# Generating AR(2) processes with real roots (causal and non-causal)
# the roots are lambda1 and lambda2
# I used t-distribution innovations in this code. You can change it.
# First generate a causal AR(1)
ar1.causal = function(lambda1,n1){
n = (n1+200)
# generate the innovations
gen = (rchisq(n,df=1)-1)
x = rep(0,n)
for(j in c(2:n)){
x[j] = (lambda1*x[j-1]+gen[j])
}
x1 = x[-c(1:200)]
return(x1)
}
# Here is a non-causal AR(1)
ar1.noncausal = function(lambda1,n1){
n = (n1+200)
# generate the innovations
gen = rt(n,5)
x = rep(0,n)
for(j in c((n-1):1)){
x[j] = ((1/lambda1)*x[j+1]-(1/lambda1)*gen[j+1])
}
x1 = x[-c(n:(n-200+1))]
return(x1)
}
# To estimate the ar2 process we feed in the AR1 (with coefficient lambda1) as an innovation
# for another AR1 with coefficient lambda2
# lambda1 and lambda2 both less than one (in absolute)
ar2.causal = function(lambda1,lambda2,n1){
n = (n1+200)
gen2 = ar1.causal(lambda1,n)
x = rep(0,n)
for(j in c(2:n)){
x[j] = (lambda2*x[j-1]+gen2[j])
}
x1 = x[-c(1:200)]
return(x1)
}
# lambda1 less than one and lambda2 greater than one.
ar2.noncausal = function(lambda1,lambda2,n1){
n = (n1+200)
gen2 = ar1.causal(lambda1,n)
x = rep(0,n)
for(j in c((n-1):1)){
x[j] = ((1/lambda2)*x[j+1]-(1/lambda2)*gen2[j+1])
}
x1 = x[-c(n:(n-200+1))]
return(x1)
}
## Test
test1c = ar1.causal(lambda=0.5,n1=1000)
test1nc = ar1.noncausal(lambda=2,n1=1000)
par(mfrow=c(2,1))
acf(test1c)
acf(test1nc)
#####
test2c = ar2.causal(lambda1 = 0.6,lambda2=0.5,n1=3000)
test2nc = ar2.noncausal(lambda1 = 0.6,lambda2=2,n1=3000)
par(mfrow=c(2,1))
acf(test2c)
acf(test2nc)
# To make sure an idea works, bump up the sample size. If it does not work when
# the sample size is huge it probably will not work at all!
######
# However, despite having the same correlation structure in the case the innovations are non-Gaussian
# they do not have the same distribution we can identify differences.
# To see a difference between the causal and noncausal case we use the cross correlations between
# the square of one series and the non-square of the other.
# Note that the cross correlations between transformations of a series are closely related to the higher order
# cumulants.
# Important below we plot the cross correlation between X and X**2 will only be non-zero if the third moment of the innovations
# is non-zero. If the t-distribution is used then the third moment is zero. The ccf plots below will "look" significant; but this is because
# (a) there error bars (based on the variance of the sample correlations) are wrong since the data is uncorrelated but not independent.
# (b) there is correlation between the sample correlations, which is why you "see" the the feature.
# If you use chi-square innovations you will see the true cross correlation.
par(mfrow=c(2,1))
ccf(test2c**2,test2c)
ccf(test2nc**2,test2nc)