# Program to evaluate crop yield loss to biotic stressors and management decision rules # in variable environments # Permanent URL: http://hdl.handle.net/2097/13786 # K. A. Garrett, Department of Plant Pathology, Kansas State University # KAES Contribution No. 12-371-C # www.ksu.edu/pdecology # This R program was applied in the following paper, and the conceptual framework # implemented in this program was developed by this author team: # Garrett, K. A., A. Dobson, J. Kroschel, B. Natarajan, S. Orlandini, H. E. Z. Tonnang, # and C. Valdivia. 2012. The effects of climate variability and the color of weather # time series on agricultural diseases and pests, and decision making for their management. # Agricultural and Forest Meteorology, in press. # If you find this code useful in your research, please cite Garrett et al. 2012 # This R code produces three figures summarizing model results # and includes options that can produce additional types of summaries # Note that because of the stochastic components of the models, # the results will be slightly different each time # This text file, if in the working directory, can be read into R using the command # source('RScriptFileCYL1.txt') # The functions included in the script can be run individually with different parameter # values as input to explore the outcomes for different scenarios ######################## FIGURE 1 # Examples of the time series generated by the yield loss model. # When a = 0, the Z_t series is white noise. # When a = 0.5 or a = 0.9, the Zt series is lighter pink and darker pink noise. # As a increases, the greater level of temporal autocorrelation produces a smoother series Z_t. # In the no-yield-recovery model, the Rt series of ‘weather conduciveness to yield loss # from pests or diseases’ (from equation 1) has 0 as a minimum value. # The resulting cumulative yield loss series (from equation 2) # also tend to be smoother for higher a. # The default examples are for m = 0 and sigma^2 = 1 in the no-yield-recovery model. # QA creates times series of weather conduciveness to disease # for one parameter combination, and resulting yield loss # # lens is the length of the time series considered # mu is the mean from Equation 1 (Garrett et al. 2012) # zsd is the standard deviation of white noise series W_t used to create AR(1) Z_t # nrun is the number of times series to be considered # no.yld.recvry is True if there is no yield recovery in the model # That is, R_t is set to be non-negative # a.ar is the AR(1) coefficient # QA <- function(lens=10, mu=0.2, zsd=0.5,nrun=100,no.yld.recvry=T,a.ar=0.9){ yvec.end <- vector(length=nrun) # create final output y vector (yield loss) for(m in 1:nrun){ # repeat for each of the runs zvec <- vector(length=lens+100) # create first order AR vector, Z_t zvec[1] <- rnorm(n=1, mean=0,sd=zsd) # Z_1 is drawn randomly for(k in 2:(100+lens)){zvec[k] <- a.ar * zvec[k-1] + (1 - a.ar)*rnorm(n=1,mean=0,sd=zsd)} zvec <- zvec[-(1:100)] # The first 100 values of Z_t are discarded rvec <- mu + zvec # R_t contains rates, conduciveness of weather from Eq 1 if(no.yld.recvry==T){ # In the no-yield-recovery model, negative R_t are set to 0 rvec <- pmax(rvec,0) } # logistic function yvec <- vector(length=lens) # create vector for yield loss, Y_t, from Eq 2 yvec[1] <- 1 # arbitrary starting value for Y_1 for (k in 2:lens){ yvec[k] <- yvec[k-1] + rvec[k] * yvec[k-1] * (1 - (yvec[k-1]/100)) if(yvec[k] < 0){yvec[k] <- 0} # discrete logistic can go outside bounds else if(yvec[k] > 100){yvec[k] <-100} # replacing outside values with the bound values } yvec.end[m] <- yvec[lens] # save the final value of each of the nrun Y_t } # rvec, zvec, and yvec are just from the last run list(yvec.end=yvec.end, rvec=rvec, zvec=zvec, yvec=yvec) } # Fig1 plots output from a single run # # tmu is the mean from Equation 1 (Garrett et al. 2012) # tzsd is the standard deviation of white noise series W_t used to create AR(1) Z_t # ta.ar is the AR(1) coefficient Fig1 <- function(ta.ar=0.0,tmu=0,tzsd=1){ Out1 <- QA(lens=20, mu=tmu, zsd=tzsd,nrun=1,no.yld.recvry=T,a.ar=ta.ar) plot(Out1$zvec,type='l', xlab='Time', ylab='Zt', main=paste('a = ',ta.ar)) plot(Out1$rvec,type='l', xlab='Time', ylab='Rt', main=paste('a = ',ta.ar)) plot(Out1$yvec, type='l',ylim=c(0,100) , xlab='Time', ylab='Yield loss', main=paste('a = ',ta.ar)) } # Create pdf for Figure 1 (as in Garrett et al. 2012) pdf("fig1.pdf") par(mfrow=c(3,3)) Fig1(ta.ar=0.0,tmu=0) # Each use of function Fig1 creates a row of plots # for a different value of the AR coefficient Fig1(ta.ar=0.5,tmu=0) Fig1(ta.ar=0.9,tmu=0) dev.off() ######################## FIGURE 2 # Percentage yield loss (smoothed in plots) for different values of a and m # for the ‘no-yield-recovery’ model for 10 time steps (generations). # When a = 0, there is no temporal correlation, # and temporal correlation increases with increasing a. # When m is low, weather conduciveness to disease development is low. # QA.summary produces summary statistics for multiple time series with different parameters # # t.lens is the length of the time series considered # t.mu is the mean from Equation 1 (Garrett et al. 2012) # t.zsd is the standard deviation of white noise series W_t used to create AR(1) Z_t # nrun is the number of times series to be considered # t.a.ar is the AR(1) coefficient QA.summary <- function(t.lens=c(5,10,15,20),t.mu=c(-2,0,2),t.zsd=((1:30)/10),nrun=1000, ta.ar=0.0){ tot <- length(t.lens) * length(t.mu) * length(t.zsd) # tot is the total number of parameter combinations outls <- matrix(nrow=tot*nrun, ncol=8) # output matrix k.out <- 0 for(ttlens in t.lens){ for(ttmu in t.mu){ # for each parameter combination do the following in its matrix for(ttzsd in t.zsd){ # (where columns 1-7 have repeated entries of summary stats) trange <- (1:nrun) + k.out*nrun outls[trange,1] <- ttlens # record the number of time steps (generations) considered outls[trange,2] <- ttmu # record the mean conduciveness to yield loss to pests/disease outls[trange,3] <- ttzsd # record the standard deviation from the noise process # use QA to generate nrun time series for that parameter combination temp2 <- QA(lens=ttlens, mu=ttmu, zsd=ttzsd, nrun=nrun, a.ar=ta.ar) temp <- temp2$yvec.end # temp is the vector of final yield loss values from each run outls[trange,4] <- mean(temp) # find the mean of the final yield losses outls[trange,5] <- sum(temp == 1)/nrun # find the freq with which yield loss does not increase outls[trange,6] <- sum(temp == 100)/nrun # find the freq with which final yield loss is 100 outls[trange,7] <- var(temp) # find the variance in final yield loss outls[trange,8] <- temp # store the vector of final yield loss values in column 8 k.out <- k.out + 1 # go to the next parameter combination } } } outls } ###### out0 <- QA.summary(nrun=1000, ta.ar=0) out0.5 <- QA.summary(nrun=1000, ta.ar=0.5) out0.9 <- QA.summary(nrun=1000, ta.ar=0.9) # Example of code to save output from QA.summary: save(out0,file='Fig2out0') # to restore: load('Fig2out0') # plotfig2 plots results of QA.summary for lens generations # # lens is the length of the time series considered # out1 is an output matrix from QA.summary # ta.ar is the AR(1) coefficient plotfig2 <- function(out1,ta.ar,lens=10){ tlens <- subset(out1,out1[,1]==lens) # limit eval of out1 to rows where time step = lens tlensmn2 <- subset(tlens,tlens[,2]== -2) # separate objects for different values of the mean tlensm0 <- subset(tlens,tlens[,2]==0) tlensm2 <- subset(tlens,tlens[,2]==2) smoothScatter(tlensmn2[,3]^2,tlensmn2[,8] ,ylim=c(0,100), xlab= 'Variance', ylab='Yield Loss', colramp = colorRampPalette(c("white", "black")), main=paste('a = ',ta.ar, sep='',', m = -2'), nbin=256, nrpoints=0, bandwidth=1) smoothScatter(tlensm0[,3]^2,tlensm0[,8] ,ylim=c(0,100),xlab= 'Variance', ylab='Yield Loss', colramp = colorRampPalette(c("white", "black")), main=paste('a = ',ta.ar, sep='',', m = 0'), nbin=256, nrpoints=0, bandwidth=1) smoothScatter(tlensm2[,3]^2,tlensm2[,8] ,ylim=c(0,100),xlab= 'Variance', ylab='Yield Loss', colramp = colorRampPalette(c("white", "black")), main=paste('a = ',ta.ar, sep='',', m = 2'), nbin=256, nrpoints=0, bandwidth=1) } # Create pdf for Figure 2 pdf("fig2.pdf") par(mfrow=c(3,3)) plotfig2(out0,ta.ar=0.0,lens=10) plotfig2(out0.5,ta.ar=0.5,lens=10) plotfig2(out0.9,ta.ar=0.9,lens=10) dev.off() ######################## FIGURE 3 # Proportion incorrect decisions based on two different decision rules # about use of mid-season management. # When a = 0, there is no temporal correlation, # and temporal correlation increases with increasing a. # When m is low, mean weather conduciveness to disease development is low. # Plotted circles indicate performance of decision-making based on current information # through the fifth of ten generations; squares indicate performance of decision-making # based on past information from three previous years. # Filled symbols indicate false negative decisions, # such that management was not applied when it would have increased profit. # Open symbols indicate false positive decisions, # such that management was applied when it decreased profit. # QB creates time series of weather conduciveness to loss and # simulates management decisions for one parameter combination # # ulens is the length of the time series considered # umu is the mean from Equation 1 (Garrett et al. 2012) # uzsd is the standard deviation of white noise series W_t used to create AR(1) Z_t # unrun is the number of times series to be considered # unyr is True if there is no yield recovery in the model # That is, R_t is set to be non-negative # ua.ar is the AR(1) coefficient # man.cost is the cost of management (in units of yield %) # yvecman is the time series of yield loss when management is applied QB <- function(ulens=10, umu=0.2, uzsd=0.5,unrun=1000,unyr=T,ua.ar=0.9,man.cost=20){ yvec.end <- vector(length=unrun) # yield loss at end of season without management yvecman.end <- vector(length=unrun) # yield loss at end of season with management y5vec.end <- vector(length=unrun) # yield loss at time 5 for (m in 1:unrun){ QAout <- QA(lens=ulens,mu=umu,zsd=uzsd,nrun=1,no.yld.recvry=unyr,a.ar=ua.ar) rvec <- QAout$rvec yvec <- QAout$yvec yvecman <- vector(length=ulens) # yvecman will contain yield loss with management yvecman[1:5] <- yvec[1:5] # times 1-5 are the same with or without management yvecman[6:8] <- yvecman[5] # three generations without increase for(k in 9:ulens) { # logistic increase in yield loss starting again at time 9 yvecman[k] <- yvecman[k-1] + rvec[k] * yvecman[k-1] * (1 - (yvecman[k-1]/100)) if(yvecman[k] < 0){yvecman[k] <- 0} else if(yvecman[k] > 100){yvecman[k] <-100} } yvec.end[m] <- yvec[ulens] # save final yield loss without management y5vec.end[m] <- yvec[5] # save yield loss at time 5 yvecman.end[m] <- yvecman[ulens] # save final yield loss with management } # decision based on current season: 1 (T) if 5th yield loss step >= 5 and < 80, and 0 (F) otherwise dec.cur <- y5vec.end >= 5 & y5vec.end < 80 # decision based on past: 1 (T) if 2 or more of 3 randomly drawn seasons exceed 20, otherwise 0 (F) sammat <- matrix(sample(x=yvec.end,size=3*unrun,replace=T),ncol=3) dec.pas <- apply(sammat, MARGIN=1, function(x) sum(x > 20) >= 2) # management was correct (man.cor) takes on 1 if management reduces yield loss (Y) by # more than man.cost man.cor <- yvec.end - yvecman.end > man.cost cur.falsepos <- dec.cur > man.cor # false positive based on current info if managed when not needed cur.falseneg <- dec.cur < man.cor # false negative based on current info if did not manage when needed pas.falsepos <- dec.pas > man.cor # false positive based on past info pas.falseneg <- dec.pas < man.cor # false negative based on past info list(yvec.end=yvec.end,yvecman.end=yvecman.end,y5vec.end=y5vec.end,dec.cur=dec.cur,dec.pas=dec.pas, man.cor=man.cor,cur.falsepos=cur.falsepos, cur.falseneg=cur.falseneg, pas.falsepos=pas.falsepos, pas.falseneg=pas.falseneg) } ###### # QB.summary produces multiple time series with different parameter combinations # and evaluations of management decisions # # lens is the length of the time series considered # t.mu is the mean from Equation 1 (Garrett et al. 2012) # t.zsd is the standard deviation of white noise series W_t used to create AR(1) Z_t # nrun is the number of times series to be considered # no.yld.recvry is True if there is no yield recovery in the model # That is, R_t is set to be non-negative # ta.ar is the AR(1) coefficient # man.cost is the cost of management (in units of yield %) QB.summary <- function(lens=10,t.mu=c(-2,0,2),t.zsd=((1:30)/10),nrun=1000,ta.ar=0.9,t.man.cost=20){ tot <- length(t.mu) * length(t.zsd) # the number of parameter combinations out.mat <- matrix(nrow=tot,ncol=14) k.out <- 1 for(ttmu in t.mu){ for(ttzsd in t.zsd){ out.mat[k.out,1] <- lens out.mat[k.out,2] <- ttmu out.mat[k.out,3] <- ttzsd temp.all <- QB(ulens=lens,umu=ttmu,uzsd=ttzsd,unrun=nrun,ua.ar=ta.ar, man.cost=t.man.cost) temp <- temp.all$yvec.end out.mat[k.out,4] <- mean(temp) out.mat[k.out,5] <- sum(temp == 1)/nrun out.mat[k.out,6] <- sum(temp == 100)/nrun out.mat[k.out,7] <- var(temp) out.mat[k.out,8] <- sum(temp.all$man.cor)/nrun # prop correct to manage out.mat[k.out,9] <- sum(temp.all$dec.cur)/nrun # prop manage based on current out.mat[k.out,10] <- sum(temp.all$dec.pas)/nrun # prop manage based on past out.mat[k.out,11] <- sum(temp.all$cur.falsepos)/nrun # prop false pos based on current out.mat[k.out,12] <- sum(temp.all$cur.falseneg)/nrun # prop false neg based on current out.mat[k.out,13] <- sum(temp.all$pas.falsepos)/nrun # prop false pos based on past out.mat[k.out,14] <- sum(temp.all$pas.falseneg)/nrun # prop false neg based on past k.out <- k.out + 1 } } out.mat } # Create output from QB.summary for three values of the AR coefficient out.QB0.0 <- QB.summary(nrun=1000, ta.ar=0.0) out.QB0.5 <- QB.summary(nrun=1000, ta.ar=0.5) out.QB0.9 <- QB.summary(nrun=1000, ta.ar=0.9) # Sm3 creates a plot for one combination of a and m # # tmx is output from # top1 is the AR coefficient a # mnum is the value of m (mean of W_t) Sm3 <- function(tmx,top1,mnum){ plot(tmx[,3]^2,tmx[,11], ylim=c(0,1),xlab= 'Variance', ylab='Proportion incorrect decisions', main=paste('a = ',top1, sep='', ', m = ',mnum)) # cur false pos – open circle points(tmx[,3]^2,tmx[,12],pch=16) # cur false neg – filled circle points(tmx[,3]^2,tmx[,13],pch=22) # past false pos – open square points(tmx[,3]^2,tmx[,14],pch=15) # past false neg – filled square } # plotfig3 creates plots for a row (one value of a) for 3 values of m # # out.QB is output from function QB # top1 is the AR coefficient a plotfig3 <- function(out.QB,top1) { tmn2 <- subset(out.QB,out.QB[,2]== -2) tm0 <- subset(out.QB,out.QB[,2]== 0) tm2 <- subset(out.QB,out.QB[,2]== 2) Sm3(tmn2,top1,mnum=-2) Sm3(tm0,top1,mnum=0) Sm3(tm2,top1,mnum=2) } # Create pdf for Figure 3 pdf("fig3.pdf") par(mfrow=c(3,3)) plotfig3(out.QB0.0,top1=0.0) plotfig3(out.QB0.5,top1=0.5) plotfig3(out.QB0.9,top1=0.9) dev.off()