#Code to fit Denisty Surface Models to fin whale data described in Sigourney et al. ###WARNINGS### #Filenames may change. May need to rename this file "Annotated_Model_Code_Sigourney_et_al_R3.r" #Make sure to set working directory to same folder that the code and data are saved in #Load Libraries library(mvtnorm) library(mrds) library(pls) library(reshape) library(mgcv) library(lattice) library(MASS) library(plyr) library(Distance) library(rjags) #Load data files BM_Sightings<-read.csv("Fin_Sightings_BM.csv") #Sightings data for distance sampling formatted for the Bayesian Method mrds_Sightings<-read.csv("Fin_Sightings_mrds.csv") #Sightings data formatted for mrds Fin_Whale_Data<-read.csv("Final_Fin_Whale_Data.csv") #Input data for both Bayesian Methdod and Two-Step Method #Truncation Distances (1=Shipboard, 2=Aerial...here and throughout) W_1<-6000 W_2<-900 #Subset shipboard surveys All_Dat_Ship<-subset(mrds_Sightings,survey==1 & distance>-1 & distance-1 & distance0) Nnonzero<-length(NonZeroObs) ZeroObs<-which(Sightings==0) Nzero<-length(ZeroObs) #MCMC inputs burnin=15000 nitt=100000 chains=1 thin=100 ########################################## #JAGS code to run Bayesian Method ####################################### sink("Fin_Model_Bayesian_Method.bugs") cat(" model{ ###### # DETECTABILITY COMPONENT OF MODEL ##### ##### PRIORS W_x <- (5/10)*3600 pi <- 3.141593 #Priors for g(0) parameters for shipboard data (MR part of MRDS) b1.0_1 ~ dnorm(0,0.0001) b.dist_1~ dnorm(0,0.0001) #Priors for hazard rate function (DS part of MRDS) b.df.0_1 ~ dnorm(0,0.0001) #Scale paramter (shipboard) b.df.0_2 ~ dnorm(0,0.0001)#Intercept (Aerial) #Beaufort effect b.df.beaufort_1 ~ dnorm(0,0.0001) b.df.beaufort_2 ~ dnorm(0,0.0001) #Subjective weather effect b.df.subj_1 ~ dnorm(0,0.0001) #Shape parameters b_1 ~ dunif(0,10) b_2 ~ dunif(0,10) #Power paramter for CPG likelihood zeta ~ dunif(1.01,2) nu <- (2 - zeta)/(zeta - 1) #For group size submodel b0.size~dnorm(0, 0.001) log(lambda.size)<-b0.size #Informed beta prior for aerial g(0) g.knot~dbeta(a_g0, b_g0) #Informed beta prior for surface availability Av~dbeta(a_Av, b_Av) #Group size model for (g in 1:N.size){ gs[g]~dpois(lambda.size) } ##### BEGIN MODEL LOOP (for all N observations in detection dataset, # i.e., for all detection distances) #NE Shipboard for(i in 1:N_Sights_1){ ################################### #MR part to calculate g(0) for shipboard surveys ################################################ logit(p1_1[i])<-b1.0_1+b.dist_1*dist_1[i] #Team 1 logit(p2_1[i])<-b1.0_1+b.dist_1*dist_1[i] #Team 2 (no team effect, so same a s team 1) logit(p1.0_1[i])<-b1.0_1 logit(p2.0_1[i])<-b1.0_1 p.dot_1[i]<-p1_1[i]+p2_1[i]-p1_1[i]*p2_1[i] p0_1[i]<-p1.0_1[i]+p2.0_1[i]-p1.0_1[i]*p2.0_1[i] pi_1.1[i]<-p1_1[i]*(1-p2_1[i])/p.dot_1[i] #y10 pi_1.2[i]<-(1-p1_1[i])*p2_1[i]/p.dot_1[i] #y01 pi_1.3[i]<-p1_1[i]*p2_1[i]/p.dot_1[i] #y11 Obs_1[i,1]~dbern(pi_1.1[i]) Obs_1[i,2]~dbern(pi_1.2[i]) Obs_1[i,3]~dbern(pi_1.3[i]) ############################################################################################## #DS part of MRDS approach (USE HAZARD RATE KEY) ############################################################################################ mu.df_1[i] <- exp(b.df.0_1+b.df.beaufort_1*Beauf_1[i]+b.df.subj_1*Subj_1[i]) #Numerically integrate hazard rate function esw_pred_1_1[i,1]<-0 for (c in 1:(steps-1)){ dx1_1[i,c]<- 1-exp(-((y.step_1[c]/mu.df_1[i])^(-b_1))) dx2_1[i,c]<- 1-exp(-((y.step_plus1_1[c]/mu.df_1[i])^(-b_1))) esw_pred_1[i,c+1]<-esw_pred_1[i,c]+0.5*step.size_1*(dx1_1[i,c]+dx2_1[i,c]) } esw_1[i]<-esw_pred_1[i,steps] f0_1[i] <- 1/esw_1[i] #Ones Trick L.f0_1[i] <- (1-exp(-((dist_1[i]/ mu.df_1[i])^(-b_1)))) * 1/esw_1[i] #hazard rate likelihood phi_1_[i]<- L.f0_1[i]/C ones.dist_1[i] ~ dbern(phi_1[i]) } mean.esw_1 <- mean(esw_1[]) #Average effective strip width (esw) mean.p0_1 <- mean(p0_1[]) #Average eg(0) Pcds_1<-mean.esw_1/W_1 #Average detection probability from DS compnent P.det_1<- mean.p0_1*Pcds_1 #Average overall detection probability for shipboard surveys #NE Aerial for(i in 1:N_Sights_2){ ############################################################################################## # USE HAZARD RATE KEY ############################################################################################ mu.df_2[i] <-exp(b.df.0_2+b.df.beaufort_2*Beauf_2[i]) #Numerically integrate hazard rate function using Heun's Method esw_pred_2[i,1]<-0 for (c in 1:(steps-1)){ dx1_2[i,c]<- 1-exp(-((y.step_2[c]/mu.df_2[i])^(-b_2))) dx2_2[i,c]<- 1-exp(-((y.step_plus1_2[c]/mu.df_2[i])^(-b_2))) esw_pred_2[i,c+1]<-esw_pred_2_1[i,c]+0.5*step.size_2*(dx1_2_1[i,c]+dx2_2[i,c]) } esw_2[i]<-esw_pred_2[i,steps] #f0_2_1[i] <- 1/esw_2_1[i] #Ones Trick L.f0_2[i] <- (1-exp(-((dist_2[i]/ mu.df_2[i])^(-b_2)))) * 1/esw_2[i] #hazard rate likelihood phi_2[i]<- L.f0_2[i]/C ones.dist_2[i] ~ dbern(phi_2[i]) } mean.esw_2 <- mean(esw_2[]) Pcds_2<-mean.esw_2/W_2 P.det_2<- g.knot_2*Pcds_2 ##################################################################### #Fit Habitat Model (step 2 of the DSM)\ ################################################################################################################### #GAM Stuff #Parameter for CPG likelihood using Candy (2004) formulation gamma1 ~ dunif(-100,100) gammaDIST125 ~ dunif(-100,100) gammaDEPTH ~ dunif(-100,100) gammaDIST2SHORE ~ dunif(-100,100) gammaSST ~ dunif(-100,100) b.plusgamma[1]<-b[1]+gamma1 b.plusgamma[2:5]<-b[2:5]+ gammaDIST125 b.plusgamma[6:9]<-b[6:9]+ gammaDEPTH b.plusgamma[10:13]<-b[10:13]+gammaDIST2SHORE b.plusgamma[14:17]<-b[14:17]+gammaSST b.minusgamma[1]<-b[1]-gamma1 b.minusgamma[2:5]<-b[2:5]-gammaDIST125 b.minusgamma[6:9]<-b[6:9]-gammaDEPTH b.minusgamma[10:13]<-b[10:13]-gammaDIST2SHORE b.minusgamma[14:17]<-b[14:17]-gammaSST eta <- X %*% b ## linear predictor eta.plusgamma<-X %*% b.plusgamma eta.minusgamma<-X %*% b.minusgamma ## Set up GAM priors (using output from jagam) for (i in 1:1) { b[i] ~ dnorm(0,0.0021) } ## prior for s(DIST125)... K1 <- S1[1:4,1:4] * lambda[1] b[2:5] ~ dmnorm(zero[2:5],K1) ## prior for s(DEPTH)... K2 <- S2[1:4,1:4] * lambda[2] b[6:9] ~ dmnorm(zero[6:9],K2) ## prior for s(DIST2SHORE)... K3 <- S3[1:4,1:4] * lambda[3] b[10:13] ~ dmnorm(zero[10:13],K3) ## prior for s(SST)... K4 <- S4[1:4,1:4] * lambda[4] b[14:17] ~ dmnorm(zero[14:17],K4) ## smoothing parameter priors CHECK... for (i in 1:4) { lambda[i] ~ dgamma(.05,.005) rho[i] <- log(lambda[i]) } #Habitat Loop for (t in 1:N_t) { #Predict detection probability for each grid cell new_mu.df_1[t] <- exp(b.df.0_1+ b.df.beaufort_1*Beaufort[t]+b.df.subj_1*Subjective[t]) new_esw_pred_1[t,1]<-0 #Numerically integrate hazard rate function for (b in 1:(steps-1)){ new_dx1_1[t,b]<- 1-exp(-((y.step_1[b]/new_mu.df_1[t])^(-b_1))) new_dx2_1[t,b]<- 1-exp(-((y.step_plus1_1[b]/new_mu.df_1[t])^(-b_1))) new_esw_pred_1[t,b+1]<-new_esw_pred_1[t,b]+0.5*step.size_1*(new_dx1_1[t,b]+new_dx2_1[t,b]) } new_esw_1[t]<-new_esw_pred_1[t,steps] #Calculate g.knot for shipboard surveys new_f0_1[t] <- 1/new_esw_1[t] P.0.1_1[t]<-(exp(b1.0_1))/(1+exp(b1.0_1)) P.0.2_1[t]<-(exp(b1.0_1))/(1+exp(b1.0_1)) #NE Ship P_0_1[t]<- P.0.1_1[t]+ P.0.2_1[t]- P.0.1_1[t]*P.0.2_1[t] #Overall predicted detection probability for shipboard survey in grid t P_t_1[t]<- P_0_1[t]*new_esw_1[t]/W_1 #################################################################################################################### #Aerial surveys ###################### # USE HAZARD RATE KEY ##################### new_mu.df_2[t] <- exp(b.df.0_2+ b.df.beaufort_2*Beaufort[t]) ew_esw_pred_2[t,1]<-0 #Numerically integrate hazard rate function for (b in 1:(steps-1)){ new_dx1_2[t,b]<- 1-exp(-((y.step_2[b]/new_mu.df_2[t])^(-b_2))) new_dx2_2[t,b]<- 1-exp(-((y.step_plus1_2[b]/new_mu.df_2[t])^(-b_2))) new_esw_pred_2[t,b+1]<-new_esw_pred_2[t,b]+0.5*step.size_2*(new_dx1_2[t,b]+new_dx2_2[t,b]) } new_esw_2[t]<-new_esw_pred_2[t,steps] #Surface Availability for aerial surveys form informed prior Av_2[t]<-Av #g.knot for aerial surveys form informed prior g.knot_2[t]<-g.knot P_t_2[t]<- g.knot_2[t]*new_esw_2[t]/W_2* Av_2[t] #Overall detection for transect t ################################################################ #Calculate amount of serach effort in grid cell t effort_offset_1[t]<-(Effort[t]*2*W_1) effort_offset_2[t]<-(Effort[t]*2*W_2) #Use indicator to determine which survey occured in grid cell t (Shipboard or aerial) P_t[t]<-P_t_1[t]*Indicator_1[t]+P_t_2[t]*Indicator_2[t] effort_offset[t]<-effort_offset_1[t]*Indicator_1[t]+effort_offset_2[t]*Indicator_2[t] mu[t] <- exp(eta[t]) ## expected response mu_g[t] <- exp(eta.plusgamma[t]/2) #For CPG formuation of likelihood mu_p[t] <- exp(eta.minusgamma[t]/2) #For CPG formuation of likelihood #Mutiply predicted density by detetcion probability and serach effort eff.mu_p[t]<-mu_p[t]*P_t[t]*effort_offset[t] eff.mu[t]<-mu[t]*P_t[t]*effort_offset[t] ################################################################ } #Loop through non-zero sightings for (i in 1:Nnonzero){ Sightings[NonZeroObs[i]] ~ dgamma(psi[NonZeroObs[i]] * nu , nu / mu_g[NonZeroObs[i]]) psi[NonZeroObs[i]] ~ dpois(eff.mu_p[NonZeroObs[i]]) } #Loop through zero sightings for (i in 1:Nzero){ Sightings[ZeroObs[i]] ~ dpois(eff.mu_p[ZeroObs[i]]) psi[ZeroObs[i]] ~ dpois(eff.mu_p[ZeroObs[i]]) } } # end model ",fill=TRUE) sink() # Bundle data Model.data <- list( N_Sights_1=dim(Dat_Sightings_1)[1], N_Sights_2=dim(Dat_Sightings_2)[1], Sightings=Sightings, Obs_1=Dat_Sightings_1[,10:12], C=10000, ones.dist_1=ones.dist_1, ones.dist_2=ones.dist_2, W_1=W_1/1000, W_2=W_2/1000, N_t=N_t, #Number of unique grid cells gs=gs-1, #subtract 1 for truncated Poisson N.size=N.size, min.P=min.P, dist_1=c(Dat_Sightings_1[,5])/1000+0.015, #column with distance data dist_2=c(Dat_Sightings_2[,5])/1000+0.015, Beauf_1=c(Dat_Sightings_1[,6]), Beauf_2=c(Dat_Sightings_2[,6]), Subj_1=c(Dat_Sightings_1[,7]), Subj_2=c(Dat_Sightings_2[,7]), Area=Final_Cov_Mat_Fin[,2], Effort=Final_Cov_Mat_Fin[,11], Beaufort=Final_Cov_Mat_Fin[,12], Subjective=Final_Cov_Mat_Fin[,14], Indicator_1=Indicator_1, Indicator_2=Indicator_2, Es=Es, Ed=Ed , a_g0=a_g0, b_g0=b_g0, a_Av=a_Av, b_Av=b_Av, steps=steps, y.step_1_1=y.step_1_1, y.step_plus1_1_1=y.step_plus1_1_1, y.step_2_1=y.step_2_1, y.step_plus1_2_1=y.step_plus1_2_1, step.size_1_1=step.size_1_1, step.size_2_1=step.size_2_1, #GAM Inputs X=X, S1=S1, S2=S2, S3=S3, S4=S4, # S5=S5, # S6=S6, zero=zero, #CPG Inputs NonZeroObs=NonZeroObs, Nnonzero=Nnonzero, ZeroObs=ZeroObs, Nzero=Nzero ) #Initial values inits <- function(){list( b.df.0_1 = runif(1,1,3), b.df.0_2 = runif(1,1,3), b.df.beaufort_1 = runif(1,-1,1), b.df.beaufort_2 = runif(1,-1,1), b.df.subj_1 = runif(1,-1,1), b_1 = runif(1,0,1), b_2 = runif(1,0,1), b1.0_1 = runif(1,0,1), b.dist_1= runif(1,0,1), psi=as.numeric(Sightings != 0), gammaDIST125=0, gammaDEPTH=0, gammaDIST2SHORE=0, gammaSST=0, #GAM inits (form jagam function) b=b, lambda=lambda ) } #Define parameters to be monitored varsToMonitor<-c("b.df.0_1", "b_1", "b.df.beaufort_1", "b.df.subj_1", "b.dist_1", "b1.0_1", "mean.esw_1", "mean.p0_1","Pcds_1", "P.det_1", "b.df.0_2", "b_2_1", "b.df.beaufort_2", "mean.esw_2","Pcds_2","P.det_2","P_t", "mu", "eta", "lambda.size", "b0.size", "lambda", "b" , "rho", "g.knot", "Av", "gamma", "eff.mu", "gammaDIST12","gammaDEPTH","gammaDIST2SHORE", "gammaSST") #MCMC inputs nitt=50000 burnin=20000 chains=2 thin=50 jm <- jags.model("Fin_Model_Bayesian_Method.bugs", data = Model.data, inits = inits, n.adapt = burnin, n.chains = chains) sam <- jags.samples(jm, varsToMonitor, n.iter = nitt, thin = thin)