#### Libraries #### rm(list=ls()) library(ggplot2) library(FSA) library(nlme) library(MuMIn) library(lmtest) library(RColorBrewer) library(cowplot) library(visreg) library(plyr) library(mgcv) #### Functions #### specify_decimal <- function(x, k) format(round(x, k), nsmall=k) predictNLME <- function( nls.object, nlme.object, newdata, interval = c("none", "confidence", "prediction"), level = 0.95, reverse=F, ... ) { #Modified by JDS from: #https://www.r-bloggers.com/predictnls-part-2-taylor-approximation-confidence-intervals-for-nls-models/ require(MASS, quietly = TRUE) require(propagate, quietly = TRUE) tr <- function(mat) sum(diag(mat), na.rm = TRUE) interval <- match.arg(interval) ## get right-hand side of formula RHS <- as.list(nls.object$call$formula)[[3]] if(reverse){ temp<-cumulative.time~log(1-perc/100)/-a RHS<-as.list(temp)[[3]] } EXPR <- as.expression(RHS) ## all variables in model VARS <- all.vars(EXPR) ## coefficients COEF <- apply(coef(nlme.object),2,mean) ## extract predictor variable predNAME <- setdiff(VARS, names(COEF)) ## take fitted values, if 'newdata' is missing if (missing(newdata)) { newdata <- eval(nlme.object$data)[predNAME] colnames(newdata) <- predNAME } ## check that 'newdata' has same name as predVAR if (names(newdata)[1] != predNAME) stop("newdata should have name '", predNAME, "'!") ## get parameter coefficients COEF <- apply(coef(nlme.object),2,mean) ## get variance-covariance matrix VCOV <- vcov(nlme.object) ## augment variance-covariance matrix for 'mvrnorm' ## by adding a column/row for 'error in x' NCOL <- ncol(VCOV) ADD1 <- c(rep(0, NCOL)) ADD1 <- matrix(ADD1, ncol = 1) colnames(ADD1) <- predNAME VCOV <- cbind(VCOV, ADD1) ADD2 <- c(rep(0, NCOL + 1)) ADD2 <- matrix(ADD2, nrow = 1) rownames(ADD2) <- predNAME VCOV <- rbind(VCOV, ADD2) NR <- nrow(newdata) respVEC <- numeric(NR) seVEC <- numeric(NR) varPLACE <- ncol(VCOV) outMAT <- NULL ## define counter function counter <- function (i) { if (i%%10 == 0) cat(i) else cat(".") if (i%%50 == 0) cat("\n") flush.console() } ## calculate residual variance r <- residuals(nlme.object) w <- weights(nlme.object) rss <- sum(if (is.null(w)) r^2 else r^2 * w) df <- df.residual(nls.object) res.var <- rss/df ## iterate over all entries in 'newdata' as in usual 'predict.' functions for (i in 1:NR) { counter(i) ## get predictor values and optional errors predVAL <- newdata[i, 1] if (ncol(newdata) == 2) predERROR <- newdata[i, 2] else predERROR <- 0 names(predVAL) <- predNAME names(predERROR) <- predNAME ## create mean vector meanVAL <- c(COEF, predVAL) ## create augmented variance-covariance matrix ## by putting error^2 in lower-right position of VCOV newVCOV <- VCOV newVCOV[varPLACE, varPLACE] <- predERROR^2 SIGMA <- newVCOV ## first-order mean: eval(EXPR), first-order variance: G.S.t(G) MEAN1 <- try(eval(EXPR, envir = as.list(meanVAL)), silent = TRUE) if (inherits(MEAN1, "try-error")) stop("There was an error in evaluating the first-order mean!") GRAD <- try(numGrad(EXPR, as.list(meanVAL)), silent = TRUE) if (inherits(GRAD, "try-error")) stop("There was an error in creating the numeric gradient!") VAR1 <- GRAD %*% SIGMA %*% matrix(GRAD) ## second-order mean: firstMEAN + 0.5 * tr(H.S), ## second-order variance: firstVAR + 0.5 * tr(H.S.H.S) HESS <- try(numHess(EXPR, as.list(meanVAL)), silent = TRUE) if (inherits(HESS, "try-error")) stop("There was an error in creating the numeric Hessian!") valMEAN2 <- 0.5 * tr(HESS %*% SIGMA) valVAR2 <- 0.5 * tr(HESS %*% SIGMA %*% HESS %*% SIGMA) MEAN2 <- MEAN1 + valMEAN2 VAR2 <- VAR1 + valVAR2 ## confidence or prediction interval if (interval != "none") { tfrac <- abs(qt((1 - level)/2, df)) INTERVAL <- tfrac * switch(interval, confidence = sqrt(VAR2), prediction = sqrt(VAR2 + res.var)) LOWER <- MEAN2 - INTERVAL UPPER <- MEAN2 + INTERVAL names(LOWER) <- paste((1 - level)/2 * 100, "%", sep = "") names(UPPER) <- paste((1 - (1- level)/2) * 100, "%", sep = "") } else { LOWER <- NULL UPPER <- NULL } RES <- c(mu.1 = MEAN1, mu.2 = MEAN2, sd.1 = sqrt(VAR1), sd.2 = sqrt(VAR2), LOWER, UPPER) outMAT <- rbind(outMAT, RES) } cat("\n") rownames(outMAT) <- NULL return(outMAT) } qqnorm.gg<-function(sample, robust=T) { #Code by Dr. Blair Sterba-Boatwright - modified by Jason Selwyn n<-length(sample); sample <- sort(sample) y.mx<-matrix(0,5000,n) # robust--use median and mad; !robust--use mean and sd if (robust) {x.center<-median(sample)} else {x.center<-mean(sample)} if (robust) {s.center<-mad(sample)} else {s.center<-sd(sample)} for (i in 1:5000) {y.mx[i,]<-sort(rnorm(n,x.center,s.center))} lo.bds<-apply(y.mx,2,lo.bd<-function(x) {quantile(x,0.025)}) hi.bds<-apply(y.mx,2,hi.bd<-function(x) {quantile(x,0.975)}) lo.lo.bds<-apply(y.mx,2,lo.lo.bd<-function(x) {quantile(x,0.005)}) hi.hi.bds<-apply(y.mx,2,hi.hi.bd<-function(x) {quantile(x,0.995)}) meds<-apply(y.mx,2,median) theoretical <-qnorm(ppoints(n)) for.plots <- data.frame(theoretical, sample, meds, lo.bds, hi.bds, lo.lo.bds, hi.hi.bds) for.plots } find.x.at.y<-function(y,model){ coeficients<-apply(coef(model),2,mean) unname(log(1-y/100)/(-1*coeficients[1])) } find.y.at.x<-function(x,model){ coeficients<-apply(coef(model),2,mean) unname(100*(1-exp(-1*coeficients[1]*x))) } #### Data #### deplete<-read.csv('Supplement/Supplemental Data S5.csv') #remove sites # D6, too many dives # D5 only 7 fish caught # D9 depletion was not progressive 3 middle datapoints are the same value deplete<-subset(deplete, site != "D5" & site != "D6" & site != "D9") #replace site numbers so it goes from D1-D7 without holes levels(deplete$site)[levels(deplete$site) == "D7"]<-"D5" levels(deplete$site)[levels(deplete$site) == "D8"]<-"D6" levels(deplete$site)[levels(deplete$site) == "D0"]<-"D7" #refactor deplete$site<-as.factor(as.character(deplete$site)) aggregate(deplete$n, by=list(Category=deplete$site), FUN=sum) aggregate(deplete$time, by=list(Category=deplete$site), FUN=sum) aggregate(deplete$time, by=list(Category=deplete$site), FUN=mean) aggregate(deplete$time, by=list(Category=deplete$site), FUN=sd) deplete$n<-deplete$n/deplete$area*1000 deplete$time<-deplete$time/deplete$area*1000 #create dataframe to store results deplete$cumulative.catch<-deplete$cpue<-deplete$cumulative.time<-deplete$perc<-deplete$resid<-deplete$fitted<-numeric(nrow(deplete)) deplete$hi.hi.bds<-deplete$lo.lo.bds<-deplete$hi.bds<-deplete$lo.bds<-deplete$meds<-deplete$sample<-deplete$theoretical<-numeric(nrow(deplete)) deplete$eq<-deplete$SW.p<-deplete$BP.p<-character(nrow(deplete)) deplete$N<-deplete$LCI_N<-deplete$UCI_N<-deplete$q<-deplete$LCI_q<-deplete$UCI_q<-deplete$r2<-deplete$p<-deplete$N.se<-deplete$q.se<-numeric(nrow(deplete)) #### Loop through sites and make Leslie Depletion model for each site #### for (i in 1:nlevels(deplete$site)) { #depletion function Leslie_Model<-with(deplete[deplete$site==levels(deplete$site)[i],], depletion(n,time, Ricker.mod = FALSE)) #extract n and other relevent data deplete[deplete$site==levels(deplete$site)[i],'cumulative.catch']<-Leslie_Model$K deplete[deplete$site==levels(deplete$site)[i],'cpue']<-Leslie_Model$cpe deplete[deplete$site==levels(deplete$site)[i],'eq'][1]<-paste('N0 = ',round(coef(Leslie_Model)[1], 0),'\n','q = ',round(coef(Leslie_Model)[2], 2),sep='') deplete[deplete$site==levels(deplete$site)[i],'N']<-Leslie_Model$est[1,1] deplete[deplete$site==levels(deplete$site)[i],'LCI_N']<-confint(Leslie_Model)[1,1] deplete[deplete$site==levels(deplete$site)[i],'UCI_N']<-confint(Leslie_Model)[1,2] deplete[deplete$site==levels(deplete$site)[i],'N_sd']<-Leslie_Model$est[1,2] deplete[deplete$site==levels(deplete$site)[i],'q']<-Leslie_Model$est[2,1] deplete[deplete$site==levels(deplete$site)[i],'LCI_q']<-confint(Leslie_Model)[2,1] deplete[deplete$site==levels(deplete$site)[i],'UCI_q']<-confint(Leslie_Model)[2,2] deplete[deplete$site==levels(deplete$site)[i],'q.sd']<-Leslie_Model$est[2,2] #Estimate percent of lionfish caught deplete$perc[deplete$site==levels(deplete$site)[i]]<-cumsum(deplete$n[deplete$site==levels(deplete$site)[i]])/round(coef(Leslie_Model)[1], 0)*100 deplete$cumulative.time[deplete$site==levels(deplete$site)[i]]<-cumsum(deplete$time[deplete$site==levels(deplete$site)[i]]) #Make regression to check assumptions temp.regression<-lm(cpue~cumulative.catch,deplete[deplete$site==levels(deplete$site)[i],]) deplete[deplete$site==levels(deplete$site)[i],'r2']<-summary(temp.regression)$adj.r.squared deplete[deplete$site==levels(deplete$site)[i],'p']<-summary(temp.regression)$coefficients[2,4] #Extract data to check normality deplete[deplete$site==levels(deplete$site)[i],13:19]<-qqnorm.gg(deplete[deplete$site==levels(deplete$site)[i],'cpue']) SW.p<-shapiro.test(deplete[deplete$site==levels(deplete$site)[i],'cpue'])$p.value deplete[deplete$site==levels(deplete$site)[i],'SW.p'][1]<-paste('Shapiro-Wilk test\np =',specify_decimal(SW.p,3)) #Extract data to check homoscedasticity BP.p <- bptest(temp.regression)$p.value deplete$resid[deplete$site==levels(deplete$site)[i]]<-resid(temp.regression) deplete$fitted[deplete$site==levels(deplete$site)[i]]<-fitted(temp.regression) deplete[deplete$site==levels(deplete$site)[i],'BP.p'][1]<-paste('Breusch-Pagan test p =',specify_decimal(BP.p,3)) } aggregate(cbind(deplete$N,deplete$LCI_N,deplete$UCI_N,deplete$q,deplete$LCI_q,deplete$UCI_q,deplete$r2,deplete$p), by=list(Category=deplete$site), FUN=mean) #### Make Leslie Depletion model Plots #### leslie_plots<-ggplot(data=deplete,aes(x=cumulative.catch,y=cpue))+ #geom_text(aes(x=25,y=20,label=eq),size=3.5)+ geom_smooth(aes(x=cumulative.catch, y=cpue), method="lm")+ theme_bw()+theme(legend.position="none")+xlab(expression("Cumulative Catch per 1,000" ~ m^{2}))+ ylab(expression("CPUE"))+ geom_point()+facet_wrap(~site,scales='free')+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank())+ coord_cartesian(ylim = c(0, 25),xlim=c(0,30)) pdf('Figures and Tables/Figure 2.pdf') leslie_plots dev.off() #### Check linear regression assumptions #### #Normality qq.plots<-ggplot(deplete, aes(x=theoretical,y=sample))+facet_wrap(~site,scales='free')+ geom_point()+ geom_line(aes(theoretical, meds), colour="blue") + geom_line(aes(theoretical, lo.bds), colour="green", linetype=2) + geom_line(aes(theoretical, hi.bds), colour="green", linetype=2) + geom_line(aes(theoretical, lo.lo.bds), colour="red", linetype=2) + geom_line(aes(theoretical, hi.hi.bds), colour="red", linetype=2) + geom_text(aes(x=-0.5,y=20,label=SW.p),size=3)+ #geom_abline(aes(intercept=0,slope=1),linetype="dashed")+ theme_bw()+theme(legend.position="none")+xlab(expression('Theoretical Quantiles'))+ylab(expression('Sample Quantiles'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank())+ coord_cartesian(ylim = c(-10, 25),xlim=c(-1.5,1.5)) pdf('Supplement/Supplemental Figure S1.pdf') qq.plots dev.off() #Homoscedasticity homoscedasticity.plots<-ggplot(deplete, aes(x=fitted,y=resid))+geom_point()+facet_wrap(~site,scales='free')+ geom_text(aes(x=12.5,y=-4.5,label=BP.p),size=3)+ geom_hline(aes(yintercept=0),linetype="dashed")+ theme_bw()+theme(legend.position="none")+xlab(expression('Fitted'))+ylab(expression('Residual'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank())+ coord_cartesian(ylim = c(-5, 5),xlim=c(0,25)) pdf('Supplement/Supplemental Figure S2.pdf') homoscedasticity.plots dev.off() #### Regression q v density #### temp<-aggregate(cbind(deplete$N,deplete$q), by=list(Category=deplete$site), FUN=mean) colnames(temp)<-c('Site','N','q') summary(lm(q~N,temp)) #Diagnostics shapiro.test(temp$q) q.v.density.qq.plots<-ggplot(qqnorm.gg(temp$q), aes(x=theoretical,y=sample))+ geom_point()+ geom_line(aes(theoretical, meds), colour="blue") + geom_line(aes(theoretical, lo.bds), colour="green", linetype=2) + geom_line(aes(theoretical, hi.bds), colour="green", linetype=2) + geom_line(aes(theoretical, lo.lo.bds), colour="red", linetype=2) + geom_line(aes(theoretical, hi.hi.bds), colour="red", linetype=2) + #geom_text(aes(x=-1.2,y=1.4,label=SW.p))+ #geom_abline(aes(intercept=0,slope=1),linetype="dashed")+ theme_bw()+theme(legend.position="none")+xlab(expression('Theoretical Quantiles'))+ylab(expression('Sample Quantiles'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank()) bp.plot.data<-data.frame(fitted=fitted(lm(q~N,temp)),resid=resid(lm(q~N,temp))) bptest(lm(q~N,temp)) q.v.density.homoscedasticity.plots<-ggplot(bp.plot.data, aes(x=fitted,y=resid))+geom_point()+ geom_hline(aes(yintercept=0),linetype="dashed")+ theme_bw()+theme(legend.position="none")+xlab(expression('Fitted'))+ylab(expression('Residual'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank()) #Plot initial.v.catchability<-ggplot(data=temp,aes(x=N,y=q))+ geom_point()+ geom_smooth(aes(x=N, y=q), method="lm")+ theme_bw()+theme(legend.position="none")+xlab(expression("N"[0]~"per 1,000" ~ m^{2}))+ylab(expression('Catchability coefficient'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank()) ) pdf('Supplement/Supplemental Figure S3.pdf') ggdraw() + draw_plot(initial.v.catchability, 0, .5, 1, .5) + draw_plot(q.v.density.qq.plots, 0, 0, .5, .5) + draw_plot(q.v.density.homoscedasticity.plots, .5, 0, .5, .5) + draw_plot_label(c("A", "B", "C"), c(0, 0, 0.5), c(1, 0.5, 0.5), size = 15) dev.off() #### Model Percent of total catch as function of cumulative dive time #### null.model<-gls(perc~1,data=deplete) #Exponential asy.nls<-nls(perc~100*(1-exp(-a*cumulative.time)),data=deplete,start=list(a=0.5)) asy.nlme<-nlme(perc~100*(1-exp(-a*cumulative.time)),data=deplete, fixed=a~1,random=a~1|site, start=c(a=0.5)) #Exponential with catchability asy.nls.q<-nls(perc~100*(1-exp(-a*cumulative.time*q)),data=deplete,start=list(a=0.5)) asy.nlme.q<-nlme(perc~100*(1-exp(-a*cumulative.time*q)),data=deplete, fixed=a~1,random=a~1|site, start=c(a=0.5)) #### Check AICc to identify best model #### all.aiccs<-AICc(asy.nls.q,asy.nlme.q,asy.nls,asy.nlme,null.model) all.aiccs<-all.aiccs[order(all.aiccs[,2]),] all.aiccs$delta<-round(all.aiccs[,2]-all.aiccs[1,2],2) all.aiccs$weight<-round(exp(all.aiccs$delta*-0.5)/sum(exp(all.aiccs$delta*-0.5)),2) summary(asy.nlme.q) #### Best Fit Model Diagnostics #### asy.nlme.qq<-qqnorm.gg(resid(asy.nlme.q)) shapiro.test(resid(asy.nlme.q)) asy.nlme.q.qq.plots<-ggplot(asy.nlme.qq, aes(x=theoretical,y=sample))+ geom_point()+ geom_line(aes(theoretical, meds), colour="blue") + geom_line(aes(theoretical, lo.bds), colour="green", linetype=2) + geom_line(aes(theoretical, hi.bds), colour="green", linetype=2) + geom_line(aes(theoretical, lo.lo.bds), colour="red", linetype=2) + geom_line(aes(theoretical, hi.hi.bds), colour="red", linetype=2) + theme_bw()+theme(legend.position="none")+xlab(expression('Theoretical Quantiles'))+ylab(expression('Sample Quantiles'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank()) bp.plot.data<-data.frame(fitted=fitted(asy.nlme.q),resid=resid(asy.nlme.q)) spline.1<-gam(resid~s(fitted,bs='tp'),data=bp.plot.data) spline.data<-visreg(spline.1,plot = FALSE) smooths<-data.frame(x=spline.data$fit$fitted,smooth=spline.data$fit$visregFit,lwr=spline.data$fit$visregLwr,upr=spline.data$fit$visregUpr) asy.nlme.q.homoscedasticity.plots<-ggplot()+theme_bw()+ geom_point(data=bp.plot.data,aes(x=fitted,y=resid))+ geom_line(data=smooths,aes(x=x,y=smooth))+ geom_ribbon(data=smooths,aes(x=x,ymin=lwr,ymax=upr),alpha=0.2)+ geom_hline(aes(yintercept=0),linetype="dashed")+ theme_bw()+theme(legend.position="none")+xlab(expression('Fitted'))+ylab(expression('Residual'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank()) summary(spline.1) pdf('Supplement/Supplemental Figure S4.pdf') ggdraw() + draw_plot(asy.nlme.q.qq.plots, 0, .5, 1, .5) + draw_plot(asy.nlme.q.homoscedasticity.plots, 0, 0, 1, .5)+ draw_plot_label(c("A", "B"), c(0, 0), c(1, 0.5), size = 15) dev.off() #### Calculate estimate stats #### deplete$q.time<-deplete$cumulative.time*deplete$q q.mean<-mean(aggregate(deplete$q, by=list(Category=deplete$site), FUN=mean)[,2]) q.min<-min(aggregate(deplete$q, by=list(Category=deplete$site), FUN=mean)[,2]) q.max<-max(aggregate(deplete$q, by=list(Category=deplete$site), FUN=mean)[,2]) together.nls<-nls(perc~100*(1-exp(-a*q.time)),data=deplete,start=list(a=0.5)) together.nlme<-nlme(perc~100*(1-exp(-a*q.time)),data=deplete, fixed=a~1,random=a~1|site, start=c(a=0.5)) predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata=data.frame(q.time=c(1,4)*q.mean),interval='confidence',level=0.95) predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata=data.frame(q.time=c(1,4)*q.min),interval='confidence',level=0.95) predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata=data.frame(q.time=c(1,4)*q.max),interval='confidence',level=0.95) predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata=data.frame(perc=c(50,90)),interval='confidence',level=0.95,reverse=T)/q.mean predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata=data.frame(perc=c(50,90)),interval='confidence',level=0.95,reverse=T)/q.min predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata=data.frame(perc=c(50,90)),interval='confidence',level=0.95,reverse=T)/q.max #### Contour Plot #### #sim.q<-seq(min(deplete$LCI_q),max(deplete$UCI_q),length.out = 100) sim.q<-seq(0,2,length.out = 100) sim.t<-seq(0,6,length.out = 100) fit.mat<-predict(asy.nlme.q,newdata = data.frame(expand.grid(cumulative.time=sim.t,q=sim.q)),level=0) graph.data<-expand.grid(cumulative.time=sim.t,q=sim.q) graph.data$f<-fit.mat contour_plot<-ggplot() + theme_bw() + xlab(expression("Cumulative Dive Time" ~ (h/"1,000"~m^{2}))) + ylab(expression('Catchability Coefficient (q)')) + geom_tile(data = graph.data,aes(x = cumulative.time, y = q, z = f,fill = f),alpha=1) + stat_contour(data = graph.data, aes(x = cumulative.time, y = q, z = f),bins = 20, colour = 'black')+ stat_contour(data = graph.data, aes(x = cumulative.time, y = q, z = f),breaks = c(50,90), colour = 'red')+ stat_contour(data = graph.data, aes(x = cumulative.time, y = q, z = f),breaks = c(75), colour = 'blue')+ scale_fill_gradientn(colours=grey(0:15/15),name="Cumulative Percentage\nLionfish Caught",breaks=c(0,25,50,75,100),limits=c(0,100),labels=c(0,25,50,75,100))+ theme(legend.position="top",legend.box = "horizontal")+ geom_point(data = deplete, aes(x =cumulative.time, y = q, colour = site), size = 1.5)+ geom_errorbar(data=deplete, aes(x=cumulative.time,ymin=q-q.sd,ymax=q+q.sd,group=site,colour=site))+ scale_colour_manual(values=brewer.pal(nlevels(deplete$site),'Set1'),name='Site')+ theme(panel.grid=element_blank())+ # delete grid lines scale_x_continuous(limits=c(0,6), expand=c(0,0))+ # set x limits scale_y_continuous(limits=c(0,2), expand=c(0,0))+ theme(axis.title.x=element_text(size=12),axis.title.y=element_text(size=12),axis.text.x=element_text(size=12,colour='black'),axis.text.y=element_text(size=12,colour='black'),axis.ticks=element_line(colour='black')) #### Plot Best Model #### x.seq<-seq(0,3.5,length.out=1000) confidence.intervals<-predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata = data.frame(q.time=x.seq),interval='confidence') prediction.intervals<-predictNLME(nls.object=together.nls,nlme.object=together.nlme,newdata = data.frame(q.time=x.seq),interval='prediction') x.50<-find.x.at.y(50,asy.nlme.q) x.90<-find.x.at.y(90,asy.nlme.q) q.time.plot<-ggplot()+theme_bw() + geom_line(aes(x=x.seq,y=confidence.intervals[,1]),col='black')+ geom_ribbon(aes(x=x.seq,ymin=confidence.intervals[,5],ymax=confidence.intervals[,6]),alpha=0.5,fill="black")+ geom_ribbon(aes(x=x.seq,ymin=prediction.intervals[,5],ymax=prediction.intervals[,6]),alpha=0.2,fill="black")+ geom_point(data=deplete,aes(x=q.time,y=perc,colour=site),size=1.5)+ #scale_shape_manual(values=c(16:20,7:8),name='Site')+ scale_colour_manual(values=brewer.pal(nlevels(deplete$site),'Set1'),name='Site')+ theme(panel.background = element_rect(fill='white', colour='black'))+ theme(panel.grid.major = element_blank(), panel.grid.minor = element_blank())+ xlab(expression("Catchability Coefficient (q) * Cumulative Dive Time" ~ (h/"1,000"~m^{2})))+ ylab(expression("Cumulative % Lionfish Caught"))+ theme(axis.title.x=element_text(size=12),axis.title.y=element_text(size=12),axis.text.x=element_text(size=12,colour='black'),axis.text.y=element_text(size=12,colour='black'),axis.ticks=element_line(colour='black'))+ scale_x_continuous(breaks= seq(0,3.6,by=0.2), labels = c(0, rep("",4), 1, rep("",4), 2,rep("",4), 3,rep("",3)), limits = c(0,3.6), expand = c(0,0)) + coord_cartesian(xlim=c(0,3.2),ylim=c(0, 102))+ theme(legend.position='none')+ geom_segment(aes(x = c(0,0), y = c(50,90), xend = c(x.50,x.90), yend = c(50,90)),lty=2,col='red')+ geom_segment(aes(x = c(x.50,x.90), y = c(-10,-10), xend = c(x.50,x.90), yend = c(50,90)),lty=2,col='red') pdf('Figures and Tables/Figure 3.pdf') ggdraw() + draw_plot(q.time.plot, 0, .5, 1, .5) + draw_plot(contour_plot, 0, 0, 1, .5)+ draw_plot_label(c("A", "B"), c(0, 0), c(1, 0.5), size = 15) dev.off()