#'=================================================================================================# # #' "Dynamic Brownian Bridge Movement Model" # #'=================================================================================================# #' NB!!!! Set the working directory setwd("C:/Shambala/data/") #' MAKE SURE THE WORKING DIRECTORY IS CORRECT i.e. [1] "C:/R_course/Day3/data" getwd() ####------------------------------------- Load Packages ----------------------------------------#### library(adehabitatLT) library(lattice) library(ggplot2) library(extrafont) library(plyr) library(colorspace) library(lubridate) library(rgdal) library(stringr) library(adehabitatHR) library(move) library(maptools) require(sp) require(rgdal) require(rgeos) #'----------------------------------- 1- Import Data ----------------------------------------# #' Import .csv data (Columns: animal, lon, lat, timestamp (or date, and time) #' thanda_cheetah_clean.csv or kruger_buffalo_clean.csv data_tracking <- read.csv("data_clean.csv") #' Import the study area boundry .shp file #' The dsn must not end in '/' #' Thanda_boundary or kruger #study_area <- readOGR(dsn = '.', layer = 'kruger') #'----------------------------- 2- Prepare the Imported Data ----------------------------------# #' Set timestamps to correct UTC time zone and make it a POSIxct object so that R knows it is a date/time data_tracking$timestamp <- as.POSIXct(data_tracking$timestamp,tz="Africa/Johannesburg") #' move doesn't accept long names for id's when calculating dbbmm, so need to rename them unique(data_tracking$animal) data_tracking$animal_orig<-data_tracking$animal #data_tracking$animal<-strtrim(data_tracking$animal,3) #'----------------------------------- 3- Choose Temporal Scale -------------------------------------# #' Choose a temporal scale to calculate the home ranges based on the question/s you want to answer # #' and your data i.e. do you want an annual, seasonal, monthly, weekly or daily ranges? # #' # #' 1) Code adds year to each of the time frames, because we don't want to calculate a range for # #' months/days/weeks using locations from different years; # #' # #' 2) Change/Update the months you want to assign to a particular season; # #' # #'------------------------------------------ RUN AS IS ----------------------------------------------# #' All data_tracking$Total <- paste(data_tracking$animal,sep="") #' WEEK data_tracking$Lweek<- paste(data_tracking$animal,year(data_tracking$timestamp), str_pad(week(data_tracking$timestamp),2,pad="0"),sep="") #' DAY data_tracking$Lday <- paste(data_tracking$animal,year(data_tracking$timestamp), str_pad(yday(data_tracking$timestamp),3,pad="0"),sep="") #' SEASON is a bit more complicated as the wet season falls over two years, so Jan and Feb need #' to have year-1 #' Add a season column to the data frame data_tracking$Season<-as.character(NA) #' Update the season column with the correct season (change values to correct months if neccessary) #' NB. This is another reason why it is important to choose the correct collaring dates data_tracking$Lmonth<-month(data_tracking$timestamp) data_tracking$Lyear<-year(data_tracking$timestamp) #' WET data_tracking$Season[month(data_tracking$timestamp) %in% c(11,12)]<-paste(data_tracking$animal[data_tracking$Lmonth %in% c(11,12)], data_tracking$Lyear[data_tracking$Lmonth %in% c(11,12)],"w",sep="") data_tracking$Season[month(data_tracking$timestamp) %in% c(1,2)] <-paste(data_tracking$animal[data_tracking$Lmonth %in% c(1,2)], as.numeric(data_tracking$Lyear[data_tracking$Lmonth %in% c(1,2)])-1,"w",sep="") #' DRY data_tracking$Season[month(data_tracking$timestamp) %in% c(5,6,7,8)]<-paste(data_tracking$animal[data_tracking$Lmonth %in% c(5,6,7,8)],data_tracking$Lyear[data_tracking$Lmonth %in% c(5,6,7,8)],"d",sep="") #' FALL data_tracking$Season[month(data_tracking$timestamp) %in% c(3,4)]<- paste(data_tracking$animal[data_tracking$Lmonth %in% c(3,4)],data_tracking$Lyear[data_tracking$Lmonth %in% c(3,4)],"f",sep="") #' SPRING data_tracking$Season[month(data_tracking$timestamp) %in% c(09,10)]<-paste(data_tracking$animal[month(data_tracking$timestamp) %in% c(9,10)],data_tracking$Lyear[data_tracking$Lmonth %in% c(9,10)],"s",sep="") #' MONTH, extract year month and save to data frame data_tracking$Lmonth<-paste(data_tracking$animal,data_tracking$Lyear, str_pad(month(data_tracking$timestamp),2,pad="0"),sep="") #' ANNUAL, here we extract the year of each location and save to the data frame data_tracking$Lyear <-paste(data_tracking$animal, year(data_tracking$timestamp),sep="") #' Make sure it is correct head(data_tracking) #'-------------------------------------- END RUN AS IS ------------------------------------------# #'--------------------------------------- + User Input ------------------------------------------# #' Specify temporal scale, choose 1-Year for first run user<-menu(c("Year", "Season", "Month", "Week", "Day", "All") , title="What temporal scale do you want to create ranges for?") id<-NA #' Creates unique labels to subset the data and create ranges with if(user==1){ subset<-unique(data_tracking$Lyear) id<-"Lyear" data_tracking$subset <- data_tracking$Lyear }else if(user==2){ subset<-unique(data_tracking$Season) id<-"Season" data_tracking$subset <- data_tracking$Season }else if(user==3){ subset<-unique(data_tracking$Lmonth) id<-"Lmonth" data_tracking$subset <- data_tracking$Lmonth }else if(user==4){ subset<-unique(data$Lweek) id<-"Lweek" data_tracking$subset <- data_tracking$Lweek }else if(user==5){ subset<-unique(data_tracking$Lday) id<-"Lday" data_tracking$subset <- data_tracking$Lday }else if(user==6){ subset<-unique(data_tracking$animal) id<-"All" data_tracking$subset <- data_tracking$animal } #' Summary of the home ranges that will be made, make sure these data are correct before continuing paste(length(subset), " ranges will be calculated") subset #'----------------------------------- 4- Create move object ----------------------------------# #' Subsetting data to reduce running time all_poly<-NA for(ele in unique(data_tracking$subset)){ data_sub <- data_tracking[data_tracking$subset == ele,] print(ele) #' Takes the Longitude and Latitude from data and saves it as a SpatialPoints data.frame with a projection sp_latlong <- SpatialPoints(data_sub[, c("lon", "lat")],proj4string=CRS("+proj=longlat +ellps=WGS84")) #' Project to real world coordinates sp_proj <- spTransform(sp_latlong, CRS("+init=epsg:32736"), center=TRUE) #' Project other layers that you may want to plot #study_area_proj <- spTransform(study_area, projection(sp_proj), center=TRUE) #' Add projected locations to data frame locs <- as.data.frame(sp_proj@coords) data_sub$x <- locs$lon data_sub$y <- locs$lat data_sub$id <- data_sub[,colnames(data_sub)==id] #' Order by id data_sub <- data_sub[order(data_sub$id),] #' Create Move object, animal = THE COLUMN obj.move <- move(x=data_sub$x, y=data_sub$y, time=data_sub$timestamp, data=data_sub, proj=CRS("+init=epsg:32736") , animal=data_sub$id) #' Summary of obj.move summary(obj.move) #' Plot plot(obj.move) #plot(study_area_proj, add=T) #'---------------------------------------- 5- Run dBBMM --------------------------------------# #' Dynamic Brownian bridge movement model (dBBMM) #' #' -> Takes into account the time dependence between successive locations #' -> Takes into account the location error of the GPS points #'---------------------------------------- + User Input --------------------------------------# #' The smaller raster and larger extent = longer processing time #' Resolution of the square raster cells (in map units) #' i.e. raster is the size of the raster cell in meters, made it large to reduce processing time raster<- 30 #' The error associated with each location, or a single value for all locations (Based on which collar) locationError <-23 #' Describes the amount of extension of the bounding box around the animal track. #' i.e. the buffer around the created raster, will warn you if to small extent <-1.8 #' The size of the moving window along the track. Larger windows provide more stable/accurate estimates of the #' Brownian motion variance but are less well able to capture more frequent changes in behavior. windowSize<-17 #' The margin used for the behavioral change point analysis marginSize<-7 #'----------------------------------------- + Run model --------------------------------------# #' Run the dBBMM model dbbmm <- brownian.bridge.dyn(obj.move, raster=raster,location.error=locationError ,window.size = windowSize ,margin = marginSize ,ext = extent) #' Model, dBBMM if there were multiple dBBMMs to run (multiple individuals or temporal scale) dbbmm #' Transform study_area or any other shapefiles you may want to plot later with the results #study_area_proj<- spTransform(study_area, CRS("+init=epsg:32736")) #'-------------------------------------- 6- Inspect Results ---------------------------------# plot(dbbmm, asp=1, lwd=3) #' Just sets the extent to the size of the study_area plot(dbbmm, asp=1, add=T) #plot(study_area_proj, asp=1, lwd=3, add=T) title(ele) #' Plots the contours contour(dbbmm, levels=c(0.5, 0.95), lwd=2,col=c(6,2),add=T) #' Saves the raster output from above to the gisDir directory you created writeRaster(dbbmm, filename=paste("C:/Shambala/data/dbbmm_30m_",ele,".tif"), overwrite=T) #' Calculate the UD area size and save to a polygon (one shapeFile that contains all the polygons) #' Create the first SpatialPolygonsDataFrame #' Calculating the UD area size #' Create a 95% polyline for the dbbmm con<-raster2contour(dbbmm, levels=0.95) #' Convert to polygon con_poly_set<-SpatialLines2PolySet(SL = con) con_poly<-PolySet2SpatialPolygons(PS = con_poly_set,close_polys = T) projection(con_poly)<-proj4string(obj.move) #' Add name con_poly$id<-ele #'Calculate area m2, raster:: denotes it must use the area method from raster to get km2 con_poly$area<-raster::area(con_poly)/1e+6 if(is.na(all_poly))( all_poly <- con_poly )else( all_poly<-rbind(all_poly,con_poly) ) } #' View all_poly all_poly #'------------------------------------- 7- Export Results ------------------------------------# all_poly <- gBuffer(all_poly, byid = T, width = 0) #' Write the results to a shapeFile writeOGR(all_poly, ".", "dbbmm_hr_95_30m", driver="ESRI Shapefile",overwrite_layer = T)