top of page

R code

mapping and distribution

#load libraries
library(raster)
library(ggplot2)
library(rnaturalearthdata)
library(rnaturalearth)
library(rgdal)
library(rgeos)
library(sdmpredictors)
library(cowplot)
library(mgcv)

#set seed
set.seed(6)

#longitudinal and latitudinal information of study region
bbox<-c(-87.5,-81.0,25.0,30.5)

#number of samples
n_s<-1000

#URL to extract world ocean shapefile
URL <- "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/10m/physical/ne_10m_land.zip"
fil <- basename(URL)
if (!file.exists(fil)) download.file(URL, fil)
fils <- unzip(fil)
land <- readOGR(grep("shp$", fils, value=TRUE), "ne_10m_land",
                 stringsAsFactors=FALSE, verbose=FALSE)

#crop to your study region
studyland <- crop(land, extent(bbox))

#URL to extract world ocean shapefile
URL <- "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/physical/ne_110m_ocean.zip"
fil <- basename(URL)
if (!file.exists(fil)) download.file(URL, fil)
fils <- unzip(fil)
ocean <- readOGR(grep("shp$", fils, value=TRUE), "ne_110m_ocean",
                stringsAsFactors=FALSE, verbose=FALSE)

#crop to your study region
study <- crop(ocean, extent(bbox))

#get samples falling at the ocean
points<-spsample(study,n=n_s,"random")

#merge points with a negative binomial distribution
data<-data.frame(lon=points@coords[,1],
                 lat=points@coords[,2],
                 abundance=rnbinom(n = 100,size = 1,mu = 0.9))

#plot points size dependent on abundance
p<-ggplot()+
  theme_bw()+
  theme(panel.grid.minor = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.5, linetype = 'dashed'),
        panel.grid.major = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.8),
        panel.border = element_blank(),
        panel.ontop = TRUE,
        panel.background = element_rect(fill = NA),
        text = element_text(size=14),
        legend.position = 'right',
        aspect.ratio = 1,)+
  geom_polygon(data=studyland, aes(x=long, y=lat, group=group),fill = 'black', size = 1)+
  geom_point(data=data,aes(x=lon,y=lat,size=abundance,fill=abundance),shape=21,color='white',alpha=0.8)+
  scale_fill_gradientn(colours = c("#3B9AB2",  "#EBCC2A", "#E1AF00", "#F21A00"),na.value = 'white',)+
  xlab('longitude')+
  ylab('latitude')+
  scale_x_continuous(expand = c(0,0))+
  scale_y_continuous(expand = c(0,0))+
  scale_size(guide='none')

#lets download raster temperature and depth factors
depth <- load_layers("BO_bathymean")
sst<-load_layers("BO_sstmean")

#stack rasters
st<-stack(-depth,sst)
st<-crop(st,bbox)

#create a variable var
var<-raster(nrows=st@nrows, ncols=st@ncols, xmn=bbox[1], xmx=bbox[2], ymn=bbox[3], ymx=bbox[4], 
       crs=crs(st), ext=st@extent, resolution=res(st))
values(var)<-rnorm(n = ncell(var))
names(var)<-'var'

#add layer var
st<-addLayer(st,var)

#name stack
names(st)<-c('depth','sst','var')

#coordinates data
coordinates(data)<-~lon+lat

#extract values of stack at lat and lon from data
env<-data.frame(extract(st,data))
data<-cbind(data,env)

#lets fit the model
mod1<-gam(abundance ~ s(depth) + s(sst) + s(var),family = nb(),data=data.frame(data))

#predict over the study area
prediction<-predict(st,mod1,type='response')

#plot predicted raster
pp<-rasterVis::gplot(prediction) +
  geom_tile(aes(fill=value))+
  theme_bw()+
  theme(panel.grid.minor = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.5, linetype = 'dashed'),
        panel.grid.major = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.8),
        panel.border = element_blank(),
        panel.ontop = TRUE,
        panel.background = element_rect(fill = NA),
        text = element_text(size=14),
        legend.position = 'right',
        aspect.ratio = 1,)+
  geom_polygon(data=studyland, aes(x=long, y=lat, group=group),fill = 'black', size = 1)+
  scale_fill_gradientn(colours = c("#3B9AB2",  "#EBCC2A", "#E1AF00", "#F21A00"),na.value = 'white',name='abundance')+
  xlab('longitude')+
  ylab('latitude')+
  scale_x_continuous(expand = c(0,0))+
  scale_y_continuous(expand = c(0,0))

#plot both maps
plot_grid(p,pp,
          nrow = 1,
          align = 'vh',
          labels = c('A','B'))

#create a list to store plots
plist<-list()

#loop for each environmental variable
for (i in 1:length(mod1$var.summary)) {

  #name variable
  v<-names(mod1$var.summary)[i]
 
  #prepare data to predict the effect of depth on species
  newdata<-expand.grid(c(seq(0,maxValue(st[[v]]),length.out = 1000)))
  names(newdata)<-v
 
  #prepare other variables
  for (j in names(mod1$var.summary)[-i]) {
    #mean for the other varibles
    newdata[[j]]<-cellStats(st[[j]],stat='mean')
  }
 
  #predict
  y=predict(mod1,type='response',newdata=newdata,se.fit=F)
 
  #standardized
  y=y/max(y)
 
  #create dataframe
  envdata<-data.frame(env=newdata[,v],pred=y)
 
  #plot
  plot<-ggplot()+
    geom_line(data=envdata,aes(x=env,y=pred),color='darkred')+
    theme_bw()+
    theme(panel.grid.minor = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.5, linetype = 'dashed'),
            panel.grid.major = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.8),
            panel.border = element_blank(),
            panel.ontop = TRUE,
            panel.background = element_rect(fill = NA),
            text = element_text(size=14),
            legend.position = 'right',
            aspect.ratio = 1,)+
      xlab(v)
    scale_y_continuous(limits = c(0,1),expand = c(0,0))
    scale_x_continuous(expand = c(0,0))
    
  #conditional ylab  
  if (i==1) {
    plot<-plot+ylab('occurrence probability')
  } else {
    plot<-plot+ylab('')
  }
 
  #store plot into the list
  plist[[v]]<-plot 
}

#plot environmental response functions
plot_grid(plotlist = plist, nrow = 1,align = 'vh')

plot1_rcode.jpg
plot2_rcode.jpg

Rshiny app

#load libraries
library(shiny)
library(gridExtra)
library(ggplot2)
library(dplyr)
library(grDevices)
library(png)
library(RCurl)
library(withr)
library(scales)
library(ggsn)
library(shinythemes)
library(RColorBrewer)
library(ggthemes)
library(colortools)
library(tableHTML)
library(lemon)
library(ggpubr)
library(shinyBS)
library(rgeos)
library(ggspatial)
library(DT)
library(cowplot)

# Define UI for the app
ui<-fluidPage(
  # App title ----
  # App title ----
  titlePanel("marine sp sampler"),
 
  # Sidebar layout with input and output definitions ----
  sidebarLayout(
    
    # Sidebar panel for inputs ----
    #Loading message
    sidebarPanel(
 
  # Sidebar layout with input and output definitions ----
   sliderInput("lon", "Longitudinal range",
                min = -180, max = +180,
                value = c(-87,-81)),
    sliderInput("lat", "Latitudinal range",
               min = -180, max = +180,
               value = c(25,30)),
    sliderInput("samples", label = 'number of samples', min = 0, 
                max = 1000, value = 50)),
  mainPanel(tabsetPanel( id = "tabs",
                         tabPanel("Map", br(),
                                  p("Samples of virtual species"),
                                  value='map', plotOutput(outputId = "plot1")),
                         tabPanel("Summary",br(),
                                  p("Abundance summary for the selected region"),
                                  value='summary', DT::dataTableOutput('summary'))))
))
 
# Define server 
server<-function(input, output, session) {
 
  # Combine the selected variables into a new data frame
  bbox <- reactive({
    c(input$lon[1],input$lon[2],input$lat[1],input$lat[2])
  })
 
  n_s <- reactive({
    input$samples
  })
 
  #plot map
  output$plot1 <- renderPlot({

    #URL to extract world ocean shapefile
    URL <- "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/10m/physical/ne_10m_land.zip"
    fil <- basename(URL)
    if (!file.exists(fil)) download.file(URL, fil)
    fils <- unzip(fil)
    land <- readOGR(grep("shp$", fils, value=TRUE), "ne_10m_land",
                    stringsAsFactors=FALSE, verbose=FALSE)
    
    #crop to your study region
    studyland <- crop(land, extent(bbox()))
    
    #URL to extract world ocean shapefile
    URL <- "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/physical/ne_110m_ocean.zip"
    fil <- basename(URL)
    if (!file.exists(fil)) download.file(URL, fil)
    fils <- unzip(fil)
    ocean <- readOGR(grep("shp$", fils, value=TRUE), "ne_110m_ocean",
                     stringsAsFactors=FALSE, verbose=FALSE)
    
    #crop to your study region
    study <- crop(ocean, extent(bbox()))
    
    #get samples falling at the ocean
    points<-spsample(study,n=n_s(),"random")
    
    #merge points with a negative binomial distribution
    data<-data.frame(lon=points@coords[,1],
                     lat=points@coords[,2],
                     abundance=rnbinom(n = n_s(),size = 1,mu = 0.9))
    
    #plot points size dependent on abundance
    ggplot()+
      theme_bw()+
      theme(panel.grid.minor = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.5, linetype = 'dashed'),
            panel.grid.major = element_line(color = rgb(235, 235, 235, 100, maxColorValue = 500), size = 0.8),
            panel.border = element_blank(),
            panel.ontop = TRUE,
            panel.background = element_rect(fill = NA),
            text = element_text(size=14),
            legend.position = 'right')+
      geom_polygon(data=studyland, aes(x=long, y=lat, group=group),fill = 'black', size = 1)+
      geom_point(data=data,aes(x=lon,y=lat,size=abundance,fill=abundance),shape=21,color='white',alpha=0.8)+
      scale_fill_gradientn(colours = c("#3B9AB2",  "#EBCC2A", "#E1AF00", "#F21A00"),na.value = 'white',)+
      xlab('longitude')+
      ylab('latitude')+
      scale_x_continuous(expand = c(0,0))+
      scale_y_continuous(expand = c(0,0))+
      scale_size(guide='none')+
      coord_fixed()
  })
 
  #render table with abundance samples
  output$summary <- DT::renderDataTable({   
    
    #URL to extract world ocean shapefile
    URL <- "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/10m/physical/ne_10m_land.zip"
    fil <- basename(URL)
    if (!file.exists(fil)) download.file(URL, fil)
    fils <- unzip(fil)
    land <- readOGR(grep("shp$", fils, value=TRUE), "ne_10m_land",
                    stringsAsFactors=FALSE, verbose=FALSE)
    
    #crop to your study region
    studyland <- crop(land, extent(bbox()))
    
    #URL to extract world ocean shapefile
    URL <- "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/physical/ne_110m_ocean.zip"
    fil <- basename(URL)
    if (!file.exists(fil)) download.file(URL, fil)
    fils <- unzip(fil)
    ocean <- readOGR(grep("shp$", fils, value=TRUE), "ne_110m_ocean",
                     stringsAsFactors=FALSE, verbose=FALSE)
    
    #crop to your study region
    study <- crop(ocean, extent(bbox()))
    
    #get samples falling at the ocean
    points<-spsample(study,n=n_s(),"random")
    
    #merge points with a negative binomial distribution
    data<-data.frame(lon=round(points@coords[,1],digits=3),
                     lat=round(points@coords[,2],digits=3),
                     abundance=rnbinom(n = n_s(),size = 1,mu = 0.9))
    
    #call table
    data
    #data.table::data.table(mort())
    
  },rownames= FALSE, extensions = 'Buttons', options = list(pageLength = 1000,
                                                            dom = 'Bfrtip',
                                                            buttons = c('copy', 'csv', 'excel', 'pdf')))
}

#load APP
shinyApp(ui = ui, server = server)

bottom of page