## ----knitropts, include = FALSE-----------------------------------------------
knitr::opts_chunk$set(message = TRUE, 
                      warning = FALSE,
                      eval = FALSE, 
                      echo = TRUE)

## -----------------------------------------------------------------------------
# ## Load packages
# library(spatsoc)
# library(data.table)
# library(asnipe)
# library(igraph)

## ----echo = FALSE, eval = TRUE------------------------------------------------
data.table::setDTthreads(1)

## -----------------------------------------------------------------------------
# ## Read data as a data.table
# DT <- fread(system.file("extdata", "DT.csv", package = "spatsoc"))
# 
# ## Cast datetime column to POSIXct
# DT[, datetime := as.POSIXct(datetime)]
# 
# ## Calculate the year of the relocation
# DT[, yr := year(datetime)]

## -----------------------------------------------------------------------------
# ## Temporal groups
# group_times(DT, datetime = 'datetime', threshold = '5 minutes')
# 
# ## Spatial groups
# group_pts(
#   DT,
#   threshold = 50,
#   id = 'ID',
#   coords = c('X', 'Y'),
#   timegroup = 'timegroup'
# )
# 

## ----eval = FALSE-------------------------------------------------------------
# # EPSG code for relocations
# utm <- 32736
# 
# ## Group relocations by julian day
# group_times(DT, datetime = 'datetime', threshold = '1 day')
# 
# ## Group lines for each individual and julian day
# group_lines(
#   DT,
#   threshold = 50,
#   crs = utm,
#   id = 'ID',
#   coords = c('X', 'Y'),
#   timegroup = 'timegroup',
#   sortBy = 'datetime'
# )

## ----eval = FALSE-------------------------------------------------------------
# # EPSG code for relocations
# utm <- 32736
# 
# ## Option 1: area = FALSE and home range intersection 'group' column added to DT
# group_polys(
#   DT,
#   area = FALSE,
#   hrType = 'mcp',
#   hrParams = list(percent = 95),
#   crs = utm,
#   id = 'ID',
#   coords = c('X', 'Y')
# )
# 
# ## Option 2: area = TRUE
# #  results must be assigned to a new variable
# #  data.table returned has ID1, ID2 and proportion and area overlap
# areaDT <- group_polys(
#   DT,
#   area = TRUE,
#   hrType = 'mcp',
#   hrParams = list(percent = 95),
#   crs = utm,
#   id = 'ID',
#   coords = c('X', 'Y')
# )
# 

## -----------------------------------------------------------------------------
# ## Subset DT to only year 2016
# subDT <- DT[yr == 2016]
# 
# ## Generate group by individual matrix
# # group column generated by spatsoc::group_pts
# gbiMtrx <- get_gbi(DT = subDT, group = 'group', id = 'ID')

## -----------------------------------------------------------------------------
# ## Generate observed network
# net <- get_network(gbiMtrx,
#                    data_format = "GBI",
#                    association_index = "SRI")

## -----------------------------------------------------------------------------
# # Calculate year column to ensure randomization only occurs within years
# #   since data spans multiple years
# DT[, yr := year(datetime)]
# 
# ## Step type randomizations
# #  providing 'timegroup' (from group_times) as datetime
# #  splitBy = 'yr' to force randomization only within year
# randStep <- randomizations(
#    DT,
#    type = 'step',
#    id = 'ID',
#    group = 'group',
#    coords = NULL,
#    datetime = 'timegroup',
#    iterations = 3,
#    splitBy = 'yr'
# )

## -----------------------------------------------------------------------------
# # Calculate year column to ensure randomization only occurs within years
# #   since data spans multiple years
# DT[, yr := year(datetime)]
# 
# ## Daily type randomizations
# # splitBy = 'yr' to force randomization only within year
# randDaily <- randomizations(
#    DT,
#    type = 'daily',
#    id = 'ID',
#    group = 'group',
#    coords = NULL,
#    datetime = 'datetime',
#    splitBy = 'yr',
#    iterations = 20
# )

## -----------------------------------------------------------------------------
# # Calculate year column to ensure randomization only occurs within years
# #   since data spans multiple years
# DT[, yr := year(datetime)]
# 
# ## Trajectory type randomization
# randTraj <- randomizations(
#    DT,
#    type = 'trajectory',
#    id = 'ID',
#    group = NULL,
#    coords = c('X', 'Y'),
#    datetime = 'datetime',
#    splitBy = 'yr',
#    iterations = 20
# )

## -----------------------------------------------------------------------------
# ## Create a data.table of unique combinations of iteration and year, excluding observed rows
# iterYearLs <- unique(randStep[!(observed), .(iteration, yr)])
# 
# ## Generate group by individual matrix
# # for each combination of iteration number and year
# # 'group' generated by spatsoc::group_pts
# # 'randomID' used instead of observed ID (type = 'step')
# gbiLs <- mapply(FUN = function(i, y) {
#   get_gbi(randStep[iteration == i & yr == y],
#           'group', 'randomID')
#   },
#   i = iterYearLs$iter,
#   y = iterYearLs$yr,
#   SIMPLIFY = FALSE
# )
# 
# ## Generate a list of random networks
# netLs <- lapply(gbiLs, FUN = get_network,
#                 data_format = "GBI", association_index = "SRI")
# 

## -----------------------------------------------------------------------------
# ## Generate fake population
# randDaily[, population := sample(1:2, .N, replace = TRUE)]
# 
# ## Create a data.table of unique combinations of iteration, year, and population,
# #    excluding observed rows
# iterYearLs <- unique(randStep[!(observed), .(iteration, yr, population)])
# 
# ## Generate group by individual matrix
# # for each combination of iteration number and year
# # 'group' generated by spatsoc::group_pts
# # 'randomID' used instead of observed ID (type = 'step')
# gbiLs <- mapply(FUN = function(i, y, p) {
#   get_gbi(randDaily[iteration == i &
#                       yr == y & population == p],
#           'group', 'randomID')
#   },
#   i = iterYearLs$iter,
#   y = iterYearLs$yr,
#   p = iterYearLs$population,
#   SIMPLIFY = FALSE
# )
# 
# ## Generate a list of random networks
# netLs <- lapply(gbiLs, FUN = get_network,
#                 data_format = "GBI", association_index = "SRI")
# 

## -----------------------------------------------------------------------------
# ## Randomized temporal groups
# # 'datetime' is the randomdatetime produced by randomizations(type = 'trajectory')
# group_times(randTraj, datetime = 'randomdatetime', threshold = '5 minutes')
# 
# ## Randomized spatial groups
# # 'iteration' used in splitBy to ensure only points within each iteration are grouped
# group_pts(randTraj, threshold = 50, id = 'ID', coords = c('X', 'Y'),
#           timegroup = 'timegroup', splitBy = 'iteration')
# 
# ## Create a data.table of unique combinations of iteration and year, excluding observed rows
# iterYearLs <- unique(randStep[!(observed), .(iteration, yr)])
# 
# ## Generate group by individual matrix
# # for each combination of iteration number and year
# # 'group' generated by spatsoc::group_pts
# # 'ID' used since datetimes were randomized within individuals
# gbiLs <- mapply(FUN = function(i, y) {
#   get_gbi(randTraj[iteration == i & yr == y],
#           'group', 'ID')
#   },
#   i = iterYearLs$iter,
#   y = iterYearLs$yr,
#   SIMPLIFY = FALSE
# )
# 
# ## Generate a list of random networks
# netLs <- lapply(gbiLs, FUN = get_network,
#                 data_format = "GBI", association_index = "SRI")
# 

## -----------------------------------------------------------------------------
# ## Generate graph
# g <- graph.adjacency(net, 'undirected',
#                      diag = FALSE, weighted = TRUE)
# 
# ## Metrics for all individuals
# observed <- data.table(
#   centrality = evcent(g)$vector,
#   strength = graph.strength(g),
#   degree = degree(g),
#   ID = names(degree(g)),
#   yr = subDT[, unique(yr)]
# )

## -----------------------------------------------------------------------------
# ## Generate graph and calculate network metrics
# mets <- lapply(seq_along(netLs), function(n) {
#   g <- graph.adjacency(netLs[[n]], 'undirected',
#                        diag = FALSE, weighted = TRUE)
# 
#   data.table(
#     centrality = evcent(g)$vector,
#     strength = graph.strength(g),
#     degree = degree(g),
#     ID = names(degree(g)),
#     iteration = iterYearLs$iter[[n]],
#     yr = iterYearLs$yr[[n]]
#     )
# })
# 
# ## Metrics for all individuals across all iterations and years
# random <- rbindlist(mets)
# 
# ## Mean values for each individual and year
# meanMets <- random[, lapply(.SD, mean), by = .(ID, yr),
#                 .SDcols = c('centrality', 'strength', 'degree')]

## -----------------------------------------------------------------------------
# ## Create a data.table of unique combinations of iteration and year, including observed and random rows
# iterYearLs <- unique(randStep[, .(iteration, yr)])
# 
# ## Generate group by individual matrix
# # for each combination of iteration and year
# # 'group' generated by spatsoc::group_pts
# # 'randomID' used instead of observed ID (type = 'step')
# gbiLs <- mapply(FUN = function(i, y) {
#   get_gbi(randStep[iteration == i & yr == y],
#           'group', 'randomID')
#   },
#   i = iterYearLs$iter,
#   y = iterYearLs$yr,
#   SIMPLIFY = FALSE
# )
# 
# ## Generate a list of random networks
# netLs <- lapply(gbiLs, FUN = get_network,
#                 data_format = "GBI", association_index = "SRI")
# 
# ## Generate graph and calculate network metrics
# mets <- lapply(seq_along(netLs), function(n) {
#   g <- graph.adjacency(netLs[[n]], 'undirected',
#                        diag = FALSE, weighted = TRUE)
# 
#   data.table(
#     centrality = evcent(g)$vector,
#     strength = graph.strength(g),
#     ID = names(degree(g)),
#     iteration = iterYearLs$iter[[n]],
#     yr = iterYearLs$yr[[n]]
#     )
# })
# 
# ## Observed and random for all individuals across all iterations and years
# out <- rbindlist(mets)
# 
# ## Split observed and random
# out[, observed := ifelse(iteration == 0, TRUE, FALSE)]
# 
# ## Mean values for each individual and year, by observed/random
# meanMets <- out[, lapply(.SD, mean), by = .(ID, yr, observed),
#                 .SDcols = c('centrality', 'strength')]
# 

