-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathParallel_Extract.R
57 lines (41 loc) · 1.68 KB
/
Parallel_Extract.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# Parallel Extract
# Original code by Travis Nauman([email protected]), modified by Dave White([email protected])
# Extracts raster values at each point in parallel, this process is useful for large datasets
## Install and Load packages
required.packages <- c("rgdal", "raster", "sp", "snow", "snowfall")
new.packages <- required.packages[!(required.packages %in% installed.packages()[,"Package"])]
if(length(new.packages)) install.packages(new.packages)
lapply(required.packages, require, character.only=T)
rm(required.packages, new.packages)
setwd("C:/cov/")## Folder with points
shp.pts <-readOGR(dsn=".", layer="field_data_0206")
setwd("C:/cov/")## Folder with rasters
# read in raster layers to create raster stack
r.list=list.files(getwd(), pattern="tif$", full.names = FALSE)
r.list
#create raster stack of covariates
r.stack <- stack(r.list)
names(r.stack)
# convert raster stack to list of single raster layers
r.stack.list <- unstack(r.stack)
names(r.stack.list) <- names(r.stack)
## Parallelized extract: (larger datasets)
sfInit(parallel=TRUE, cpus=parallel:::detectCores()-1)
sfLibrary(raster)
sfLibrary(rgdal)
# run parallelized 'extract'
e.df <- sfSapply(r.stack.list, extract, y=shp.pts)
sfStop()
gc()
# convert to dataframe a
DF <- as.data.frame(e.df)
names(DF) = tools::file_path_sans_ext(basename(names(r.stack.list)))
names(DF)
head(DF)
DF$ID <- seq.int(nrow(DF))
shp.pts$ID <- seq.int(nrow(shp.pts))
# merge by ID creating spatial points
train.pts = merge(shp.pts, DF, by="ID")
## Save points
#setwd("D:/Jornada LRU Update/Modeling/")
writeOGR(train.pts,dsn=".", layer= "trainptscov", driver="ESRI Shapefile", overwrite=TRUE)