I am not sure I completely understand the question, but if it is to sample points from each state raster according to the probability found within the raster, I think this solution should do:
# Bring in libraries and set random seed
library(data.table)
library(raster)
set.seed(123)
# Bring in tifs and subset to your 16 states
tifFiles <- list.files(".", pattern=".TIF$", full.names = TRUE)
tifFiles <- paste0("./Suitability_Prob_v",c(30:39,310:315),".TIF")
# How many points do we want to sample from each state?
# I am not sure what is in your csv file, so I sampled some points from a poisson
sampPoints <- data.table(State=tifFiles, N=rpois(length(tifFiles),6))
# Function to read in raster and sample points
readRasterAndSample <- function(rasFile, count) {
# Read in raster and deal with 0s and NAs
ras <- raster(rasFile)
ras[ras < 0] <- 0
# Convert to data.table
rasDT <- as.data.frame(ras, xy=TRUE)
setDT(rasDT)
setnames(rasDT, c("x","y","values"))
# Sample non-zero points according to desired count
# Use raster probability as sample prob
outPoints <- rasDT[values > 0]
outPoints <- outPoints[sample(1:.N, count, prob=values, replace=TRUE)]
# Jitter points randomly
outPoints[, x := x + runif(.N, -0.5*res(ras), 0.5*res(ras))]
outPoints[, y := y + runif(.N, -0.5*res(ras), 0.5*res(ras))]
outPoints[, state := basename(rasFile)]
# Combine into list and return
outList <- list(ras=ras, pts=outPoints)
return(outList)
}
# Apply function to all states
rasterList <- lapply(1:nrow(sampPoints), function(x)
with(sampPoints[x], readRasterAndSample(rasFile=State, count=N)))
# Collect all rasters and merge
rasterAll <- do.call(merge, lapply(rasterList, "[[", "ras"))
# Collect all state points and convert to SpatialPointsDataframe
pointDat <- rbindlist(lapply(rasterList, "[[", "pts"), fill=TRUE)
coordinates(pointDat) <- ~x+y
proj4string(pointDat) <- rasterAll@crs@projargs
# Plot to check
plot(rasterAll)
plot(pointDat, add=TRUE)
Thank you for sending me in the right direction with exactextractr, @Spacedman. Here's the solution that I came up with:
library(terra)
library(exactextractr)
library(dplyr)
library(sf)
f <- system.file("ex/lux.shp", package="terra")
v <- st_read(f)
v <- v[1:2,]
z <- rast(vect(v), resolution=.09, names="test")
values(z) <- 1:ncell(z)
levels(z) <- 0:ncell(z)
plot(z)
plot(v, add = T)
#extract the area of each cell that is contained within each polygon
x <- exact_extract(z, v, coverage_area = TRUE)
#add polygon names that the results will be grouped by
names(x) <- v$NAME_2
#bind the list output into a df and calculate the proportion cover for each category
test <- bind_rows(x, .id = "region") %>%
group_by(region, value) %>%
summarize(total_area = sum(coverage_area)) %>%
group_by(region) %>%
mutate(proportion = total_area/sum(total_area))
Edit: With a big dataset, I was having memory issues with the code above. Here's a more memory efficient version:
### Summarizing function
sum_cover <- function(x){
list(x %>%
group_by(value) %>%
summarize(total_area = sum(coverage_area)) %>%
mutate(proportion = total_area/sum(total_area)))
}
#extract the area of each raster cell covered by the plot and summarize
x <- exact_extract(z, v, coverage_area = TRUE, summarize_df = TRUE, fun = sum_cover)
#add plot names to the elements of the output list
names(x) <- v$NAME_2
#merge the list elements into a df
test <- bind_rows(x, .id = "Plot_buffer")
Best Answer
You can use
global
function from{terra}
library:For a reproducible example I'm adding a list of random rasters loaded into a list:
For computing mean for every element of the raster list, use
lapply
withglobal
function:For saving the result to your machine, use
write.csv
:Adaptation to OP code: