forked from lawaFreshwater/WQualityStateTrend
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathlawa_intersect_WFS.R
131 lines (98 loc) · 4.67 KB
/
lawa_intersect_WFS.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
#===================================================================================================
# LAWA DATA PREPARATION - Intersect WFS Sites with Catchments
# Horizons Regional Council
#
# 28 August 2016
#
# Jorn Sijbertsma
# Sean Hodges
# Horizons Regional Council
#===================================================================================================
# Clearing workspace
rm(list = ls())
ANALYSIS<-"Intersect WFS"
# Set working directory
od <- getwd()
wd <- "\\\\file\\herman\\R\\OA\\08\\02\\2018\\Water Quality\\R\\lawa_state"
setwd(wd)
#/* -===Include required function libraries===- */
source("scripts\\WQualityStateTrend\\lawa_state_functions.R")
require(rgeos)
require(spatialEco)
require(maptools)
require(rgdal)
## Supplementary functions
ld <- function(url){
(download.file(url,destfile="tmp",method="wininet"))
xmlfile <- xmlParse(file = "tmp")
unlink("tmp")
return(xmlfile)
}
points.in.polys <- function (pts, polys) {
if (!inherits(polys, "SpatialPolygonsDataFrame"))
stop("MUST BE SP SpatialPolygonsDataFrame OBJECT")
if ((inherits(pts, "SpatialPointsDataFrame") | inherits(pts,
"SpatialPoints")) == FALSE)
stop("Must be sp SpatialPointsDataFrame object")
z <- pts[!is.na(sp::over(pts, sp::geometry(polys))), ]
y <- sp::over(pts,polys)
y <- y[!is.na(y$LAWA_CATCH),]
z@data <- data.frame(z@data, y)
z@proj4string <- pts@proj4string
z
}
# ======================================
# Load WFS locations from CSV
## Load csv with WFS locations
siteTable <- read.csv("LAWA_Site_Table.csv",stringsAsFactors=FALSE)
siteTable$Lat <- as.numeric(siteTable$Lat)
siteTable$Long <- as.numeric(siteTable$Long)
pts <- siteTable[complete.cases(siteTable$Lat),]
# Cast dataframe as SpatialPointDataFrame
coordinates(pts) <- ~ Long + Lat
# Load catchment polys as SpatialPolygonsDataFrame
# LAWA_CATCHMENTS_WGS84 - original - replaced by the eIDI FW catchment file
polys <- readOGR(dsn="\\\\file\\herman\\R\\OA\\08\\02\\Mapping\\data\\2018\\eIDI-FW-Catchments.shp",
layer="eIDI-FW-Catchments",p4s = NULL,
stringsAsFactors = FALSE)
# Just use the Parent Catchments
#polys <- subset(polys,CatchType=="Parent")
# Set the projection of the wq sites to match the catchments, since everything in WGS84
pts@proj4string <- polys@proj4string
# Intersect points and polygons using user-defined function
pip <- points.in.polys(pts,polys)
# Just keeping required fields
pip.data <- pip@data[,c(4,2:3,5:11,15:22)] ## field list(LawaSiteID,SiteID,CouncilSiteID,SWQuality:Agency,)
#pip.data$LAWA_CATCH[pip.data$LAWA_CATCH==0] <- pip.data$CatchID[pip.data$LAWA_CATCH==0]
dd<- pip.data[!grepl("NRWQN",pip.data$LawaSiteID,ignore.case = TRUE),]
df<- pip.data[grepl("NRWQN",pip.data$LawaSiteID,ignore.case = TRUE),]
# Some columns need to be moved around so that correct id's are in correct order
# This should be dealt with during the initial pull, but for the time-being, we'll
# deal with this through post-processing feeds
# EBOP and NRC need to have SiteId and CouncilSiteID's swapped
region <-c ("Auckland","Bay of Plenty","Northland","Gisborne")
for(p in 1:length(region)){
siteID <- dd$CouncilSiteID[grepl(region[p],dd$Region,ignore.case = TRUE)]
dd$CouncilSiteID[grepl(region[p],dd$Region,ignore.case = TRUE)] <- dd$SiteID[grepl(region[p],dd$Region,ignore.case = TRUE)]
dd$SiteID[grepl(region[p],dd$Region,ignore.case = TRUE)] <- siteID
}
site<-c("HRC-00036","HRC-00042")
for(p in 1:length(site)){
siteID <- dd$CouncilSiteID[dd$LawaSiteID==site[p]]
dd$CouncilSiteID[dd$LawaSiteID==site[p]] <- dd$SiteID[dd$LawaSiteID==site[p]]
dd$SiteID[dd$LawaSiteID==site[p]] <- siteID
}
pip.data <- rbind.data.frame(dd,df)
pip.data$SiteID <- trimws(pip.data$SiteID)
pip.data$CouncilSiteID <- trimws(pip.data$CouncilSiteID)
#pip.data <- read.csv("LAWA_Site_Table1.csv", stringsAsFactors=FALSE)
chk <- grepl("\\&",pip.data$CouncilSiteID)
cat(sum(chk),": site names includuing '&' character")
pip.data$CouncilSiteID <- gsub("\\&","%26",pip.data$CouncilSiteID) # replace "&" symbols (as a reserved character) with ascii representation %26
#pip.data$LawaCatchm[pip.data$LawaCatchm==0]<-1 # Make sure all catchments set to 1 in case filters set later to exclude zeros.
pip.data <- unique(pip.data)
cat(sum(chk),": site names includuing '&' character")
write.csv(pip.data,"LAWA_Site_Table1.csv")
#write.csv(pip.data,"LAWA_Site_Table.csv")
siteTableMerge <- merge(siteTable,pip.data,by="LawaSiteID",all.x=TRUE)
write.csv(siteTableMerge,"reviewWFS-SiteList.csv")