-
Notifications
You must be signed in to change notification settings - Fork 2
/
Baker_sockeye_ISU.Rmd
352 lines (300 loc) · 15.8 KB
/
Baker_sockeye_ISU.Rmd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
---
title: Baker River Sockeye Run-Size In-Season Update
author: Thomas Buehrens ([email protected]) & Casey Ruff ([email protected])
output:
html_document:
code_folding: hide
fig_caption: yes
theme: cerulean
toc: yes
toc_depth: 3
toc_float: yes
pdf_document:
toc: yes
toc_depth: '3'
---
<script>
$(document).ready(function() {
$head = $('#header');
$head.prepend('<img src=\"https://privatelands.wdfw.wa.gov/wdfwlogo_clrnotxt.png"\" style=\"float: right;width: 150px;\"/>')
});
</script>
<script>
$(document).ready(function() {
$head = $('#header');
$head.prepend('<img src=\"https://www.tribalnationsmaps.com/uploads/1/0/4/5/10451178/s366865341169104376_p1137_i1_w1260.jpeg?width=640"\" style=\"float: right;width: 250px;\"/>')
});
</script>
***
Last Updated `r format(Sys.time(), '%m/%d/%Y')`.
***
# Overview
This script fits Seasonal Auto-Regressive Integrated Moving Average (SARIMA) models to in-season salmon return data to predict total return. It does this by aggregating the run-size into two periods: counts prior to a particular date (often the date of the most recent observation), and counts after that date (i.e., the remainder of the run). It then predicts the remainder of the run using the pattern of abundance before and after that date in previous years, in conjunction with covariates. This software evaluates the model's skill at predicting the remainder of the runsize based on data collected up to a particular date. A forecast for the final year, including confidence intervals, is produced.
<!-- ## Setup -->
<!-- All analyses require R software [**(link)**](https://cran.r-project.org/) (v3.4.3) for data retrieval, data processing, and summarizing model results. Here we configure R to perform our analysis and generate our outputs -->
```{r set_options, echo = TRUE, message = FALSE}
options(width = 100)
knitr::opts_chunk$set(message = FALSE)
set.seed(123)
```
<!-- We also need a couple of helper functions which we will define -->
```{r load_funcs, message = FALSE, warning = FALSE,results = "hide"}
wd_functions<-"functions"
sapply(FUN = source, paste(wd_functions, list.files(wd_functions), sep="/"))
```
<!-- Here we will load & install packages we need to use (needs internet connection if packages not already installed) -->
```{r load_packages, message = FALSE, warning = FALSE,results = "hide"}
packages_list<-c("tidyverse"
,"forecast"
,"mgcv"
,"ggplot2"
,"MASS"
,"RColorBrewer"
,"kableExtra"
,"lubridate"
,"modelr"
,"kableExtra"
,"reshape2"
,"ggfortify"
,"clock"
,"smooth"
,"scales"
,"here"
,"MuMIn"
,"CombMSC"
)
install_or_load_pack(pack = packages_list)
```
<!-- ## User Inputs -->
Look at this section to see user inputs regarding the years and dates of data analyzed and covariates used.
```{r user_inputs_data, message = FALSE, warning = FALSE,results = "show"}
#=========
# Raw Data
#=========
yr_start<-1992
yr_end<-year(Sys.Date())
dam="Baker"
species="Sock"
#===========================
# Summarization for analysis
#===========================
date_start_analysis<-ymd("1992/6/1") #this will be the first date of data used in the analysis (and this month/day first in years thereafter)
date_end_analysis<-ymd("2022/12/31") #this will be the last date of data used in the analysis (and this month/day last in years preceding)
forecast_period_start_m<-6 #this will be the month associated with the first month/day of the seasonal estimate period each year...can be after first data used to estimate it or the same
forecast_period_start_d<-1 #this will be the month day associated with the first month/day of the seasonal estimate period each year...can be after first data used to estimate it or the same
use_freshest_data = T #use all data up to "today" or only up to beginning of forecast period
last_data<-as.Date("2022-07-11")
#==================
#forecasting params
#==================
leave_yrs<- 11
covariates<-c("lag2_NPGO","lag1_log_SAR2","lag2_log_smolts","var_flow", "lag1_PDO","lag1_NPGO","lag2_PDO","pink_ind","lag1_log_SAR1","zl_flow")
p1_covariates_only=c("var_flow","zl_flow")
plot_results = F
first_forecast_period = 2
write_model_summaries = TRUE
```
<!-- ## Get Raw Data -->
In this section, data used in the analysis is loaded. Here a snapshot of the data (before aggregation into the two periods described in the overview).
```{r get_data, message=FALSE, warning=FALSE, results="show"}
dat<-read_csv(here("data","tbl_totalSockeyeCountByArea.csv"))%>%
pivot_longer(-c("ID","CountDate","Comment"),names_to = "location",values_to = "count")%>%
mutate(date=mdy(CountDate),year=year(date))%>%
group_by(date,year)%>%
summarise(abundance=sum(count), .groups = "keep")%>%
mutate(species=ifelse(abundance >= 0,"Sockeye","Sockeye"))%>%
filter(!is.na(date))
#=========================================================
#get PDO data
#=========================================================
PDO<-read_table("https://psl.noaa.gov/pdo/data/pdo.timeseries.ersstv5.csv",skip=1,col_names=F,comment="#")%>%
dplyr::rename(Date=X1,PDO=X2)%>%
filter(!PDO < -99)%>%
mutate(Date=as.Date(Date),Month=month(Date),Year=year(Date))%>%
group_by(Year)%>%
add_tally()%>%
#filter(!Month>6)%>% #use only spring (Jan-June) NPGO
#filter(!n < 12)%>% #use only complete years
group_by(Year)%>%
dplyr::summarise(PDO=mean(PDO))%>%
mutate(lag2_PDO = lag(PDO,2), lag1_PDO = lag(PDO,1))%>%
dplyr::select(year=Year,lag2_PDO, lag1_PDO)
#=========================================================
#get NPGO data
#=========================================================
NPGO<-read_table("http://www.o3d.org/npgo/npgo.php",skip=29,col_names=F,comment="#")%>%
filter(!is.na(X2))%>%
dplyr::rename(Year=X1,Month=X2,NPGO=X3)%>%
mutate(Year=as.numeric(Year))%>%
group_by(Year)%>%
add_tally()%>%
#filter(!Month>6)%>% #use only spring (Jan-June) NPGO
filter(!n < 12)%>% #use only complete years
group_by(Year)%>%
dplyr::summarise(NPGO=mean(NPGO))%>%
mutate(Year=Year+1, lag1_NPGO = NPGO,lag2_NPGO = lag(NPGO))%>%
dplyr::select(year=Year,lag1_NPGO,lag2_NPGO)
#=========================================================
#get Salmon Ocean Abundance Data
#=========================================================
OceanSalmon<-read_csv("https://raw.githubusercontent.com/tbuehrens/Salmon_Forecast_Example/main/data/pink_covariates_1952-2020.csv")%>%
dplyr::rename(Year=year)%>%
mutate(log_tot_pk_num = log(tot_pk_num))%>%
dplyr::select(year=Year,log_tot_pk_num)
#=========================================================
#get SAR survival/age data
#=========================================================
SAR<-read_csv(here("data","baker_sockeye_SAR.csv"))%>%
filter(Year<2021)
SAR<-SAR%>%bind_cols(SAR1=data.frame(SAR1=gam(cbind(round(OA1_Recruits),smolts-round(OA1_Recruits))~s(Year,k=(dim(SAR)[1]),m=1,bs="ps"),family=quasibinomial,data=SAR)$fitted))%>%
bind_cols(SAR2=data.frame(SAR2=c(NA,gam(cbind(round(OA2_Recruits),smolts-round(OA2_Recruits))~s(Year,k=(dim(SAR)[1]-1),m=1,bs="ps"),family=quasibinomial,data=SAR)$fitted)))%>%
mutate(year=Year+2,lag1_log_SAR1 = lag(log(SAR1),1),lag1_log_SAR2=log(SAR2))%>%
dplyr::select(year=year,lag1_log_SAR1,lag1_log_SAR2)
#=========================================================
#get smolts
#=========================================================
Smolts<-read_csv(here("data","baker_sockeye_SAR.csv"))%>%
dplyr::mutate(year=Year+2,lag2_log_smolts=log(smolts))%>%
dplyr::select(year,lag2_log_smolts)
#=========================================================
#get mainstem and baker river flow data (might affect timing)
#=========================================================
# flow_site<-12200500
# flow_url <- paste0("https://waterdata.usgs.gov/nwis/dv?&format=rdb&site_no=",flow_site,
# "&period=&begin_date=",yr_start,"-01-01",
# "&end_date=",yr_end,"-12-31")
# flow<-readr::read_delim(flow_url,comment = '#')%>%
# filter(agency_cd=="USGS")%>%
# dplyr::rename(date=datetime,CFS=`149429_00060_00003`)%>%
# dplyr::select(date,CFS)%>%
# mutate(date=ymd(date),CFS = as.numeric(CFS),flow_diff = log(lag(CFS,1)) - log(CFS))
#
#
# flow<-flow%>%
# mutate(year=year(date),month=month(date),yday=yday(date))%>%
# filter(yday <= yday(last_data) & yday >= yday(last_data-13))%>%
# group_by(year)%>%
# dplyr::summarise(zl_flow=mean(log(CFS),na.rm=T),var_flow=sd(flow_diff,na.rm=T),.groups = "keep")%>%
# ungroup()%>%
# mutate(zl_flow=as.vector(scale(zl_flow)),var_flow = as.vector(scale(var_flow)))
## try with baker river flows
flow_site<-12193400
flow_url <- paste0("https://waterdata.usgs.gov/nwis/dv?&format=rdb&site_no=",flow_site,
"&period=&begin_date=",yr_start,"-01-01",
"&end_date=",yr_end,"-12-31")
flow<-readr::read_delim(flow_url,comment = '#')%>%
filter(agency_cd=="USGS")%>%
dplyr::rename(date=datetime,CFS=`149403_00060_00003`)%>%
dplyr::select(date,CFS)%>%
mutate(date=ymd(date),CFS = as.numeric(CFS),flow_diff = log(lag(CFS,1)) - log(CFS))
flow<-flow%>%
mutate(year=year(date),month=month(date),yday=yday(date))%>%
filter(yday <= yday(last_data) & yday >= yday(last_data-13))%>%
group_by(year)%>%
dplyr::summarise(zl_flow=mean(log(CFS),na.rm=T),var_flow=sd(flow_diff,na.rm=T),.groups = "keep")%>%
ungroup()%>%
mutate(zl_flow=as.vector(scale(zl_flow)),var_flow = as.vector(scale(var_flow)))
flow %>%
filter(year == 2022)
#================================================================
dat<-dat%>%
left_join(PDO)%>%
left_join(NPGO)%>%
left_join(OceanSalmon)%>%
left_join(SAR)%>%
left_join(Smolts)%>%
left_join(flow)%>%
mutate(pink_ind = ifelse(year< 1999 | year%%2==0,0,1))
timing<-dat%>%
mutate(yday=yday(date))%>%
group_by(year)%>%
mutate(cumpct=cumsum(abundance)/sum(abundance),diff50=abs(0.5-cumpct),date50=ifelse(diff50==min(diff50),1,0))%>%
filter(date50==1)%>%
summarise(yday=mean(yday))
dat<-dat%>%
filter(date <= last_data)
print(head(dat))
```
```{r run_timing_plot, message=FALSE, warning=FALSE, results="show", fig.show = 'asis', fig.cap = "Figure 1. The day of year when 50% of the Baker Sockeye run has arrived at Baker Dam and/or been caught downstream of the dam."}
ggplot(timing,aes(x=year,y=yday))+
geom_line()+
geom_point()+
ylab("Day of Year 50% of Run @ Trap")
```
<!-- ## Summarize Data for Analysis -->
```{r summarize_data, message=FALSE, warning=FALSE, results="show"}
series<-prepare_data(series = dat,
date_start_analysis = date_start_analysis,
date_end_analysis = date_end_analysis,
forecast_period_start_m = forecast_period_start_m, #inclusive
forecast_period_start_d = forecast_period_start_d, #inclusive
use_freshest_data = use_freshest_data,
covariates = covariates,
p1_covariates_only = p1_covariates_only
)
series$series$var_flow[which(series$series$year %in% 2022)]
```
## Results
This section present a a results table showing the performance of the prediction model in previous years as well as a forecast in the current year. The results in the table are graphed below.
```{r Analysis_v2, message=FALSE, warning=FALSE, results="show"}
#best_covariates<-all_subsets(series=series$series,covariates=covariates,min=0,max=5)
# best_covariates[[2]]%>%
# slice(1:10)%>%
# kbl(caption = "Table 1. Covariate Model Selection Results.",digits =3)%>%
# kable_classic(full_width = F, html_font = "Cambria")
#best_covariates[[1]][[best_covariates[[2]]$model_num[1]]]
results<-inseason_forecast(series$series,
leave_yrs=leave_yrs,
covariates= c("lag2_NPGO","lag1_log_SAR2","lag2_log_smolts","var_flow","zl_flow"), #use to automate variable selection: best_covariates[[1]][[best_covariates[[2]]$model_num[1]]],
first_forecast_period = first_forecast_period,
plot_results = plot_results,
write_model_summaries = write_model_summaries,
forecast_period_start_m = forecast_period_start_m, #inclusive
forecast_period_start_d = forecast_period_start_d, #inclusive
obs_period_2 = series$obs_period_2,
p1_covariates_only = p1_covariates_only
)
p_2_start_m<-series$p_2_start_m
p_2_start_d<-series$p_2_start_d
results%>%
ungroup()%>%
dplyr::select(!c("period","train_test"))%>%
kbl(caption = paste0("Table 1. Forecasts of ",dam," dam counts of ",species, " from ",forecast_period_start_m,"/",forecast_period_start_d,"-",month(date_end_analysis),"/",mday(date_end_analysis)," using only past years' data for the forecast period and in-season counts from ", month(date_start_analysis),"/",mday(date_start_analysis)," through ", month(max(dat$date)),"/",mday(max(dat$date)), " in each year. Additional covariates are included in the table below."),digits =3)%>%
kable_classic(full_width = F, html_font = "Cambria")
# print(paste0("Mean Absolute Percent Error (MAPE) = ",mean(abs((results$error/results$abundance)*100),na.rm = T)))
# print(paste0("Median Symmetric Accuracy; MSA) = ",100*(exp(median(abs(log(results$predicted_abundance/results$abundance)),na.rm = T))-1)))
ISU_pct_error <- mean(abs((results$error/results$abundance)*100),na.rm = T)
ISU_msa <- 100*(exp(median(abs(log(results$predicted_abundance/results$abundance)),na.rm = T))-1)
##load pre-season forecast performance table
psf_performance <- read_csv("https://raw.githubusercontent.com/casruff/Salmon_Forecast_Example/test/sockeye_preseason_fcst_performance.csv")%>%
filter(Year %in% 2012:2021)%>%
mutate(type="PSF")%>%
rename(year = Year,predicted_abundance = Estimate,`Lo 95` = L95,`Hi 95` = U95)%>%
mutate(`Lo 50` = exp(log(predicted_abundance)- 0.675 * sd), `Hi 50` = exp(log(predicted_abundance)+ 0.675 * sd))%>%
left_join(results%>%dplyr::select(year,abundance))
ISU_performance <- results %>%
filter(year %in% 2012:2021)
psf_error <- psf_performance$predicted_abundance - ISU_performance$abundance
psf_pct_error <- mean(abs((psf_error/ISU_performance$abundance)*100))
psf_msa <- 100*(exp(median(abs(log(psf_performance$predicted_abundance/ISU_performance$abundance))))-1)
performance_comp <- data.frame(Method = c("PSF","ISU"),MAPE = c(psf_pct_error,ISU_pct_error),MSA = c(psf_msa,ISU_msa))
performance_comp %>%
kbl(caption = paste0("Table 2. Comparison of forecast performance of ISU using catch and Baker dam counts versus stack weighted pre-season forecast for years 2012 - 2020. ISUs were made using in-season counts from ", month(date_start_analysis),"/",mday(date_start_analysis)," through ", month(max(dat$date)),"/",mday(max(dat$date)), " in each year"),digits =3)%>%
kable_classic(full_width = F, html_font = "Cambria")
results<-results%>%
mutate(type="ISU")%>%
bind_rows(psf_performance)
```
```{r performance_plot, message=FALSE, warning=FALSE, results="show", fig.show = 'asis', fig.cap = "Figure 2. Past performance of pre-season and in-season forecasts. Points are actual final run sizes, lines are 'best forecasts', and shading shows 50% (dark) and 95% (light) prediction intervals."}
p<-ggplot(results,aes(x=year,y=predicted_abundance,group=type))+
geom_ribbon(aes(ymin=`Lo 95`,ymax=`Hi 95`,fill=type),color=NA,alpha=0.5)+
geom_ribbon(aes(ymin=`Lo 50`,ymax=`Hi 50`,fill=type),color=NA,alpha=0.5)+
geom_line()+
#geom_line(data = psf_performance,mapping=aes(x=year,y=predicted_performance), color = "red")+
geom_point(aes(x=year,y=abundance))+
scale_x_continuous(breaks=unique(results$year))+
facet_wrap(~type,ncol=2)+
ylim(0,NA)+
theme(axis.text.x = element_text(angle = 90, vjust = 0.5, hjust=1))
print(p)
```