Skip to content

Commit 7cd6564

Browse files
add webb network helper, load data for pkernels
1 parent 1f9ab18 commit 7cd6564

8 files changed

Lines changed: 3269 additions & 0 deletions

File tree

src/acquisition_master.R

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -292,6 +292,13 @@ ms_globals <- c(ls(all.names = TRUE), 'ms_globals')
292292

293293
dir.create('logs', showWarnings = FALSE)
294294

295+
# NOTE: this should be moved I believe, and made to work with the raw data
296+
# dcumentation of the latest iteration...
297+
# this function will update the citation sheet with the data and url of raw data download
298+
scrape_data_download_urls()
299+
300+
## change string in line below to find row index of your desired domain
301+
## dmnrow <- which(network_domain$domain == 'loch_vale')
295302
for(dmnrow in 1:nrow(network_domain)){
296303

297304
# drop_automated_entries('.') #use with caution!
@@ -322,12 +329,15 @@ for(dmnrow in 1:nrow(network_domain)){
322329
n = network,
323330
d = domain))
324331

332+
# this should only run when you have your producs.csv
333+
# and processing kernels prod information matching
325334
update_product_statuses(network = network,
326335
domain = domain)
327336

328337
get_all_local_helpers(network = network,
329338
domain = domain)
330339

340+
# stop here and go to processing_kernels.R to continue
331341
ms_retrieve(network = network,
332342
# prodname_filter = c('stream_chemistry'),
333343
domain = domain)
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
retrieve_sleepers_product <- function(network,
2+
domain,
3+
prodname_ms,
4+
site_code,
5+
tracker,
6+
url){
7+
# creating a string which matches the names of processing kernels
8+
processing_func <- get(paste0('process_0_',
9+
# these names or based off of prod names in products.csv
10+
prodcode_from_prodname_ms(prodname_ms)))
11+
12+
# tracking the "version" of the product
13+
rt <- tracker[[prodname_ms]][[site_code]]$retrieve
14+
15+
held_dt <- as.POSIXct(rt$held_version,
16+
tz = 'UTC')
17+
18+
# "deets" is a list of all the information originally from a row in products.csv
19+
deets <- list(prodname_ms = prodname_ms,
20+
site_code = site_code,
21+
component = rt$component,
22+
last_mod_dt = held_dt,
23+
url = url)
24+
25+
# these "deets" are fed as arguments to wwhatever processing kernel is currently being called
26+
# remember, this "retrieve_product" function is being called, in the retrieve.R script,
27+
# in a loop over the product names from products.csv -- this is why the products.csv prod names
28+
# must match the end of the procesing kernels which are written to retrieve that product
29+
30+
# if you're working on pkernels and not actually running this func, uncomment and run these lines:
31+
## set_details = deets
32+
## network = network
33+
## domain = domain
34+
35+
result <- do.call(processing_func,
36+
args = list(set_details = deets,
37+
network = network,
38+
domain = domain))
39+
40+
41+
new_status <- evaluate_result_status(result)
42+
43+
if('access_time' %in% names(result) && any(! is.na(result$access_time))){
44+
deets$last_mod_dt <- result$access_time[! is.na(result$access_time)][1]
45+
}
46+
47+
update_data_tracker_r(network = network,
48+
domain = domain,
49+
tracker_name = 'held_data',
50+
set_details = deets,
51+
new_status = new_status)
52+
53+
source_urls <- get_source_urls(result_obj = result,
54+
processing_func = processing_func)
55+
56+
write_metadata_r(murl = source_urls,
57+
network = network,
58+
domain = domain,
59+
prodname_ms = prodname_ms)
60+
61+
}
62+
63+
retrieve_usgs_sleeper_daily_q <- function(set_details) {
64+
if(grepl("w5", set_details$component) == TRUE) {
65+
q <- dataRetrieval::readNWISdv(siteNumbers = "01135300",
66+
parameterCd = "00060")
67+
} else if(grepl("w3", set_details$component) == TRUE) {
68+
q <- dataRetrieval::readNWISdv(siteNumbers = "01135150",
69+
parameterCd = "00060")
70+
}
71+
72+
return(q)
73+
}

0 commit comments

Comments
 (0)