-
Notifications
You must be signed in to change notification settings - Fork 2
/
build.R
98 lines (89 loc) · 3.04 KB
/
build.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
library(community)
# get GEOID to entity info map
entities_file <- "../entities.rds"
if (file.exists(entities_file)) {
entities <- readRDS(entities_file)
} else {
file <- tempfile(fileext = ".csv.xz")
download.file(paste0(
"https://raw.githubusercontent.com/uva-bi-sdad/sdc.geographies/main/",
"docs/distribution/geographies_metadata.csv.xz"
), file)
entities <- vroom::vroom(file)
entities <- entities[!duplicated(entities$geoid), c("geoid", "region_type")]
saveRDS(entities, entities_file, compress = "xz")
}
# render measure info
dynamic_info_files <- grep("/code/", list.files(
".", "\\.json$", recursive = TRUE, full.names = TRUE
), fixed = TRUE, value = TRUE)
for (f in dynamic_info_files) data_measure_info(f, render = sub(
"/[^/]+\\.json", "/measure_info.json", sub("/code/", "/data/", f, fixed = TRUE)
), open_after = FALSE)
# check data and measure info
check_repository(dataset = structure(entities$region_type, names = entities$geoid))
# rebuild site
## unify original files
datasets <- paste0(list.dirs("."), "/data/distribution")
datasets <- datasets[dir.exists(datasets)]
data_reformat_sdad(
list.files(datasets, "\\.csv", full.names = TRUE), "docs/data",
metadata = entities, entity_info = NULL
)
info <- lapply(
list.files(datasets, "measure_info\\.json", full.names = TRUE),
jsonlite::read_json
)
agg_info <- list()
for (m in info) {
for (e in names(m)) {
agg_info[[e]] <- if (e %in% names(agg_info)) c(agg_info[[e]], m[[e]]) else m[[e]]
}
}
if (length(agg_info)) {
jsonlite::write_json(agg_info, "docs/data/measure_info.json", auto_unbox = TRUE, pretty = TRUE)
}
## add unified files
files <- paste0("docs/data/", list.files("docs/data/", "\\.csv\\.xz$"))
### make complete maps
dir.create("docs/maps", FALSE)
map_files <- list.files("docs/maps")
if (!length(map_files)) {
if (!require(catchment)) {
remotes::install_github("uva-bi-sdad/catchment")
library(catchment)
}
ids <- unique(unlist(lapply(files, function(f) {
unique(vroom::vroom(f, col_select = "ID", show_col_types = FALSE)[[1]])
})))
states <- unique(substring(ids[
ids %in% entities$geoid[entities$region_type %in% c("county", "tract", "block group")]
], 1, 2))
years <- as.numeric(unique(unlist(lapply(files, function(f) {
unique(vroom::vroom(f, col_select = "time", show_col_types = FALSE)[[1]])
}))))
years <- years[years > 2012 & years < 2023]
for (y in years) {
for (l in c("county", "tract", "bg")) {
f <- paste0("docs/maps/", l, "_", y, ".geojson")
if (!file.exists(f)) {
ms <- do.call(rbind, lapply(states, function(s) {
download_census_shapes(
fips = s, entity = l, name = paste0(l, y, s), year = y
)[, "GEOID", drop = FALSE]
}))
sf::st_write(ms, f)
}
}
}
}
data_add(
structure(files, names = gsub("^docs/data/|\\.csv\\.xz$", "", files)),
meta = list(
ids = list(variable = "ID"),
time = "time",
variables = "docs/data/measure_info.json"
),
dir = "docs/data"
)
site_build(".", serve = TRUE, aggregate = FALSE, open_after = TRUE)