forked from reustle/covid19japan-data
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgenerate.js
137 lines (119 loc) · 4.89 KB
/
generate.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
const fs = require('fs')
const moment = require('moment')
const _ = require('lodash')
const FetchPatientData = require('./src/fetch_patient_data.js')
const Summarize = require('./src/summarize.js')
const FetchSheet = require('./src/fetch_sheet.js')
const MergePatients = require('./src/merge_patients.js')
const Prefectures = require('./src/prefectures.js')
const { request } = require('http')
const generateLastUpdated = async (patients) => {
// Check if patient list changed size, if it did, then update lastUpdated
let lastUpdated = null
const existingPatientsData = fs.readFileSync(`./docs/patient_data/latest.json`)
if (existingPatientsData) {
const existingPatients = JSON.parse(existingPatientsData)
if (existingPatients && existingPatients.length && existingPatients.length != patients.length) {
// Add 540 = UTC+9 for JST.
lastUpdated = moment().utcOffset(540).format()
console.log(`Patients data updated. New: ${patients.length} Old: ${existingPatients.length}`)
}
}
// Patient data didn't get updated, pull lastUpdate from the latest summary.
if (lastUpdated == null) {
const existingSummaryData = fs.readFileSync(`./docs/summary/latest.json`)
if (existingSummaryData) {
const existingSummary = JSON.parse(existingSummaryData)
if (existingSummary && existingSummary.updated && typeof existingSummary.updated === 'string') {
lastUpdated = existingSummary.updated
//console.log(`Pulling lastUpdated from summary/latest.json: ${lastUpdated}`)
}
}
}
// If it's still null, we don't know. So just use the latest timestamp.
if (lastUpdated == null) {
lastUpdated = moment().utcOffset(540).format()
}
return lastUpdated
}
const fetchAndSummarize = async (dateString) => {
const prefectureNames = Prefectures.prefectureNamesEn()
const regions = Prefectures.regionPrefectures()
const latestSheetId = '1vkw_Lku7F_F3F_iNmFFrDq9j7-tQ6EmZPOLpLt-s3TY'
const daily = await FetchSheet.fetchRows(latestSheetId, 'Sum By Day')
const prefectures = await FetchSheet.fetchRows(latestSheetId, 'Prefecture Data')
const cruiseCounts = await FetchSheet.fetchRows(latestSheetId, 'Cruise Sum By Day')
const recoveries = await FetchSheet.fetchRows(latestSheetId, 'Recoveries')
const filterPatientForOutput = (patient) => {
let filtered = Object.assign({}, patient)
if (patient.ageBracket == -1) {
delete filtered.ageBracket
}
delete filtered.patientCount
return filtered
}
const mergeAndOutput = (allPatients) => {
let patients = MergePatients.mergePatients(allPatients)
console.log(`Total patients fetched: ${patients.length}`)
generateLastUpdated(patients)
.then(lastUpdated => {
// Write patient data
const patientOutputFilename = `./docs/patient_data/latest.json`
const patientOutput = patients.map(filterPatientForOutput)
fs.writeFileSync(patientOutputFilename, JSON.stringify(patientOutput))
// Write daily and prefectural summary.
const summary = Summarize.summarize(patients, daily, prefectures, cruiseCounts, recoveries, prefectureNames, regions, lastUpdated)
const summaryOutputFilename = `./docs/summary/latest.json`
fs.writeFileSync(summaryOutputFilename, JSON.stringify(summary, null, ' '))
// Write minified version of daily/prefectural summary
const summaryMinifiedOutputFilename = `./docs/summary_min/latest.json`
fs.writeFileSync(summaryMinifiedOutputFilename, JSON.stringify(summary))
console.log('Success.')
})
}
const tabsBatchSize = 6
let tabs = [
'Patient Data',
'Aichi',
'Chiba',
'Fukuoka',
'Hokkaido',
'Kanagawa',
'Osaka',
'Saitama',
'Tokyo'
]
// Split tabs into requests with maximum tabBatchSize
// to prevent response from being too long.
const requests = []
while (tabs.length > 0) {
const thisRequestTabs = tabs.slice(0, tabsBatchSize)
tabs = tabs.slice(tabsBatchSize)
requests.push(FetchPatientData.fetchPatientDataFromSheets([{
sheetId: latestSheetId,
tabs: thisRequestTabs
}]))
}
// Execute the requests.
Promise.all(requests)
.then(patientLists => {
let allPatients = _.flatten(patientLists)
mergeAndOutput(allPatients)
})
.catch(error => {
console.log(error)
})
}
const writePerPrefecturePatients = (prefectureName, allPatients, dateString) => {
const lowercasePrefecture = _.camelCase(prefectureName)
const prefecturePatientsFilename = `./docs/patients/${lowercasePrefecture}_${dateString}.json`
const prefecturePatients = _.filter(patients, v => { return v.detectedPrefecture == prefectureName})
fs.writeFileSync(prefecturePatientsFilename, JSON.stringify(prefecturePatients, null, ' '))
}
try {
// Add 540 = UTC+9 for JST.
const dateString = moment().utcOffset(540).format('YYYY-MM-DD')
fetchAndSummarize(dateString)
} catch (e) {
console.error(e)
}