-
Notifications
You must be signed in to change notification settings - Fork 74
/
jsonl.rs
273 lines (234 loc) · 8.39 KB
/
jsonl.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
static USAGE: &str = r#"
Convert newline-delimited JSON (JSONL/NDJSON) to CSV.
The command tries to do its best but since it is not possible to
straightforwardly convert JSON lines to CSV, the process might lose some complex
fields from the input.
Also, it will fail if the JSON documents are not consistent with one another,
as the first JSON line will be used to infer the headers of the CSV output.
For examples, see https://github.com/dathere/qsv/blob/master/tests/test_jsonl.rs.
Usage:
qsv jsonl [options] [<input>]
qsv jsonl --help
jsonl options:
--ignore-errors Skip malformed input lines.
-j, --jobs <arg> The number of jobs to run in parallel.
When not set, the number of jobs is set to the
number of CPUs detected.
-b, --batch <size> The number of rows per batch to load into memory,
before running in parallel. Set to 0 to load all
rows in one batch. [default: 50000]
Common options:
-h, --help Display this message
-o, --output <file> Write output to <file> instead of stdout.
-d, --delimiter <arg> The delimiter to use when writing CSV data.
Must be a single character. [default: ,]
"#;
use std::{
fs,
io::{self, BufRead, BufReader},
};
use rayon::{
iter::{IndexedParallelIterator, ParallelIterator},
prelude::IntoParallelRefIterator,
};
use serde::Deserialize;
use serde_json::Value;
use crate::{
config::{Config, Delimiter, DEFAULT_RDR_BUFFER_CAPACITY},
util, CliResult,
};
#[derive(Deserialize)]
struct Args {
arg_input: Option<String>,
flag_output: Option<String>,
flag_delimiter: Option<Delimiter>,
flag_ignore_errors: bool,
flag_jobs: Option<usize>,
flag_batch: usize,
}
fn recurse_to_infer_headers(value: &Value, headers: &mut Vec<Vec<String>>, path: &[String]) {
match value {
Value::Object(map) => {
for (key, value) in map {
match value {
Value::Null
| Value::Bool(_)
| Value::Number(_)
| Value::String(_)
| Value::Array(_) => {
let mut full_path = path.to_owned();
full_path.push(key.to_string());
headers.push(full_path);
},
Value::Object(_) => {
let mut new_path = path.to_owned();
new_path.push(key.to_string());
recurse_to_infer_headers(value, headers, &new_path);
},
#[allow(unreachable_patterns)]
_ => {},
}
}
},
_ => {
headers.push(vec![String::from("value")]);
},
}
}
fn infer_headers(value: &Value) -> Vec<Vec<String>> {
let mut headers: Vec<Vec<String>> = Vec::new();
recurse_to_infer_headers(value, &mut headers, &Vec::new());
headers
}
fn get_value_at_path(value: &Value, path: &[String]) -> Option<Value> {
let mut current = value;
for key in path {
match current.get(key) {
Some(new_value) => {
current = new_value;
},
None => {
return None;
},
}
}
Some(current.clone())
}
#[inline]
fn json_line_to_csv_record(value: &Value, headers: &[Vec<String>]) -> csv::StringRecord {
let mut record = csv::StringRecord::new();
for path in headers {
let value = get_value_at_path(value, path);
if let Some(value) = value {
record.push_field(&match value {
Value::Bool(v) => {
if v {
String::from("true")
} else {
String::from("false")
}
},
Value::Number(v) => v.to_string(),
Value::String(v) => v,
Value::Array(v) => v
.iter()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>()
.join(","),
_ => String::new(),
});
} else {
record.push_field("");
}
}
record
}
pub fn run(argv: &[&str]) -> CliResult<()> {
let args: Args = util::get_args(USAGE, argv)?;
let mut wtr = Config::new(args.flag_output.as_ref())
.delimiter(args.flag_delimiter)
.writer()?;
let mut is_stdin = false;
let mut rdr: Box<dyn BufRead> = match args.arg_input {
None => {
is_stdin = true;
Box::new(BufReader::new(io::stdin()))
},
Some(p) => Box::new(BufReader::with_capacity(
DEFAULT_RDR_BUFFER_CAPACITY,
fs::File::open(p)?,
)),
};
let mut headers: Vec<Vec<String>> = Vec::new();
let mut headers_emitted: bool = false;
// amortize memory allocation by reusing record
let mut batch_line = String::new();
// reuse batch buffers
let batchsize: usize = if args.flag_batch == 0 {
if is_stdin {
// if stdin, we don't know how many lines there are
// so just make a reasonably big batch size
1_000_000
} else {
// safety: we know flag_output is Some coz of the std_in check above
util::count_lines_in_file(&args.flag_output.unwrap())? as usize
}
} else {
args.flag_batch
};
let mut batch = Vec::with_capacity(batchsize);
let mut batch_results = Vec::with_capacity(batchsize);
util::njobs(args.flag_jobs);
let mut result_idx = 0_u64;
'batch_loop: loop {
for _ in 0..batchsize {
batch_line.clear();
match rdr.read_line(&mut batch_line) {
Ok(0) => {
// EOF
break;
},
Ok(_) => {
batch.push(batch_line.clone());
},
Err(e) => {
if args.flag_ignore_errors {
continue;
}
return fail_clierror!(
r#"Could not read input line!: {e}
Use `--ignore-errors` option to skip malformed input lines.
Use `tojsonl` command to convert _to_ jsonl instead of _from_ jsonl."#,
);
},
}
}
if batch.is_empty() {
break 'batch_loop; // EOF
}
if !headers_emitted {
let value: Value = match serde_json::from_str(&batch[0]) {
Ok(v) => v,
Err(e) => {
return fail_clierror!(
"Could not parse first input line as JSON to infer headers: {e}",
);
},
};
headers = infer_headers(&value);
let headers_formatted = headers.iter().map(|v| v.join(".")).collect::<Vec<String>>();
let headers_record = csv::StringRecord::from(headers_formatted);
wtr.write_record(&headers_record)?;
headers_emitted = true;
}
// do actual work via rayon
batch
.par_iter()
.map(|json_line| match serde_json::from_str(json_line) {
Ok(v) => Some(json_line_to_csv_record(&v, &headers)),
Err(e) => {
if !args.flag_ignore_errors {
log::error!("serde_json::from_str error: {:#?}", e);
}
None
},
})
.collect_into_vec(&mut batch_results);
// rayon collect() guarantees original order, so we can just append results of each batch
for result_record in &batch_results {
result_idx += 1;
if let Some(record) = result_record {
wtr.write_record(record)?;
} else if !args.flag_ignore_errors {
// there was an error parsing a json line
return fail_clierror!(
r#"Could not parse input line {result_idx} as JSON
Use `--ignore-errors` option to skip malformed input lines.
Use `tojsonl` command to convert _to_ jsonl instead of _from_ jsonl."#,
);
}
}
batch.clear();
} // end batch loop
Ok(wtr.flush()?)
}