-
Notifications
You must be signed in to change notification settings - Fork 140
/
official_experiments
479 lines (331 loc) · 80.2 KB
/
official_experiments
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
EC^2 Baseline: w/ minibatching
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-standard-64 baseline_ec2_batch_list "python bin/list.py --compressor pypy --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --testingTimeout 720 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 10 -RS 5000 --seed $SEED"; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-standard-64 baseline_ec2_batch_text "python bin/text.py --compressor pypy -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 -RS 5000 --testingTimeout 600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED"; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 baseline_ec2_batch_logo "python bin/logo.py --compressor pypy --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 -RS 5000 --seed $SEED"; done
for SEED in `seq 1 5`; do python bin/launch.py -c -k -z n1-highmem-64 baseline_ec2_batch_tower "python bin/tower.py --compressor pypy -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 --primitives new --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-standard-64 baseline_ec2_batch_regex "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --compressor pypy --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced "; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-standard-20 baseline_ec2_batch_rational "python bin/rational.py --compressor pypy --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 10 -i 20 --structurePenalty 1.0 --pseudoCounts 30.0 -t 120 --recognitionTimeout 3600 -RS 5000 --testingTimeout 600 --CPUs 20 --seed $SEED"; done
EC^2 Baseline: w/o minibatching
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-standard-64 baseline_ec2_no_batch_list "python bin/list.py --compressor pypy --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --testingTimeout 600 --storeTaskMetrics -RS 5000 --seed $SEED"; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-standard-64 baseline_ec2_no_batch_text "python bin/text.py --compressor pypy -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --testingTimeout 600 --storeTaskMetrics --seed $SEED"; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 baseline_ec2_no_batch_logo "python bin/logo.py --compressor pypy --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 -RS 5000 --seed $SEED"; done
for SEED in `seq 1 5`; do python bin/launch.py -c -k -z n1-highmem-64 baseline_ec2_no_batch_tower "python bin/tower.py --compressor pypy -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --primitives new --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-standard-64 baseline_ec2_no_batch_regex "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --compressor pypy --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 600 --seed $SEED --primitives strConst --use_str_const --primitives reduced "; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-standard-20 baseline_ec2_no_batch_rational "python bin/rational.py --compressor pypy --storeTaskMetrics -i 20 --structurePenalty 1.0 --pseudoCounts 30.0 -t 120 --recognitionTimeout 3600 -RS 5000 --testingTimeout 600 --CPUs 20 --seed $SEED"; done
EC^2 Baseline: origami programming and physics
python bin/launch.py -k -c -z n1-highmem-64 baseline_ec2_McCarthy "python bin/list.py --primitives McCarthy --dataset bootstrap --structurePenalty 1. --pseudoCounts 30 --arity 4 -g -t 57600 --taskReranker unsolved --topK 5 --maximumFrontier 5 --compressor pypy"
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 baseline_ec2_scientific "python bin/scientificLaws.py -t 14400 --topK 5 --arity 3 --maximumFrontier 5 -i 20 -R 30 -RS 5000 -r 0. --taskReranker unsolved --compressor pypy"; done
To see the billing information broken down by instance type:
https://console.aws.amazon.com/cost-reports/home#/custom?groupBy=InstanceType&hasBlended=false&hasAmortized=false&excludeDiscounts=true&excludeTaggedResources=false&timeRangeOption=Last12Months&granularity=Monthly&reportName=&reportType=CostUsage&isTemplate=true&startDate=2018-01-01&endDate=2018-12-31&filter=%5B%7B%22dimension%22:%22RecordType%22,%22values%22:%5B%22Refund%22,%22Credit%22%5D,%22include%22:false,%22children%22:null%7D%5D&forecastTimeRangeOption=None&usageAs=usageQuantity&chartStyle=Stack
Enumeration baseline:
python bin/launch.py -k -c -z n1-standard-64 baseline_enumeration_text "python bin/text.py -g -t 1 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 1 --storeTaskMetrics --testingTimeout 86400 --taskReranker randomShuffle --taskBatchSize 10"
python bin/launch.py -k -c -z n1-standard-64 baseline_enumeration_list "python bin/list.py -g --split 0.5 -t 1 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 1 -R 3600 --storeTaskMetrics --testingTimeout 86400 --taskReranker randomShuffle --taskBatchSize 10 "
python bin/launch.py -k -c -z n1-standard-96 baseline_enumeration_logo "python bin/logo.py -g --split 0.5 -t 1 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 1 --storeTaskMetrics --testingTimeout 86400 --taskReranker randomShuffle --taskBatchSize 50 "
python bin/launch.py -c -k -z n1-highmem-64 baseline_enumeration_tower "python bin/tower.py -g -t 1 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 1 --storeTaskMetrics --split 0.5 --testingTimeout 86400 --taskReranker randomShuffle --taskBatchSize 50 --primitives new "
for SEED in `seq 0 0`; do python bin/launch.py -k -c -z n1-standard-64 baseline_enumeration_regex "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 1 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 1 -R 3600 --testingTimeout 86400 --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced -g"; done
python bin/launch.py -k -c -z n1-standard-64 baseline_enumeration_rational "python bin/rational.py --arity 0 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 10 -g -i 1 --structurePenalty 1.0 --pseudoCounts 30.0 -t 1 --testingTimeout 86400"
Google cloud experiments - repeated many times.
Text:
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 text "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 text_latest "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --latest --noUnfold --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 text_only_challenge "python bin/text.py -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 50 --auxiliary --ensembleSize 1 -RS 5000 --latest --noUnfold --seed $SEED --onlyChallenge" ; done
for SEED in `seq 1 2`; do python bin/launch.py -k -c -z n1-highmem-64 text_no_length_no_map_no_unfold "python bin/text.py --noLength --noMap --noUnfold -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 text_mask "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --mask --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 text_mask "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --mask --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 text_no_dsl "python bin/text.py -t 720 --pseudoCounts 30 --aic 1000.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 2 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 text_no_generative "python bin/text.py -t 720 --topK 2 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --ensembleSize 1 -RS 5000 --no-dsl --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 text_no_generative_mask "python bin/text.py -t 720 --topK 2 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --ensembleSize 1 -RS 5000 --no-dsl --mask --seed $SEED" ; done
for SEED in `seq 4 5`; do python bin/launch.py -k -c -z n1-highmem-64 text_no_recognition "python bin/text.py -g -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 baseline_ec_text "python bin/text.py -g -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 20 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 text_memorize "python bin/text.py --compressor memorize -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 text_memorize_no_recognition "python bin/text.py --compressor memorize -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 --seed $SEED -g " ; done
List:
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 list "python bin/list.py --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 2`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_length_no_map_no_unfold "python bin/list.py --noLength --noMap --noUnfold --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 2`; do python bin/launch.py -k -c -z n1-highmem-64 list_1h_no_length_no_map_no_unfold "python bin/list.py --taskReranker randomShuffle --taskBatchSize 40 --noLength --noMap --noUnfold --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 2`; do python bin/launch.py -k -c -z n1-highmem-64 list_small_train "python bin/list.py --split 0.25 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 3 5`; do python bin/launch.py -k -c -z n1-highmem-64 list_tiny_train "python bin/list.py --split 0.15 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 2`; do python bin/launch.py -k -c -z n1-highmem-64 list_small_train_no_dsl "python bin/list.py --no-dsl --split 0.25 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 list_tiny_train_no_dsl "python bin/list.py --no-dsl --split 0.15 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 list_mask "python bin/list.py --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --mask --seed $SEED" ; done
for SEED in `seq 4 5`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_dsl_no_ensemble "python bin/list.py --aic 1000.0 --split 0.5 -t 720 --pseudoCounts 30 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_generative "python bin/list.py --split 0.5 -t 720 --topK 2 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --no-dsl --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 1`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_generative_no_batch "python bin/list.py --split 0.5 -t 7200 --topK 2 --maximumFrontier 5 -i 5 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --no-dsl --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_generative_mask "python bin/list.py --split 0.5 -t 720 --topK 2 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --no-dsl --ensembleSize 1 -RS 5000 --mask --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_generative_old_recognition "python bin/list.py --split 0.5 -t 720 --topK 2 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --no-dsl --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 4 5`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_recognition "python bin/list.py -g --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED" ; done
for SEED in `seq 4 5`; do python bin/launch.py -k -c -z n1-highmem-64 list_no_recognition "python bin/list.py -g --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 baseline_ec_list "python bin/list.py -g --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 list_memorize "python bin/list.py --compressor memorize --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 list_memorize_no_recognition "python bin/list.py --compressor memorize --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 1 --seed $SEED -g " ; done
LOGO:
for SEED in `seq 1 1`; do python bin/launch.py -k -c -z n1-megamem-96 logo_no_batch_2h "python bin/logo.py --split 0.5 -t 7200 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 6 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_batch_40_1h "python bin/logo.py --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 6 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 40 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_batch_50_1h "python bin/logo.py --split 0.5 -t 3600 --pseudoCounts 30 --auxiliary --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 50 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_auxiliary_batch_40_1h "python bin/logo.py --auxiliary --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 40 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 1`; do python bin/launch.py -k -c -z n1-megamem-96 logo_auxiliary_no_batch_2h "python bin/logo.py --split 0.5 -t 7200 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 6 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual -RS 5000 --seed $SEED --auxiliary" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_no_dsl_auxiliary_batch_40_1h "python bin/logo.py --no-dsl --auxiliary --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 40 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_no_dsl_auxiliary_batch_50_1h "python bin/logo.py --no-dsl --auxiliary --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 50 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_no_recognition_batch_40_1h "python bin/logo.py -g --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_no_recognition_batch_50_1h "python bin/logo.py -g --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-standard-96 baseline_ec_logo "python bin/logo.py -g --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 20 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_memorize "python bin/logo.py --compressor memorize --split 0.5 -t 3600 --pseudoCounts 30 --auxiliary --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --storeTaskMetrics --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 50 -RS 5000 --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-megamem-96 logo_memorize_no_recognition "python bin/logo.py --compressor memorize --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 -g --seed $SEED" ; done
McCarthy:
python bin/launch.py -k -c -z n1-highmem-64 McCarthy_unsolved_4h "python bin/list.py --primitives McCarthy --dataset bootstrap --structurePenalty 1. --pseudoCounts 30 --arity 4 -g -t 14400 --taskReranker unsolved --topK 2 --maximumFrontier 5"
python bin/launch.py -k -c -z n1-highmem-64 McCarthy_unsolved_4h_tk5 "python bin/list.py --primitives McCarthy --dataset bootstrap --structurePenalty 1. --pseudoCounts 30 --arity 4 -g -t 14400 --taskReranker unsolved --topK 5 --maximumFrontier 5"
python bin/launch.py -k -c -z n1-highmem-64 McCarthy_unsolved_8h_tk5 "python bin/list.py --primitives McCarthy --dataset bootstrap --structurePenalty 1. --pseudoCounts 30 --arity 4 -g -t 28800 --taskReranker unsolved --topK 5 --maximumFrontier 5"
python bin/launch.py -k -c -z n1-highmem-64 McCarthy_unsolved_8h_tk5_l15 "python bin/list.py --primitives McCarthy --dataset bootstrap --structurePenalty 1.5 --pseudoCounts 30 --arity 4 -g -t 28800 --taskReranker unsolved --topK 5 --maximumFrontier 5"
python bin/launch.py -k -c -z n1-highmem-64 McCarthy_unsolved_16h_tk5 "python bin/list.py --primitives McCarthy --dataset bootstrap --structurePenalty 1. --pseudoCounts 30 --arity 4 -g -t 57600 --taskReranker unsolved --topK 5 --maximumFrontier 5"
python bin/launch.py -k -c -z n1-highmem-64 baseline_ec_McCarthy_unsolved_16h_tk5 "python bin/list.py --primitives McCarthy --dataset bootstrap --structurePenalty 1. --pseudoCounts 30 --arity 0 -g -t 57600 --taskReranker unsolved --topK 5 --maximumFrontier 5 -i 10"
Scientific laws:
python bin/launch.py -k -c -z n1-highmem-64 scientific_1h "python bin/scientificLaws.py -t 3600 --topK 5 --arity 3 --maximumFrontier 5 -i 10 -R 3600 -RS 5000 --biasOptimal --contextual --mask -r 0."
python bin/launch.py -k -c -z n1-highmem-64 scientific_unsolved_1h "python bin/scientificLaws.py -t 3600 --topK 5 --arity 3 --maximumFrontier 5 -i 10 -R 3600 -RS 5000 --biasOptimal --contextual --mask -r 0. --taskReranker unsolved"
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 scientific_unsolved_4h "python bin/scientificLaws.py -t 14400 --topK 5 --arity 3 --maximumFrontier 5 -i 20 -R 30 -RS 5000 --biasOptimal --contextual --mask -r 0. --taskReranker unsolved"; done
python bin/launch.py -k -c -z n1-highmem-64 scientific_unsolved_8h "python bin/scientificLaws.py -t 28800 --topK 5 --arity 3 --maximumFrontier 5 -i 20 -g --taskReranker unsolved"
python bin/launch.py -k -c -z n1-highmem-64 scientific_12m_10b "python bin/scientificLaws.py -t 720 --taskReranker randomShuffle --taskBatchSize 10 --topK 5 --arity 3 --maximumFrontier 5 -i 10 -R 3600 -RS 5000 --biasOptimal --contextual --mask -r 0."
python bin/launch.py -k -c -z n1-highmem-64 scientific_60m_10b "python bin/scientificLaws.py -t 3600 --taskReranker randomShuffle --taskBatchSize 10 --topK 5 --arity 3 --maximumFrontier 5 -i 10 -R 3600 -RS 5000 --biasOptimal --contextual --mask -r 0."
python bin/launch.py -k -c -z n1-highmem-64 scientific_30m_40b "python bin/scientificLaws.py -t 3600 --taskReranker randomShuffle --taskBatchSize 40 --topK 5 --arity 3 --maximumFrontier 5 -i 10 -R 3600 -RS 5000 --biasOptimal --contextual --mask -r 0."
for SEED in `seq 1 1`; do python bin/launch.py -k -c -z n1-highmem-64 baseline_ec_scientific_unsolved_4h "python bin/scientificLaws.py -t 14400 --topK 5 --arity 0 -g --maximumFrontier 5 -i 10 --taskReranker unsolved"; done
REGEX:
Reduced Primitives:
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_mask "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_bigram "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_unigram "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_nodsl_mask "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --no-dsl --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_nodsl_mask "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --no-dsl --seed $SEED" ; done
#for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_norec "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --g --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_nodsl_bigram "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --no-dsl --seed $SEED" ; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_reduced_nodsl_unigram "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --no-dsl --seed $SEED" ; done
for SEED in `seq 1 1`; do python bin/launch.py -k -c -z n1-highmem-64 regex_2h_mask "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 7200 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --seed $SEED" ; done
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 regex_batch_40_mask "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED" ; done
No DSL baseline:
Need to run:
for SEED in `seq 1 5`; do python bin/launch.py -k -c -z n1-highmem-64 regex_no_dsl_batch_40_mask "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --no-dsl --seed $SEED" ; done
noRecognition:
python bin/launch.py -k -c -z n1-highmem-64 test "python bin/regexes.py --tasks new --taskReranker randomShuffle --taskBatchSize 40 --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --testingTimeout 1800 --seed 0 -g"
for SEED in `seq 1 2`; do python bin/launch.py -k -c -z n1-highmem-64 test "python bin/regexes.py --tasks new --taskReranker randomShuffle --taskBatchSize 40 --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --testingTimeout 1800 --seed $SEED -g" ; done
Need to do:
for SEED in `seq 3 5`; do python bin/launch.py -k -c -z n1-highmem-64 test "python bin/regexes.py --tasks new --taskReranker randomShuffle --taskBatchSize 40 --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --testingTimeout 1800 --seed $SEED -g" ; done
#1 hour checkpoint:
python bin/launch.py --checkpoint experimentOutputs/regex/2019-02-12T21:51:03.349621/regex_aic=1.0_arity=3_aux=True_BO=True_CO=True_ES=1_ET=3600_HR=0.5_it=10_mask=True_MF=10_pc=30.0_RT=3600_RR=False_RW=False_STM=True_L=1.5_batch=40_TRR=randomShuffle_K=2_topkNotMAP=True.pickle -k -c -z n1-highmem-64 regex_batch_40_reduced_mask_checkpoint "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 -R 3600 --testingTimeout 7200 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed 0 --resume experimentOutputs/regex_aic=1.0_arity=3_aux=True_BO=True_CO=True_ES=1_ET=3600_HR=0.5_it=10_mask=True_MF=10_pc=30.0_RT=3600_RR=False_RW=False_STM=True_L=1.5_batch=40_TRR=randomShuffle_K=2_topkNotMAP=True.pickle"
python bin/launch.py --checkpoint experimentOutputs/regex/2019-02-12T21:51:03.349621/regex_aic=1.0_arity=3_aux=True_BO=True_CO=True_ES=1_ET=3600_HR=0.5_it=10_mask=True_MF=10_pc=30.0_RT=3600_RR=False_RW=False_STM=True_L=1.5_batch=40_TRR=randomShuffle_K=2_topkNotMAP=True.pickle -k -c -z n1-highmem-64 regex_batch_40_reduced_mask_noC "python bin/regexes.py --primitives reduced --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 -R 3600 --testingTimeout 7200 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed 0 --resume experimentOutputs/regex_aic=1.0_arity=3_aux=True_BO=True_CO=True_ES=1_ET=3600_HR=0.5_it=10_mask=True_MF=10_pc=30.0_RT=3600_RR=False_RW=False_STM=True_L=1.5_batch=40_TRR=randomShuffle_K=2_topkNotMAP=True.pickle"
#fixed bigram posterior
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_mask_posterior "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED" ; done
#Runs with string constants:
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_strConst "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED --primitives strConst --use_str_const"; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_strConst_7m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 720 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED --primitives strConst --use_str_const"; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b10_strConst_7m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 -R 3600 --testingTimeout 720 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const"; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b10_strConst_7m_no_dsl "python bin/regexes.py --no-dsl --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 -R 3600 --testingTimeout 720 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const"; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b10_strConst_2m_no_dsl "python bin/regexes.py --no-dsl --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 120 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 -R 3600 --testingTimeout 120 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const"; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_strConst_2m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 120 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 120 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED --primitives strConst --use_str_const"; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_strConst_noTC "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff None --split 0.5 -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 3600 --testingTimeout 1800 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED --primitives strConst --use_str_const"; done
# Runs with string constants but without the mask
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b10_strConst_7m_no_mask "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 -R 3600 --testingTimeout 720 --biasOptimal --contextual --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const"; done
for SEED in `seq 1 3`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b10_strConst_7m_no_mask_no_dsl "python bin/regexes.py --tasks new --no-dsl --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 11 -R 3600 --testingTimeout 720 --biasOptimal --contextual --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const"; done
#
I want:
reduced:
full model
norec -g
nodsl --no-dsl
reduced+unigram:
full model
norec -g
nodsl --no-dsl
#full:
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_str_red_12m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced"; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_str_red_norec_12m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced -g"; done
# _b40_str_red_norec_12m
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-standard-64 baseline_ec_regex "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED --primitives strConst --use_str_const --primitives reduced -g"; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-standard-64 regex_memorize "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --compressor memorize --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED --primitives strConst --use_str_const --primitives reduced --biasOptimal"; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-standard-64 regex_memorize_no_recognition "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --compressor memorize --maximumFrontier 5 -i 25 --testingTimeout 720 --auxiliary --taskReranker randomShuffle --taskBatchSize 40 --seed $SEED --primitives strConst --use_str_const --primitives reduced -g "; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-highmem-64 regex_b40_str_red_nodsl_12m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --biasOptimal --contextual --mask --auxiliary --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced --no-dsl"; done
#unigram:
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-highmem-64 regex_uni_b40_str_red_12m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --taskReranker randomShuffle --biasOptimal --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced"; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-highmem-64 regex_uni_b40_str_red_nodsl_12m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced --no-dsl --biasOptimal"; done
for SEED in `seq 42 46`; do python bin/launch.py -k -c -z n1-highmem-64 regex_uni_b40_str_red_norec_12m "python bin/regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 720 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced -g"; done
#unigram, with maybe, yet to run:
for SEED in `seq 42 46`; do python launch.py -k -c -z n1-highmem-64 regex_uni_b40_mayb_30m "python regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 1800 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 1800 --taskReranker randomShuffle --biasOptimal --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced"; done
for SEED in `seq 42 46`; do python launch.py -k -c -z n1-highmem-64 regex_uni_b40_mayb_nodsl_30m "python regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 1800 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 1800 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced --no-dsl --biasOptimal"; done
for SEED in `seq 42 46`; do python launch.py -k -c -z n1-highmem-64 regex_uni_b40_mayb_norec_30m "python regexes.py --tasks new --maxTasks 256 --ll_cutoff bigram None --split 0.5 -t 1800 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 25 -R 3600 --testingTimeout 1800 --taskReranker randomShuffle --taskBatchSize 10 --seed $SEED --primitives strConst --use_str_const --primitives reduced -g"; done
TOWER
for SEED in `seq 1 3`; do python bin/launch.py -c -k -z n1-highmem-64 tower_random_shuffle_10_120s "python bin/tower.py -t 120 --pseudoCounts 30 --tasks old --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --primitives old --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done # did not do well
python bin/launch.py -c -k -z n1-highmem-64 tower_no_batch_1h "python bin/tower.py -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --primitives new --recognitionTimeout 3600 -RS 5000" # does very well! both quantitatively and qualitatively
python bin/launch.py -c -k -z n1-highmem-64 tower_batch_10_720 "python bin/tower.py -t 720 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 10 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --primitives new --recognitionTimeout 3600 -RS 5000"
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_batch_40_3600 "python bin/tower.py -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 40 --primitives new --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_batch_50_3600 "python bin/tower.py -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 50 --primitives new --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_no_dsl_batch_40_3600 "python bin/tower.py --no-dsl -t 3600 --pseudoCounts 30 --tasks new --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 40 --primitives new --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_no_dsl_batch_50_3600 "python bin/tower.py --no-dsl -t 3600 --pseudoCounts 30 --tasks new --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 50 --primitives new --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_no_recognition_batch_40_3600 "python bin/tower.py -g -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 40 --primitives new --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_no_recognition_batch_50_3600 "python bin/tower.py -g -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 --primitives new --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 baseline_ec_tower "python bin/tower.py -g -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 0 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 --primitives new --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_memorize "python bin/tower.py --compressor memorize -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 50 --primitives new --recognitionTimeout 3600 -RS 5000 --seed $SEED" ; done
for SEED in `seq 0 4`; do python bin/launch.py -c -k -z n1-highmem-64 tower_memorize_no_recognition "python bin/tower.py --compressor memorize -g -t 3600 --pseudoCounts 30 --tasks new --aic 1.0 --structurePenalty 1.5 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 50 --primitives new --seed $SEED" ; done
The hyper parameters here are not consistent. A key thing we need to
do is make sure that we are always using the same hyper parameters, or
if we are not, we need a good reason for it. For example, the
structure penalty on text is larger than for the other domains, but
this is justifiable. And the top K for regular expressions is larger
than for the other domains but that is also justifiable. In contrast
the pseudocounts really should be the same everywhere.
LOGO:
Full model without batching:
python bin/launch.py -k -z x1.32xlarge DreamLogo "python bin/logo.py -t 7200 --structurePenalty 1.5 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 3600"
Random shuffle (batches of 10), 720s: [DONE, solves 36/73 testing]
python bin/launch.py -k -z r4.16xlarge LogoBatch "python bin/logo.py -t 720 --structurePenalty 1.5 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 3600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 10 -i 20 -R 1800 --reuseRecognition"
Random shuffle (batches of 10), 720s: [DONE: solves 40/73 and ends up converging to a bad DSL]
python bin/launch.py -k -z x1.32xlarge LogoBatch_24m "python bin/logo.py -t 1440 --structurePenalty 1.5 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 3600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 10 -i 30 -R 1800 "
Random shuffle (batches of 10), 3600s: [DONE: solves 40/73 and ends up converging to a bad DSL]
python bin/launch.py -k -z x1.32xlarge logo_batch_10_1h "python bin/logo.py -t 3600 --structurePenalty 1.0 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 10 -i 30 -R 1800 "
Random shuffle (batches of 20), 30min: [DONE: solves 50/73 and still converges to a bad DSL]
python bin/launch.py -k -z x1.32xlarge logo_batch_20_30m "python bin/logo.py -t 1800 --structurePenalty 1.5 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 20 -i 30 -R 1800 "
Random shuffle (batches of 40), 60min: [DONE: solves almost 90%, slightly worse than no batching]
python bin/launch.py -k -z x1.32xlarge logo_batch_40_60m "python bin/logo.py -t 3600 --structurePenalty 1.5 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 40 -i 20 -R 1800 "
*old recognition*, Random shuffle (batches of 40), 60min: [RUNNING]
python bin/launch.py -k -z x1.32xlarge logo_batch_40_60m_old_recognition "python bin/logo.py -t 3600 --structurePenalty 1.5 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 40 -i 20 -R 3600 "
Baseline without recognition model (batches of 40), 720: [RUNNING]
python bin/launch.py -k -z x1.32xlarge logo_no_recognition_batch_40_60m "python bin/logo.py -t 3600 --structurePenalty 1.5 --pseudoCounts 30.0 --split 0.5 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 40 -i 20 -g"
Baseline without DSL learning (batches of 40): [RUNNING]
python bin/launch.py -k -z x1.32xlarge logo_no_dsl_batch_40_60m "python bin/logo.py -t 3600 --structurePenalty 1.5 --pseudoCounts 30.0 --biasOptimal --contextual --split 0.5 --testingTimeout 600 --storeTaskMetrics --taskReranker randomShuffle --taskBatchSize 40 -i 20 -R 1800 --aic 1000.0"
TEXT:
Full model without batching:
python bin/launch.py -k -z x1.32xlarge TextDream "python bin/text.py -i 6 -t 7200 --pseudoCounts 30 --testingTimeout 1800 --contextual --biasOptimal -l 5 --maximumFrontier 2"
Derby: (You can replace this checkpoint with a different one and it will run the Derby with it)
python bin/launch.py -k -z c4.8xlarge --checkpoint experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.5_it=19_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle batched_derby "python bin/text.py --compete experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.5_it=19_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle"
python bin/launch.py -k -z c4.8xlarge --checkpoint experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.5_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle batched_derby_it20 "python bin/text.py --compete experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.5_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle"
Baseline without batching, 3600s: text_baseline_3600s [Done: solves up to 80 tasks.]
python bin/launch.py -k -z r4.16xlarge text_baseline_3600s "python bin/text.py -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 7 -R 7200 --storeTaskMetrics --testingTimeout 3600 --biasOptimal --contextual --taskReranker default"
Baseline without batching, 7200: text_baseline_7200s [Done: solves 87 tasks.]
python bin/launch.py -k -z r4.16xlarge text_baseline_7200s "python bin/text.py -t 7200 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 7 -R 7200 --storeTaskMetrics --testingTimeout 7200 --biasOptimal --contextual --taskReranker default"
Baseline without recognition model (batches of 10), 720s: [DONE]
python bin/launch.py -k -z r4.16xlarge text_no_recognition_random_shuffle_10_720s "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --testingTimeout 600 -g --taskReranker randomShuffle --taskBatchSize 10"
Baseline without DSL learning (batches of 10), 720s: [DONE]
python bin/launch.py -k -z r4.16xlarge text_no_dsl_random_shuffle_10_720s "python bin/text.py -t 720 --pseudoCounts 30 --aic 1000.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Baseline without DSL learning + old recognition model (batches of 10), 720s: [RUNNING]
python bin/launch.py -k -z r4.16xlarge text_no_dsl_old_recognition_random_shuffle_10_720s "python bin/text.py -t 720 --pseudoCounts 30 --aic 1000.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle (batches of 10), 720s: text_random_shuffle_10_720s [DONE - solves 91.]
python bin/launch.py -k -z r4.16xlarge text_random_shuffle_10_720s "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle (batches of 40), 3600s: text_random_shuffle_40_3600s [RUNNING]
python bin/launch.py -k -z r4.16xlarge text_random_shuffle_40_3600s "python bin/text.py -t 3600 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 40"
Random shuffle (batches of 10), 720s, structure penalty: text_random_shuffle_10_720s_sp [Done - solves 56/108.]
python bin/launch.py -k -z r4.16xlarge text_random_shuffle_10_720s_sp "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle (batches of 10), 720s, more dreams: text_random_shuffle_10_720s_r [Done - solves 59/108.]
python bin/launch.py -k -z r4.16xlarge text_random_shuffle_10_720s_r "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 -r 0.9"
Random shuffle_r: resuming (text_random_shuffle_10_720s_r) with more time: *text_resume_1440s
python bin/launch.py -k --ssh_key openmind -z r4.16xlarge --checkpoint experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.9_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle text_resume_1440s "python bin/text.py -t 1440 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 31 -R 3600 --storeTaskMetrics --testingTimeout 1440 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 -r 0.9 --resume experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.9_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle"
Random shuffle_r: resuming (text_random_shuffle_10_720s_r) with more time on just unsolved: *text_resume_unsolved_1440s
python bin/launch.py -k --ssh_key openmind -z r4.16xlarge --checkpoint experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.9_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle text_resume_unsolved_1440s "python bin/text.py -t 1440 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 31 -R 3600 --storeTaskMetrics --testingTimeout 1440 --biasOptimal --contextual --taskReranker unsolved --taskBatchSize 10 -r 0.9 --resume experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.9_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle"
Random shuffle_r: resuming (text_random_shuffle_10_720s_r) with more time on just unsolved: *text_resume_unsolved_2160s
python bin/launch.py -k --ssh_key openmind -z r4.16xlarge --checkpoint experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.9_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle text_resume_unsolved_2160s "python bin/text.py -t 2160 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 31 -R 3600 --storeTaskMetrics --testingTimeout 2160 --biasOptimal --contextual --taskReranker unsolved --taskBatchSize 10 -r 0.9 --resume experimentOutputs/text_aic=1.0_arity=3_BO=True_CO=True_ET=720_HR=0.9_it=20_MF=5_baseline=False_pc=30.0_RT=7200_RW=False_storeTask=True_L=1.0_batch=10_taskReranker=randomShuffle_K=2_topkNotMAP=False_rec=True_feat=LearnedFeatureExtractor.pickle"
Random shuffle (batches of 20), 1440s: text_random_shuffle_20_1440s [DONE - solves 56 tasks.]
python bin/launch.py -k -z r4.16xlarge text_random_shuffle_20_1440s "python bin/text.py -t 1440 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 7200 --storeTaskMetrics --testingTimeout 1440 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 20"
Random kNN (batches of 10), 720s: test_random_knn_10_720s [Done: solves up to 87 tasks.]
python bin/launch.py -k -z r4.16xlarge test_random_knn_10_720s "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker randomkNN --taskBatchSize 10"
Unsolved (ranked by entropy, batches of 10), 720s: text_unsolved_entropy_10_720s [Done - solves up to 91 tasks.]
python bin/launch.py -k -z r4.16xlarge text_unsolved_entropy_10_720s "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker unsolvedEntropy --taskBatchSize 10"
Curriculum (batch size 10), 720s: text_default_10_720s [Done - solves up to 91 tasks.]
python bin/launch.py -k -z r4.16xlarge text_default_10_720s "python bin/text.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --testingTimeout 720 --biasOptimal --contextual --taskReranker default --taskBatchSize 10"
LIST:
1/18 Replication Runs: # Cap 10,000 gradient steps.
Best full model, random shuffle (batches of 10), structure penalty, no vectorization: *list_random_shuffle_10_720s_sp_no_vec_1: replication 1. [Best 103/109]
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_no_vec_1 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Replication 2: Done. Best 90/109.
python bin/launch.py -k -z r4.16xlarge *list_random_shuffle_10_720s_sp_no_vec_2 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Replication 3: *list_random_shuffle_10_720s_sp_no_vec_3 Done. Best 84/109.
Best full model, random shuffle (batches of 10), structure penalty, vectorization with gradient steps capped. Best: 104/109.
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_1 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 "
Done: best 103/109.
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_2 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 "
Best full model, no vectorization, auxiliary loss. Best: 88/109.
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_no_vec_aux_1 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary"
Running one replication. Best: 104/109.
Best full model, vectorization, auxiliary loss. Best: 104/109.
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_aux_1 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary"
Best full model, vectorization with many steps (sanity check.)
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_many_1 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 150000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 "
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_many_2 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 150000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 "
Best full model, vectorization with gradient steps capped, ensemble = 3.
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_ensemble_1 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --ensembleSize 3"
Replication: list_random_shuffle_10_720s_sp_vec_ensemble_2
Best full model, vectorization, auxiliary loss, ensemble = 3.
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_aux_ensemble_1 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -RS 10000 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --auxiliary --ensembleSize 3"
Repython bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp_vec_aux_ensemble_2.
---
Full model without batching:
python bin/launch.py -k -z x1.32xlarge ListDream "python bin/list.py -t 7200 --split 0.5 --testingTimeout 600 --contextual --biasOptimal -i 6 --maximumFrontier 5 --pseudoCounts 10. --structurePenalty 2"
Full model without batching, 3600s timeout: list_baseline_3600 [DONE: 103/109.]
python bin/launch.py -k -z x1.32xlarge list_baseline_3600 "python bin/list.py -t 3600 --pseudoCounts 10 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 10 -i 7 -R 3600 --storeTaskMetrics --split 0.5 --testingTimeout 3600 --biasOptimal --contextual --taskReranker default"
Full model without batching, 7200s timeout: list_baseline_7200s [DONE - Solves 109/109 tasks.]
python bin/launch.py -k -z x1.32xlarge list_baseline_7200 "python bin/list.py -t 7200 --pseudoCounts 10 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 10 -i 7 -R 3600 --storeTaskMetrics --split 0.5 --testingTimeout 7200 --biasOptimal --contextual --taskReranker default"
Random shuffle (batches of 10), 720s: list_random_shuffle_10_720s [Done: Solves 101/109]
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Baseline without DSL learning (batches of 10), 720s: [RUNNING]
python bin/launch.py -k -z r4.16xlarge list_no_dsl_random_shuffle_10_720s "python bin/list.py -t 720 --pseudoCounts 30 --aic 1000.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Baseline without DSL learning (batches of 10), 720s: [RUNNING]
python bin/launch.py -k -z r4.16xlarge list_no_dsl_old_recognition_random_shuffle_10_720s "python bin/list.py -t 720 --pseudoCounts 30 --aic 1000.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --taskReranker randomShuffle --taskBatchSize 10"
Baseline without recognition model (batches of 10), 720s: list_no_recognition_random_shuffle_10_720s [RUNNING]
python bin/launch.py -k -z r4.16xlarge list_no_recognition_random_shuffle_10_720s "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --split 0.5 --testingTimeout 600 -g --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle (batches of 10), structure penalty: list_random_shuffle_10_720s_sp [Done: solves 105/109.]
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_sp "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Running for replication: *list_random_shuffle_10_720s_sp_v2
python bin/launch.py --ssh_key openmind -k -z r4.16xlarge list_random_shuffle_10_720s_sp_v2 "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle (batches of 10), more dreams: list_random_shuffle_10_720s_r [Done: solves 104/109.]
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_10_720s_r "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 -r 0.9"
Random shuffle (batches of 20), 1440s: list_random_shuffle_20_1440s [DONE: solves 102/109.]
python bin/launch.py -k -z r4.16xlarge list_random_shuffle_20_1440s "python bin/list.py -t 1440 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 10 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 1440 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle (batches of 10), structure penalty, retraining: list_random_shuffle_10_720s_sp_retrain [DONE: solves 85/109].
python bin/launch.py -k -z r4.16xlarge list_random_shuffle
e_10_720s_sp_retrain "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 1800 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --reuseRecognition"
Random kNN (batches of 10), 720s: list_random_knn_10_720s [Done - solves 84/109]
python bin/launch.py -k -z r4.16xlarge list_random_knn_10_720s_catwong "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomkNN --taskBatchSize 10"
Random kNN (batches of 10), regularized: list_random_knn_10_720s_reg [Done - solves 105/109.]
python bin/launch.py -k -z r4.16xlarge list_random_knn_10_720s_reg "python bin/list.py -t 720 --pseudoCounts 30 --aic 1.0 --structurePenalty 2.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 7200 --storeTaskMetrics --split 0.5 --testingTimeout 720 --biasOptimal --contextual --taskReranker randomkNN --taskBatchSize 10 -r 0.9"
TOWER:
Full model without batching: (Solves up to 51 tasks)
python bin/launch.py -k -z m4.16xlarge TowerDream5 "python bin/tower.py -i 6 -t 300 --pseudoCounts 30 --tasks old --maximumFrontier 5 --biasOptimal --contextual --testingTimeout 600 --split 0.5 --structurePenalty 1"
Full model without batching and without test/train split, new primitives: [RUNNING]
python bin/launch.py -k -z m4.16xlarge tower_new_everything "python bin/tower.py -i 6 -t 300 --pseudoCounts 30 --tasks old --maximumFrontier 5 --biasOptimal --contextual --structurePenalty 1 --primitives new"
Full model without batching, new primitives: [Done, solves 50/56]
python bin/launch.py -k -z m4.16xlarge tower_new "python bin/tower.py --recognitionTimeout 3600 -i 6 -t 3600 --pseudoCounts 30 --tasks old --maximumFrontier 5 --biasOptimal --contextual --testingTimeout 600 --split 0.5 --structurePenalty 1 --primitives new"
Random shuffle batches of 10, new primitives: [Done, solves 44/56]
python bin/launch.py -k -z m4.16xlarge tower_new_random_shuffle_10_360s "python bin/tower.py --recognitionTimeout 3600 -i 20 -t 120 --pseudoCounts 30 --tasks old --maximumFrontier 5 --biasOptimal --contextual --testingTimeout 600 --split 0.5 --structurePenalty 1 --primitives new --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle batches of 10, 720s, new primitives: [RUNNING]
python bin/launch.py -k -z m4.16xlarge tower_new_random_shuffle_10_720s "python bin/tower.py --recognitionTimeout 3600 -i 20 -t 720 --pseudoCounts 30 --tasks old --maximumFrontier 5 --biasOptimal --contextual --testingTimeout 600 --split 0.5 --structurePenalty 1 --primitives new --taskReranker randomShuffle --taskBatchSize 10"
Random shuffle (batches of 5), 60s: tower_random_shuffle_5_60s [Done, solves 42/56]
python bin/launch.py -k -z m4.16xlarge tower_random_shuffle_5_60s "python bin/tower.py -t 60 --pseudoCounts 30 --tasks old --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 300 --storeTaskMetrics --split 0.5 --testingTimeout 60 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 5"
Random shuffle (batches of 5), 60s: tower_random_shuffle_5_120s [Done, solves 45/56].
python bin/launch.py -k -z m4.16xlarge tower_random_shuffle_5_120s "python bin/tower.py -t 120 --pseudoCounts 30 --tasks old --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 300 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 5"
Random shuffle (batches of 10), 120s: tower_random_shuffle_10_120s [Done, solves 51/56]
python bin/launch.py -k -z m4.16xlarge tower_random_shuffle_10_120s "python bin/tower.py -t 120 --pseudoCounts 30 --tasks old --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --primitives old --recognitionTimeout 3600"
Re-run with new random shuffle (v2): solves 49/56.
Baseline without recognition model: tower_no_recognition_random_shuffle_10_120s [RUNNING]
python bin/launch.py -k -z m4.16xlarge tower_no_recognition_random_shuffle_10_120s "python bin/tower.py -t 120 --pseudoCounts 30 --tasks old --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --taskReranker randomShuffle --taskBatchSize 10 --primitives old -g"
Baseline without DSL learning: tower_no_dsl_random_shuffle_10_120s [RUNNING]
python bin/launch.py -k -z m4.16xlarge tower_no_dsl_random_shuffle_10_120s "python bin/tower.py -t 120 --pseudoCounts 30 --tasks old --aic 1000.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --primitives old --recognitionTimeout 3600"
Random shuffle (batches of 10), 120s. retraining: tower_random_shuffle_10_120s [12/18]
python bin/launch.py -k -z m4.16xlarge --ssh_key openmind tower_random_shuffle_10_120s "python bin/tower.py -t 120 --pseudoCounts 30 --tasks old --aic 1.0 --structurePenalty 1.0 --topK 2 --arity 3 --maximumFrontier 5 -i 20 -R 300 --storeTaskMetrics --split 0.5 --testingTimeout 600 --biasOptimal --contextual --taskReranker randomShuffle --taskBatchSize 10 --reuseRecognition"
REGEX:
Best run so far (uses context free):
python bin/launch.py -g -z "p3.16xlarge" "regex_gpu_contextfreeBO.95HR" "python bin/regexes.py -t 3600 --testingTimeout 1800 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --biasOptimal -r .95"
Best contextual run:
python bin/launch.py -g -z "p3.16xlarge" "regex_gpu_contextualBO.95HR" "python bin/regexes.py -t 3600 --testingTimeout 1800 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .95"
A task batching run
python bin/launch.py -g -z "p3.16xlarge" "regex_gpu_batch_contextualBO.95HR" "python bin/regexes.py -t 720 --testingTimeout 720 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .95 --taskReranker randomShuffle --taskBatchSize 10"
task batchinng with rec model reuse:
python bin/launch.py -g -z "p3.16xlarge" "regex_gpu_batch_reuserec_contextualBO.95HR" "python bin/regexes.py -t 720 --testingTimeout 720 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .95 --taskReranker randomShuffle --taskBatchSize 10 --reuseRecognition"
***
#100% helmholtz:
python bin/launch.py -g -z "g3.16xlarge" "regex_gpu_batch_100HR" "python bin/regexes.py -t 720 --testingTimeout 720 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r 1 --taskReranker randomShuffle --taskBatchSize 10"
#can use --seed to change subset of tasks. default is 42
#Reuse vs not reuse
python bin/launch.py -g -z "g3.16xlarge" "regex_batch_.95HR" "python bin/regexes.py -t 720 --testingTimeout 720 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .95 --taskReranker randomShuffle --taskBatchSize 10"
#task batchinng with rec model reuse:
python bin/launch.py -g -z "g3.16xlarge" "regex_batch_reuserec.95HR" "python bin/regexes.py -t 720 --testingTimeout 720 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .95 --taskReranker randomShuffle --taskBatchSize 10 --reuseRecognition"
LONGER RUNS, NORMAL STATS
python bin/launch.py -g -z "g3.16xlarge" "regex_batch_50HR" "python bin/regexes.py -i 30 -t 720 --testingTimeout 900 -R 1800 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .5 --taskReranker randomShuffle --taskBatchSize 10"
python bin/launch.py -g -z "g3.16xlarge" "regex_batch_reuserec50HR" "python bin/regexes.py -i 30 -t 720 --testingTimeout 900 -R 1800 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .5 --taskReranker randomShuffle --taskBatchSize 10 --reuseRecognition"
Full model with batching, 0.5 Helmholtz ratio, long recognition model train time, bounded rank of 32: [RUNNING]
python bin/launch.py -g -z "g3.16xlarge" "regex_batch_40_1800s_r32" "python bin/regexes.py -t 1800 --testingTimeout 1800 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 --contextual --biasOptimal -r .5 --taskReranker randomShuffle --taskBatchSize 40 --recognitionTimeout 1800 --reuseRecognition --matrixRank 32"
Baseline without recognition model:
python bin/launch.py -k -z m4.16xlarge --tail regex_no_recognition_batch_40_1800 "python bin/regexes.py -t 1800 --testingTimeout 1800 --tasks new --maxTasks 256 --ll_cutoff bigram --split 0.5 -k 10 -g --taskReranker randomShuffle --taskBatchSize 40 -i 20"