Folders and files Name Name Last commit message
Last commit date
parent directory
View all files
dataset: opus
model: transformer
source language(s): acm afb amh apc apc_Latn ara ara_Latn arq arq_Latn ary arz heb mlt phn_Phnx tir tmr_Hebr
target language(s): eng
model: transformer
pre-processing: normalization + SentencePiece (spm32k,spm32k)
download: opus-2020-06-28.zip
test set translations: opus-2020-06-28.test.txt
test set scores: opus-2020-06-28.eval.txt
testset
BLEU
chr-F
Tatoeba-test.amh-eng.amh.eng
35.2
0.539
Tatoeba-test.ara-eng.ara.eng
36.9
0.548
Tatoeba-test.heb-eng.heb.eng
43.1
0.597
Tatoeba-test.mlt-eng.mlt.eng
46.3
0.618
Tatoeba-test.multi.eng
40.0
0.570
Tatoeba-test.phn-eng.phn.eng
0.2
0.040
Tatoeba-test.tir-eng.tir.eng
12.3
0.327
Tatoeba-test.tmr-eng.tmr.eng
0.7
0.147
dataset: opus
model: transformer
source language(s): acm afb amh apc apc_Latn ara ara_Latn arq arq_Latn ary arz heb mlt tir
target language(s): eng
model: transformer
pre-processing: normalization + SentencePiece (spm32k,spm32k)
download: opus-2020-07-04.zip
test set translations: opus-2020-07-04.test.txt
test set scores: opus-2020-07-04.eval.txt
testset
BLEU
chr-F
Tatoeba-test.amh-eng.amh.eng
37.8
0.558
Tatoeba-test.ara-eng.ara.eng
37.2
0.549
Tatoeba-test.heb-eng.heb.eng
43.3
0.597
Tatoeba-test.mlt-eng.mlt.eng
45.7
0.617
Tatoeba-test.multi.eng
40.3
0.572
Tatoeba-test.tir-eng.tir.eng
14.8
0.349
testset
BLEU
chr-F
Tatoeba-test.amh-eng.amh.eng
42.4
0.592
Tatoeba-test.ara-eng.ara.eng
37.5
0.555
Tatoeba-test.heb-eng.heb.eng
43.4
0.599
Tatoeba-test.mlt-eng.mlt.eng
48.1
0.647
Tatoeba-test.multi.eng
40.6
0.578
Tatoeba-test.tir-eng.tir.eng
15.5
0.341
testset
BLEU
chr-F
Tatoeba-test.amh-eng.amh.eng
37.5
0.565
Tatoeba-test.ara-eng.ara.eng
38.9
0.566
Tatoeba-test.heb-eng.heb.eng
44.6
0.610
Tatoeba-test.mlt-eng.mlt.eng
53.7
0.688
Tatoeba-test.multi.eng
41.7
0.588
Tatoeba-test.tir-eng.tir.eng
18.3
0.370
testset
BLEU
chr-F
Tatoeba-test.amh-eng.amh.eng
41.8
0.587
Tatoeba-test.ara-eng.ara.eng
39.4
0.570
Tatoeba-test.heb-eng.heb.eng
45.0
0.612
Tatoeba-test.mlt-eng.mlt.eng
51.5
0.670
Tatoeba-test.multi.eng
42.3
0.592
Tatoeba-test.tir-eng.tir.eng
17.1
0.357
testset
BLEU
chr-F
#sent
#words
BP
Tatoeba-test.acm-eng
16.0
0.507
3
24
1.000
Tatoeba-test.afb-eng
34.0
0.517
36
175
1.000
Tatoeba-test.amh-eng
0.2
0.103
190
1001
1.000
Tatoeba-test.apc-eng
14.3
0.346
5
29
0.852
Tatoeba-test.ara-eng
37.5
0.554
10000
73964
1.000
Tatoeba-test.arq-eng
7.2
0.231
403
3058
1.000
Tatoeba-test.ary-eng
34.8
0.429
18
98
0.926
Tatoeba-test.arz-eng
11.1
0.304
181
1178
1.000
Tatoeba-test.heb-eng
43.0
0.597
10000
73559
0.971
Tatoeba-test.jpa-eng
3.0
0.185
4
38
1.000
Tatoeba-test.mlt-eng
30.9
0.475
203
1165
0.891
Tatoeba-test.multi-eng
39.7
0.570
10000
73229
0.989
Tatoeba-test.oar-eng
0.8
0.089
6
71
1.000
Tatoeba-test.oar_Hebr-eng
1.0
0.085
3
39
1.000
Tatoeba-test.oar_Syrc-eng
1.5
0.094
3
32
0.794
Tatoeba-test.phn-eng
1.1
0.069
5
51
0.382
Tatoeba-test.tir-eng
0.3
0.127
69
503
1.000
Tatoeba-test.tmr-eng
2.1
0.139
19
147
0.862
tico19-test.amh-eng
1.7
0.180
2100
56848
1.000
tico19-test.ara-eng
26.7
0.548
2100
56347
1.000
tico19-test.en-ti_ER.tir-eng
2.1
0.191
2100
56848
1.000
tico19-test.en-ti_ET.tir-eng
2.4
0.194
2100
56848
1.000
tico19-test.tir-eng
2.3
0.188
2100
56848
1.000
opus4m+btTCv20210807-2021-10-01.zip
testset
BLEU
chr-F
#sent
#words
BP
Tatoeba-test-v2021-08-07.multi-eng
42.1
0.592
10000
74081
0.991
Tatoeba-test-v2021-08-07.multi-multi
42.1
0.592
10000
74081
0.991
tico19-test.amh-eng
9.8
0.252
2100
56848
1.000
tico19-test.ara-eng
35.1
0.621
2100
56347
1.000
tico19-test.en-ti_ER.tir-eng
34.7
0.462
2100
56848
1.000
tico19-test.en-ti_ET.tir-eng
33.4
0.452
2100
56848
1.000
tico19-test.tir-eng
30.6
0.426
2100
56848
1.000
You can’t perform that action at this time.