GLUE-LM-GAP

GLUE-LM-GAP is LM-GAP challenge base on GLUE benchmark. [ver. 2.0.6]

# submitter when ver. description dev-0 PerplexityHashed test-A PerplexityHashed
152 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=LongT5-Local-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 1006246.726444 1008375.972086
130 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=ByT5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 854668.733905 855967.364657
125 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=ByT5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 577506.468555 584584.351424
124 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 544241.485156 562529.862349
123 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-efficient-mini top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 493098.539241 510519.379968
117 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-efficient-tiny top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 408282.416232 418979.060903
116 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-large-v1_1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 420124.898821 415089.099066
112 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 306911.104213 324604.408047
109 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-large-v1_1-lm-adapt top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 337413.972666 300663.100084
108 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small-v1_1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 290626.365308 282303.546243
107 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-efficient-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 303742.494167 277887.449243
106 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base-v1_1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 290005.373172 267972.646440
102 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small-v1_1-lm-adapt top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 239393.520998 233990.717024
101 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=LongT5-TGlobal-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 199552.610743 211414.917005
96 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-efficient-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 152750.729495 124430.988625
95 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 103892.908235 118104.568290
94 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-efficient-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 105072.298767 84932.099781
92 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base-v1_1-lm-adapt top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 74652.507386 63737.130182
61 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=mT5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 4691.781349 3976.321841
55 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=mT5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 3046.209545 2491.979920
46 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=mT5-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 2055.165101 1615.687190
29 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 995.334314 782.865991
18 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=Switch-base-8 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 425.605103 349.968606
16 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 358.614059 274.694340
13 kaczla 2023-03-06 15:48 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 253.863088 192.323406
151 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 870218.401911 876508.944709
149 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869793.785438 875278.906409
147 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869064.584648 875114.462710
145 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869876.865963 873852.303343
143 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 871216.070328 870332.354133
141 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869304.616131 869819.109384
139 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 872216.010612 869300.217146
137 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 874622.829677 869124.251622
135 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 868924.084221 867987.178408
119 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-en token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 408239.470759 428281.101166
87 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-17-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 40697.047197 52129.248038
82 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=German-BERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 27421.852502 27859.712465
80 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-100-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 13760.997821 15046.785077
76 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=PolishRoBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 13507.011385 12559.885478
69 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-tiny-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 6677.054334 7540.606730
64 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=ClinicalBERT token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 4364.651919 4443.596582
63 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=SportsBERT token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 4562.718171 4407.714425
59 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=FinBERT token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 3268.202377 3527.824565
58 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=CamemBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 4236.441996 3469.326138
56 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-mini-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 2471.697715 2714.984099
52 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=SciBERT-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 2132.405410 2072.331054
47 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=SciBERT-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1695.065247 1681.679705
45 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-small-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1438.074270 1577.662051
44 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1678.076625 1575.853755
42 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1419.618051 1477.047924
40 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1278.244697 1303.640415
38 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-medium-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1158.238019 1260.641963
37 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=CodeBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1482.468166 1174.661467
35 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 957.703457 933.385723
33 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=MobileBERT-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 852.327720 913.525374
31 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 756.596451 804.253307
28 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-xxlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A 746.990635
26 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-xlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 739.702271 730.841384
24 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 642.869447 670.671724
22 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 709.262387 635.838112
20 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 563.054777 494.413642
17 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BioMed-RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 424.568655 312.169270
15 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 254.756477 193.883054
12 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=DistilRoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 231.609393 179.690259
10 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 198.220916 172.101095
8 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 179.035358 138.830588
6 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-large-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 147.667554 125.733251
4 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 114.419134 91.907166
2 kaczla 2023-02-25 16:30 2.0.6 Transformer encoder models (MLM) method=simple model_name=RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 86.217101 69.393009
154 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=LongT5-Local-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 1006327.841621 1008648.894465
133 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 853010.711229 860545.059404
129 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-large-v1_1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 616699.817328 622033.924463
127 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=LongT5-TGlobal-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 599174.609809 594610.012669
122 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base-v1_1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 462216.715180 456565.137625
115 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small-v1_1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 341731.446603 361959.431007
111 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 291276.702614 309516.650005
105 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 266041.436835 266466.400683
100 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 170274.560487 182789.640921
98 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=mT5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 163267.421887 158446.962205
91 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 56915.639536 60987.821059
78 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=mT5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 16713.044290 14703.538644
75 kaczla 2023-02-24 12:21 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 10171.933229 9998.940853
153 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=LongT5-Local-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 1006327.841621 1008648.894465
132 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 853010.711229 860545.059404
128 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-large-v1.1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 616699.817328 622033.924463
126 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=LongT5-TGlobal-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 599174.609809 594610.012669
121 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base-v1.1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 462216.715180 456565.137625
114 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small-v1.1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 341731.446603 361959.431007
110 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 291276.702614 309516.650005
104 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 266041.436835 266466.400683
99 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-large top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 170274.560487 182789.640921
97 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=mT5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 163267.421887 158446.962205
90 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 56915.639536 60987.821059
77 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=mT5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 16713.044290 14703.538644
74 kaczla 2023-02-24 12:04 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 10171.933229 9998.940853
131 kaczla 2023-02-23 11:14 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 853010.711229 860545.059404
120 kaczla 2023-02-23 11:14 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base-v1.1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 462216.715180 456565.137625
113 kaczla 2023-02-23 11:14 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small-v1.1 top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 341731.446603 361959.431007
103 kaczla 2023-02-23 11:14 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=FLAN-T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 266041.436835 266466.400683
89 kaczla 2023-02-23 11:14 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-base top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 56915.639536 60987.821059
73 kaczla 2023-02-23 11:14 2.0.6 Transformer encoder-decoder models (seq2seq) depth=1 model_name=T5-small top_k=15 transformer huggingface-transformers transformer-encoder-decoder seq2seq 10171.933229 9998.940853
93 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=PolishGPT-2-small top_k=15 transformer huggingface-transformers transformer-decoder clm 79627.908898 79278.556763
88 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=PolishGPT-2-medium top_k=15 transformer huggingface-transformers transformer-decoder clm 59754.664598 58446.342067
85 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=PolishGPT-2-large top_k=15 transformer huggingface-transformers transformer-decoder clm 50521.897871 47767.398812
84 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=GPT-fr-base top_k=15 transformer huggingface-transformers transformer-decoder clm 46956.696551 46362.459344
83 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=GPT-fr-small top_k=15 transformer huggingface-transformers transformer-decoder clm 38383.860649 38199.695407
72 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=DistilGPT-2 top_k=15 transformer huggingface-transformers transformer-decoder clm 8885.673813 9714.724445
70 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=BioGPT top_k=15 transformer huggingface-transformers transformer-decoder clm 10345.246977 9377.074086
68 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=Pythia-70M top_k=15 transformer huggingface-transformers transformer-decoder clm 7424.531362 7329.966876
67 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=Pythia-160M top_k=15 transformer huggingface-transformers transformer-decoder clm 5019.867530 4845.613045
66 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=GPT-2-base top_k=15 transformer huggingface-transformers transformer-decoder clm 4732.154178 4700.318867
62 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=GPT-Neo-125M top_k=15 transformer huggingface-transformers transformer-decoder clm 4711.535675 4373.950153
60 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=Pythia-410M top_k=15 transformer huggingface-transformers transformer-decoder clm 3894.413763 3685.007192
54 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=GPT-2-medium top_k=15 transformer huggingface-transformers transformer-decoder clm 2591.721115 2250.943823
51 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=OPT-125M top_k=15 transformer huggingface-transformers transformer-decoder clm 2560.284962 2021.615293
49 kaczla 2023-02-13 09:07 2.0.6 Transformer decoder models (CLM) depth=1 model_name=OPT-350M top_k=15 transformer huggingface-transformers transformer-decoder clm 2257.973076 1767.228754
223 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
222 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
221 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=XLM-en token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
220 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=XLM-17-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
219 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=XLM-100-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
218 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=SportsBERT token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
217 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=SciBERT-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
216 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=SciBERT-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
215 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
214 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
213 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=PolishRoBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
212 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MobileBERT-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
211 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
210 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
209 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
208 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
207 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
206 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
205 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
204 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
203 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
202 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=German-BERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
201 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=FinBERT token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
200 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=DistilRoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
199 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
198 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
197 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=CodeBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
196 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=CamemBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
195 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=BioMed-RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
194 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
193 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=BERT-large-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
192 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
191 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
190 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
189 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=BERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
188 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=ALBERT-xxlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
187 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=ALBERT-xlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
186 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=ALBERT-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
185 kaczla 2023-02-13 08:56 2.0.5 Transformer encoder models (MLM) method=simple model_name=ALBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A N/A
71 kaczla 2023-02-06 14:35 2.0.4 Transformer decoder models (CLM) depth=1 model_name=DistilGPT-2 top_k=15 transformer huggingface-transformers transformer-encoder clm 8885.673813 9714.724445
65 kaczla 2023-02-06 14:35 2.0.4 Transformer decoder models (CLM) depth=1 model_name=GPT-2-base top_k=15 transformer huggingface-transformers transformer-encoder clm 4732.154178 4700.318867
53 kaczla 2023-02-06 14:35 2.0.4 Transformer decoder models (CLM) depth=1 model_name=GPT-2-medium top_k=15 transformer huggingface-transformers transformer-encoder clm 2591.721115 2250.943823
50 kaczla 2023-02-06 14:35 2.0.4 Transformer decoder models (CLM) depth=1 model_name=OPT-125M top_k=15 transformer huggingface-transformers transformer-encoder clm 2560.284962 2021.615293
48 kaczla 2023-02-06 14:35 2.0.4 Transformer decoder models (CLM) depth=1 model_name=OPT-350M top_k=15 transformer huggingface-transformers transformer-encoder clm 2257.973076 1767.228754
150 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 870218.401911 876508.944709
148 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869793.785438 875278.906409
146 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869064.584648 875114.462710
144 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869876.865963 873852.303343
142 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 871216.070328 870332.354133
140 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869304.616131 869819.109384
138 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 872216.010612 869300.217146
136 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 874622.829677 869124.251622
134 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 868924.084221 867987.178408
118 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-en token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 408239.470759 428281.101166
86 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-17-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 40697.047197 52129.248038
81 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=German-BERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 27421.852502 27859.712465
79 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-100-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 13760.997821 15046.785077
57 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=CamemBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 4236.441996 3469.326138
43 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1678.076625 1575.853755
41 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1419.618051 1477.047924
39 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1278.244697 1303.640415
36 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=PolishRoBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A 1024.000000
34 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 957.703457 933.385723
32 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=MobileBERT-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 852.327720 913.525374
30 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 756.596451 804.253307
27 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-xxlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A 746.990635
25 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=ALBERT-xlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 739.702271 730.841384
23 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 642.869447 670.671724
21 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 709.262387 635.838112
19 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 563.054777 494.413642
14 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 254.756477 193.883054
11 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=DistilRoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 231.609393 179.690259
9 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 198.220916 172.101095
7 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 179.035358 138.830588
5 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=BERT-large-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 147.667554 125.733251
3 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 114.419134 91.907166
1 kaczla 2023-02-06 13:06 2.0.6 Transformer encoder models (MLM) method=simple model_name=RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 86.217101 69.393009
184 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 179.035358 138.830588
183 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=XLM-RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 254.756477 193.883054
182 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=XLM-en token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 408239.470759 428281.101166
181 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=XLM-17-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 40697.047197 52129.248038
180 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=XLM-100-lang token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 13760.997821 15046.785077
179 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 86.217101 69.393009
178 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=RoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 114.419134 91.907166
177 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MobileBERT-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 852.327720 913.525374
176 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 874622.829677 869124.251622
175 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869876.865963 873852.303343
174 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H768-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 871216.070328 870332.354133
173 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 868924.084221 867987.178408
172 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869793.785438 875278.906409
171 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869304.616131 869819.109384
170 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L6-H384-BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 872216.010612 869300.217146
169 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-XLMR-Large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 870218.401911 876508.944709
168 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=MiniLM-L12-H384-RoBERTa-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 869064.584648 875114.462710
167 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=DistilRoBERTa-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 231.609393 179.690259
166 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1278.244697 1303.640415
165 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=DistilBERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 563.054777 494.413642
164 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=BERT-large-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 642.869447 670.671724
163 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=BERT-large-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 147.667554 125.733251
162 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=BERT-base-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 756.596451 804.253307
161 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-uncased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1419.618051 1477.047924
160 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=BERT-base-multilingual-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 709.262387 635.838112
159 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=BERT-base-cased token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 198.220916 172.101095
158 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=ALBERT-xxlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm N/A 746.990635
157 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=ALBERT-xlarge token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 739.702271 730.841384
156 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=ALBERT-large token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 957.703457 933.385723
155 kaczla 2022-12-29 14:08 2.0.3 Transformer encoder models (MLM) method=simple model_name=ALBERT-base token_length=1 top_k=15 transformer huggingface-transformers transformer-encoder mlm 1678.076625 1575.853755