Datasets:

License:
xuanricheng commited on
Commit
1b13dd5
·
verified ·
1 Parent(s): 3f54425

Add results for yi.daiteng01

Browse files
yi.daiteng01/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,386 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "yi.daiteng01",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "ChartQA": {
9
+ "acc": 79.24,
10
+ "acc_stderr": 0,
11
+ "accuracy": 79.24,
12
+ "human_test": {
13
+ "total": 1250,
14
+ "correct": 824,
15
+ "accuracy": 65.92
16
+ },
17
+ "augmented_test": {
18
+ "total": 1250,
19
+ "correct": 1157,
20
+ "accuracy": 92.56
21
+ }
22
+ },
23
+ "CMMMU": {
24
+ "acc": 39.56,
25
+ "acc_stderr": 0,
26
+ "\u5546\u4e1a": {
27
+ "num": 126,
28
+ "correct": 35,
29
+ "accuracy": 27.78
30
+ },
31
+ "\u79d1\u5b66": {
32
+ "num": 204,
33
+ "correct": 68,
34
+ "accuracy": 33.33
35
+ },
36
+ "overall": {
37
+ "num": 900,
38
+ "correct": 356,
39
+ "accuracy": 39.56
40
+ },
41
+ "accuracy": 39.56,
42
+ "\u5065\u5eb7\u4e0e\u533b\u5b66": {
43
+ "num": 153,
44
+ "correct": 73,
45
+ "accuracy": 47.71
46
+ },
47
+ "\u6280\u672f\u4e0e\u5de5\u7a0b": {
48
+ "num": 244,
49
+ "correct": 89,
50
+ "accuracy": 36.48
51
+ },
52
+ "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
53
+ "num": 88,
54
+ "correct": 56,
55
+ "accuracy": 63.64
56
+ },
57
+ "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
58
+ "num": 85,
59
+ "correct": 35,
60
+ "accuracy": 41.18
61
+ }
62
+ },
63
+ "CMMU": {
64
+ "acc": 23.88,
65
+ "acc_stderr": 0,
66
+ "val": {
67
+ "multiple-choice": {
68
+ "hard": {
69
+ "total": 150,
70
+ "correct": 29,
71
+ "accuracy": 19.33
72
+ },
73
+ "normal": {
74
+ "total": 1205,
75
+ "correct": 351,
76
+ "accuracy": 29.13
77
+ }
78
+ },
79
+ "fill-in-the-blank": {
80
+ "hard": {
81
+ "total": 300,
82
+ "correct": 43,
83
+ "accuracy": 14.33
84
+ },
85
+ "normal": {
86
+ "total": 507,
87
+ "correct": 74,
88
+ "accuracy": 14.6
89
+ }
90
+ },
91
+ "multiple-response": {
92
+ "hard": {
93
+ "total": 94,
94
+ "correct": 12,
95
+ "accuracy": 12.77
96
+ },
97
+ "normal": {
98
+ "total": 33,
99
+ "correct": 4,
100
+ "accuracy": 12.12
101
+ }
102
+ }
103
+ },
104
+ "test": {
105
+ "multiple-choice": {
106
+ "hard": {
107
+ "total": 150,
108
+ "correct": 22,
109
+ "accuracy": 14.67
110
+ },
111
+ "normal": {
112
+ "total": 1205,
113
+ "correct": 387,
114
+ "accuracy": 32.12
115
+ }
116
+ },
117
+ "fill-in-the-blank": {
118
+ "hard": {
119
+ "total": 296,
120
+ "correct": 42,
121
+ "accuracy": 14.19
122
+ },
123
+ "normal": {
124
+ "total": 529,
125
+ "correct": 83,
126
+ "accuracy": 15.69
127
+ }
128
+ },
129
+ "multiple-response": {
130
+ "hard": {
131
+ "total": 95,
132
+ "correct": 10,
133
+ "accuracy": 10.53
134
+ },
135
+ "normal": {
136
+ "total": 32,
137
+ "correct": 7,
138
+ "accuracy": 21.88
139
+ }
140
+ }
141
+ },
142
+ "val-overall": {
143
+ "total": 2289,
144
+ "correct": 513,
145
+ "accuracy": 22.41,
146
+ "bias_rate": 7.79
147
+ },
148
+ "test-overall": {
149
+ "total": 2307,
150
+ "correct": 551,
151
+ "accuracy": 23.88,
152
+ "bias_rate": 2.61
153
+ }
154
+ },
155
+ "MMMU": {
156
+ "acc": 44.33,
157
+ "acc_stderr": 0,
158
+ "accuracy": 44.33,
159
+ "subject_score": {
160
+ "Art": 71.67,
161
+ "Math": 46.67,
162
+ "Basic": 46.67,
163
+ "Music": 26.67,
164
+ "Design": 80.0,
165
+ "Energy": 23.33,
166
+ "Manage": 43.33,
167
+ "Public": 66.67,
168
+ "Biology": 40.0,
169
+ "Finance": 30.0,
170
+ "History": 63.33,
171
+ "Physics": 36.67,
172
+ "Clinical": 50.0,
173
+ "Computer": 36.67,
174
+ "Pharmacy": 26.67,
175
+ "Chemistry": 20.0,
176
+ "Economics": 40.0,
177
+ "Geography": 53.33,
178
+ "Marketing": 46.67,
179
+ "Materials": 20.0,
180
+ "Sociology": 60.0,
181
+ "Accounting": 40.0,
182
+ "Literature": 83.33,
183
+ "Mechanical": 26.67,
184
+ "Psychology": 46.67,
185
+ "Agriculture": 50.0,
186
+ "Diagnostics": 46.67,
187
+ "Electronics": 23.33,
188
+ "Architecture": 13.33
189
+ },
190
+ "difficulty_score": {
191
+ "Easy": 58.98,
192
+ "Hard": 27.07,
193
+ "Medium": 41.51
194
+ }
195
+ },
196
+ "MMMU_Pro_standard": {
197
+ "acc": 30.77,
198
+ "acc_stderr": 0,
199
+ "accuracy": 30.77,
200
+ "reject_info": {
201
+ "reject_rate": 0.06,
202
+ "reject_number": 1,
203
+ "total_question": 1730
204
+ },
205
+ "subject_score": {
206
+ "Art": 35.85,
207
+ "Math": 23.33,
208
+ "Music": 25.0,
209
+ "Design": 53.33,
210
+ "Manage": 28.0,
211
+ "Biology": 32.2,
212
+ "Finance": 25.0,
213
+ "History": 50.0,
214
+ "Physics": 28.33,
215
+ "Pharmacy": 35.09,
216
+ "Chemistry": 28.33,
217
+ "Economics": 33.9,
218
+ "Geography": 36.54,
219
+ "Marketing": 25.42,
220
+ "Materials": 10.17,
221
+ "Sociology": 42.59,
222
+ "Accounting": 22.41,
223
+ "Art_Theory": 67.27,
224
+ "Literature": 71.15,
225
+ "Psychology": 30.0,
226
+ "Agriculture": 28.33,
227
+ "Electronics": 23.33,
228
+ "Public_Health": 29.31,
229
+ "Computer_Science": 25.0,
230
+ "Energy_and_Power": 15.52,
231
+ "Clinical_Medicine": 25.42,
232
+ "Basic_Medical_Science": 34.62,
233
+ "Mechanical_Engineering": 8.47,
234
+ "Architecture_and_Engineering": 11.67,
235
+ "Diagnostics_and_Laboratory_Medicine": 28.33
236
+ },
237
+ "difficulty_score": {
238
+ "Easy": 45.08,
239
+ "Hard": 18.25,
240
+ "Medium": 27.59
241
+ }
242
+ },
243
+ "MMMU_Pro_vision": {
244
+ "acc": 52.69,
245
+ "acc_stderr": 0,
246
+ "accuracy": 52.69,
247
+ "reject_info": {
248
+ "reject_rate": 0.06,
249
+ "reject_number": 1,
250
+ "total_question": 1730
251
+ },
252
+ "subject_score": {
253
+ "Art": 41.51,
254
+ "Math": 56.67,
255
+ "Music": 41.67,
256
+ "Design": 55.0,
257
+ "Manage": 48.0,
258
+ "Biology": 55.93,
259
+ "Finance": 51.67,
260
+ "History": 51.79,
261
+ "Physics": 56.67,
262
+ "Pharmacy": 56.14,
263
+ "Chemistry": 66.67,
264
+ "Economics": 50.0,
265
+ "Geography": 53.85,
266
+ "Marketing": 35.59,
267
+ "Materials": 41.67,
268
+ "Sociology": 61.11,
269
+ "Accounting": 43.1,
270
+ "Art_Theory": 50.91,
271
+ "Literature": 57.69,
272
+ "Psychology": 61.67,
273
+ "Agriculture": 55.0,
274
+ "Electronics": 51.67,
275
+ "Public_Health": 43.1,
276
+ "Computer_Science": 61.67,
277
+ "Energy_and_Power": 55.17,
278
+ "Clinical_Medicine": 67.8,
279
+ "Basic_Medical_Science": 61.54,
280
+ "Mechanical_Engineering": 52.54,
281
+ "Architecture_and_Engineering": 35.0,
282
+ "Diagnostics_and_Laboratory_Medicine": 60.0
283
+ }
284
+ },
285
+ "OCRBench": {
286
+ "acc": 68.1,
287
+ "acc_stderr": 0,
288
+ "accuracy": 68.1,
289
+ "final_score": [
290
+ 681,
291
+ 1000
292
+ ],
293
+ "Doc-oriented VQA": [
294
+ 147,
295
+ 200
296
+ ],
297
+ "Scene Text-centric VQA": [
298
+ 170,
299
+ 200
300
+ ],
301
+ "Handwriting Recognition": [
302
+ 29,
303
+ 50
304
+ ],
305
+ "Digit String Recognition": [
306
+ 20,
307
+ 50
308
+ ],
309
+ "Regular Text Recognition": [
310
+ 48,
311
+ 50
312
+ ],
313
+ "Artistic Text Recognition": [
314
+ 43,
315
+ 50
316
+ ],
317
+ "Irregular Text Recognition": [
318
+ 41,
319
+ 50
320
+ ],
321
+ "Key Information Extraction": [
322
+ 148,
323
+ 200
324
+ ],
325
+ "Non-Semantic Text Recognition": [
326
+ 32,
327
+ 50
328
+ ],
329
+ "Handwritten Mathematical Expression Recognition": [
330
+ 3,
331
+ 100
332
+ ]
333
+ },
334
+ "MathVision": {
335
+ "acc": 15.2,
336
+ "acc_stderr": 0,
337
+ "accuracy": 15.2
338
+ },
339
+ "CII-Bench": {
340
+ "acc": 45.36,
341
+ "acc_stderr": 0,
342
+ "accuracy": 45.36,
343
+ "domain_score": {
344
+ "Art": 44.85,
345
+ "CTC": 46.67,
346
+ "Env.": 57.41,
347
+ "Life": 40.26,
348
+ "Society": 46.49,
349
+ "Politics": 54.17
350
+ },
351
+ "emotion_score": {
352
+ "Neutral": 48.5,
353
+ "Negative": 44.91,
354
+ "Positive": 42.31
355
+ }
356
+ },
357
+ "Blink": {
358
+ "acc": 47.26,
359
+ "acc_stderr": 0,
360
+ "Jigsaw": 54.67,
361
+ "IQ Test": 28.67,
362
+ "Counting": 60.0,
363
+ "accuracy": 47.26,
364
+ "Art Style": 59.83,
365
+ "reject_info": {
366
+ "reject_rate": 0.05,
367
+ "reject_number": 1,
368
+ "total_question": 1901
369
+ },
370
+ "Relative Depth": 51.61,
371
+ "Spatial Relation": 74.83,
372
+ "Visual Similarity": 73.33,
373
+ "Forensic Detection": 38.64,
374
+ "Object Localization": 56.56,
375
+ "Multi-view Reasoning": 45.86,
376
+ "Relative Reflectance": 31.34,
377
+ "Visual Correspondence": 40.35,
378
+ "Semantic Correspondence": 30.94,
379
+ "Functional Correspondence": 20.0
380
+ }
381
+ },
382
+ "versions": {},
383
+ "config_tasks": {},
384
+ "summary_tasks": {},
385
+ "summary_general": {}
386
+ }