Datasets:

License:
xuanricheng commited on
Commit
a38999a
·
verified ·
1 Parent(s): 955b7e9

Add results for gemini-2.0-pro-exp-02-05

Browse files
gemini-2.0-pro-exp-02-05/results_2025-03-10T11-01-14.184463.json ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "gemini-2.0-pro-exp-02-05",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "ChartQA": {
9
+ "acc": 21.82,
10
+ "acc_stderr": 0,
11
+ "accuracy": 21.82,
12
+ "human_test": {
13
+ "total": 1250,
14
+ "correct": 234,
15
+ "accuracy": 18.72
16
+ },
17
+ "reject_info": {
18
+ "reject_rate": 0.08,
19
+ "reject_number": 2,
20
+ "total_question": 2500
21
+ },
22
+ "augmented_test": {
23
+ "total": 1248,
24
+ "correct": 311,
25
+ "accuracy": 24.92
26
+ }
27
+ },
28
+ "CMMMU": {
29
+ "acc": 61.11,
30
+ "acc_stderr": 0,
31
+ "\u5546\u4e1a": {
32
+ "num": 126,
33
+ "correct": 53,
34
+ "accuracy": 42.06
35
+ },
36
+ "\u79d1\u5b66": {
37
+ "num": 204,
38
+ "correct": 114,
39
+ "accuracy": 55.88
40
+ },
41
+ "overall": {
42
+ "num": 900,
43
+ "correct": 550,
44
+ "accuracy": 61.11
45
+ },
46
+ "accuracy": 61.11,
47
+ "\u5065\u5eb7\u4e0e\u533b\u5b66": {
48
+ "num": 153,
49
+ "correct": 103,
50
+ "accuracy": 67.32
51
+ },
52
+ "\u6280\u672f\u4e0e\u5de5\u7a0b": {
53
+ "num": 244,
54
+ "correct": 148,
55
+ "accuracy": 60.66
56
+ },
57
+ "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
58
+ "num": 88,
59
+ "correct": 69,
60
+ "accuracy": 78.41
61
+ },
62
+ "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
63
+ "num": 85,
64
+ "correct": 63,
65
+ "accuracy": 74.12
66
+ }
67
+ },
68
+ "CMMU": {
69
+ "acc": 53.71,
70
+ "acc_stderr": 0,
71
+ "val": {
72
+ "multiple-choice": {
73
+ "hard": {
74
+ "total": 150,
75
+ "correct": 68,
76
+ "accuracy": 45.33
77
+ },
78
+ "normal": {
79
+ "total": 1205,
80
+ "correct": 668,
81
+ "accuracy": 55.44
82
+ }
83
+ },
84
+ "fill-in-the-blank": {
85
+ "hard": {
86
+ "total": 300,
87
+ "correct": 156,
88
+ "accuracy": 52.0
89
+ },
90
+ "normal": {
91
+ "total": 506,
92
+ "correct": 267,
93
+ "accuracy": 52.77
94
+ }
95
+ },
96
+ "multiple-response": {
97
+ "hard": {
98
+ "total": 94,
99
+ "correct": 48,
100
+ "accuracy": 51.06
101
+ },
102
+ "normal": {
103
+ "total": 33,
104
+ "correct": 16,
105
+ "accuracy": 48.48
106
+ }
107
+ }
108
+ },
109
+ "test": {
110
+ "multiple-choice": {
111
+ "hard": {
112
+ "total": 150,
113
+ "correct": 71,
114
+ "accuracy": 47.33
115
+ },
116
+ "normal": {
117
+ "total": 1205,
118
+ "correct": 696,
119
+ "accuracy": 57.76
120
+ }
121
+ },
122
+ "fill-in-the-blank": {
123
+ "hard": {
124
+ "total": 296,
125
+ "correct": 142,
126
+ "accuracy": 47.97
127
+ },
128
+ "normal": {
129
+ "total": 529,
130
+ "correct": 277,
131
+ "accuracy": 52.36
132
+ }
133
+ },
134
+ "multiple-response": {
135
+ "hard": {
136
+ "total": 95,
137
+ "correct": 40,
138
+ "accuracy": 42.11
139
+ },
140
+ "normal": {
141
+ "total": 32,
142
+ "correct": 13,
143
+ "accuracy": 40.62
144
+ }
145
+ }
146
+ },
147
+ "reject_info": {
148
+ "reject_rate": 0.01,
149
+ "reject_number": 1,
150
+ "total_question": 12705
151
+ },
152
+ "val-overall": {
153
+ "total": 2288,
154
+ "correct": 1223,
155
+ "accuracy": 53.45,
156
+ "bias_rate": 9.6
157
+ },
158
+ "test-overall": {
159
+ "total": 2307,
160
+ "correct": 1239,
161
+ "accuracy": 53.71,
162
+ "bias_rate": 3.96
163
+ }
164
+ },
165
+ "MMMU": {
166
+ "acc": 62.89,
167
+ "acc_stderr": 0,
168
+ "accuracy": 62.89,
169
+ "subject_score": {
170
+ "Art": 88.33,
171
+ "Math": 53.33,
172
+ "Basic": 73.33,
173
+ "Music": 36.67,
174
+ "Design": 86.67,
175
+ "Energy": 50.0,
176
+ "Manage": 63.33,
177
+ "Public": 73.33,
178
+ "Biology": 56.67,
179
+ "Finance": 30.0,
180
+ "History": 80.0,
181
+ "Physics": 76.67,
182
+ "Clinical": 66.67,
183
+ "Computer": 56.67,
184
+ "Pharmacy": 86.67,
185
+ "Chemistry": 56.67,
186
+ "Economics": 66.67,
187
+ "Geography": 63.33,
188
+ "Marketing": 60.0,
189
+ "Materials": 53.33,
190
+ "Sociology": 73.33,
191
+ "Accounting": 50.0,
192
+ "Literature": 90.0,
193
+ "Mechanical": 46.67,
194
+ "Psychology": 73.33,
195
+ "Agriculture": 66.67,
196
+ "Diagnostics": 46.67,
197
+ "Electronics": 36.67,
198
+ "Architecture": 36.67
199
+ },
200
+ "difficulty_score": {
201
+ "Easy": 72.2,
202
+ "Hard": 49.17,
203
+ "Medium": 62.26
204
+ }
205
+ },
206
+ "MMMU_Pro_standard": {
207
+ "acc": 45.9,
208
+ "acc_stderr": 0,
209
+ "accuracy": 45.9,
210
+ "subject_score": {
211
+ "Art": 66.04,
212
+ "Math": 46.67,
213
+ "Music": 31.67,
214
+ "Design": 66.67,
215
+ "Manage": 40.0,
216
+ "Biology": 44.07,
217
+ "Finance": 28.33,
218
+ "History": 55.36,
219
+ "Physics": 40.0,
220
+ "Pharmacy": 49.12,
221
+ "Chemistry": 51.67,
222
+ "Economics": 44.07,
223
+ "Geography": 40.38,
224
+ "Marketing": 45.76,
225
+ "Materials": 21.67,
226
+ "Sociology": 59.26,
227
+ "Accounting": 36.21,
228
+ "Art_Theory": 69.09,
229
+ "Literature": 59.62,
230
+ "Psychology": 50.0,
231
+ "Agriculture": 35.0,
232
+ "Electronics": 51.67,
233
+ "Public_Health": 46.55,
234
+ "Computer_Science": 46.67,
235
+ "Energy_and_Power": 34.48,
236
+ "Clinical_Medicine": 52.54,
237
+ "Basic_Medical_Science": 57.69,
238
+ "Mechanical_Engineering": 33.9,
239
+ "Architecture_and_Engineering": 36.67,
240
+ "Diagnostics_and_Laboratory_Medicine": 43.33
241
+ },
242
+ "difficulty_score": {
243
+ "Easy": 56.82,
244
+ "Hard": 32.42,
245
+ "Medium": 45.44
246
+ }
247
+ },
248
+ "MMMU_Pro_vision": {
249
+ "acc": 40.54,
250
+ "acc_stderr": 0,
251
+ "accuracy": 40.54,
252
+ "reject_info": {
253
+ "reject_rate": 0.06,
254
+ "reject_number": 1,
255
+ "total_question": 1730
256
+ },
257
+ "subject_score": {
258
+ "Art": 57.69,
259
+ "Math": 33.33,
260
+ "Music": 30.0,
261
+ "Design": 70.0,
262
+ "Manage": 50.0,
263
+ "Biology": 40.68,
264
+ "Finance": 20.0,
265
+ "History": 50.0,
266
+ "Physics": 36.67,
267
+ "Pharmacy": 49.12,
268
+ "Chemistry": 41.67,
269
+ "Economics": 40.68,
270
+ "Geography": 42.31,
271
+ "Marketing": 30.51,
272
+ "Materials": 25.0,
273
+ "Sociology": 55.56,
274
+ "Accounting": 32.76,
275
+ "Art_Theory": 69.09,
276
+ "Literature": 67.31,
277
+ "Psychology": 38.33,
278
+ "Agriculture": 31.67,
279
+ "Electronics": 36.67,
280
+ "Public_Health": 25.86,
281
+ "Computer_Science": 38.33,
282
+ "Energy_and_Power": 18.97,
283
+ "Clinical_Medicine": 44.07,
284
+ "Basic_Medical_Science": 57.69,
285
+ "Mechanical_Engineering": 28.81,
286
+ "Architecture_and_Engineering": 31.67,
287
+ "Diagnostics_and_Laboratory_Medicine": 35.0
288
+ }
289
+ },
290
+ "OCRBench": {
291
+ "acc": 86.186,
292
+ "acc_stderr": 0,
293
+ "accuracy": 86.186,
294
+ "final_score": [
295
+ 861,
296
+ 999
297
+ ],
298
+ "reject_info": {
299
+ "reject_rate": 0.1,
300
+ "reject_number": 1,
301
+ "total_question": 1000
302
+ },
303
+ "Doc-oriented VQA": [
304
+ 175,
305
+ 200
306
+ ],
307
+ "Scene Text-centric VQA": [
308
+ 187,
309
+ 200
310
+ ],
311
+ "Handwriting Recognition": [
312
+ 38,
313
+ 50
314
+ ],
315
+ "Digit String Recognition": [
316
+ 39,
317
+ 50
318
+ ],
319
+ "Regular Text Recognition": [
320
+ 49,
321
+ 50
322
+ ],
323
+ "Artistic Text Recognition": [
324
+ 47,
325
+ 50
326
+ ],
327
+ "Irregular Text Recognition": [
328
+ 47,
329
+ 50
330
+ ],
331
+ "Key Information Extraction": [
332
+ 186,
333
+ 200
334
+ ],
335
+ "Non-Semantic Text Recognition": [
336
+ 50,
337
+ 50
338
+ ],
339
+ "Handwritten Mathematical Expression Recognition": [
340
+ 43,
341
+ 99
342
+ ]
343
+ },
344
+ "MathVision": {
345
+ "acc": 53.79,
346
+ "acc_stderr": 0,
347
+ "accuracy": 53.79,
348
+ "reject_info": {
349
+ "reject_rate": 0.07,
350
+ "reject_number": 2,
351
+ "total_question": 3040
352
+ }
353
+ },
354
+ "CII-Bench": {
355
+ "acc": 67.72,
356
+ "acc_stderr": 0,
357
+ "accuracy": 67.72,
358
+ "reject_info": {
359
+ "reject_rate": 0.39,
360
+ "reject_number": 3,
361
+ "total_question": 765
362
+ },
363
+ "domain_score": {
364
+ "Art": 70.37,
365
+ "CTC": 57.46,
366
+ "Env.": 66.67,
367
+ "Life": 70.56,
368
+ "Society": 69.02,
369
+ "Politics": 75.0
370
+ },
371
+ "emotion_score": {
372
+ "Neutral": 71.05,
373
+ "Negative": 65.78,
374
+ "Positive": 66.09
375
+ }
376
+ },
377
+ "Blink": {
378
+ "acc": 64.7,
379
+ "acc_stderr": 0,
380
+ "Jigsaw": 57.72,
381
+ "IQ Test": 21.33,
382
+ "Counting": 70.0,
383
+ "accuracy": 64.7,
384
+ "Art Style": 87.18,
385
+ "reject_info": {
386
+ "reject_rate": 0.16,
387
+ "reject_number": 3,
388
+ "total_question": 1901
389
+ },
390
+ "Relative Depth": 84.68,
391
+ "Spatial Relation": 74.65,
392
+ "Visual Similarity": 81.48,
393
+ "Forensic Detection": 78.79,
394
+ "Object Localization": 60.66,
395
+ "Multi-view Reasoning": 59.4,
396
+ "Relative Reflectance": 33.83,
397
+ "Visual Correspondence": 83.72,
398
+ "Semantic Correspondence": 63.31,
399
+ "Functional Correspondence": 53.08
400
+ }
401
+ },
402
+ "versions": {},
403
+ "config_tasks": {},
404
+ "summary_tasks": {},
405
+ "summary_general": {}
406
+ }