Datasets:

License:
xuanricheng commited on
Commit
955b7e9
·
verified ·
1 Parent(s): 4cf9a96

Add results for claude-3-7-sonnet-20250219

Browse files
claude-3-7-sonnet-20250219/results_2025-03-10T11-01-07.004431.json ADDED
@@ -0,0 +1,386 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "claude-3-7-sonnet-20250219",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "ChartQA": {
9
+ "acc": 27.56,
10
+ "acc_stderr": 0,
11
+ "accuracy": 27.56,
12
+ "human_test": {
13
+ "total": 1250,
14
+ "correct": 390,
15
+ "accuracy": 31.2
16
+ },
17
+ "augmented_test": {
18
+ "total": 1250,
19
+ "correct": 299,
20
+ "accuracy": 23.92
21
+ }
22
+ },
23
+ "CMMMU": {
24
+ "acc": 49.11,
25
+ "acc_stderr": 0,
26
+ "\u5546\u4e1a": {
27
+ "num": 126,
28
+ "correct": 43,
29
+ "accuracy": 34.13
30
+ },
31
+ "\u79d1\u5b66": {
32
+ "num": 204,
33
+ "correct": 83,
34
+ "accuracy": 40.69
35
+ },
36
+ "overall": {
37
+ "num": 900,
38
+ "correct": 442,
39
+ "accuracy": 49.11
40
+ },
41
+ "accuracy": 49.11,
42
+ "\u5065\u5eb7\u4e0e\u533b\u5b66": {
43
+ "num": 153,
44
+ "correct": 89,
45
+ "accuracy": 58.17
46
+ },
47
+ "\u6280\u672f\u4e0e\u5de5\u7a0b": {
48
+ "num": 244,
49
+ "correct": 112,
50
+ "accuracy": 45.9
51
+ },
52
+ "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
53
+ "num": 88,
54
+ "correct": 64,
55
+ "accuracy": 72.73
56
+ },
57
+ "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
58
+ "num": 85,
59
+ "correct": 51,
60
+ "accuracy": 60.0
61
+ }
62
+ },
63
+ "CMMU": {
64
+ "acc": 25.36,
65
+ "acc_stderr": 0,
66
+ "val": {
67
+ "multiple-choice": {
68
+ "hard": {
69
+ "total": 150,
70
+ "correct": 15,
71
+ "accuracy": 10.0
72
+ },
73
+ "normal": {
74
+ "total": 1205,
75
+ "correct": 300,
76
+ "accuracy": 24.9
77
+ }
78
+ },
79
+ "fill-in-the-blank": {
80
+ "hard": {
81
+ "total": 300,
82
+ "correct": 87,
83
+ "accuracy": 29.0
84
+ },
85
+ "normal": {
86
+ "total": 507,
87
+ "correct": 171,
88
+ "accuracy": 33.73
89
+ }
90
+ },
91
+ "multiple-response": {
92
+ "hard": {
93
+ "total": 94,
94
+ "correct": 11,
95
+ "accuracy": 11.7
96
+ },
97
+ "normal": {
98
+ "total": 33,
99
+ "correct": 7,
100
+ "accuracy": 21.21
101
+ }
102
+ }
103
+ },
104
+ "test": {
105
+ "multiple-choice": {
106
+ "hard": {
107
+ "total": 150,
108
+ "correct": 21,
109
+ "accuracy": 14.0
110
+ },
111
+ "normal": {
112
+ "total": 1205,
113
+ "correct": 294,
114
+ "accuracy": 24.4
115
+ }
116
+ },
117
+ "fill-in-the-blank": {
118
+ "hard": {
119
+ "total": 296,
120
+ "correct": 84,
121
+ "accuracy": 28.38
122
+ },
123
+ "normal": {
124
+ "total": 529,
125
+ "correct": 165,
126
+ "accuracy": 31.19
127
+ }
128
+ },
129
+ "multiple-response": {
130
+ "hard": {
131
+ "total": 95,
132
+ "correct": 13,
133
+ "accuracy": 13.68
134
+ },
135
+ "normal": {
136
+ "total": 32,
137
+ "correct": 8,
138
+ "accuracy": 25.0
139
+ }
140
+ }
141
+ },
142
+ "val-overall": {
143
+ "total": 2289,
144
+ "correct": 591,
145
+ "accuracy": 25.82,
146
+ "bias_rate": 58.07
147
+ },
148
+ "test-overall": {
149
+ "total": 2307,
150
+ "correct": 585,
151
+ "accuracy": 25.36,
152
+ "bias_rate": 46.77
153
+ }
154
+ },
155
+ "MMMU": {
156
+ "acc": 52.33,
157
+ "acc_stderr": 0,
158
+ "accuracy": 52.33,
159
+ "subject_score": {
160
+ "Art": 81.67,
161
+ "Math": 10.0,
162
+ "Basic": 73.33,
163
+ "Music": 30.0,
164
+ "Design": 80.0,
165
+ "Energy": 43.33,
166
+ "Manage": 53.33,
167
+ "Public": 56.67,
168
+ "Biology": 60.0,
169
+ "Finance": 33.33,
170
+ "History": 66.67,
171
+ "Physics": 60.0,
172
+ "Clinical": 63.33,
173
+ "Computer": 40.0,
174
+ "Pharmacy": 70.0,
175
+ "Chemistry": 40.0,
176
+ "Economics": 53.33,
177
+ "Geography": 43.33,
178
+ "Marketing": 46.67,
179
+ "Materials": 40.0,
180
+ "Sociology": 73.33,
181
+ "Accounting": 50.0,
182
+ "Literature": 83.33,
183
+ "Mechanical": 20.0,
184
+ "Psychology": 76.67,
185
+ "Agriculture": 56.67,
186
+ "Diagnostics": 40.0,
187
+ "Electronics": 13.33,
188
+ "Architecture": 30.0
189
+ },
190
+ "difficulty_score": {
191
+ "Easy": 65.08,
192
+ "Hard": 28.73,
193
+ "Medium": 53.54
194
+ }
195
+ },
196
+ "MMMU_Pro_standard": {
197
+ "acc": 38.61,
198
+ "acc_stderr": 0,
199
+ "accuracy": 38.61,
200
+ "subject_score": {
201
+ "Art": 66.04,
202
+ "Math": 21.67,
203
+ "Music": 20.0,
204
+ "Design": 63.33,
205
+ "Manage": 30.0,
206
+ "Biology": 32.2,
207
+ "Finance": 33.33,
208
+ "History": 48.21,
209
+ "Physics": 36.67,
210
+ "Pharmacy": 43.86,
211
+ "Chemistry": 40.0,
212
+ "Economics": 38.98,
213
+ "Geography": 38.46,
214
+ "Marketing": 32.2,
215
+ "Materials": 16.67,
216
+ "Sociology": 53.7,
217
+ "Accounting": 32.76,
218
+ "Art_Theory": 61.82,
219
+ "Literature": 71.15,
220
+ "Psychology": 41.67,
221
+ "Agriculture": 35.0,
222
+ "Electronics": 43.33,
223
+ "Public_Health": 50.0,
224
+ "Computer_Science": 35.0,
225
+ "Energy_and_Power": 24.14,
226
+ "Clinical_Medicine": 47.46,
227
+ "Basic_Medical_Science": 36.54,
228
+ "Mechanical_Engineering": 20.34,
229
+ "Architecture_and_Engineering": 25.0,
230
+ "Diagnostics_and_Laboratory_Medicine": 28.33
231
+ },
232
+ "difficulty_score": {
233
+ "Easy": 52.65,
234
+ "Hard": 26.68,
235
+ "Medium": 35.33
236
+ }
237
+ },
238
+ "MMMU_Pro_vision": {
239
+ "acc": 68.03,
240
+ "acc_stderr": 0,
241
+ "accuracy": 68.03,
242
+ "subject_score": {
243
+ "Art": 66.04,
244
+ "Math": 70.0,
245
+ "Music": 61.67,
246
+ "Design": 70.0,
247
+ "Manage": 70.0,
248
+ "Biology": 66.1,
249
+ "Finance": 53.33,
250
+ "History": 69.64,
251
+ "Physics": 65.0,
252
+ "Pharmacy": 70.18,
253
+ "Chemistry": 70.0,
254
+ "Economics": 64.41,
255
+ "Geography": 76.92,
256
+ "Marketing": 57.63,
257
+ "Materials": 65.0,
258
+ "Sociology": 68.52,
259
+ "Accounting": 72.41,
260
+ "Art_Theory": 63.64,
261
+ "Literature": 69.23,
262
+ "Psychology": 83.33,
263
+ "Agriculture": 70.0,
264
+ "Electronics": 55.0,
265
+ "Public_Health": 63.79,
266
+ "Computer_Science": 71.67,
267
+ "Energy_and_Power": 82.76,
268
+ "Clinical_Medicine": 74.58,
269
+ "Basic_Medical_Science": 76.92,
270
+ "Mechanical_Engineering": 76.27,
271
+ "Architecture_and_Engineering": 60.0,
272
+ "Diagnostics_and_Laboratory_Medicine": 60.0
273
+ }
274
+ },
275
+ "OCRBench": {
276
+ "acc": 79.819,
277
+ "acc_stderr": 0,
278
+ "accuracy": 79.819,
279
+ "final_score": [
280
+ 795,
281
+ 996
282
+ ],
283
+ "reject_info": {
284
+ "reject_rate": 0.4,
285
+ "reject_number": 4,
286
+ "total_question": 1000
287
+ },
288
+ "Doc-oriented VQA": [
289
+ 173,
290
+ 196
291
+ ],
292
+ "Scene Text-centric VQA": [
293
+ 169,
294
+ 200
295
+ ],
296
+ "Handwriting Recognition": [
297
+ 38,
298
+ 50
299
+ ],
300
+ "Digit String Recognition": [
301
+ 33,
302
+ 50
303
+ ],
304
+ "Regular Text Recognition": [
305
+ 49,
306
+ 50
307
+ ],
308
+ "Artistic Text Recognition": [
309
+ 44,
310
+ 50
311
+ ],
312
+ "Irregular Text Recognition": [
313
+ 45,
314
+ 50
315
+ ],
316
+ "Key Information Extraction": [
317
+ 176,
318
+ 200
319
+ ],
320
+ "Non-Semantic Text Recognition": [
321
+ 47,
322
+ 50
323
+ ],
324
+ "Handwritten Mathematical Expression Recognition": [
325
+ 21,
326
+ 100
327
+ ]
328
+ },
329
+ "MathVision": {
330
+ "acc": 45.38,
331
+ "acc_stderr": 0,
332
+ "accuracy": 45.38,
333
+ "reject_info": {
334
+ "reject_rate": 0.03,
335
+ "reject_number": 1,
336
+ "total_question": 3040
337
+ }
338
+ },
339
+ "CII-Bench": {
340
+ "acc": 59.26,
341
+ "acc_stderr": 0,
342
+ "accuracy": 59.26,
343
+ "reject_info": {
344
+ "reject_rate": 0.52,
345
+ "reject_number": 4,
346
+ "total_question": 765
347
+ },
348
+ "domain_score": {
349
+ "Art": 55.15,
350
+ "CTC": 52.24,
351
+ "Env.": 62.75,
352
+ "Life": 63.64,
353
+ "Society": 59.46,
354
+ "Politics": 70.83
355
+ },
356
+ "emotion_score": {
357
+ "Neutral": 59.62,
358
+ "Negative": 59.25,
359
+ "Positive": 58.87
360
+ }
361
+ },
362
+ "Blink": {
363
+ "acc": 63.34,
364
+ "acc_stderr": 0,
365
+ "Jigsaw": 63.33,
366
+ "IQ Test": 26.67,
367
+ "Counting": 65.0,
368
+ "accuracy": 63.34,
369
+ "Art Style": 83.76,
370
+ "Relative Depth": 75.81,
371
+ "Spatial Relation": 75.52,
372
+ "Visual Similarity": 86.67,
373
+ "Forensic Detection": 57.58,
374
+ "Object Localization": 60.66,
375
+ "Multi-view Reasoning": 54.14,
376
+ "Relative Reflectance": 31.34,
377
+ "Visual Correspondence": 91.28,
378
+ "Semantic Correspondence": 50.36,
379
+ "Functional Correspondence": 63.85
380
+ }
381
+ },
382
+ "versions": {},
383
+ "config_tasks": {},
384
+ "summary_tasks": {},
385
+ "summary_general": {}
386
+ }