Datasets:

License:
xuanricheng commited on
Commit
3f54425
·
verified ·
1 Parent(s): 0c4e490

Add results for Qwen/Qwen2-VL-2B-Instruct

Browse files
Qwen/Qwen2-VL-2B-Instruct/results_2025-01-25T05-05-06.871936.json ADDED
@@ -0,0 +1,371 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen/Qwen2-VL-2B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "ChartQA": {
9
+ "acc": 75.8,
10
+ "acc_stderr": 0,
11
+ "accuracy": 75.8,
12
+ "human_test": {
13
+ "total": 1250,
14
+ "correct": 740,
15
+ "accuracy": 59.2
16
+ },
17
+ "augmented_test": {
18
+ "total": 1250,
19
+ "correct": 1155,
20
+ "accuracy": 92.4
21
+ }
22
+ },
23
+ "CMMMU": {
24
+ "acc": 35.89,
25
+ "acc_stderr": 0,
26
+ "\u5546\u4e1a": {
27
+ "num": 126,
28
+ "correct": 26,
29
+ "accuracy": 20.63
30
+ },
31
+ "\u79d1\u5b66": {
32
+ "num": 204,
33
+ "correct": 59,
34
+ "accuracy": 28.92
35
+ },
36
+ "overall": {
37
+ "num": 900,
38
+ "correct": 323,
39
+ "accuracy": 35.89
40
+ },
41
+ "accuracy": 35.89,
42
+ "\u5065\u5eb7\u4e0e\u533b\u5b66": {
43
+ "num": 153,
44
+ "correct": 60,
45
+ "accuracy": 39.22
46
+ },
47
+ "\u6280\u672f\u4e0e\u5de5\u7a0b": {
48
+ "num": 244,
49
+ "correct": 91,
50
+ "accuracy": 37.3
51
+ },
52
+ "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
53
+ "num": 88,
54
+ "correct": 47,
55
+ "accuracy": 53.41
56
+ },
57
+ "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
58
+ "num": 85,
59
+ "correct": 40,
60
+ "accuracy": 47.06
61
+ }
62
+ },
63
+ "CMMU": {
64
+ "acc": 15.86,
65
+ "acc_stderr": 0,
66
+ "val": {
67
+ "multiple-choice": {
68
+ "hard": {
69
+ "total": 150,
70
+ "correct": 16,
71
+ "accuracy": 10.67
72
+ },
73
+ "normal": {
74
+ "total": 1205,
75
+ "correct": 218,
76
+ "accuracy": 18.09
77
+ }
78
+ },
79
+ "fill-in-the-blank": {
80
+ "hard": {
81
+ "total": 300,
82
+ "correct": 44,
83
+ "accuracy": 14.67
84
+ },
85
+ "normal": {
86
+ "total": 507,
87
+ "correct": 84,
88
+ "accuracy": 16.57
89
+ }
90
+ },
91
+ "multiple-response": {
92
+ "hard": {
93
+ "total": 94,
94
+ "correct": 5,
95
+ "accuracy": 5.32
96
+ },
97
+ "normal": {
98
+ "total": 33,
99
+ "correct": 1,
100
+ "accuracy": 3.03
101
+ }
102
+ }
103
+ },
104
+ "test": {
105
+ "multiple-choice": {
106
+ "hard": {
107
+ "total": 150,
108
+ "correct": 12,
109
+ "accuracy": 8.0
110
+ },
111
+ "normal": {
112
+ "total": 1205,
113
+ "correct": 221,
114
+ "accuracy": 18.34
115
+ }
116
+ },
117
+ "fill-in-the-blank": {
118
+ "hard": {
119
+ "total": 296,
120
+ "correct": 44,
121
+ "accuracy": 14.86
122
+ },
123
+ "normal": {
124
+ "total": 529,
125
+ "correct": 84,
126
+ "accuracy": 15.88
127
+ }
128
+ },
129
+ "multiple-response": {
130
+ "hard": {
131
+ "total": 95,
132
+ "correct": 3,
133
+ "accuracy": 3.16
134
+ },
135
+ "normal": {
136
+ "total": 32,
137
+ "correct": 2,
138
+ "accuracy": 6.25
139
+ }
140
+ }
141
+ },
142
+ "val-overall": {
143
+ "total": 2289,
144
+ "correct": 368,
145
+ "accuracy": 16.08,
146
+ "bias_rate": 76.19
147
+ },
148
+ "test-overall": {
149
+ "total": 2307,
150
+ "correct": 366,
151
+ "accuracy": 15.86,
152
+ "bias_rate": 64.2
153
+ }
154
+ },
155
+ "MMMU": {
156
+ "acc": 39.67,
157
+ "acc_stderr": 0,
158
+ "accuracy": 39.67,
159
+ "subject_score": {
160
+ "Art": 65.0,
161
+ "Math": 26.67,
162
+ "Basic": 43.33,
163
+ "Music": 33.33,
164
+ "Design": 60.0,
165
+ "Energy": 40.0,
166
+ "Manage": 33.33,
167
+ "Public": 50.0,
168
+ "Biology": 30.0,
169
+ "Finance": 20.0,
170
+ "History": 60.0,
171
+ "Physics": 23.33,
172
+ "Clinical": 46.67,
173
+ "Computer": 20.0,
174
+ "Pharmacy": 36.67,
175
+ "Chemistry": 13.33,
176
+ "Economics": 33.33,
177
+ "Geography": 33.33,
178
+ "Marketing": 50.0,
179
+ "Materials": 23.33,
180
+ "Sociology": 40.0,
181
+ "Accounting": 63.33,
182
+ "Literature": 86.67,
183
+ "Mechanical": 36.67,
184
+ "Psychology": 50.0,
185
+ "Agriculture": 33.33,
186
+ "Diagnostics": 20.0,
187
+ "Electronics": 23.33,
188
+ "Architecture": 30.0
189
+ },
190
+ "difficulty_score": {
191
+ "Easy": 48.14,
192
+ "Hard": 27.62,
193
+ "Medium": 38.92
194
+ }
195
+ },
196
+ "MMMU_Pro_standard": {
197
+ "acc": 27.11,
198
+ "acc_stderr": 0,
199
+ "accuracy": 27.11,
200
+ "subject_score": {
201
+ "Art": 50.94,
202
+ "Math": 20.0,
203
+ "Music": 26.67,
204
+ "Design": 53.33,
205
+ "Manage": 26.0,
206
+ "Biology": 30.51,
207
+ "Finance": 13.33,
208
+ "History": 42.86,
209
+ "Physics": 16.67,
210
+ "Pharmacy": 21.05,
211
+ "Chemistry": 26.67,
212
+ "Economics": 23.73,
213
+ "Geography": 34.62,
214
+ "Marketing": 16.95,
215
+ "Materials": 13.33,
216
+ "Sociology": 42.59,
217
+ "Accounting": 15.52,
218
+ "Art_Theory": 45.45,
219
+ "Literature": 67.31,
220
+ "Psychology": 16.67,
221
+ "Agriculture": 16.67,
222
+ "Electronics": 31.67,
223
+ "Public_Health": 20.69,
224
+ "Computer_Science": 26.67,
225
+ "Energy_and_Power": 20.69,
226
+ "Clinical_Medicine": 20.34,
227
+ "Basic_Medical_Science": 25.0,
228
+ "Mechanical_Engineering": 20.34,
229
+ "Architecture_and_Engineering": 18.33,
230
+ "Diagnostics_and_Laboratory_Medicine": 20.0
231
+ },
232
+ "difficulty_score": {
233
+ "Easy": 35.98,
234
+ "Hard": 19.45,
235
+ "Medium": 25.09
236
+ }
237
+ },
238
+ "MMMU_Pro_vision": {
239
+ "acc": 9.83,
240
+ "acc_stderr": 0,
241
+ "accuracy": 9.83,
242
+ "subject_score": {
243
+ "Art": 24.53,
244
+ "Math": 3.33,
245
+ "Music": 10.0,
246
+ "Design": 18.33,
247
+ "Manage": 18.0,
248
+ "Biology": 11.86,
249
+ "Finance": 0.0,
250
+ "History": 10.71,
251
+ "Physics": 6.67,
252
+ "Pharmacy": 15.79,
253
+ "Chemistry": 8.33,
254
+ "Economics": 3.39,
255
+ "Geography": 7.69,
256
+ "Marketing": 6.78,
257
+ "Materials": 5.0,
258
+ "Sociology": 14.81,
259
+ "Accounting": 3.45,
260
+ "Art_Theory": 23.64,
261
+ "Literature": 32.69,
262
+ "Psychology": 13.33,
263
+ "Agriculture": 11.67,
264
+ "Electronics": 5.0,
265
+ "Public_Health": 3.45,
266
+ "Computer_Science": 11.67,
267
+ "Energy_and_Power": 0.0,
268
+ "Clinical_Medicine": 3.39,
269
+ "Basic_Medical_Science": 7.69,
270
+ "Mechanical_Engineering": 8.47,
271
+ "Architecture_and_Engineering": 1.67,
272
+ "Diagnostics_and_Laboratory_Medicine": 10.0
273
+ }
274
+ },
275
+ "OCRBench": {
276
+ "acc": 77.2,
277
+ "acc_stderr": 0,
278
+ "accuracy": 77.2,
279
+ "final_score": [
280
+ 772,
281
+ 1000
282
+ ],
283
+ "Doc-oriented VQA": [
284
+ 136,
285
+ 200
286
+ ],
287
+ "Scene Text-centric VQA": [
288
+ 174,
289
+ 200
290
+ ],
291
+ "Handwriting Recognition": [
292
+ 41,
293
+ 50
294
+ ],
295
+ "Digit String Recognition": [
296
+ 31,
297
+ 50
298
+ ],
299
+ "Regular Text Recognition": [
300
+ 49,
301
+ 50
302
+ ],
303
+ "Artistic Text Recognition": [
304
+ 47,
305
+ 50
306
+ ],
307
+ "Irregular Text Recognition": [
308
+ 46,
309
+ 50
310
+ ],
311
+ "Key Information Extraction": [
312
+ 174,
313
+ 200
314
+ ],
315
+ "Non-Semantic Text Recognition": [
316
+ 43,
317
+ 50
318
+ ],
319
+ "Handwritten Mathematical Expression Recognition": [
320
+ 31,
321
+ 100
322
+ ]
323
+ },
324
+ "MathVision": {
325
+ "acc": 14.77,
326
+ "acc_stderr": 0,
327
+ "accuracy": 14.77
328
+ },
329
+ "CII-Bench": {
330
+ "acc": 39.22,
331
+ "acc_stderr": 0,
332
+ "accuracy": 39.22,
333
+ "domain_score": {
334
+ "Art": 41.18,
335
+ "CTC": 45.19,
336
+ "Env.": 51.85,
337
+ "Life": 31.6,
338
+ "Society": 40.0,
339
+ "Politics": 33.33
340
+ },
341
+ "emotion_score": {
342
+ "Neutral": 38.72,
343
+ "Negative": 39.25,
344
+ "Positive": 39.74
345
+ }
346
+ },
347
+ "Blink": {
348
+ "acc": 38.82,
349
+ "acc_stderr": 0,
350
+ "Jigsaw": 52.67,
351
+ "IQ Test": 16.0,
352
+ "Counting": 54.17,
353
+ "accuracy": 38.82,
354
+ "Art Style": 47.01,
355
+ "Relative Depth": 50.81,
356
+ "Spatial Relation": 69.93,
357
+ "Visual Similarity": 52.59,
358
+ "Forensic Detection": 19.7,
359
+ "Object Localization": 52.46,
360
+ "Multi-view Reasoning": 27.07,
361
+ "Relative Reflectance": 27.61,
362
+ "Visual Correspondence": 29.07,
363
+ "Semantic Correspondence": 22.3,
364
+ "Functional Correspondence": 28.46
365
+ }
366
+ },
367
+ "versions": {},
368
+ "config_tasks": {},
369
+ "summary_tasks": {},
370
+ "summary_general": {}
371
+ }