Datasets:

License:
xuanricheng commited on
Commit
e6f8dbd
·
verified ·
1 Parent(s): 2529b9e

Add results for deepseek-ai/Janus-Pro-7B

Browse files
deepseek-ai/Janus-Pro-7B/results_2025-02-17T04-40-24.381356.json ADDED
@@ -0,0 +1,371 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "deepseek-ai/Janus-Pro-7B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "ChartQA": {
9
+ "acc": 26.08,
10
+ "acc_stderr": 0,
11
+ "accuracy": 26.08,
12
+ "human_test": {
13
+ "total": 1250,
14
+ "correct": 262,
15
+ "accuracy": 20.96
16
+ },
17
+ "augmented_test": {
18
+ "total": 1250,
19
+ "correct": 390,
20
+ "accuracy": 31.2
21
+ }
22
+ },
23
+ "CMMMU": {
24
+ "acc": 32.33,
25
+ "acc_stderr": 0,
26
+ "\u5546\u4e1a": {
27
+ "num": 126,
28
+ "correct": 24,
29
+ "accuracy": 19.05
30
+ },
31
+ "\u79d1\u5b66": {
32
+ "num": 204,
33
+ "correct": 58,
34
+ "accuracy": 28.43
35
+ },
36
+ "overall": {
37
+ "num": 900,
38
+ "correct": 291,
39
+ "accuracy": 32.33
40
+ },
41
+ "accuracy": 32.33,
42
+ "\u5065\u5eb7\u4e0e\u533b\u5b66": {
43
+ "num": 153,
44
+ "correct": 61,
45
+ "accuracy": 39.87
46
+ },
47
+ "\u6280\u672f\u4e0e\u5de5\u7a0b": {
48
+ "num": 244,
49
+ "correct": 75,
50
+ "accuracy": 30.74
51
+ },
52
+ "\u827a\u672f\u4e0e\u8bbe\u8ba1": {
53
+ "num": 88,
54
+ "correct": 42,
55
+ "accuracy": 47.73
56
+ },
57
+ "\u4eba\u6587\u793e\u4f1a\u79d1\u5b66": {
58
+ "num": 85,
59
+ "correct": 31,
60
+ "accuracy": 36.47
61
+ }
62
+ },
63
+ "CMMU": {
64
+ "acc": 9.1,
65
+ "acc_stderr": 0,
66
+ "val": {
67
+ "multiple-choice": {
68
+ "hard": {
69
+ "total": 150,
70
+ "correct": 7,
71
+ "accuracy": 4.67
72
+ },
73
+ "normal": {
74
+ "total": 1205,
75
+ "correct": 136,
76
+ "accuracy": 11.29
77
+ }
78
+ },
79
+ "fill-in-the-blank": {
80
+ "hard": {
81
+ "total": 300,
82
+ "correct": 21,
83
+ "accuracy": 7.0
84
+ },
85
+ "normal": {
86
+ "total": 507,
87
+ "correct": 41,
88
+ "accuracy": 8.09
89
+ }
90
+ },
91
+ "multiple-response": {
92
+ "hard": {
93
+ "total": 94,
94
+ "correct": 1,
95
+ "accuracy": 1.06
96
+ },
97
+ "normal": {
98
+ "total": 33,
99
+ "correct": 1,
100
+ "accuracy": 3.03
101
+ }
102
+ }
103
+ },
104
+ "test": {
105
+ "multiple-choice": {
106
+ "hard": {
107
+ "total": 150,
108
+ "correct": 9,
109
+ "accuracy": 6.0
110
+ },
111
+ "normal": {
112
+ "total": 1205,
113
+ "correct": 141,
114
+ "accuracy": 11.7
115
+ }
116
+ },
117
+ "fill-in-the-blank": {
118
+ "hard": {
119
+ "total": 296,
120
+ "correct": 23,
121
+ "accuracy": 7.77
122
+ },
123
+ "normal": {
124
+ "total": 529,
125
+ "correct": 37,
126
+ "accuracy": 6.99
127
+ }
128
+ },
129
+ "multiple-response": {
130
+ "hard": {
131
+ "total": 95,
132
+ "correct": 0,
133
+ "accuracy": 0.0
134
+ },
135
+ "normal": {
136
+ "total": 32,
137
+ "correct": 0,
138
+ "accuracy": 0.0
139
+ }
140
+ }
141
+ },
142
+ "val-overall": {
143
+ "total": 2289,
144
+ "correct": 207,
145
+ "accuracy": 9.04,
146
+ "bias_rate": 62.9
147
+ },
148
+ "test-overall": {
149
+ "total": 2307,
150
+ "correct": 210,
151
+ "accuracy": 9.1,
152
+ "bias_rate": 66.36
153
+ }
154
+ },
155
+ "MMMU": {
156
+ "acc": 36.78,
157
+ "acc_stderr": 0,
158
+ "accuracy": 36.78,
159
+ "subject_score": {
160
+ "Art": 58.33,
161
+ "Math": 26.67,
162
+ "Basic": 36.67,
163
+ "Music": 33.33,
164
+ "Design": 66.67,
165
+ "Energy": 26.67,
166
+ "Manage": 40.0,
167
+ "Public": 23.33,
168
+ "Biology": 23.33,
169
+ "Finance": 23.33,
170
+ "History": 30.0,
171
+ "Physics": 30.0,
172
+ "Clinical": 23.33,
173
+ "Computer": 26.67,
174
+ "Pharmacy": 43.33,
175
+ "Chemistry": 23.33,
176
+ "Economics": 26.67,
177
+ "Geography": 30.0,
178
+ "Marketing": 36.67,
179
+ "Materials": 26.67,
180
+ "Sociology": 56.67,
181
+ "Accounting": 33.33,
182
+ "Literature": 66.67,
183
+ "Mechanical": 50.0,
184
+ "Psychology": 46.67,
185
+ "Agriculture": 50.0,
186
+ "Diagnostics": 26.67,
187
+ "Electronics": 36.67,
188
+ "Architecture": 23.33
189
+ },
190
+ "difficulty_score": {
191
+ "Easy": 42.03,
192
+ "Hard": 30.39,
193
+ "Medium": 35.85
194
+ }
195
+ },
196
+ "MMMU_Pro_standard": {
197
+ "acc": 23.76,
198
+ "acc_stderr": 0,
199
+ "accuracy": 23.76,
200
+ "subject_score": {
201
+ "Art": 39.62,
202
+ "Math": 21.67,
203
+ "Music": 25.0,
204
+ "Design": 45.0,
205
+ "Manage": 26.0,
206
+ "Biology": 15.25,
207
+ "Finance": 18.33,
208
+ "History": 37.5,
209
+ "Physics": 16.67,
210
+ "Pharmacy": 31.58,
211
+ "Chemistry": 18.33,
212
+ "Economics": 15.25,
213
+ "Geography": 30.77,
214
+ "Marketing": 13.56,
215
+ "Materials": 13.33,
216
+ "Sociology": 29.63,
217
+ "Accounting": 17.24,
218
+ "Art_Theory": 38.18,
219
+ "Literature": 53.85,
220
+ "Psychology": 16.67,
221
+ "Agriculture": 28.33,
222
+ "Electronics": 30.0,
223
+ "Public_Health": 15.52,
224
+ "Computer_Science": 25.0,
225
+ "Energy_and_Power": 17.24,
226
+ "Clinical_Medicine": 11.86,
227
+ "Basic_Medical_Science": 7.69,
228
+ "Mechanical_Engineering": 18.64,
229
+ "Architecture_and_Engineering": 20.0,
230
+ "Diagnostics_and_Laboratory_Medicine": 21.67
231
+ },
232
+ "difficulty_score": {
233
+ "Easy": 27.27,
234
+ "Hard": 19.95,
235
+ "Medium": 23.35
236
+ }
237
+ },
238
+ "MMMU_Pro_vision": {
239
+ "acc": 12.37,
240
+ "acc_stderr": 0,
241
+ "accuracy": 12.37,
242
+ "subject_score": {
243
+ "Art": 1.89,
244
+ "Math": 6.67,
245
+ "Music": 20.0,
246
+ "Design": 6.67,
247
+ "Manage": 16.0,
248
+ "Biology": 10.17,
249
+ "Finance": 11.67,
250
+ "History": 12.5,
251
+ "Physics": 13.33,
252
+ "Pharmacy": 21.05,
253
+ "Chemistry": 18.33,
254
+ "Economics": 6.78,
255
+ "Geography": 13.46,
256
+ "Marketing": 13.56,
257
+ "Materials": 13.33,
258
+ "Sociology": 7.41,
259
+ "Accounting": 6.9,
260
+ "Art_Theory": 10.91,
261
+ "Literature": 13.46,
262
+ "Psychology": 11.67,
263
+ "Agriculture": 11.67,
264
+ "Electronics": 11.67,
265
+ "Public_Health": 8.62,
266
+ "Computer_Science": 13.33,
267
+ "Energy_and_Power": 22.41,
268
+ "Clinical_Medicine": 13.56,
269
+ "Basic_Medical_Science": 9.62,
270
+ "Mechanical_Engineering": 18.64,
271
+ "Architecture_and_Engineering": 13.33,
272
+ "Diagnostics_and_Laboratory_Medicine": 11.67
273
+ }
274
+ },
275
+ "OCRBench": {
276
+ "acc": 60.9,
277
+ "acc_stderr": 0,
278
+ "accuracy": 60.9,
279
+ "final_score": [
280
+ 609,
281
+ 1000
282
+ ],
283
+ "Doc-oriented VQA": [
284
+ 79,
285
+ 200
286
+ ],
287
+ "Scene Text-centric VQA": [
288
+ 156,
289
+ 200
290
+ ],
291
+ "Handwriting Recognition": [
292
+ 33,
293
+ 50
294
+ ],
295
+ "Digit String Recognition": [
296
+ 35,
297
+ 50
298
+ ],
299
+ "Regular Text Recognition": [
300
+ 49,
301
+ 50
302
+ ],
303
+ "Artistic Text Recognition": [
304
+ 49,
305
+ 50
306
+ ],
307
+ "Irregular Text Recognition": [
308
+ 40,
309
+ 50
310
+ ],
311
+ "Key Information Extraction": [
312
+ 82,
313
+ 200
314
+ ],
315
+ "Non-Semantic Text Recognition": [
316
+ 38,
317
+ 50
318
+ ],
319
+ "Handwritten Mathematical Expression Recognition": [
320
+ 48,
321
+ 100
322
+ ]
323
+ },
324
+ "MathVision": {
325
+ "acc": 13.98,
326
+ "acc_stderr": 0,
327
+ "accuracy": 13.98
328
+ },
329
+ "CII-Bench": {
330
+ "acc": 27.19,
331
+ "acc_stderr": 0,
332
+ "accuracy": 27.19,
333
+ "domain_score": {
334
+ "Art": 31.62,
335
+ "CTC": 28.89,
336
+ "Env.": 27.78,
337
+ "Life": 25.54,
338
+ "Society": 24.32,
339
+ "Politics": 29.17
340
+ },
341
+ "emotion_score": {
342
+ "Neutral": 27.07,
343
+ "Negative": 24.53,
344
+ "Positive": 30.34
345
+ }
346
+ },
347
+ "Blink": {
348
+ "acc": 42.24,
349
+ "acc_stderr": 0,
350
+ "Jigsaw": 59.33,
351
+ "IQ Test": 19.33,
352
+ "Counting": 44.17,
353
+ "accuracy": 42.24,
354
+ "Art Style": 47.01,
355
+ "Relative Depth": 59.68,
356
+ "Spatial Relation": 76.92,
357
+ "Visual Similarity": 56.3,
358
+ "Forensic Detection": 23.48,
359
+ "Object Localization": 50.82,
360
+ "Multi-view Reasoning": 50.38,
361
+ "Relative Reflectance": 31.34,
362
+ "Visual Correspondence": 30.81,
363
+ "Semantic Correspondence": 25.18,
364
+ "Functional Correspondence": 20.77
365
+ }
366
+ },
367
+ "versions": {},
368
+ "config_tasks": {},
369
+ "summary_tasks": {},
370
+ "summary_general": {}
371
+ }