Datasets:

License:
daiteng01 commited on
Commit
4cf9a96
·
verified ·
1 Parent(s): 2d4dcad

Upload 34 files

Browse files
Files changed (34) hide show
  1. Aria/results_2025-01-25T10-42-53.190540.json +86 -0
  2. Claude-3.5-Sonnet-20241022/results_2025-01-25T10-42-53.190540.json +270 -0
  3. Claude3-Opus-20240229/results_2025-01-25T10-42-53.190540.json +101 -0
  4. Doubao-Pro-Vision-32k-241028/results_2025-01-25T10-42-53.190540.json +265 -0
  5. GLM-4V-Plus/results_2025-01-25T10-42-53.190540.json +96 -0
  6. GPT-4o-20240806/results_2025-01-25T10-42-53.190540.json +96 -0
  7. GPT-4o-20241120/results_2025-01-25T10-42-53.190540.json +180 -0
  8. GPT-4o-mini-20240718/results_2025-01-25T10-42-53.190540.json +91 -0
  9. Gemini-1.5-Flash/results_2025-01-25T10-42-53.190540.json +91 -0
  10. Gemini-1.5-Pro/results_2025-01-25T10-42-53.190540.json +133 -0
  11. Idefics3-8B-Llama3/results_2025-01-25T10-42-53.190540.json +86 -0
  12. InternVL2-2B/results_2025-01-25T10-42-53.190540.json +155 -0
  13. InternVL2-8B/results_2025-01-25T10-42-53.190540.json +86 -0
  14. InternVL2-Llama3-76B/results_2025-01-25T10-42-53.190540.json +86 -0
  15. Janus-1.3B/results_2025-01-25T10-42-53.190540.json +86 -0
  16. LLaVA-OneVision-0.5B/results_2025-01-25T10-42-53.190540.json +260 -0
  17. LLaVA-OneVision-7B/results_2025-01-25T10-42-53.190540.json +86 -0
  18. LLaVA-Onevision-72B/results_2025-01-25T10-42-53.190540.json +136 -0
  19. Llama-3.2-11B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json +86 -0
  20. Llama-3.2-90B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json +159 -0
  21. MiniCPM-V-2.6/results_2025-01-25T10-42-53.190540.json +86 -0
  22. Molmo-72B-0924/results_2025-01-25T10-42-53.190540.json +86 -0
  23. Molmo-7B-D/results_2025-01-25T10-42-53.190540.json +86 -0
  24. Mono-InternVL-2B/results_2025-01-25T10-42-53.190540.json +86 -0
  25. NVLM-D-72B/results_2025-01-25T10-42-53.190540.json +91 -0
  26. Phi-3.5-Vision-Instruct/results_2025-01-25T10-42-53.190540.json +86 -0
  27. Pixtral-12B-2409/results_2025-01-25T10-42-53.190540.json +120 -0
  28. Qwen-VL-Max/results_2025-01-25T10-42-53.190540.json +116 -0
  29. Qwen2-VL-2B-Instruct/results_2025-01-25T10-42-53.190540.json +86 -0
  30. Qwen2-VL-72B-Instruct/results_2025-01-25T10-42-53.190540.json +91 -0
  31. Qwen2-VL-7B-Instruct/results_2025-01-25T10-42-53.190540.json +86 -0
  32. Step-1V-32k/results_2025-01-25T10-42-53.190540.json +123 -0
  33. XGen-MM-Instruct-Interleave-v1.5/results_2025-01-25T10-42-53.190540.json +86 -0
  34. Yi-Vision/results_2025-01-25T10-42-53.190540.json +111 -0
Aria/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Aria",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 43.56,
10
+ "acc_stderr": 0,
11
+ "acc": 43.56
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 48.67,
15
+ "acc_stderr": 0,
16
+ "acc": 48.67
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 32.08,
20
+ "acc_stderr": 0,
21
+ "acc": 32.08
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 52.02,
25
+ "acc_stderr": 0,
26
+ "acc": 52.02
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 71.9,
30
+ "Regular Text Recognition": 48,
31
+ "Irregular Text Recognition": 41,
32
+ "Artistic Text Recognition": 44,
33
+ "Handwriting Recognition": 30,
34
+ "Digit String Recognition": 23,
35
+ "Non-Semantic Text Recognition": 41,
36
+ "Scene Text-centric VQA": 174,
37
+ "Doc-oriented VQA": 157,
38
+ "Key Information Extraction": 155,
39
+ "Handwritten Mathematical Expression Recognition": 6,
40
+ "acc_stderr": 0,
41
+ "acc": 71.9
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 15.56,
45
+ "acc_stderr": 0,
46
+ "acc": 15.56
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 46.27,
50
+ "domain_score": {
51
+ "Life": 40.26,
52
+ "Art": 45.59,
53
+ "CTC": 48.89,
54
+ "Society": 46.49,
55
+ "Env.": 62.96,
56
+ "Politics": 54.17
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 46.62,
60
+ "Negative": 46.04,
61
+ "Positive": 46.15
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 46.27
65
+ },
66
+ "Blink": {
67
+ "accuracy": 52.18,
68
+ "Art Style": 70.09,
69
+ "Counting": 58.33,
70
+ "Forensic Detection": 58.33,
71
+ "Functional Correspondence": 23.08,
72
+ "IQ Test": 29.33,
73
+ "Jigsaw": 74.0,
74
+ "Multi-view Reasoning": 53.38,
75
+ "Object Localization": 54.1,
76
+ "Relative Depth": 41.94,
77
+ "Relative Reflectance": 34.33,
78
+ "Semantic Correspondence": 30.22,
79
+ "Spatial Relation": 79.02,
80
+ "Visual Correspondence": 42.44,
81
+ "Visual Similarity": 85.19,
82
+ "acc_stderr": 0,
83
+ "acc": 52.18
84
+ }
85
+ }
86
+ }
Claude-3.5-Sonnet-20241022/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,270 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Claude-3.5-Sonnet-20241022",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 66,
12
+ "accuracy": 75.0
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 457,
17
+ "accuracy": 50.78
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 35,
22
+ "accuracy": 27.78
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 91,
27
+ "accuracy": 44.61
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 93,
32
+ "accuracy": 60.78
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 53,
37
+ "accuracy": 62.35
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 119,
42
+ "accuracy": 48.77
43
+ },
44
+ "accuracy": 50.78,
45
+ "acc_stderr": 0,
46
+ "acc": 50.78
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 56.11,
50
+ "subject_score": {
51
+ "Accounting": 56.67,
52
+ "Agriculture": 56.67,
53
+ "Architecture": 20.0,
54
+ "Art": 81.67,
55
+ "Basic": 73.33,
56
+ "Biology": 53.33,
57
+ "Chemistry": 43.33,
58
+ "Clinical": 73.33,
59
+ "Computer": 60.0,
60
+ "Design": 80.0,
61
+ "Diagnostics": 40.0,
62
+ "Economics": 56.67,
63
+ "Electronics": 33.33,
64
+ "Energy": 36.67,
65
+ "Finance": 56.67,
66
+ "Geography": 63.33,
67
+ "History": 66.67,
68
+ "Literature": 76.67,
69
+ "Manage": 60.0,
70
+ "Marketing": 50.0,
71
+ "Materials": 26.67,
72
+ "Math": 30.0,
73
+ "Mechanical": 20.0,
74
+ "Music": 40.0,
75
+ "Pharmacy": 70.0,
76
+ "Physics": 50.0,
77
+ "Psychology": 70.0,
78
+ "Public": 80.0,
79
+ "Sociology": 76.67
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 57.31,
83
+ "Easy": 68.47,
84
+ "Hard": 33.15
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 56.11
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 42.14,
91
+ "subject_score": {
92
+ "History": 46.43,
93
+ "Art": 66.04,
94
+ "Design": 60.0,
95
+ "Literature": 57.69,
96
+ "Agriculture": 35.0,
97
+ "Finance": 63.33,
98
+ "Sociology": 48.15,
99
+ "Accounting": 51.72,
100
+ "Energy_and_Power": 20.69,
101
+ "Pharmacy": 56.14,
102
+ "Architecture_and_Engineering": 28.33,
103
+ "Clinical_Medicine": 45.76,
104
+ "Public_Health": 58.62,
105
+ "Physics": 31.67,
106
+ "Art_Theory": 67.27,
107
+ "Electronics": 45.0,
108
+ "Psychology": 38.33,
109
+ "Biology": 30.51,
110
+ "Manage": 38.0,
111
+ "Economics": 52.54,
112
+ "Mechanical_Engineering": 23.73,
113
+ "Diagnostics_and_Laboratory_Medicine": 35.0,
114
+ "Basic_Medical_Science": 38.46,
115
+ "Computer_Science": 48.33,
116
+ "Math": 28.33,
117
+ "Music": 28.33,
118
+ "Materials": 16.67,
119
+ "Marketing": 42.37,
120
+ "Chemistry": 33.33,
121
+ "Geography": 34.62
122
+ },
123
+ "difficulty_score": {
124
+ "Medium": 40.45,
125
+ "Easy": 54.17,
126
+ "Hard": 29.68
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 42.14
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 56.65,
133
+ "subject_score": {
134
+ "History": 62.5,
135
+ "Art": 50.94,
136
+ "Design": 65.0,
137
+ "Literature": 63.46,
138
+ "Agriculture": 61.67,
139
+ "Finance": 46.67,
140
+ "Sociology": 66.67,
141
+ "Accounting": 65.52,
142
+ "Energy_and_Power": 46.55,
143
+ "Pharmacy": 54.39,
144
+ "Architecture_and_Engineering": 38.33,
145
+ "Clinical_Medicine": 49.15,
146
+ "Public_Health": 68.97,
147
+ "Physics": 58.33,
148
+ "Art_Theory": 56.36,
149
+ "Electronics": 51.67,
150
+ "Psychology": 78.33,
151
+ "Biology": 61.02,
152
+ "Manage": 58.0,
153
+ "Economics": 57.63,
154
+ "Mechanical_Engineering": 55.93,
155
+ "Diagnostics_and_Laboratory_Medicine": 58.33,
156
+ "Basic_Medical_Science": 59.62,
157
+ "Computer_Science": 66.67,
158
+ "Math": 60.0,
159
+ "Music": 53.33,
160
+ "Materials": 40.0,
161
+ "Marketing": 42.37,
162
+ "Chemistry": 55.0,
163
+ "Geography": 48.08
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 56.65
167
+ },
168
+ "OCRBench": {
169
+ "final_score": [
170
+ 793,
171
+ 1000
172
+ ],
173
+ "accuracy": 79.3,
174
+ "Regular Text Recognition": [
175
+ 49,
176
+ 50
177
+ ],
178
+ "Irregular Text Recognition": [
179
+ 48,
180
+ 50
181
+ ],
182
+ "Artistic Text Recognition": [
183
+ 45,
184
+ 50
185
+ ],
186
+ "Handwriting Recognition": [
187
+ 44,
188
+ 50
189
+ ],
190
+ "Digit String Recognition": [
191
+ 34,
192
+ 50
193
+ ],
194
+ "Non-Semantic Text Recognition": [
195
+ 46,
196
+ 50
197
+ ],
198
+ "Scene Text-centric VQA": [
199
+ 170,
200
+ 200
201
+ ],
202
+ "Doc-oriented VQA": [
203
+ 164,
204
+ 200
205
+ ],
206
+ "Key Information Extraction": [
207
+ 173,
208
+ 200
209
+ ],
210
+ "Handwritten Mathematical Expression Recognition": [
211
+ 20,
212
+ 100
213
+ ],
214
+ "acc_stderr": 0,
215
+ "acc": 79.3
216
+ },
217
+ "MathVision": {
218
+ "reject_info": {
219
+ "reject_rate": 0.03,
220
+ "reject_number": 1,
221
+ "total_question": 3040
222
+ },
223
+ "accuracy": 38.3,
224
+ "acc_stderr": 0,
225
+ "acc": 38.3
226
+ },
227
+ "CII-Bench": {
228
+ "reject_info": {
229
+ "reject_rate": 0.52,
230
+ "reject_number": 4,
231
+ "total_question": 765
232
+ },
233
+ "accuracy": 63.07,
234
+ "domain_score": {
235
+ "Life": 64.5,
236
+ "Art": 61.76,
237
+ "CTC": 57.46,
238
+ "Society": 64.86,
239
+ "Env.": 62.75,
240
+ "Politics": 75.0
241
+ },
242
+ "emotion_score": {
243
+ "Neutral": 65.28,
244
+ "Negative": 63.4,
245
+ "Positive": 60.17
246
+ },
247
+ "acc_stderr": 0,
248
+ "acc": 63.07
249
+ },
250
+ "Blink": {
251
+ "accuracy": 60.34,
252
+ "Art Style": 83.76,
253
+ "Counting": 60.0,
254
+ "Forensic Detection": 49.24,
255
+ "Functional Correspondence": 57.69,
256
+ "IQ Test": 29.33,
257
+ "Jigsaw": 59.33,
258
+ "Multi-view Reasoning": 38.35,
259
+ "Object Localization": 53.28,
260
+ "Relative Depth": 67.74,
261
+ "Relative Reflectance": 35.07,
262
+ "Semantic Correspondence": 53.24,
263
+ "Spatial Relation": 78.32,
264
+ "Visual Correspondence": 87.21,
265
+ "Visual Similarity": 89.63,
266
+ "acc_stderr": 0,
267
+ "acc": 60.34
268
+ }
269
+ }
270
+ }
Claude3-Opus-20240229/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Claude3-Opus-20240229",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 40.44,
10
+ "acc_stderr": 0,
11
+ "acc": 40.44
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 47.0,
15
+ "acc_stderr": 0,
16
+ "acc": 47.0
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 30.46,
20
+ "acc_stderr": 0,
21
+ "acc": 30.46
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "reject_info": {
25
+ "reject_rate": 1.62,
26
+ "reject_number": 28,
27
+ "total_question": 1730
28
+ },
29
+ "accuracy": 52.0,
30
+ "acc_stderr": 0,
31
+ "acc": 52.0
32
+ },
33
+ "OCRBench": {
34
+ "accuracy": 70.0,
35
+ "Regular Text Recognition": 49,
36
+ "Irregular Text Recognition": 42,
37
+ "Artistic Text Recognition": 46,
38
+ "Handwriting Recognition": 40,
39
+ "Digit String Recognition": 30,
40
+ "Non-Semantic Text Recognition": 44,
41
+ "Scene Text-centric VQA": 157,
42
+ "Doc-oriented VQA": 145,
43
+ "Key Information Extraction": 139,
44
+ "Handwritten Mathematical Expression Recognition": 8,
45
+ "acc_stderr": 0,
46
+ "acc": 70.0
47
+ },
48
+ "MathVision": {
49
+ "reject_info": {
50
+ "reject_rate": 0.03,
51
+ "reject_number": 1,
52
+ "total_question": 3040
53
+ },
54
+ "accuracy": 24.61,
55
+ "acc_stderr": 0,
56
+ "acc": 24.61
57
+ },
58
+ "CII-Bench": {
59
+ "accuracy": 47.71,
60
+ "domain_score": {
61
+ "Life": 43.29,
62
+ "Art": 53.68,
63
+ "CTC": 45.93,
64
+ "Society": 43.24,
65
+ "Env.": 61.11,
66
+ "Politics": 70.83
67
+ },
68
+ "emotion_score": {
69
+ "Neutral": 48.12,
70
+ "Negative": 49.43,
71
+ "Positive": 45.3
72
+ },
73
+ "acc_stderr": 0,
74
+ "acc": 47.71
75
+ },
76
+ "Blink": {
77
+ "reject_info": {
78
+ "reject_rate": 0.16,
79
+ "reject_number": 3,
80
+ "total_question": 1901
81
+ },
82
+ "accuracy": 44.42,
83
+ "Art Style": 69.23,
84
+ "Counting": 50.83,
85
+ "Forensic Detection": 36.36,
86
+ "Functional Correspondence": 23.08,
87
+ "IQ Test": 22.0,
88
+ "Jigsaw": 54.67,
89
+ "Multi-view Reasoning": 48.12,
90
+ "Object Localization": 54.1,
91
+ "Relative Depth": 53.23,
92
+ "Relative Reflectance": 37.31,
93
+ "Semantic Correspondence": 29.41,
94
+ "Spatial Relation": 53.15,
95
+ "Visual Correspondence": 30.23,
96
+ "Visual Similarity": 69.63,
97
+ "acc_stderr": 0,
98
+ "acc": 44.42
99
+ }
100
+ }
101
+ }
Doubao-Pro-Vision-32k-241028/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,265 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Doubao-Pro-Vision-32k-241028",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 69,
12
+ "accuracy": 78.41
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 557,
17
+ "accuracy": 61.89
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 52,
22
+ "accuracy": 41.27
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 127,
27
+ "accuracy": 62.25
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 102,
32
+ "accuracy": 66.67
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 65,
37
+ "accuracy": 76.47
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 142,
42
+ "accuracy": 58.2
43
+ },
44
+ "accuracy": 61.89,
45
+ "acc_stderr": 0,
46
+ "acc": 61.89
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 62.33,
50
+ "subject_score": {
51
+ "Accounting": 63.33,
52
+ "Agriculture": 53.33,
53
+ "Architecture": 46.67,
54
+ "Art": 81.67,
55
+ "Basic": 60.0,
56
+ "Biology": 53.33,
57
+ "Chemistry": 60.0,
58
+ "Clinical": 73.33,
59
+ "Computer": 70.0,
60
+ "Design": 76.67,
61
+ "Diagnostics": 56.67,
62
+ "Economics": 73.33,
63
+ "Electronics": 46.67,
64
+ "Energy": 63.33,
65
+ "Finance": 53.33,
66
+ "Geography": 63.33,
67
+ "History": 80.0,
68
+ "Literature": 93.33,
69
+ "Manage": 60.0,
70
+ "Marketing": 70.0,
71
+ "Materials": 36.67,
72
+ "Math": 43.33,
73
+ "Mechanical": 50.0,
74
+ "Music": 26.67,
75
+ "Pharmacy": 73.33,
76
+ "Physics": 63.33,
77
+ "Psychology": 56.67,
78
+ "Public": 73.33,
79
+ "Sociology": 66.67
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 58.49,
83
+ "Easy": 73.9,
84
+ "Hard": 52.49
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 62.33
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "reject_info": {
91
+ "reject_rate": 0.06,
92
+ "reject_number": 1,
93
+ "total_question": 1730
94
+ },
95
+ "accuracy": 44.59,
96
+ "subject_score": {
97
+ "History": 60.71,
98
+ "Art": 64.15,
99
+ "Design": 63.33,
100
+ "Literature": 75.0,
101
+ "Agriculture": 30.0,
102
+ "Finance": 33.33,
103
+ "Sociology": 57.41,
104
+ "Accounting": 29.31,
105
+ "Energy_and_Power": 20.69,
106
+ "Pharmacy": 51.79,
107
+ "Architecture_and_Engineering": 40.0,
108
+ "Clinical_Medicine": 42.37,
109
+ "Public_Health": 41.38,
110
+ "Physics": 38.33,
111
+ "Art_Theory": 74.55,
112
+ "Electronics": 61.67,
113
+ "Psychology": 38.33,
114
+ "Biology": 49.15,
115
+ "Manage": 38.0,
116
+ "Economics": 50.85,
117
+ "Mechanical_Engineering": 37.29,
118
+ "Diagnostics_and_Laboratory_Medicine": 33.33,
119
+ "Basic_Medical_Science": 48.08,
120
+ "Computer_Science": 48.33,
121
+ "Math": 36.67,
122
+ "Music": 28.33,
123
+ "Materials": 28.33,
124
+ "Marketing": 47.46,
125
+ "Chemistry": 40.0,
126
+ "Geography": 38.46
127
+ },
128
+ "difficulty_score": {
129
+ "Medium": 40.82,
130
+ "Easy": 57.69,
131
+ "Hard": 34.91
132
+ },
133
+ "acc_stderr": 0,
134
+ "acc": 44.59
135
+ },
136
+ "MMMU_Pro_vision": {
137
+ "accuracy": 40.06,
138
+ "subject_score": {
139
+ "History": 35.71,
140
+ "Art": 18.87,
141
+ "Design": 31.67,
142
+ "Literature": 40.38,
143
+ "Agriculture": 20.0,
144
+ "Finance": 33.33,
145
+ "Sociology": 29.63,
146
+ "Accounting": 39.66,
147
+ "Energy_and_Power": 63.79,
148
+ "Pharmacy": 52.63,
149
+ "Architecture_and_Engineering": 46.67,
150
+ "Clinical_Medicine": 15.25,
151
+ "Public_Health": 39.66,
152
+ "Physics": 60.0,
153
+ "Art_Theory": 43.64,
154
+ "Electronics": 41.67,
155
+ "Psychology": 31.67,
156
+ "Biology": 32.2,
157
+ "Manage": 52.0,
158
+ "Economics": 42.37,
159
+ "Mechanical_Engineering": 55.93,
160
+ "Diagnostics_and_Laboratory_Medicine": 13.33,
161
+ "Basic_Medical_Science": 38.46,
162
+ "Computer_Science": 55.0,
163
+ "Math": 58.33,
164
+ "Music": 40.0,
165
+ "Materials": 48.33,
166
+ "Marketing": 37.29,
167
+ "Chemistry": 51.67,
168
+ "Geography": 30.77
169
+ },
170
+ "acc_stderr": 0,
171
+ "acc": 40.06
172
+ },
173
+ "OCRBench": {
174
+ "final_score": [
175
+ 826,
176
+ 1000
177
+ ],
178
+ "accuracy": 82.6,
179
+ "Regular Text Recognition": [
180
+ 48,
181
+ 50
182
+ ],
183
+ "Irregular Text Recognition": [
184
+ 45,
185
+ 50
186
+ ],
187
+ "Artistic Text Recognition": [
188
+ 46,
189
+ 50
190
+ ],
191
+ "Handwriting Recognition": [
192
+ 34,
193
+ 50
194
+ ],
195
+ "Digit String Recognition": [
196
+ 36,
197
+ 50
198
+ ],
199
+ "Non-Semantic Text Recognition": [
200
+ 39,
201
+ 50
202
+ ],
203
+ "Scene Text-centric VQA": [
204
+ 179,
205
+ 200
206
+ ],
207
+ "Doc-oriented VQA": [
208
+ 165,
209
+ 200
210
+ ],
211
+ "Key Information Extraction": [
212
+ 169,
213
+ 200
214
+ ],
215
+ "Handwritten Mathematical Expression Recognition": [
216
+ 65,
217
+ 100
218
+ ],
219
+ "acc_stderr": 0,
220
+ "acc": 82.6
221
+ },
222
+ "MathVision": {
223
+ "accuracy": 35.56,
224
+ "acc_stderr": 0,
225
+ "acc": 35.56
226
+ },
227
+ "CII-Bench": {
228
+ "accuracy": 67.97,
229
+ "domain_score": {
230
+ "Life": 68.4,
231
+ "Art": 65.44,
232
+ "CTC": 62.96,
233
+ "Society": 70.27,
234
+ "Env.": 70.37,
235
+ "Politics": 83.33
236
+ },
237
+ "emotion_score": {
238
+ "Neutral": 70.3,
239
+ "Negative": 66.42,
240
+ "Positive": 67.09
241
+ },
242
+ "acc_stderr": 0,
243
+ "acc": 67.97
244
+ },
245
+ "Blink": {
246
+ "accuracy": 60.81,
247
+ "Art Style": 77.78,
248
+ "Counting": 71.67,
249
+ "Forensic Detection": 59.09,
250
+ "Functional Correspondence": 36.15,
251
+ "IQ Test": 25.33,
252
+ "Jigsaw": 77.33,
253
+ "Multi-view Reasoning": 48.87,
254
+ "Object Localization": 61.48,
255
+ "Relative Depth": 70.97,
256
+ "Relative Reflectance": 35.82,
257
+ "Semantic Correspondence": 56.12,
258
+ "Spatial Relation": 81.82,
259
+ "Visual Correspondence": 67.44,
260
+ "Visual Similarity": 83.7,
261
+ "acc_stderr": 0,
262
+ "acc": 60.81
263
+ }
264
+ }
265
+ }
GLM-4V-Plus/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "GLM-4V-Plus",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 43.56,
10
+ "acc_stderr": 0,
11
+ "acc": 43.56
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 54.44,
15
+ "acc_stderr": 0,
16
+ "acc": 54.44
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "reject_info": {
20
+ "reject_rate": 0.06,
21
+ "reject_number": 1,
22
+ "total_question": 1730
23
+ },
24
+ "accuracy": 37.19,
25
+ "acc_stderr": 0,
26
+ "acc": 37.19
27
+ },
28
+ "MMMU_Pro_vision": {
29
+ "accuracy": 23.47,
30
+ "acc_stderr": 0,
31
+ "acc": 23.47
32
+ },
33
+ "OCRBench": {
34
+ "reject_info": {
35
+ "reject_rate": 0.2,
36
+ "reject_number": 2,
37
+ "total_question": 1000
38
+ },
39
+ "accuracy": 81.663,
40
+ "Regular Text Recognition": 47,
41
+ "Irregular Text Recognition": 47,
42
+ "Artistic Text Recognition": 44,
43
+ "Handwriting Recognition": 32,
44
+ "Digit String Recognition": 35,
45
+ "Non-Semantic Text Recognition": 30,
46
+ "Scene Text-centric VQA": 177,
47
+ "Doc-oriented VQA": 154,
48
+ "Key Information Extraction": 165,
49
+ "Handwritten Mathematical Expression Recognition": 84,
50
+ "acc_stderr": 0,
51
+ "acc": 81.663
52
+ },
53
+ "MathVision": {
54
+ "accuracy": 17.66,
55
+ "acc_stderr": 0,
56
+ "acc": 17.66
57
+ },
58
+ "CII-Bench": {
59
+ "accuracy": 60.86,
60
+ "domain_score": {
61
+ "Life": 55.41,
62
+ "Art": 59.56,
63
+ "CTC": 62.96,
64
+ "Society": 64.67,
65
+ "Env.": 66.67,
66
+ "Politics": 66.67
67
+ },
68
+ "emotion_score": {
69
+ "Neutral": 60.38,
70
+ "Negative": 60.75,
71
+ "Positive": 61.54
72
+ },
73
+ "acc_stderr": 0,
74
+ "acc": 60.86
75
+ },
76
+ "Blink": {
77
+ "accuracy": 55.44,
78
+ "Art Style": 51.28,
79
+ "Counting": 75.0,
80
+ "Forensic Detection": 13.64,
81
+ "Functional Correspondence": 35.38,
82
+ "IQ Test": 24.67,
83
+ "Jigsaw": 69.33,
84
+ "Multi-view Reasoning": 52.63,
85
+ "Object Localization": 63.93,
86
+ "Relative Depth": 69.35,
87
+ "Relative Reflectance": 28.36,
88
+ "Semantic Correspondence": 48.92,
89
+ "Spatial Relation": 86.71,
90
+ "Visual Correspondence": 72.09,
91
+ "Visual Similarity": 82.22,
92
+ "acc_stderr": 0,
93
+ "acc": 55.44
94
+ }
95
+ }
96
+ }
GPT-4o-20240806/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "GPT-4o-20240806",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 50.78,
10
+ "acc_stderr": 0,
11
+ "acc": 50.78
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 57.22,
15
+ "acc_stderr": 0,
16
+ "acc": 57.22
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 37.17,
20
+ "acc_stderr": 0,
21
+ "acc": 37.17
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 46.53,
25
+ "acc_stderr": 0,
26
+ "acc": 46.53
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 80.4,
30
+ "Regular Text Recognition": 50,
31
+ "Irregular Text Recognition": 48,
32
+ "Artistic Text Recognition": 49,
33
+ "Handwriting Recognition": 41,
34
+ "Digit String Recognition": 36,
35
+ "Non-Semantic Text Recognition": 46,
36
+ "Scene Text-centric VQA": 177,
37
+ "Doc-oriented VQA": 170,
38
+ "Key Information Extraction": 163,
39
+ "Handwritten Mathematical Expression Recognition": 24,
40
+ "acc_stderr": 0,
41
+ "acc": 80.4
42
+ },
43
+ "MathVision": {
44
+ "reject_info": {
45
+ "reject_rate": 0.03,
46
+ "reject_number": 1,
47
+ "total_question": 3040
48
+ },
49
+ "accuracy": 28.79,
50
+ "acc_stderr": 0,
51
+ "acc": 28.79
52
+ },
53
+ "CII-Bench": {
54
+ "accuracy": 59.22,
55
+ "domain_score": {
56
+ "Life": 60.61,
57
+ "Art": 60.29,
58
+ "CTC": 45.93,
59
+ "Society": 63.24,
60
+ "Env.": 68.52,
61
+ "Politics": 62.5
62
+ },
63
+ "emotion_score": {
64
+ "Neutral": 59.77,
65
+ "Negative": 60.75,
66
+ "Positive": 56.84
67
+ },
68
+ "acc_stderr": 0,
69
+ "acc": 59.22
70
+ },
71
+ "Blink": {
72
+ "reject_info": {
73
+ "reject_rate": 0.11,
74
+ "reject_number": 2,
75
+ "total_question": 1901
76
+ },
77
+ "accuracy": 64.19,
78
+ "Art Style": 86.32,
79
+ "Counting": 71.67,
80
+ "Forensic Detection": 77.27,
81
+ "Functional Correspondence": 58.46,
82
+ "IQ Test": 26.0,
83
+ "Jigsaw": 66.67,
84
+ "Multi-view Reasoning": 39.1,
85
+ "Object Localization": 63.11,
86
+ "Relative Depth": 79.84,
87
+ "Relative Reflectance": 29.1,
88
+ "Semantic Correspondence": 59.71,
89
+ "Spatial Relation": 76.92,
90
+ "Visual Correspondence": 87.65,
91
+ "Visual Similarity": 78.52,
92
+ "acc_stderr": 0,
93
+ "acc": 64.19
94
+ }
95
+ }
96
+ }
GPT-4o-20241120/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "GPT-4o-20241120",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 64,
12
+ "accuracy": 72.73
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 438,
17
+ "accuracy": 48.67
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 44,
22
+ "accuracy": 34.92
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 90,
27
+ "accuracy": 44.12
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 86,
32
+ "accuracy": 56.21
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 51,
37
+ "accuracy": 60.0
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 103,
42
+ "accuracy": 42.21
43
+ },
44
+ "accuracy": 48.67,
45
+ "acc_stderr": 0,
46
+ "acc": 48.67
47
+ },
48
+ "MMMU": {
49
+ "reject_info": {
50
+ "reject_rate": 0.67,
51
+ "reject_number": 6,
52
+ "total_question": 900
53
+ },
54
+ "accuracy": 60.4,
55
+ "acc_stderr": 0,
56
+ "acc": 60.4
57
+ },
58
+ "MMMU_Pro_standard": {
59
+ "reject_info": {
60
+ "reject_rate": 0.64,
61
+ "reject_number": 11,
62
+ "total_question": 1730
63
+ },
64
+ "accuracy": 43.22,
65
+ "acc_stderr": 0,
66
+ "acc": 43.22
67
+ },
68
+ "MMMU_Pro_vision": {
69
+ "reject_info": {
70
+ "reject_rate": 0.81,
71
+ "reject_number": 14,
72
+ "total_question": 1730
73
+ },
74
+ "accuracy": 45.4,
75
+ "acc_stderr": 0,
76
+ "acc": 45.4
77
+ },
78
+ "OCRBench": {
79
+ "reject_info": {
80
+ "reject_rate": 0.5,
81
+ "reject_number": 5,
82
+ "total_question": 1000
83
+ },
84
+ "final_score": [
85
+ 800,
86
+ 995
87
+ ],
88
+ "accuracy": 80.402,
89
+ "Regular Text Recognition": [
90
+ 49,
91
+ 50
92
+ ],
93
+ "Irregular Text Recognition": [
94
+ 48,
95
+ 50
96
+ ],
97
+ "Artistic Text Recognition": [
98
+ 49,
99
+ 50
100
+ ],
101
+ "Handwriting Recognition": [
102
+ 41,
103
+ 50
104
+ ],
105
+ "Digit String Recognition": [
106
+ 37,
107
+ 50
108
+ ],
109
+ "Non-Semantic Text Recognition": [
110
+ 47,
111
+ 50
112
+ ],
113
+ "Scene Text-centric VQA": [
114
+ 179,
115
+ 200
116
+ ],
117
+ "Doc-oriented VQA": [
118
+ 162,
119
+ 195
120
+ ],
121
+ "Key Information Extraction": [
122
+ 165,
123
+ 200
124
+ ],
125
+ "Handwritten Mathematical Expression Recognition": [
126
+ 23,
127
+ 100
128
+ ],
129
+ "acc_stderr": 0,
130
+ "acc": 80.402
131
+ },
132
+ "MathVision": {
133
+ "reject_info": {
134
+ "reject_rate": 0.56,
135
+ "reject_number": 17,
136
+ "total_question": 3040
137
+ },
138
+ "accuracy": 29.61,
139
+ "acc_stderr": 0,
140
+ "acc": 29.61
141
+ },
142
+ "CII-Bench": {
143
+ "accuracy": 61.05,
144
+ "domain_score": {
145
+ "Art": 63.24,
146
+ "CTC": 51.11,
147
+ "Life": 59.31,
148
+ "Society": 65.41,
149
+ "Env.": 74.07,
150
+ "Politics": 58.33
151
+ },
152
+ "emotion_score": {
153
+ "Negative": 63.77,
154
+ "Positive": 58.97,
155
+ "Neutral": 60.15
156
+ },
157
+ "acc_stderr": 0,
158
+ "acc": 61.05
159
+ },
160
+ "Blink": {
161
+ "accuracy": 65.65,
162
+ "Art Style": 73.5,
163
+ "Counting": 73.33,
164
+ "Forensic Detection": 79.55,
165
+ "Functional Correspondence": 55.38,
166
+ "IQ Test": 24.0,
167
+ "Jigsaw": 66.67,
168
+ "Multi-view Reasoning": 49.62,
169
+ "Object Localization": 63.11,
170
+ "Relative Depth": 81.45,
171
+ "Relative Reflectance": 32.84,
172
+ "Semantic Correspondence": 64.75,
173
+ "Spatial Relation": 81.82,
174
+ "Visual Correspondence": 87.79,
175
+ "Visual Similarity": 85.19,
176
+ "acc_stderr": 0,
177
+ "acc": 65.65
178
+ }
179
+ }
180
+ }
GPT-4o-mini-20240718/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "GPT-4o-mini-20240718",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 39.44,
10
+ "acc_stderr": 0,
11
+ "acc": 39.44
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 49.78,
15
+ "acc_stderr": 0,
16
+ "acc": 49.78
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 31.39,
20
+ "acc_stderr": 0,
21
+ "acc": 31.39
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 23.35,
25
+ "acc_stderr": 0,
26
+ "acc": 23.35
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 75.0,
30
+ "Regular Text Recognition": 49,
31
+ "Irregular Text Recognition": 47,
32
+ "Artistic Text Recognition": 48,
33
+ "Handwriting Recognition": 36,
34
+ "Digit String Recognition": 33,
35
+ "Non-Semantic Text Recognition": 44,
36
+ "Scene Text-centric VQA": 168,
37
+ "Doc-oriented VQA": 147,
38
+ "Key Information Extraction": 164,
39
+ "Handwritten Mathematical Expression Recognition": 14,
40
+ "acc_stderr": 0,
41
+ "acc": 75.0
42
+ },
43
+ "MathVision": {
44
+ "reject_info": {
45
+ "reject_rate": 0.03,
46
+ "reject_number": 1,
47
+ "total_question": 3040
48
+ },
49
+ "accuracy": 26.95,
50
+ "acc_stderr": 0,
51
+ "acc": 26.95
52
+ },
53
+ "CII-Bench": {
54
+ "accuracy": 45.75,
55
+ "domain_score": {
56
+ "Life": 40.69,
57
+ "Art": 51.47,
58
+ "CTC": 40.74,
59
+ "Society": 47.57,
60
+ "Env.": 57.41,
61
+ "Politics": 50.0
62
+ },
63
+ "emotion_score": {
64
+ "Neutral": 45.86,
65
+ "Negative": 49.43,
66
+ "Positive": 41.45
67
+ },
68
+ "acc_stderr": 0,
69
+ "acc": 45.75
70
+ },
71
+ "Blink": {
72
+ "accuracy": 54.87,
73
+ "Art Style": 82.05,
74
+ "Counting": 53.33,
75
+ "Forensic Detection": 47.73,
76
+ "Functional Correspondence": 33.85,
77
+ "IQ Test": 28.67,
78
+ "Jigsaw": 72.0,
79
+ "Multi-view Reasoning": 50.38,
80
+ "Object Localization": 54.1,
81
+ "Relative Depth": 55.65,
82
+ "Relative Reflectance": 32.09,
83
+ "Semantic Correspondence": 45.32,
84
+ "Spatial Relation": 71.33,
85
+ "Visual Correspondence": 60.47,
86
+ "Visual Similarity": 82.22,
87
+ "acc_stderr": 0,
88
+ "acc": 54.87
89
+ }
90
+ }
91
+ }
Gemini-1.5-Flash/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Gemini-1.5-Flash",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 42.89,
10
+ "acc_stderr": 0,
11
+ "acc": 42.89
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 48.78,
15
+ "acc_stderr": 0,
16
+ "acc": 48.78
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 31.97,
20
+ "acc_stderr": 0,
21
+ "acc": 31.97
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "reject_info": {
25
+ "reject_rate": 0.06,
26
+ "reject_number": 1,
27
+ "total_question": 1730
28
+ },
29
+ "accuracy": 33.02,
30
+ "acc_stderr": 0,
31
+ "acc": 33.02
32
+ },
33
+ "OCRBench": {
34
+ "accuracy": 74.3,
35
+ "Regular Text Recognition": 48,
36
+ "Irregular Text Recognition": 39,
37
+ "Artistic Text Recognition": 46,
38
+ "Handwriting Recognition": 32,
39
+ "Digit String Recognition": 31,
40
+ "Non-Semantic Text Recognition": 33,
41
+ "Scene Text-centric VQA": 172,
42
+ "Doc-oriented VQA": 147,
43
+ "Key Information Extraction": 171,
44
+ "Handwritten Mathematical Expression Recognition": 24,
45
+ "acc_stderr": 0,
46
+ "acc": 74.3
47
+ },
48
+ "MathVision": {
49
+ "accuracy": 21.15,
50
+ "acc_stderr": 0,
51
+ "acc": 21.15
52
+ },
53
+ "CII-Bench": {
54
+ "accuracy": 45.23,
55
+ "domain_score": {
56
+ "Life": 41.56,
57
+ "Art": 51.47,
58
+ "CTC": 37.04,
59
+ "Society": 47.57,
60
+ "Env.": 51.85,
61
+ "Politics": 58.33
62
+ },
63
+ "emotion_score": {
64
+ "Neutral": 45.49,
65
+ "Negative": 49.43,
66
+ "Positive": 40.17
67
+ },
68
+ "acc_stderr": 0,
69
+ "acc": 45.23
70
+ },
71
+ "Blink": {
72
+ "accuracy": 55.71,
73
+ "Art Style": 73.5,
74
+ "Counting": 55.83,
75
+ "Forensic Detection": 47.73,
76
+ "Functional Correspondence": 31.54,
77
+ "IQ Test": 26.67,
78
+ "Jigsaw": 72.0,
79
+ "Multi-view Reasoning": 48.12,
80
+ "Object Localization": 60.66,
81
+ "Relative Depth": 68.55,
82
+ "Relative Reflectance": 33.58,
83
+ "Semantic Correspondence": 50.36,
84
+ "Spatial Relation": 69.23,
85
+ "Visual Correspondence": 64.53,
86
+ "Visual Similarity": 78.52,
87
+ "acc_stderr": 0,
88
+ "acc": 55.71
89
+ }
90
+ }
91
+ }
Gemini-1.5-Pro/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Gemini-1.5-Pro",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 50.0,
10
+ "acc_stderr": 0,
11
+ "acc": 50.0
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 61.78,
15
+ "acc_stderr": 0,
16
+ "acc": 61.78
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 47.8,
20
+ "subject_score": {
21
+ "History": 55.36,
22
+ "Design": 68.33,
23
+ "Literature": 75.0,
24
+ "Sociology": 55.56,
25
+ "Pharmacy": 47.37,
26
+ "Art": 69.81,
27
+ "Clinical_Medicine": 32.2,
28
+ "Accounting": 58.62,
29
+ "Agriculture": 35.0,
30
+ "Public_Health": 58.62,
31
+ "Physics": 41.67,
32
+ "Art_Theory": 67.27,
33
+ "Energy_and_Power": 41.38,
34
+ "Psychology": 51.67,
35
+ "Architecture_and_Engineering": 36.67,
36
+ "Finance": 58.33,
37
+ "Manage": 40.0,
38
+ "Biology": 35.59,
39
+ "Diagnostics_and_Laboratory_Medicine": 36.67,
40
+ "Economics": 45.76,
41
+ "Basic_Medical_Science": 46.15,
42
+ "Mechanical_Engineering": 37.29,
43
+ "Electronics": 55.0,
44
+ "Computer_Science": 51.67,
45
+ "Math": 46.67,
46
+ "Music": 18.33,
47
+ "Materials": 21.67,
48
+ "Marketing": 57.63,
49
+ "Chemistry": 51.67,
50
+ "Geography": 44.23
51
+ },
52
+ "difficulty_score": {
53
+ "Medium": 43.57,
54
+ "Hard": 38.15,
55
+ "Easy": 61.55
56
+ },
57
+ "acc_stderr": 0,
58
+ "acc": 47.8
59
+ },
60
+ "MMMU_Pro_vision": {
61
+ "reject_info": {
62
+ "reject_rate": 3.29,
63
+ "reject_number": 57,
64
+ "total_question": 1730
65
+ },
66
+ "accuracy": 47.7,
67
+ "acc_stderr": 0,
68
+ "acc": 47.7
69
+ },
70
+ "OCRBench": {
71
+ "accuracy": 80.4,
72
+ "Regular Text Recognition": 49,
73
+ "Irregular Text Recognition": 42,
74
+ "Artistic Text Recognition": 47,
75
+ "Handwriting Recognition": 33,
76
+ "Digit String Recognition": 33,
77
+ "Non-Semantic Text Recognition": 44,
78
+ "Scene Text-centric VQA": 178,
79
+ "Doc-oriented VQA": 173,
80
+ "Key Information Extraction": 182,
81
+ "Handwritten Mathematical Expression Recognition": 23,
82
+ "acc_stderr": 0,
83
+ "acc": 80.4
84
+ },
85
+ "MathVision": {
86
+ "accuracy": 44.64,
87
+ "acc_stderr": 0,
88
+ "acc": 44.64
89
+ },
90
+ "CII-Bench": {
91
+ "accuracy": 59.61,
92
+ "domain_score": {
93
+ "Art": 61.03,
94
+ "Society": 61.08,
95
+ "Env.": 61.11,
96
+ "CTC": 48.89,
97
+ "Life": 62.77,
98
+ "Politics": 66.67
99
+ },
100
+ "emotion_score": {
101
+ "Negative": 62.64,
102
+ "Positive": 55.13,
103
+ "Neutral": 60.53
104
+ },
105
+ "acc_stderr": 0,
106
+ "acc": 59.61
107
+ },
108
+ "Blink": {
109
+ "reject_info": {
110
+ "reject_rate": 0.05,
111
+ "reject_number": 1,
112
+ "total_question": 1901
113
+ },
114
+ "accuracy": 64.11,
115
+ "Art Style": 77.78,
116
+ "Counting": 63.33,
117
+ "Forensic Detection": 71.97,
118
+ "Functional Correspondence": 54.62,
119
+ "IQ Test": 28.67,
120
+ "Jigsaw": 79.33,
121
+ "Multi-view Reasoning": 54.14,
122
+ "Object Localization": 63.93,
123
+ "Relative Depth": 75.81,
124
+ "Relative Reflectance": 30.83,
125
+ "Semantic Correspondence": 53.96,
126
+ "Spatial Relation": 76.22,
127
+ "Visual Correspondence": 83.72,
128
+ "Visual Similarity": 81.48,
129
+ "acc_stderr": 0,
130
+ "acc": 64.11
131
+ }
132
+ }
133
+ }
Idefics3-8B-Llama3/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Idefics3-8B-Llama3",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 33.89,
10
+ "acc_stderr": 0,
11
+ "acc": 33.89
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 42.22,
15
+ "acc_stderr": 0,
16
+ "acc": 42.22
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 27.86,
20
+ "acc_stderr": 0,
21
+ "acc": 27.86
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 13.53,
25
+ "acc_stderr": 0,
26
+ "acc": 13.53
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 55.3,
30
+ "Regular Text Recognition": 30,
31
+ "Irregular Text Recognition": 24,
32
+ "Artistic Text Recognition": 23,
33
+ "Handwriting Recognition": 5,
34
+ "Digit String Recognition": 1,
35
+ "Non-Semantic Text Recognition": 1,
36
+ "Scene Text-centric VQA": 171,
37
+ "Doc-oriented VQA": 138,
38
+ "Key Information Extraction": 160,
39
+ "Handwritten Mathematical Expression Recognition": 0,
40
+ "acc_stderr": 0,
41
+ "acc": 55.3
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 16.25,
45
+ "acc_stderr": 0,
46
+ "acc": 16.25
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 39.22,
50
+ "domain_score": {
51
+ "Life": 35.93,
52
+ "Art": 41.18,
53
+ "CTC": 35.56,
54
+ "Society": 39.46,
55
+ "Env.": 53.7,
56
+ "Politics": 45.83
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 38.72,
60
+ "Negative": 41.89,
61
+ "Positive": 36.75
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 39.22
65
+ },
66
+ "Blink": {
67
+ "accuracy": 48.13,
68
+ "Art Style": 64.96,
69
+ "Counting": 61.67,
70
+ "Forensic Detection": 28.79,
71
+ "Functional Correspondence": 25.38,
72
+ "IQ Test": 28.0,
73
+ "Jigsaw": 47.33,
74
+ "Multi-view Reasoning": 43.61,
75
+ "Object Localization": 56.56,
76
+ "Relative Depth": 57.26,
77
+ "Relative Reflectance": 38.06,
78
+ "Semantic Correspondence": 33.81,
79
+ "Spatial Relation": 79.72,
80
+ "Visual Correspondence": 42.44,
81
+ "Visual Similarity": 72.59,
82
+ "acc_stderr": 0,
83
+ "acc": 48.13
84
+ }
85
+ }
86
+ }
InternVL2-2B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL2-2B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 35,
12
+ "accuracy": 39.77
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 263,
17
+ "accuracy": 29.22
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 22,
22
+ "accuracy": 17.46
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 43,
27
+ "accuracy": 21.08
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 52,
32
+ "accuracy": 33.99
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 37,
37
+ "accuracy": 43.53
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 74,
42
+ "accuracy": 30.33
43
+ },
44
+ "accuracy": 29.22,
45
+ "acc_stderr": 0,
46
+ "acc": 29.22
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 32.89,
50
+ "acc_stderr": 0,
51
+ "acc": 32.89
52
+ },
53
+ "MMMU_Pro_standard": {
54
+ "accuracy": 20.4,
55
+ "acc_stderr": 0,
56
+ "acc": 20.4
57
+ },
58
+ "MMMU_Pro_vision": {
59
+ "accuracy": 10.81,
60
+ "acc_stderr": 0,
61
+ "acc": 10.81
62
+ },
63
+ "OCRBench": {
64
+ "final_score": [
65
+ 714,
66
+ 1000
67
+ ],
68
+ "accuracy": 71.4,
69
+ "Regular Text Recognition": [
70
+ 45,
71
+ 50
72
+ ],
73
+ "Irregular Text Recognition": [
74
+ 45,
75
+ 50
76
+ ],
77
+ "Artistic Text Recognition": [
78
+ 44,
79
+ 50
80
+ ],
81
+ "Handwriting Recognition": [
82
+ 26,
83
+ 50
84
+ ],
85
+ "Digit String Recognition": [
86
+ 40,
87
+ 50
88
+ ],
89
+ "Non-Semantic Text Recognition": [
90
+ 39,
91
+ 50
92
+ ],
93
+ "Scene Text-centric VQA": [
94
+ 161,
95
+ 200
96
+ ],
97
+ "Doc-oriented VQA": [
98
+ 114,
99
+ 200
100
+ ],
101
+ "Key Information Extraction": [
102
+ 138,
103
+ 200
104
+ ],
105
+ "Handwritten Mathematical Expression Recognition": [
106
+ 62,
107
+ 100
108
+ ],
109
+ "acc_stderr": 0,
110
+ "acc": 71.4
111
+ },
112
+ "MathVision": {
113
+ "accuracy": 14.01,
114
+ "acc_stderr": 0,
115
+ "acc": 14.01
116
+ },
117
+ "CII-Bench": {
118
+ "accuracy": 38.95,
119
+ "domain_score": {
120
+ "Life": 29.0,
121
+ "CTC": 40.74,
122
+ "Art": 42.65,
123
+ "Society": 41.62,
124
+ "Env.": 55.56,
125
+ "Politics": 45.83
126
+ },
127
+ "emotion_score": {
128
+ "Neutral": 42.11,
129
+ "Positive": 39.32,
130
+ "Negative": 35.47
131
+ },
132
+ "acc_stderr": 0,
133
+ "acc": 38.95
134
+ },
135
+ "Blink": {
136
+ "accuracy": 40.35,
137
+ "Art Style": 52.99,
138
+ "Counting": 46.67,
139
+ "Forensic Detection": 18.94,
140
+ "Functional Correspondence": 18.46,
141
+ "IQ Test": 28.67,
142
+ "Jigsaw": 42.0,
143
+ "Multi-view Reasoning": 46.62,
144
+ "Object Localization": 45.08,
145
+ "Relative Depth": 51.61,
146
+ "Relative Reflectance": 30.6,
147
+ "Semantic Correspondence": 28.06,
148
+ "Spatial Relation": 72.73,
149
+ "Visual Correspondence": 33.14,
150
+ "Visual Similarity": 53.33,
151
+ "acc_stderr": 0,
152
+ "acc": 40.35
153
+ }
154
+ }
155
+ }
InternVL2-8B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL2-8B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 42.56,
10
+ "acc_stderr": 0,
11
+ "acc": 42.56
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 47.56,
15
+ "acc_stderr": 0,
16
+ "acc": 47.56
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 31.27,
20
+ "acc_stderr": 0,
21
+ "acc": 31.27
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 35.55,
25
+ "acc_stderr": 0,
26
+ "acc": 35.55
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 74.2,
30
+ "Regular Text Recognition": 47,
31
+ "Irregular Text Recognition": 45,
32
+ "Artistic Text Recognition": 43,
33
+ "Handwriting Recognition": 22,
34
+ "Digit String Recognition": 35,
35
+ "Non-Semantic Text Recognition": 37,
36
+ "Scene Text-centric VQA": 168,
37
+ "Doc-oriented VQA": 143,
38
+ "Key Information Extraction": 151,
39
+ "Handwritten Mathematical Expression Recognition": 51,
40
+ "acc_stderr": 0,
41
+ "acc": 74.2
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 19.77,
45
+ "acc_stderr": 0,
46
+ "acc": 19.77
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 51.9,
50
+ "domain_score": {
51
+ "Life": 42.86,
52
+ "Art": 55.88,
53
+ "CTC": 50.37,
54
+ "Society": 57.3,
55
+ "Env.": 68.52,
56
+ "Politics": 45.83
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 52.63,
60
+ "Negative": 53.21,
61
+ "Positive": 49.57
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 51.9
65
+ },
66
+ "Blink": {
67
+ "accuracy": 48.03,
68
+ "Art Style": 70.94,
69
+ "Counting": 60.83,
70
+ "Forensic Detection": 24.24,
71
+ "Functional Correspondence": 20.77,
72
+ "IQ Test": 25.33,
73
+ "Jigsaw": 70.0,
74
+ "Multi-view Reasoning": 51.13,
75
+ "Object Localization": 52.46,
76
+ "Relative Depth": 66.13,
77
+ "Relative Reflectance": 31.34,
78
+ "Semantic Correspondence": 35.25,
79
+ "Spatial Relation": 83.22,
80
+ "Visual Correspondence": 38.95,
81
+ "Visual Similarity": 47.41,
82
+ "acc_stderr": 0,
83
+ "acc": 48.03
84
+ }
85
+ }
86
+ }
InternVL2-Llama3-76B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "InternVL2-Llama3-76B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 42.0,
10
+ "acc_stderr": 0,
11
+ "acc": 42.0
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 55.89,
15
+ "acc_stderr": 0,
16
+ "acc": 55.89
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 36.3,
20
+ "acc_stderr": 0,
21
+ "acc": 36.3
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 13.82,
25
+ "acc_stderr": 0,
26
+ "acc": 13.82
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 77.7,
30
+ "Regular Text Recognition": 49,
31
+ "Irregular Text Recognition": 47,
32
+ "Artistic Text Recognition": 45,
33
+ "Handwriting Recognition": 21,
34
+ "Digit String Recognition": 27,
35
+ "Non-Semantic Text Recognition": 39,
36
+ "Scene Text-centric VQA": 181,
37
+ "Doc-oriented VQA": 146,
38
+ "Key Information Extraction": 159,
39
+ "Handwritten Mathematical Expression Recognition": 63,
40
+ "acc_stderr": 0,
41
+ "acc": 77.7
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 17.07,
45
+ "acc_stderr": 0,
46
+ "acc": 17.07
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 53.99,
50
+ "domain_score": {
51
+ "CTC": 53.33,
52
+ "Society": 56.76,
53
+ "Env.": 66.67,
54
+ "Life": 46.32,
55
+ "Art": 57.35,
56
+ "Politics": 62.5
57
+ },
58
+ "emotion_score": {
59
+ "Positive": 53.42,
60
+ "Negative": 52.08,
61
+ "Neutral": 56.39
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 53.99
65
+ },
66
+ "Blink": {
67
+ "accuracy": 57.34,
68
+ "Art Style": 80.34,
69
+ "Counting": 65.83,
70
+ "Forensic Detection": 38.64,
71
+ "Functional Correspondence": 30.0,
72
+ "IQ Test": 28.67,
73
+ "Jigsaw": 69.33,
74
+ "Multi-view Reasoning": 54.89,
75
+ "Object Localization": 52.46,
76
+ "Relative Depth": 80.65,
77
+ "Relative Reflectance": 29.85,
78
+ "Semantic Correspondence": 39.57,
79
+ "Spatial Relation": 88.81,
80
+ "Visual Correspondence": 63.95,
81
+ "Visual Similarity": 82.22,
82
+ "acc_stderr": 0,
83
+ "acc": 57.34
84
+ }
85
+ }
86
+ }
Janus-1.3B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Janus-1.3B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 25.44,
10
+ "acc_stderr": 0,
11
+ "acc": 25.44
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 30.0,
15
+ "acc_stderr": 0,
16
+ "acc": 30.0
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 15.09,
20
+ "acc_stderr": 0,
21
+ "acc": 15.09
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 10.75,
25
+ "acc_stderr": 0,
26
+ "acc": 10.75
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 49.2,
30
+ "Regular Text Recognition": 45,
31
+ "Irregular Text Recognition": 41,
32
+ "Artistic Text Recognition": 44,
33
+ "Handwriting Recognition": 23,
34
+ "Digit String Recognition": 34,
35
+ "Non-Semantic Text Recognition": 29,
36
+ "Scene Text-centric VQA": 138,
37
+ "Doc-oriented VQA": 46,
38
+ "Key Information Extraction": 42,
39
+ "Handwritten Mathematical Expression Recognition": 50,
40
+ "acc_stderr": 0,
41
+ "acc": 49.2
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 14.21,
45
+ "acc_stderr": 0,
46
+ "acc": 14.21
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 21.96,
50
+ "domain_score": {
51
+ "Life": 16.88,
52
+ "Art": 27.21,
53
+ "CTC": 21.48,
54
+ "Society": 23.78,
55
+ "Env.": 31.48,
56
+ "Politics": 8.33
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 24.44,
60
+ "Negative": 21.51,
61
+ "Positive": 19.66
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 21.96
65
+ },
66
+ "Blink": {
67
+ "accuracy": 38.56,
68
+ "Art Style": 52.99,
69
+ "Counting": 42.5,
70
+ "Forensic Detection": 22.73,
71
+ "Functional Correspondence": 24.62,
72
+ "IQ Test": 23.33,
73
+ "Jigsaw": 52.67,
74
+ "Multi-view Reasoning": 44.36,
75
+ "Object Localization": 41.8,
76
+ "Relative Depth": 58.87,
77
+ "Relative Reflectance": 28.36,
78
+ "Semantic Correspondence": 27.34,
79
+ "Spatial Relation": 46.15,
80
+ "Visual Correspondence": 27.33,
81
+ "Visual Similarity": 53.33,
82
+ "acc_stderr": 0,
83
+ "acc": 38.56
84
+ }
85
+ }
86
+ }
LLaVA-OneVision-0.5B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,260 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "LLaVA-OneVision-0.5B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 36,
12
+ "accuracy": 40.91
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 243,
17
+ "accuracy": 27.0
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 16,
22
+ "accuracy": 12.7
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 34,
27
+ "accuracy": 16.67
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 55,
32
+ "accuracy": 35.95
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 32,
37
+ "accuracy": 37.65
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 70,
42
+ "accuracy": 28.69
43
+ },
44
+ "accuracy": 27.0,
45
+ "acc_stderr": 0,
46
+ "acc": 27.0
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 33.11,
50
+ "subject_score": {
51
+ "Accounting": 40.0,
52
+ "Agriculture": 26.67,
53
+ "Architecture": 20.0,
54
+ "Art": 45.0,
55
+ "Basic": 36.67,
56
+ "Biology": 26.67,
57
+ "Chemistry": 23.33,
58
+ "Clinical": 23.33,
59
+ "Computer": 26.67,
60
+ "Design": 53.33,
61
+ "Diagnostics": 23.33,
62
+ "Economics": 33.33,
63
+ "Electronics": 23.33,
64
+ "Energy": 30.0,
65
+ "Finance": 20.0,
66
+ "Geography": 23.33,
67
+ "History": 40.0,
68
+ "Literature": 66.67,
69
+ "Manage": 26.67,
70
+ "Marketing": 40.0,
71
+ "Materials": 43.33,
72
+ "Math": 33.33,
73
+ "Mechanical": 40.0,
74
+ "Music": 26.67,
75
+ "Pharmacy": 30.0,
76
+ "Physics": 26.67,
77
+ "Psychology": 20.0,
78
+ "Public": 36.67,
79
+ "Sociology": 43.33
80
+ },
81
+ "difficulty_score": {
82
+ "Medium": 35.38,
83
+ "Easy": 35.59,
84
+ "Hard": 23.76
85
+ },
86
+ "acc_stderr": 0,
87
+ "acc": 33.11
88
+ },
89
+ "MMMU_Pro_standard": {
90
+ "accuracy": 17.28,
91
+ "subject_score": {
92
+ "Literature": 48.08,
93
+ "Agriculture": 10.0,
94
+ "History": 19.64,
95
+ "Sociology": 18.52,
96
+ "Design": 20.0,
97
+ "Finance": 6.67,
98
+ "Art": 20.75,
99
+ "Public_Health": 17.24,
100
+ "Accounting": 15.52,
101
+ "Energy_and_Power": 18.97,
102
+ "Clinical_Medicine": 10.17,
103
+ "Architecture_and_Engineering": 15.0,
104
+ "Pharmacy": 17.54,
105
+ "Physics": 11.67,
106
+ "Electronics": 13.33,
107
+ "Psychology": 15.0,
108
+ "Art_Theory": 16.36,
109
+ "Economics": 6.78,
110
+ "Manage": 22.0,
111
+ "Diagnostics_and_Laboratory_Medicine": 11.67,
112
+ "Mechanical_Engineering": 20.34,
113
+ "Basic_Medical_Science": 19.23,
114
+ "Computer_Science": 26.67,
115
+ "Math": 10.0,
116
+ "Biology": 18.64,
117
+ "Marketing": 16.95,
118
+ "Music": 20.0,
119
+ "Materials": 8.33,
120
+ "Chemistry": 20.0,
121
+ "Geography": 30.77
122
+ },
123
+ "difficulty_score": {
124
+ "Easy": 18.75,
125
+ "Medium": 18.48,
126
+ "Hard": 12.97
127
+ },
128
+ "acc_stderr": 0,
129
+ "acc": 17.28
130
+ },
131
+ "MMMU_Pro_vision": {
132
+ "accuracy": 11.97,
133
+ "subject_score": {
134
+ "Art": 13.21,
135
+ "History": 5.36,
136
+ "Agriculture": 8.33,
137
+ "Finance": 3.33,
138
+ "Literature": 11.54,
139
+ "Sociology": 12.96,
140
+ "Design": 5.0,
141
+ "Public_Health": 6.9,
142
+ "Clinical_Medicine": 8.47,
143
+ "Accounting": 6.9,
144
+ "Architecture_and_Engineering": 10.0,
145
+ "Pharmacy": 29.82,
146
+ "Energy_and_Power": 6.9,
147
+ "Psychology": 10.0,
148
+ "Physics": 5.0,
149
+ "Electronics": 13.33,
150
+ "Art_Theory": 9.09,
151
+ "Manage": 20.0,
152
+ "Mechanical_Engineering": 16.95,
153
+ "Economics": 3.39,
154
+ "Biology": 11.86,
155
+ "Diagnostics_and_Laboratory_Medicine": 10.0,
156
+ "Basic_Medical_Science": 15.38,
157
+ "Computer_Science": 16.67,
158
+ "Math": 13.33,
159
+ "Music": 31.67,
160
+ "Materials": 11.67,
161
+ "Marketing": 8.47,
162
+ "Chemistry": 23.33,
163
+ "Geography": 11.54
164
+ },
165
+ "acc_stderr": 0,
166
+ "acc": 11.97
167
+ },
168
+ "OCRBench": {
169
+ "final_score": [
170
+ 587,
171
+ 1000
172
+ ],
173
+ "accuracy": 58.7,
174
+ "Regular Text Recognition": [
175
+ 39,
176
+ 50
177
+ ],
178
+ "Irregular Text Recognition": [
179
+ 33,
180
+ 50
181
+ ],
182
+ "Artistic Text Recognition": [
183
+ 46,
184
+ 50
185
+ ],
186
+ "Handwriting Recognition": [
187
+ 19,
188
+ 50
189
+ ],
190
+ "Digit String Recognition": [
191
+ 41,
192
+ 50
193
+ ],
194
+ "Non-Semantic Text Recognition": [
195
+ 31,
196
+ 50
197
+ ],
198
+ "Scene Text-centric VQA": [
199
+ 161,
200
+ 200
201
+ ],
202
+ "Doc-oriented VQA": [
203
+ 87,
204
+ 200
205
+ ],
206
+ "Key Information Extraction": [
207
+ 108,
208
+ 200
209
+ ],
210
+ "Handwritten Mathematical Expression Recognition": [
211
+ 22,
212
+ 100
213
+ ],
214
+ "acc_stderr": 0,
215
+ "acc": 58.7
216
+ },
217
+ "MathVision": {
218
+ "accuracy": 13.29,
219
+ "acc_stderr": 0,
220
+ "acc": 13.29
221
+ },
222
+ "CII-Bench": {
223
+ "accuracy": 28.5,
224
+ "domain_score": {
225
+ "CTC": 33.33,
226
+ "Art": 32.35,
227
+ "Env.": 35.19,
228
+ "Life": 22.08,
229
+ "Society": 27.57,
230
+ "Politics": 33.33
231
+ },
232
+ "emotion_score": {
233
+ "Positive": 30.34,
234
+ "Negative": 28.68,
235
+ "Neutral": 26.69
236
+ },
237
+ "acc_stderr": 0,
238
+ "acc": 28.5
239
+ },
240
+ "Blink": {
241
+ "accuracy": 38.77,
242
+ "Art Style": 50.43,
243
+ "Counting": 33.33,
244
+ "Forensic Detection": 25.0,
245
+ "Functional Correspondence": 24.62,
246
+ "IQ Test": 22.0,
247
+ "Jigsaw": 51.33,
248
+ "Multi-view Reasoning": 44.36,
249
+ "Object Localization": 59.84,
250
+ "Relative Depth": 43.55,
251
+ "Relative Reflectance": 29.85,
252
+ "Semantic Correspondence": 34.53,
253
+ "Spatial Relation": 54.55,
254
+ "Visual Correspondence": 26.74,
255
+ "Visual Similarity": 48.15,
256
+ "acc_stderr": 0,
257
+ "acc": 38.77
258
+ }
259
+ }
260
+ }
LLaVA-OneVision-7B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "LLaVA-OneVision-7B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 37.11,
10
+ "acc_stderr": 0,
11
+ "acc": 37.11
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 45.33,
15
+ "acc_stderr": 0,
16
+ "acc": 45.33
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 28.67,
20
+ "acc_stderr": 0,
21
+ "acc": 28.67
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 13.82,
25
+ "acc_stderr": 0,
26
+ "acc": 13.82
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 60.5,
30
+ "Regular Text Recognition": 45,
31
+ "Irregular Text Recognition": 39,
32
+ "Artistic Text Recognition": 47,
33
+ "Handwriting Recognition": 26,
34
+ "Digit String Recognition": 32,
35
+ "Non-Semantic Text Recognition": 22,
36
+ "Scene Text-centric VQA": 155,
37
+ "Doc-oriented VQA": 127,
38
+ "Key Information Extraction": 110,
39
+ "Handwritten Mathematical Expression Recognition": 2,
40
+ "acc_stderr": 0,
41
+ "acc": 60.5
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 16.68,
45
+ "acc_stderr": 0,
46
+ "acc": 16.68
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 42.88,
50
+ "domain_score": {
51
+ "CTC": 42.22,
52
+ "Society": 49.19,
53
+ "Art": 43.38,
54
+ "Life": 34.2,
55
+ "Env.": 53.7,
56
+ "Politics": 54.17
57
+ },
58
+ "emotion_score": {
59
+ "Positive": 37.18,
60
+ "Negative": 46.04,
61
+ "Neutral": 44.74
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 42.88
65
+ },
66
+ "Blink": {
67
+ "accuracy": 49.03,
68
+ "Art Style": 58.12,
69
+ "Counting": 65.83,
70
+ "Forensic Detection": 25.0,
71
+ "Functional Correspondence": 33.08,
72
+ "IQ Test": 26.67,
73
+ "Jigsaw": 51.33,
74
+ "Multi-view Reasoning": 50.38,
75
+ "Object Localization": 56.56,
76
+ "Relative Depth": 75.81,
77
+ "Relative Reflectance": 23.88,
78
+ "Semantic Correspondence": 32.37,
79
+ "Spatial Relation": 79.72,
80
+ "Visual Correspondence": 38.37,
81
+ "Visual Similarity": 77.78,
82
+ "acc_stderr": 0,
83
+ "acc": 49.03
84
+ }
85
+ }
86
+ }
LLaVA-Onevision-72B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "LLaVA-Onevision-72B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "艺术与设计": {
10
+ "num": 88,
11
+ "correct": 66,
12
+ "accuracy": 75.0
13
+ },
14
+ "overall": {
15
+ "num": 900,
16
+ "correct": 430,
17
+ "accuracy": 47.78
18
+ },
19
+ "商业": {
20
+ "num": 126,
21
+ "correct": 35,
22
+ "accuracy": 27.78
23
+ },
24
+ "科学": {
25
+ "num": 204,
26
+ "correct": 80,
27
+ "accuracy": 39.22
28
+ },
29
+ "健康与医学": {
30
+ "num": 153,
31
+ "correct": 86,
32
+ "accuracy": 56.21
33
+ },
34
+ "人文社会科学": {
35
+ "num": 85,
36
+ "correct": 50,
37
+ "accuracy": 58.82
38
+ },
39
+ "技术与工程": {
40
+ "num": 244,
41
+ "correct": 113,
42
+ "accuracy": 46.31
43
+ },
44
+ "accuracy": 47.78,
45
+ "acc_stderr": 0,
46
+ "acc": 47.78
47
+ },
48
+ "MMMU": {
49
+ "accuracy": 56.0,
50
+ "acc_stderr": 0,
51
+ "acc": 56.0
52
+ },
53
+ "MMMU_Pro_standard": {
54
+ "reject_info": {
55
+ "reject_rate": 0.12,
56
+ "reject_number": 2,
57
+ "total_question": 1730
58
+ },
59
+ "accuracy": 37.21,
60
+ "acc_stderr": 0,
61
+ "acc": 37.21
62
+ },
63
+ "MMMU_Pro_vision": {
64
+ "reject_info": {
65
+ "reject_rate": 0.12,
66
+ "reject_number": 2,
67
+ "total_question": 1730
68
+ },
69
+ "accuracy": 31.94,
70
+ "acc_stderr": 0,
71
+ "acc": 31.94
72
+ },
73
+ "OCRBench": {
74
+ "accuracy": 74.4,
75
+ "Regular Text Recognition": 45,
76
+ "Irregular Text Recognition": 39,
77
+ "Artistic Text Recognition": 46,
78
+ "Handwriting Recognition": 20,
79
+ "Digit String Recognition": 37,
80
+ "Non-Semantic Text Recognition": 28,
81
+ "Scene Text-centric VQA": 177,
82
+ "Doc-oriented VQA": 151,
83
+ "Key Information Extraction": 149,
84
+ "Handwritten Mathematical Expression Recognition": 52,
85
+ "acc_stderr": 0,
86
+ "acc": 74.4
87
+ },
88
+ "MathVision": {
89
+ "accuracy": 25.1,
90
+ "acc_stderr": 0,
91
+ "acc": 25.1
92
+ },
93
+ "CII-Bench": {
94
+ "accuracy": 57.78,
95
+ "domain_score": {
96
+ "CTC": 53.33,
97
+ "Society": 59.46,
98
+ "Art": 60.29,
99
+ "Env.": 68.52,
100
+ "Life": 54.11,
101
+ "Politics": 66.67
102
+ },
103
+ "emotion_score": {
104
+ "Positive": 55.13,
105
+ "Negative": 61.13,
106
+ "Neutral": 56.77
107
+ },
108
+ "acc_stderr": 0,
109
+ "acc": 57.78
110
+ },
111
+ "Blink": {
112
+ "reject_info": {
113
+ "reject_rate": 0.05,
114
+ "reject_number": 1,
115
+ "total_question": 1901
116
+ },
117
+ "accuracy": 56.95,
118
+ "Art Style": 75.0,
119
+ "Counting": 73.33,
120
+ "Forensic Detection": 43.94,
121
+ "Functional Correspondence": 31.54,
122
+ "IQ Test": 20.67,
123
+ "Jigsaw": 70.0,
124
+ "Multi-view Reasoning": 38.35,
125
+ "Object Localization": 63.11,
126
+ "Relative Depth": 76.61,
127
+ "Relative Reflectance": 38.81,
128
+ "Semantic Correspondence": 43.17,
129
+ "Spatial Relation": 84.62,
130
+ "Visual Correspondence": 59.3,
131
+ "Visual Similarity": 84.44,
132
+ "acc_stderr": 0,
133
+ "acc": 56.95
134
+ }
135
+ }
136
+ }
Llama-3.2-11B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Llama-3.2-11B-Vision-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 28.89,
10
+ "acc_stderr": 0,
11
+ "acc": 28.89
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 38.33,
15
+ "acc_stderr": 0,
16
+ "acc": 38.33
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 26.53,
20
+ "acc_stderr": 0,
21
+ "acc": 26.53
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 33.93,
25
+ "acc_stderr": 0,
26
+ "acc": 33.93
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 62.2,
30
+ "Regular Text Recognition": 46,
31
+ "Irregular Text Recognition": 46,
32
+ "Artistic Text Recognition": 46,
33
+ "Handwriting Recognition": 25,
34
+ "Digit String Recognition": 3,
35
+ "Non-Semantic Text Recognition": 41,
36
+ "Scene Text-centric VQA": 130,
37
+ "Doc-oriented VQA": 142,
38
+ "Key Information Extraction": 141,
39
+ "Handwritten Mathematical Expression Recognition": 2,
40
+ "acc_stderr": 0,
41
+ "acc": 62.2
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 16.71,
45
+ "acc_stderr": 0,
46
+ "acc": 16.71
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 1.44,
50
+ "domain_score": {
51
+ "Life": 1.3,
52
+ "Art": 0.74,
53
+ "CTC": 1.48,
54
+ "Society": 0.54,
55
+ "Env.": 7.41,
56
+ "Politics": 0.0
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 1.88,
60
+ "Negative": 1.13,
61
+ "Positive": 1.28
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 1.44
65
+ },
66
+ "Blink": {
67
+ "accuracy": 28.2,
68
+ "Art Style": 46.15,
69
+ "Counting": 44.17,
70
+ "Forensic Detection": 15.91,
71
+ "Functional Correspondence": 17.69,
72
+ "IQ Test": 4.0,
73
+ "Jigsaw": 24.0,
74
+ "Multi-view Reasoning": 44.36,
75
+ "Object Localization": 59.84,
76
+ "Relative Depth": 37.9,
77
+ "Relative Reflectance": 17.16,
78
+ "Semantic Correspondence": 8.63,
79
+ "Spatial Relation": 37.06,
80
+ "Visual Correspondence": 13.95,
81
+ "Visual Similarity": 38.52,
82
+ "acc_stderr": 0,
83
+ "acc": 28.2
84
+ }
85
+ }
86
+ }
Llama-3.2-90B-Vision-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Llama-3.2-90B-Vision-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 41.78,
10
+ "acc_stderr": 0,
11
+ "acc": 41.78
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 54.67,
15
+ "subject_score": {
16
+ "Accounting": 50.0,
17
+ "Agriculture": 56.67,
18
+ "Architecture": 30.0,
19
+ "Art": 80.0,
20
+ "Basic": 56.67,
21
+ "Biology": 40.0,
22
+ "Chemistry": 30.0,
23
+ "Clinical": 60.0,
24
+ "Computer": 56.67,
25
+ "Design": 83.33,
26
+ "Diagnostics": 50.0,
27
+ "Economics": 66.67,
28
+ "Electronics": 43.33,
29
+ "Energy": 60.0,
30
+ "Finance": 43.33,
31
+ "Geography": 53.33,
32
+ "History": 76.67,
33
+ "Literature": 76.67,
34
+ "Manage": 40.0,
35
+ "Marketing": 43.33,
36
+ "Materials": 46.67,
37
+ "Math": 30.0,
38
+ "Mechanical": 46.67,
39
+ "Music": 20.0,
40
+ "Pharmacy": 60.0,
41
+ "Physics": 60.0,
42
+ "Psychology": 53.33,
43
+ "Public": 76.67,
44
+ "Sociology": 70.0
45
+ },
46
+ "difficulty_score": {
47
+ "Medium": 53.3,
48
+ "Easy": 64.75,
49
+ "Hard": 41.44
50
+ },
51
+ "acc_stderr": 0,
52
+ "acc": 54.67
53
+ },
54
+ "MMMU_Pro_standard": {
55
+ "accuracy": 38.09,
56
+ "subject_score": {
57
+ "Art": 62.26,
58
+ "History": 53.57,
59
+ "Design": 70.0,
60
+ "Literature": 59.62,
61
+ "Agriculture": 35.0,
62
+ "Sociology": 46.3,
63
+ "Accounting": 39.66,
64
+ "Finance": 40.0,
65
+ "Pharmacy": 40.35,
66
+ "Energy_and_Power": 22.41,
67
+ "Clinical_Medicine": 44.07,
68
+ "Architecture_and_Engineering": 11.67,
69
+ "Physics": 31.67,
70
+ "Art_Theory": 65.45,
71
+ "Public_Health": 39.66,
72
+ "Psychology": 36.67,
73
+ "Electronics": 40.0,
74
+ "Manage": 40.0,
75
+ "Economics": 47.46,
76
+ "Biology": 28.81,
77
+ "Mechanical_Engineering": 23.73,
78
+ "Diagnostics_and_Laboratory_Medicine": 35.0,
79
+ "Basic_Medical_Science": 42.31,
80
+ "Computer_Science": 36.67,
81
+ "Math": 21.67,
82
+ "Music": 23.33,
83
+ "Materials": 15.0,
84
+ "Marketing": 44.07,
85
+ "Chemistry": 23.33,
86
+ "Geography": 32.69
87
+ },
88
+ "difficulty_score": {
89
+ "Easy": 51.52,
90
+ "Medium": 36.58,
91
+ "Hard": 23.44
92
+ },
93
+ "acc_stderr": 0,
94
+ "acc": 38.09
95
+ },
96
+ "MMMU_Pro_vision": {
97
+ "accuracy": 23.58,
98
+ "acc_stderr": 0,
99
+ "acc": 23.58
100
+ },
101
+ "OCRBench": {
102
+ "accuracy": 71.1,
103
+ "Regular Text Recognition": 46,
104
+ "Irregular Text Recognition": 45,
105
+ "Artistic Text Recognition": 32,
106
+ "Handwriting Recognition": 27,
107
+ "Digit String Recognition": 20,
108
+ "Non-Semantic Text Recognition": 43,
109
+ "Scene Text-centric VQA": 145,
110
+ "Doc-oriented VQA": 172,
111
+ "Key Information Extraction": 161,
112
+ "Handwritten Mathematical Expression Recognition": 20,
113
+ "acc_stderr": 0,
114
+ "acc": 71.1
115
+ },
116
+ "MathVision": {
117
+ "accuracy": 21.51,
118
+ "acc_stderr": 0,
119
+ "acc": 21.51
120
+ },
121
+ "CII-Bench": {
122
+ "accuracy": 55.82,
123
+ "domain_score": {
124
+ "Life": 51.08,
125
+ "Art": 55.88,
126
+ "CTC": 54.07,
127
+ "Society": 54.59,
128
+ "Env.": 75.93,
129
+ "Politics": 75.0
130
+ },
131
+ "emotion_score": {
132
+ "Neutral": 54.89,
133
+ "Negative": 58.11,
134
+ "Positive": 54.27
135
+ },
136
+ "acc_stderr": 0,
137
+ "acc": 55.82
138
+ },
139
+ "Blink": {
140
+ "accuracy": 48.87,
141
+ "Art Style": 47.01,
142
+ "Counting": 64.17,
143
+ "Forensic Detection": 24.24,
144
+ "Functional Correspondence": 26.92,
145
+ "IQ Test": 26.0,
146
+ "Jigsaw": 44.0,
147
+ "Multi-view Reasoning": 55.64,
148
+ "Object Localization": 60.66,
149
+ "Relative Depth": 73.39,
150
+ "Relative Reflectance": 32.09,
151
+ "Semantic Correspondence": 41.01,
152
+ "Spatial Relation": 78.32,
153
+ "Visual Correspondence": 62.21,
154
+ "Visual Similarity": 49.63,
155
+ "acc_stderr": 0,
156
+ "acc": 48.87
157
+ }
158
+ }
159
+ }
MiniCPM-V-2.6/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "MiniCPM-V-2.6",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 38.89,
10
+ "acc_stderr": 0,
11
+ "acc": 38.89
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 45.11,
15
+ "acc_stderr": 0,
16
+ "acc": 45.11
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 28.38,
20
+ "acc_stderr": 0,
21
+ "acc": 28.38
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 23.01,
25
+ "acc_stderr": 0,
26
+ "acc": 23.01
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 80.6,
30
+ "Regular Text Recognition": 49,
31
+ "Irregular Text Recognition": 43,
32
+ "Artistic Text Recognition": 44,
33
+ "Handwriting Recognition": 32,
34
+ "Digit String Recognition": 38,
35
+ "Non-Semantic Text Recognition": 46,
36
+ "Scene Text-centric VQA": 176,
37
+ "Doc-oriented VQA": 149,
38
+ "Key Information Extraction": 163,
39
+ "Handwritten Mathematical Expression Recognition": 66,
40
+ "acc_stderr": 0,
41
+ "acc": 80.6
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 15.1,
45
+ "acc_stderr": 0,
46
+ "acc": 15.1
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 47.58,
50
+ "domain_score": {
51
+ "CTC": 51.85,
52
+ "Env.": 48.15,
53
+ "Art": 51.47,
54
+ "Society": 48.65,
55
+ "Politics": 62.5,
56
+ "Life": 40.26
57
+ },
58
+ "emotion_score": {
59
+ "Positive": 46.15,
60
+ "Negative": 47.55,
61
+ "Neutral": 48.87
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 47.58
65
+ },
66
+ "Blink": {
67
+ "accuracy": 49.5,
68
+ "Art Style": 73.5,
69
+ "Counting": 50.0,
70
+ "Forensic Detection": 34.85,
71
+ "Functional Correspondence": 27.69,
72
+ "IQ Test": 21.33,
73
+ "Jigsaw": 70.67,
74
+ "Multi-view Reasoning": 47.37,
75
+ "Object Localization": 44.26,
76
+ "Relative Depth": 59.68,
77
+ "Relative Reflectance": 31.34,
78
+ "Semantic Correspondence": 35.25,
79
+ "Spatial Relation": 79.72,
80
+ "Visual Correspondence": 45.93,
81
+ "Visual Similarity": 74.07,
82
+ "acc_stderr": 0,
83
+ "acc": 49.5
84
+ }
85
+ }
86
+ }
Molmo-72B-0924/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Molmo-72B-0924",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 48.33,
10
+ "acc_stderr": 0,
11
+ "acc": 48.33
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 51.89,
15
+ "acc_stderr": 0,
16
+ "acc": 51.89
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 36.65,
20
+ "acc_stderr": 0,
21
+ "acc": 36.65
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 60.69,
25
+ "acc_stderr": 0,
26
+ "acc": 60.69
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 73.3,
30
+ "Regular Text Recognition": 48,
31
+ "Irregular Text Recognition": 45,
32
+ "Artistic Text Recognition": 45,
33
+ "Handwriting Recognition": 29,
34
+ "Digit String Recognition": 29,
35
+ "Non-Semantic Text Recognition": 33,
36
+ "Scene Text-centric VQA": 184,
37
+ "Doc-oriented VQA": 146,
38
+ "Key Information Extraction": 165,
39
+ "Handwritten Mathematical Expression Recognition": 9,
40
+ "acc_stderr": 0,
41
+ "acc": 73.3
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 24.38,
45
+ "acc_stderr": 0,
46
+ "acc": 24.38
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 52.55,
50
+ "domain_score": {
51
+ "Life": 53.25,
52
+ "Art": 57.35,
53
+ "CTC": 46.67,
54
+ "Society": 48.11,
55
+ "Env.": 62.96,
56
+ "Politics": 62.5
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 53.01,
60
+ "Negative": 52.45,
61
+ "Positive": 52.14
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 52.55
65
+ },
66
+ "Blink": {
67
+ "accuracy": 49.03,
68
+ "Art Style": 57.26,
69
+ "Counting": 68.33,
70
+ "Forensic Detection": 40.15,
71
+ "Functional Correspondence": 23.85,
72
+ "IQ Test": 27.33,
73
+ "Jigsaw": 44.67,
74
+ "Multi-view Reasoning": 50.38,
75
+ "Object Localization": 54.1,
76
+ "Relative Depth": 72.58,
77
+ "Relative Reflectance": 34.33,
78
+ "Semantic Correspondence": 36.69,
79
+ "Spatial Relation": 75.52,
80
+ "Visual Correspondence": 36.05,
81
+ "Visual Similarity": 74.81,
82
+ "acc_stderr": 0,
83
+ "acc": 49.03
84
+ }
85
+ }
86
+ }
Molmo-7B-D/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Molmo-7B-D",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 39.78,
10
+ "acc_stderr": 0,
11
+ "acc": 39.78
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 43.0,
15
+ "acc_stderr": 0,
16
+ "acc": 43.0
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 26.07,
20
+ "acc_stderr": 0,
21
+ "acc": 26.07
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 54.97,
25
+ "acc_stderr": 0,
26
+ "acc": 54.97
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 71.6,
30
+ "Regular Text Recognition": 49,
31
+ "Irregular Text Recognition": 44,
32
+ "Artistic Text Recognition": 45,
33
+ "Handwriting Recognition": 28,
34
+ "Digit String Recognition": 29,
35
+ "Non-Semantic Text Recognition": 34,
36
+ "Scene Text-centric VQA": 176,
37
+ "Doc-oriented VQA": 144,
38
+ "Key Information Extraction": 161,
39
+ "Handwritten Mathematical Expression Recognition": 6,
40
+ "acc_stderr": 0,
41
+ "acc": 71.6
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 17.43,
45
+ "acc_stderr": 0,
46
+ "acc": 17.43
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 40.13,
50
+ "domain_score": {
51
+ "Life": 33.77,
52
+ "Art": 43.38,
53
+ "CTC": 35.56,
54
+ "Society": 42.7,
55
+ "Env.": 57.41,
56
+ "Politics": 50.0
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 40.23,
60
+ "Negative": 42.64,
61
+ "Positive": 37.18
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 40.13
65
+ },
66
+ "Blink": {
67
+ "accuracy": 43.56,
68
+ "Art Style": 58.12,
69
+ "Counting": 57.5,
70
+ "Forensic Detection": 29.55,
71
+ "Functional Correspondence": 30.0,
72
+ "IQ Test": 22.67,
73
+ "Jigsaw": 52.67,
74
+ "Multi-view Reasoning": 45.86,
75
+ "Object Localization": 56.56,
76
+ "Relative Depth": 70.16,
77
+ "Relative Reflectance": 26.87,
78
+ "Semantic Correspondence": 23.02,
79
+ "Spatial Relation": 65.03,
80
+ "Visual Correspondence": 28.49,
81
+ "Visual Similarity": 54.07,
82
+ "acc_stderr": 0,
83
+ "acc": 43.56
84
+ }
85
+ }
86
+ }
Mono-InternVL-2B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Mono-InternVL-2B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 28.56,
10
+ "acc_stderr": 0,
11
+ "acc": 28.56
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 28.22,
15
+ "acc_stderr": 0,
16
+ "acc": 28.22
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 16.53,
20
+ "acc_stderr": 0,
21
+ "acc": 16.53
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 10.69,
25
+ "acc_stderr": 0,
26
+ "acc": 10.69
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 69.9,
30
+ "Regular Text Recognition": 47,
31
+ "Irregular Text Recognition": 39,
32
+ "Artistic Text Recognition": 39,
33
+ "Handwriting Recognition": 19,
34
+ "Digit String Recognition": 44,
35
+ "Non-Semantic Text Recognition": 43,
36
+ "Scene Text-centric VQA": 163,
37
+ "Doc-oriented VQA": 110,
38
+ "Key Information Extraction": 141,
39
+ "Handwritten Mathematical Expression Recognition": 54,
40
+ "acc_stderr": 0,
41
+ "acc": 69.9
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 12.53,
45
+ "acc_stderr": 0,
46
+ "acc": 12.53
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 23.4,
50
+ "domain_score": {
51
+ "Life": 14.72,
52
+ "Art": 27.94,
53
+ "CTC": 28.89,
54
+ "Society": 25.41,
55
+ "Env.": 24.07,
56
+ "Politics": 33.33
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 27.82,
60
+ "Negative": 21.89,
61
+ "Positive": 20.09
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 23.4
65
+ },
66
+ "Blink": {
67
+ "accuracy": 35.72,
68
+ "Art Style": 43.59,
69
+ "Counting": 25.83,
70
+ "Forensic Detection": 31.06,
71
+ "Functional Correspondence": 14.62,
72
+ "IQ Test": 20.0,
73
+ "Jigsaw": 46.0,
74
+ "Multi-view Reasoning": 40.6,
75
+ "Object Localization": 59.84,
76
+ "Relative Depth": 54.84,
77
+ "Relative Reflectance": 32.84,
78
+ "Semantic Correspondence": 25.9,
79
+ "Spatial Relation": 51.05,
80
+ "Visual Correspondence": 18.6,
81
+ "Visual Similarity": 42.96,
82
+ "acc_stderr": 0,
83
+ "acc": 35.72
84
+ }
85
+ }
86
+ }
NVLM-D-72B/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "NVLM-D-72B",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 50.22,
10
+ "acc_stderr": 0,
11
+ "acc": 50.22
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 58.22,
15
+ "acc_stderr": 0,
16
+ "acc": 58.22
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "reject_info": {
20
+ "reject_rate": 0.06,
21
+ "reject_number": 1,
22
+ "total_question": 1730
23
+ },
24
+ "accuracy": 36.84,
25
+ "acc_stderr": 0,
26
+ "acc": 36.84
27
+ },
28
+ "MMMU_Pro_vision": {
29
+ "accuracy": 38.73,
30
+ "acc_stderr": 0,
31
+ "acc": 38.73
32
+ },
33
+ "OCRBench": {
34
+ "accuracy": 79.5,
35
+ "Regular Text Recognition": 46,
36
+ "Irregular Text Recognition": 48,
37
+ "Artistic Text Recognition": 47,
38
+ "Handwriting Recognition": 38,
39
+ "Digit String Recognition": 38,
40
+ "Non-Semantic Text Recognition": 29,
41
+ "Scene Text-centric VQA": 181,
42
+ "Doc-oriented VQA": 144,
43
+ "Key Information Extraction": 164,
44
+ "Handwritten Mathematical Expression Recognition": 60,
45
+ "acc_stderr": 0,
46
+ "acc": 79.5
47
+ },
48
+ "MathVision": {
49
+ "accuracy": 20.26,
50
+ "acc_stderr": 0,
51
+ "acc": 20.26
52
+ },
53
+ "CII-Bench": {
54
+ "accuracy": 55.42,
55
+ "domain_score": {
56
+ "Env.": 74.07,
57
+ "Art": 59.56,
58
+ "Life": 46.75,
59
+ "CTC": 52.59,
60
+ "Society": 58.92,
61
+ "Politics": 62.5
62
+ },
63
+ "emotion_score": {
64
+ "Negative": 58.11,
65
+ "Neutral": 54.89,
66
+ "Positive": 52.99
67
+ },
68
+ "acc_stderr": 0,
69
+ "acc": 55.42
70
+ },
71
+ "Blink": {
72
+ "accuracy": 47.5,
73
+ "Art Style": 52.99,
74
+ "Counting": 66.67,
75
+ "Forensic Detection": 43.18,
76
+ "Functional Correspondence": 20.0,
77
+ "IQ Test": 22.0,
78
+ "Jigsaw": 64.67,
79
+ "Multi-view Reasoning": 47.37,
80
+ "Object Localization": 55.74,
81
+ "Relative Depth": 54.84,
82
+ "Relative Reflectance": 33.58,
83
+ "Semantic Correspondence": 30.22,
84
+ "Spatial Relation": 78.32,
85
+ "Visual Correspondence": 34.3,
86
+ "Visual Similarity": 67.41,
87
+ "acc_stderr": 0,
88
+ "acc": 47.5
89
+ }
90
+ }
91
+ }
Phi-3.5-Vision-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Phi-3.5-Vision-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 28.44,
10
+ "acc_stderr": 0,
11
+ "acc": 28.44
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 44.0,
15
+ "acc_stderr": 0,
16
+ "acc": 44.0
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 24.28,
20
+ "acc_stderr": 0,
21
+ "acc": 24.28
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 11.5,
25
+ "acc_stderr": 0,
26
+ "acc": 11.5
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 60.9,
30
+ "Regular Text Recognition": 45,
31
+ "Irregular Text Recognition": 39,
32
+ "Artistic Text Recognition": 45,
33
+ "Handwriting Recognition": 23,
34
+ "Digit String Recognition": 11,
35
+ "Non-Semantic Text Recognition": 42,
36
+ "Scene Text-centric VQA": 156,
37
+ "Doc-oriented VQA": 123,
38
+ "Key Information Extraction": 123,
39
+ "Handwritten Mathematical Expression Recognition": 2,
40
+ "acc_stderr": 0,
41
+ "acc": 60.9
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 14.47,
45
+ "acc_stderr": 0,
46
+ "acc": 14.47
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 36.08,
50
+ "domain_score": {
51
+ "Life": 32.03,
52
+ "Art": 39.71,
53
+ "CTC": 36.3,
54
+ "Society": 33.51,
55
+ "Env.": 46.3,
56
+ "Politics": 50.0
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 39.47,
60
+ "Negative": 36.23,
61
+ "Positive": 32.05
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 36.08
65
+ },
66
+ "Blink": {
67
+ "accuracy": 57.39,
68
+ "Art Style": 88.89,
69
+ "Counting": 53.33,
70
+ "Forensic Detection": 92.42,
71
+ "Functional Correspondence": 33.08,
72
+ "IQ Test": 26.0,
73
+ "Jigsaw": 71.33,
74
+ "Multi-view Reasoning": 45.86,
75
+ "Object Localization": 45.9,
76
+ "Relative Depth": 70.16,
77
+ "Relative Reflectance": 38.06,
78
+ "Semantic Correspondence": 39.57,
79
+ "Spatial Relation": 65.03,
80
+ "Visual Correspondence": 55.23,
81
+ "Visual Similarity": 84.44,
82
+ "acc_stderr": 0,
83
+ "acc": 57.39
84
+ }
85
+ }
86
+ }
Pixtral-12B-2409/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Pixtral-12B-2409",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 35.89,
10
+ "acc_stderr": 0,
11
+ "acc": 35.89
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 48.67,
15
+ "acc_stderr": 0,
16
+ "acc": 48.67
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 31.5,
20
+ "acc_stderr": 0,
21
+ "acc": 31.5
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 57.51,
25
+ "acc_stderr": 0,
26
+ "acc": 57.51
27
+ },
28
+ "OCRBench": {
29
+ "final_score": [
30
+ 681,
31
+ 1000
32
+ ],
33
+ "accuracy": 68.1,
34
+ "Regular Text Recognition": [
35
+ 44,
36
+ 50
37
+ ],
38
+ "Irregular Text Recognition": [
39
+ 34,
40
+ 50
41
+ ],
42
+ "Artistic Text Recognition": [
43
+ 47,
44
+ 50
45
+ ],
46
+ "Handwriting Recognition": [
47
+ 34,
48
+ 50
49
+ ],
50
+ "Digit String Recognition": [
51
+ 23,
52
+ 50
53
+ ],
54
+ "Non-Semantic Text Recognition": [
55
+ 38,
56
+ 50
57
+ ],
58
+ "Scene Text-centric VQA": [
59
+ 165,
60
+ 200
61
+ ],
62
+ "Doc-oriented VQA": [
63
+ 155,
64
+ 200
65
+ ],
66
+ "Key Information Extraction": [
67
+ 140,
68
+ 200
69
+ ],
70
+ "Handwritten Mathematical Expression Recognition": [
71
+ 1,
72
+ 100
73
+ ],
74
+ "acc_stderr": 0,
75
+ "acc": 68.1
76
+ },
77
+ "MathVision": {
78
+ "accuracy": 21.55,
79
+ "acc_stderr": 0,
80
+ "acc": 21.55
81
+ },
82
+ "CII-Bench": {
83
+ "accuracy": 31.63,
84
+ "domain_score": {
85
+ "Life": 27.27,
86
+ "Art": 35.29,
87
+ "CTC": 27.41,
88
+ "Society": 35.14,
89
+ "Env.": 31.48,
90
+ "Politics": 50.0
91
+ },
92
+ "emotion_score": {
93
+ "Neutral": 31.58,
94
+ "Negative": 35.85,
95
+ "Positive": 26.92
96
+ },
97
+ "acc_stderr": 0,
98
+ "acc": 31.63
99
+ },
100
+ "Blink": {
101
+ "accuracy": 52.13,
102
+ "Art Style": 65.81,
103
+ "Counting": 65.83,
104
+ "Forensic Detection": 34.09,
105
+ "Functional Correspondence": 31.54,
106
+ "IQ Test": 20.0,
107
+ "Jigsaw": 59.33,
108
+ "Multi-view Reasoning": 49.62,
109
+ "Object Localization": 46.72,
110
+ "Relative Depth": 64.52,
111
+ "Relative Reflectance": 26.87,
112
+ "Semantic Correspondence": 43.17,
113
+ "Spatial Relation": 79.72,
114
+ "Visual Correspondence": 69.19,
115
+ "Visual Similarity": 72.59,
116
+ "acc_stderr": 0,
117
+ "acc": 52.13
118
+ }
119
+ }
120
+ }
Qwen-VL-Max/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen-VL-Max",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "reject_info": {
10
+ "reject_rate": 0.78,
11
+ "reject_number": 7,
12
+ "total_question": 900
13
+ },
14
+ "accuracy": 49.94,
15
+ "acc_stderr": 0,
16
+ "acc": 49.94
17
+ },
18
+ "MMMU": {
19
+ "reject_info": {
20
+ "reject_rate": 0.78,
21
+ "reject_number": 7,
22
+ "total_question": 900
23
+ },
24
+ "accuracy": 56.89,
25
+ "acc_stderr": 0,
26
+ "acc": 56.89
27
+ },
28
+ "MMMU_Pro_standard": {
29
+ "reject_info": {
30
+ "reject_rate": 0.29,
31
+ "reject_number": 5,
32
+ "total_question": 1730
33
+ },
34
+ "accuracy": 39.25,
35
+ "acc_stderr": 0,
36
+ "acc": 39.25
37
+ },
38
+ "MMMU_Pro_vision": {
39
+ "accuracy": 31.79,
40
+ "acc_stderr": 0,
41
+ "acc": 31.79
42
+ },
43
+ "OCRBench": {
44
+ "reject_info": {
45
+ "reject_rate": 0.2,
46
+ "reject_number": 2,
47
+ "total_question": 1000
48
+ },
49
+ "accuracy": 84.569,
50
+ "Regular Text Recognition": 49,
51
+ "Irregular Text Recognition": 46,
52
+ "Artistic Text Recognition": 48,
53
+ "Handwriting Recognition": 41,
54
+ "Digit String Recognition": 33,
55
+ "Non-Semantic Text Recognition": 44,
56
+ "Scene Text-centric VQA": 175,
57
+ "Doc-oriented VQA": 171,
58
+ "Key Information Extraction": 178,
59
+ "Handwritten Mathematical Expression Recognition": 59,
60
+ "acc_stderr": 0,
61
+ "acc": 84.569
62
+ },
63
+ "MathVision": {
64
+ "reject_info": {
65
+ "reject_rate": 0.03,
66
+ "reject_number": 1,
67
+ "total_question": 3040
68
+ },
69
+ "accuracy": 26.88,
70
+ "acc_stderr": 0,
71
+ "acc": 26.88
72
+ },
73
+ "CII-Bench": {
74
+ "accuracy": 58.76,
75
+ "domain_score": {
76
+ "Life": 56.39,
77
+ "Art": 61.03,
78
+ "CTC": 51.11,
79
+ "Society": 61.41,
80
+ "Env.": 70.37,
81
+ "Politics": 65.22
82
+ },
83
+ "emotion_score": {
84
+ "Neutral": 59.09,
85
+ "Negative": 59.39,
86
+ "Positive": 57.69
87
+ },
88
+ "acc_stderr": 0,
89
+ "acc": 58.76
90
+ },
91
+ "Blink": {
92
+ "reject_info": {
93
+ "reject_rate": 0.11,
94
+ "reject_number": 2,
95
+ "total_question": 1901
96
+ },
97
+ "accuracy": 56.4,
98
+ "Art Style": 68.1,
99
+ "Counting": 63.33,
100
+ "Forensic Detection": 59.85,
101
+ "Functional Correspondence": 47.69,
102
+ "IQ Test": 23.33,
103
+ "Jigsaw": 52.35,
104
+ "Multi-view Reasoning": 52.63,
105
+ "Object Localization": 55.74,
106
+ "Relative Depth": 65.32,
107
+ "Relative Reflectance": 36.57,
108
+ "Semantic Correspondence": 42.45,
109
+ "Spatial Relation": 72.03,
110
+ "Visual Correspondence": 72.09,
111
+ "Visual Similarity": 80.0,
112
+ "acc_stderr": 0,
113
+ "acc": 56.4
114
+ }
115
+ }
116
+ }
Qwen2-VL-2B-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen2-VL-2B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 33.89,
10
+ "acc_stderr": 0,
11
+ "acc": 33.89
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 41.44,
15
+ "acc_stderr": 0,
16
+ "acc": 41.44
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 26.82,
20
+ "acc_stderr": 0,
21
+ "acc": 26.82
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 13.58,
25
+ "acc_stderr": 0,
26
+ "acc": 13.58
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 75.5,
30
+ "Regular Text Recognition": 49,
31
+ "Irregular Text Recognition": 45,
32
+ "Artistic Text Recognition": 47,
33
+ "Handwriting Recognition": 42,
34
+ "Digit String Recognition": 33,
35
+ "Non-Semantic Text Recognition": 44,
36
+ "Scene Text-centric VQA": 172,
37
+ "Doc-oriented VQA": 131,
38
+ "Key Information Extraction": 162,
39
+ "Handwritten Mathematical Expression Recognition": 30,
40
+ "acc_stderr": 0,
41
+ "acc": 75.5
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 14.34,
45
+ "acc_stderr": 0,
46
+ "acc": 14.34
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 39.48,
50
+ "domain_score": {
51
+ "Art": 42.65,
52
+ "Env.": 50.0,
53
+ "CTC": 46.67,
54
+ "Society": 40.0,
55
+ "Life": 31.17,
56
+ "Politics": 33.33
57
+ },
58
+ "emotion_score": {
59
+ "Negative": 38.49,
60
+ "Positive": 39.74,
61
+ "Neutral": 40.23
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 39.48
65
+ },
66
+ "Blink": {
67
+ "accuracy": 40.45,
68
+ "Art Style": 47.01,
69
+ "Counting": 55.83,
70
+ "Forensic Detection": 21.21,
71
+ "Functional Correspondence": 32.31,
72
+ "IQ Test": 16.0,
73
+ "Jigsaw": 52.67,
74
+ "Multi-view Reasoning": 33.08,
75
+ "Object Localization": 50.0,
76
+ "Relative Depth": 55.65,
77
+ "Relative Reflectance": 31.34,
78
+ "Semantic Correspondence": 24.46,
79
+ "Spatial Relation": 69.93,
80
+ "Visual Correspondence": 28.49,
81
+ "Visual Similarity": 55.56,
82
+ "acc_stderr": 0,
83
+ "acc": 40.45
84
+ }
85
+ }
86
+ }
Qwen2-VL-72B-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen2-VL-72B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 52.78,
10
+ "acc_stderr": 0,
11
+ "acc": 52.78
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 60.89,
15
+ "acc_stderr": 0,
16
+ "acc": 60.89
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 41.33,
20
+ "acc_stderr": 0,
21
+ "acc": 41.33
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "reject_info": {
25
+ "reject_rate": 0.23,
26
+ "reject_number": 4,
27
+ "total_question": 1730
28
+ },
29
+ "accuracy": 34.41,
30
+ "acc_stderr": 0,
31
+ "acc": 34.41
32
+ },
33
+ "OCRBench": {
34
+ "accuracy": 83.2,
35
+ "Regular Text Recognition": 48,
36
+ "Irregular Text Recognition": 47,
37
+ "Artistic Text Recognition": 48,
38
+ "Handwriting Recognition": 29,
39
+ "Digit String Recognition": 33,
40
+ "Non-Semantic Text Recognition": 38,
41
+ "Scene Text-centric VQA": 178,
42
+ "Doc-oriented VQA": 172,
43
+ "Key Information Extraction": 176,
44
+ "Handwritten Mathematical Expression Recognition": 63,
45
+ "acc_stderr": 0,
46
+ "acc": 83.2
47
+ },
48
+ "MathVision": {
49
+ "accuracy": 26.35,
50
+ "acc_stderr": 0,
51
+ "acc": 26.35
52
+ },
53
+ "CII-Bench": {
54
+ "accuracy": 67.84,
55
+ "domain_score": {
56
+ "Society": 70.27,
57
+ "CTC": 68.15,
58
+ "Art": 63.24,
59
+ "Env.": 79.63,
60
+ "Life": 64.94,
61
+ "Politics": 75.0
62
+ },
63
+ "emotion_score": {
64
+ "Negative": 67.55,
65
+ "Positive": 68.8,
66
+ "Neutral": 67.29
67
+ },
68
+ "acc_stderr": 0,
69
+ "acc": 67.84
70
+ },
71
+ "Blink": {
72
+ "accuracy": 57.29,
73
+ "Art Style": 81.2,
74
+ "Counting": 77.5,
75
+ "Forensic Detection": 61.36,
76
+ "Functional Correspondence": 25.38,
77
+ "IQ Test": 26.0,
78
+ "Jigsaw": 63.33,
79
+ "Multi-view Reasoning": 39.1,
80
+ "Object Localization": 56.56,
81
+ "Relative Depth": 68.55,
82
+ "Relative Reflectance": 31.34,
83
+ "Semantic Correspondence": 34.53,
84
+ "Spatial Relation": 87.41,
85
+ "Visual Correspondence": 72.09,
86
+ "Visual Similarity": 80.0,
87
+ "acc_stderr": 0,
88
+ "acc": 57.29
89
+ }
90
+ }
91
+ }
Qwen2-VL-7B-Instruct/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Qwen2-VL-7B-Instruct",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 44.78,
10
+ "acc_stderr": 0,
11
+ "acc": 44.78
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 50.44,
15
+ "acc_stderr": 0,
16
+ "acc": 50.44
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 33.93,
20
+ "acc_stderr": 0,
21
+ "acc": 33.93
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 17.69,
25
+ "acc_stderr": 0,
26
+ "acc": 17.69
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 82.9,
30
+ "Regular Text Recognition": 48,
31
+ "Irregular Text Recognition": 46,
32
+ "Artistic Text Recognition": 47,
33
+ "Handwriting Recognition": 43,
34
+ "Digit String Recognition": 39,
35
+ "Non-Semantic Text Recognition": 47,
36
+ "Scene Text-centric VQA": 177,
37
+ "Doc-oriented VQA": 148,
38
+ "Key Information Extraction": 175,
39
+ "Handwritten Mathematical Expression Recognition": 59,
40
+ "acc_stderr": 0,
41
+ "acc": 82.9
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 17.34,
45
+ "acc_stderr": 0,
46
+ "acc": 17.34
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 51.24,
50
+ "domain_score": {
51
+ "Life": 41.99,
52
+ "Art": 55.15,
53
+ "CTC": 51.85,
54
+ "Society": 51.89,
55
+ "Env.": 74.07,
56
+ "Politics": 58.33
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 53.76,
60
+ "Negative": 48.68,
61
+ "Positive": 51.28
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 51.24
65
+ },
66
+ "Blink": {
67
+ "accuracy": 50.08,
68
+ "Art Style": 59.83,
69
+ "Counting": 69.17,
70
+ "Forensic Detection": 30.3,
71
+ "Functional Correspondence": 21.54,
72
+ "IQ Test": 28.0,
73
+ "Jigsaw": 49.33,
74
+ "Multi-view Reasoning": 42.86,
75
+ "Object Localization": 54.92,
76
+ "Relative Depth": 67.74,
77
+ "Relative Reflectance": 39.55,
78
+ "Semantic Correspondence": 31.65,
79
+ "Spatial Relation": 83.92,
80
+ "Visual Correspondence": 44.19,
81
+ "Visual Similarity": 84.44,
82
+ "acc_stderr": 0,
83
+ "acc": 50.08
84
+ }
85
+ }
86
+ }
Step-1V-32k/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Step-1V-32k",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 47.33,
10
+ "acc_stderr": 0,
11
+ "acc": 47.33
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 52.44,
15
+ "acc_stderr": 0,
16
+ "acc": 52.44
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 35.66,
20
+ "subject_score": {
21
+ "History": 42.86,
22
+ "Art": 64.15,
23
+ "Design": 61.67,
24
+ "Literature": 71.15,
25
+ "Agriculture": 35.0,
26
+ "Finance": 31.67,
27
+ "Sociology": 42.59,
28
+ "Accounting": 44.83,
29
+ "Energy_and_Power": 15.52,
30
+ "Pharmacy": 43.86,
31
+ "Architecture_and_Engineering": 11.67,
32
+ "Clinical_Medicine": 20.34,
33
+ "Public_Health": 37.93,
34
+ "Physics": 23.33,
35
+ "Art_Theory": 67.27,
36
+ "Electronics": 20.0,
37
+ "Psychology": 33.33,
38
+ "Biology": 35.59,
39
+ "Manage": 28.0,
40
+ "Economics": 57.63,
41
+ "Mechanical_Engineering": 18.64,
42
+ "Diagnostics_and_Laboratory_Medicine": 26.67,
43
+ "Basic_Medical_Science": 40.38,
44
+ "Computer_Science": 33.33,
45
+ "Math": 26.67,
46
+ "Music": 18.33,
47
+ "Materials": 18.33,
48
+ "Marketing": 45.76,
49
+ "Chemistry": 28.33,
50
+ "Geography": 36.54
51
+ },
52
+ "difficulty_score": {
53
+ "Medium": 33.96,
54
+ "Easy": 51.7,
55
+ "Hard": 17.96
56
+ },
57
+ "acc_stderr": 0,
58
+ "acc": 35.66
59
+ },
60
+ "MMMU_Pro_vision": {
61
+ "accuracy": 59.08,
62
+ "acc_stderr": 0,
63
+ "acc": 59.08
64
+ },
65
+ "OCRBench": {
66
+ "accuracy": 84.8,
67
+ "Regular Text Recognition": 49,
68
+ "Irregular Text Recognition": 49,
69
+ "Artistic Text Recognition": 48,
70
+ "Handwriting Recognition": 42,
71
+ "Digit String Recognition": 38,
72
+ "Non-Semantic Text Recognition": 44,
73
+ "Scene Text-centric VQA": 185,
74
+ "Doc-oriented VQA": 153,
75
+ "Key Information Extraction": 178,
76
+ "Handwritten Mathematical Expression Recognition": 62,
77
+ "acc_stderr": 0,
78
+ "acc": 84.8
79
+ },
80
+ "MathVision": {
81
+ "accuracy": 25.86,
82
+ "acc_stderr": 0,
83
+ "acc": 25.86
84
+ },
85
+ "CII-Bench": {
86
+ "accuracy": 58.82,
87
+ "domain_score": {
88
+ "Life": 61.04,
89
+ "Art": 57.35,
90
+ "CTC": 51.85,
91
+ "Society": 55.68,
92
+ "Env.": 72.22,
93
+ "Politics": 79.17
94
+ },
95
+ "emotion_score": {
96
+ "Neutral": 60.15,
97
+ "Negative": 58.49,
98
+ "Positive": 57.69
99
+ },
100
+ "acc_stderr": 0,
101
+ "acc": 58.82
102
+ },
103
+ "Blink": {
104
+ "accuracy": 58.13,
105
+ "Art Style": 76.07,
106
+ "Counting": 70.0,
107
+ "Forensic Detection": 45.45,
108
+ "Functional Correspondence": 33.08,
109
+ "IQ Test": 24.67,
110
+ "Jigsaw": 65.33,
111
+ "Multi-view Reasoning": 51.13,
112
+ "Object Localization": 57.38,
113
+ "Relative Depth": 75.81,
114
+ "Relative Reflectance": 36.57,
115
+ "Semantic Correspondence": 46.04,
116
+ "Spatial Relation": 74.83,
117
+ "Visual Correspondence": 75.0,
118
+ "Visual Similarity": 83.7,
119
+ "acc_stderr": 0,
120
+ "acc": 58.13
121
+ }
122
+ }
123
+ }
XGen-MM-Instruct-Interleave-v1.5/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "XGen-MM-Instruct-Interleave-v1.5",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 27.56,
10
+ "acc_stderr": 0,
11
+ "acc": 27.56
12
+ },
13
+ "MMMU": {
14
+ "accuracy": 40.0,
15
+ "acc_stderr": 0,
16
+ "acc": 40.0
17
+ },
18
+ "MMMU_Pro_standard": {
19
+ "accuracy": 25.49,
20
+ "acc_stderr": 0,
21
+ "acc": 25.49
22
+ },
23
+ "MMMU_Pro_vision": {
24
+ "accuracy": 12.66,
25
+ "acc_stderr": 0,
26
+ "acc": 12.66
27
+ },
28
+ "OCRBench": {
29
+ "accuracy": 55.5,
30
+ "Regular Text Recognition": 48,
31
+ "Irregular Text Recognition": 45,
32
+ "Artistic Text Recognition": 45,
33
+ "Handwriting Recognition": 29,
34
+ "Digit String Recognition": 28,
35
+ "Non-Semantic Text Recognition": 32,
36
+ "Scene Text-centric VQA": 175,
37
+ "Doc-oriented VQA": 84,
38
+ "Key Information Extraction": 69,
39
+ "Handwritten Mathematical Expression Recognition": 0,
40
+ "acc_stderr": 0,
41
+ "acc": 55.5
42
+ },
43
+ "MathVision": {
44
+ "accuracy": 20.56,
45
+ "acc_stderr": 0,
46
+ "acc": 20.56
47
+ },
48
+ "CII-Bench": {
49
+ "accuracy": 36.99,
50
+ "domain_score": {
51
+ "Life": 28.14,
52
+ "Art": 40.44,
53
+ "CTC": 37.04,
54
+ "Society": 40.54,
55
+ "Env.": 50.0,
56
+ "Politics": 45.83
57
+ },
58
+ "emotion_score": {
59
+ "Neutral": 37.59,
60
+ "Negative": 39.62,
61
+ "Positive": 33.33
62
+ },
63
+ "acc_stderr": 0,
64
+ "acc": 36.99
65
+ },
66
+ "Blink": {
67
+ "accuracy": 47.55,
68
+ "Art Style": 40.17,
69
+ "Counting": 55.0,
70
+ "Forensic Detection": 31.82,
71
+ "Functional Correspondence": 29.23,
72
+ "IQ Test": 28.0,
73
+ "Jigsaw": 40.0,
74
+ "Multi-view Reasoning": 55.64,
75
+ "Object Localization": 52.46,
76
+ "Relative Depth": 73.39,
77
+ "Relative Reflectance": 43.28,
78
+ "Semantic Correspondence": 21.58,
79
+ "Spatial Relation": 76.92,
80
+ "Visual Correspondence": 44.77,
81
+ "Visual Similarity": 77.78,
82
+ "acc_stderr": 0,
83
+ "acc": 47.55
84
+ }
85
+ }
86
+ }
Yi-Vision/results_2025-01-25T10-42-53.190540.json ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "model_name": "Yi-Vision",
4
+ "model_dtype": "float16",
5
+ "model_size": 0
6
+ },
7
+ "results": {
8
+ "CMMMU": {
9
+ "accuracy": 51.44,
10
+ "acc_stderr": 0,
11
+ "acc": 51.44
12
+ },
13
+ "MMMU": {
14
+ "reject_info": {
15
+ "reject_rate": 0.33,
16
+ "reject_number": 3,
17
+ "total_question": 900
18
+ },
19
+ "accuracy": 57.64,
20
+ "acc_stderr": 0,
21
+ "acc": 57.64
22
+ },
23
+ "MMMU_Pro_standard": {
24
+ "reject_info": {
25
+ "reject_rate": 0.06,
26
+ "reject_number": 1,
27
+ "total_question": 1730
28
+ },
29
+ "accuracy": 36.67,
30
+ "acc_stderr": 0,
31
+ "acc": 36.67
32
+ },
33
+ "MMMU_Pro_vision": {
34
+ "reject_info": {
35
+ "reject_rate": 0.06,
36
+ "reject_number": 1,
37
+ "total_question": 1730
38
+ },
39
+ "accuracy": 27.47,
40
+ "acc_stderr": 0,
41
+ "acc": 27.47
42
+ },
43
+ "OCRBench": {
44
+ "accuracy": 81.8,
45
+ "Regular Text Recognition": 50,
46
+ "Irregular Text Recognition": 49,
47
+ "Artistic Text Recognition": 47,
48
+ "Handwriting Recognition": 37,
49
+ "Digit String Recognition": 39,
50
+ "Non-Semantic Text Recognition": 41,
51
+ "Scene Text-centric VQA": 185,
52
+ "Doc-oriented VQA": 165,
53
+ "Key Information Extraction": 168,
54
+ "Handwritten Mathematical Expression Recognition": 37,
55
+ "acc_stderr": 0,
56
+ "acc": 81.8
57
+ },
58
+ "MathVision": {
59
+ "reject_info": {
60
+ "reject_rate": 0.07,
61
+ "reject_number": 2,
62
+ "total_question": 3040
63
+ },
64
+ "accuracy": 22.19,
65
+ "acc_stderr": 0,
66
+ "acc": 22.19
67
+ },
68
+ "CII-Bench": {
69
+ "accuracy": 54.51,
70
+ "domain_score": {
71
+ "Life": 49.35,
72
+ "Art": 54.41,
73
+ "CTC": 52.59,
74
+ "Society": 60.54,
75
+ "Env.": 61.11,
76
+ "Politics": 54.17
77
+ },
78
+ "emotion_score": {
79
+ "Neutral": 58.65,
80
+ "Negative": 55.09,
81
+ "Positive": 49.15
82
+ },
83
+ "acc_stderr": 0,
84
+ "acc": 54.51
85
+ },
86
+ "Blink": {
87
+ "reject_info": {
88
+ "reject_rate": 0.11,
89
+ "reject_number": 2,
90
+ "total_question": 1901
91
+ },
92
+ "accuracy": 62.66,
93
+ "Art Style": 76.07,
94
+ "Counting": 70.83,
95
+ "Forensic Detection": 71.21,
96
+ "Functional Correspondence": 35.38,
97
+ "IQ Test": 26.0,
98
+ "Jigsaw": 84.0,
99
+ "Multi-view Reasoning": 45.11,
100
+ "Object Localization": 64.75,
101
+ "Relative Depth": 75.81,
102
+ "Relative Reflectance": 34.33,
103
+ "Semantic Correspondence": 47.83,
104
+ "Spatial Relation": 80.42,
105
+ "Visual Correspondence": 79.53,
106
+ "Visual Similarity": 85.19,
107
+ "acc_stderr": 0,
108
+ "acc": 62.66
109
+ }
110
+ }
111
+ }