Yirany commited on
Commit
9088c84
1 Parent(s): 6918ea2

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +5 -270
README.md CHANGED
@@ -50,276 +50,11 @@ datasets:
50
  ### Evaluation <!-- omit in toc -->
51
 
52
  Results on TextVQA, DocVQA, OCRBench, OpenCompass MultiModal Avg , MME, MMBench, MMMU, MathVista, LLaVA Bench, RealWorld QA, Object HalBench.
53
- <small>
54
- <table style="margin: 0px auto;">
55
- <thead>
56
- <tr>
57
- <th align="left">Model</th>
58
- <th>Size</th>
59
- <th>OCRBench</th>
60
- <th>TextVQA val</th>
61
- <th>DocVQA test</th>
62
- <th>Open-Compass</th>
63
- <th>MME</th>
64
- <th>MMB test (en)</th>
65
- <th>MMB test (cn)</th>
66
- <th>MMMU val</th>
67
- <th>Math-Vista</th>
68
- <th>LLaVA Bench</th>
69
- <th>RealWorld QA</th>
70
- <th>Object HalBench</th>
71
- </tr>
72
- </thead>
73
- <tbody align="center">
74
- <tr>
75
- <td colspan="14" align="left"><strong>Proprietary</strong></td>
76
- </tr>
77
- <tr>
78
- <td nowrap="nowrap" align="left">Gemini Pro</td>
79
- <td>-</td>
80
- <td>680</td>
81
- <td>74.6</td>
82
- <td>88.1</td>
83
- <td>62.9</td>
84
- <td>2148.9</td>
85
- <td>73.6</td>
86
- <td>74.3</td>
87
- <td>48.9</td>
88
- <td>45.8</td>
89
- <td>79.9</td>
90
- <td>60.4</td>
91
- <td>-</td>
92
- </tr>
93
- <tr>
94
- <td nowrap="nowrap" align="left">GPT-4V (2023.11.06)</td>
95
- <td>-</td>
96
- <td>645</td>
97
- <td>78.0</td>
98
- <td>88.4</td>
99
- <td>63.5</td>
100
- <td>1771.5</td>
101
- <td>77.0</td>
102
- <td>74.4</td>
103
- <td>53.8</td>
104
- <td>47.8</td>
105
- <td>93.1</td>
106
- <td>63.0</td>
107
- <td>86.4</td>
108
- </tr>
109
- <tr>
110
- <td colspan="14" align="left"><strong>Open-source</strong></td>
111
- </tr>
112
- <tr>
113
- <td nowrap="nowrap" align="left">Mini-Gemini</td>
114
- <td>2.2B</td>
115
- <td>-</td>
116
- <td>56.2</td>
117
- <td>34.2*</td>
118
- <td>-</td>
119
- <td>1653.0</td>
120
- <td>-</td>
121
- <td>-</td>
122
- <td>31.7</td>
123
- <td>-</td>
124
- <td>-</td>
125
- <td>-</td>
126
- <td>-</td>
127
- </tr>
128
- <tr>
129
- <td nowrap="nowrap" align="left">Qwen-VL-Chat</td>
130
- <td>9.6B</td>
131
- <td>488</td>
132
- <td>61.5</td>
133
- <td>62.6</td>
134
- <td>51.6</td>
135
- <td>1860.0</td>
136
- <td>61.8</td>
137
- <td>56.3</td>
138
- <td>37.0</td>
139
- <td>33.8</td>
140
- <td>67.7</td>
141
- <td>49.3</td>
142
- <td>56.2</td>
143
- </tr>
144
- <tr>
145
- <td nowrap="nowrap" align="left">DeepSeek-VL-7B</td>
146
- <td>7.3B</td>
147
- <td>435</td>
148
- <td>64.7*</td>
149
- <td>47.0*</td>
150
- <td>54.6</td>
151
- <td>1765.4</td>
152
- <td>73.8</td>
153
- <td>71.4</td>
154
- <td>38.3</td>
155
- <td>36.8</td>
156
- <td>77.8</td>
157
- <td>54.2</td>
158
- <td>-</td>
159
- </tr>
160
- <tr>
161
- <td nowrap="nowrap" align="left">Yi-VL-34B</td>
162
- <td>34B</td>
163
- <td>290</td>
164
- <td>43.4*</td>
165
- <td>16.9*</td>
166
- <td>52.2</td>
167
- <td><strong>2050.2</strong></td>
168
- <td>72.4</td>
169
- <td>70.7</td>
170
- <td>45.1</td>
171
- <td>30.7</td>
172
- <td>62.3</td>
173
- <td>54.8</td>
174
- <td>79.3</td>
175
- </tr>
176
- <tr>
177
- <td nowrap="nowrap" align="left">CogVLM-Chat</td>
178
- <td>17.4B</td>
179
- <td>590</td>
180
- <td>70.4</td>
181
- <td>33.3*</td>
182
- <td>54.2</td>
183
- <td>1736.6</td>
184
- <td>65.8</td>
185
- <td>55.9</td>
186
- <td>37.3</td>
187
- <td>34.7</td>
188
- <td>73.9</td>
189
- <td>60.3</td>
190
- <td>73.6</td>
191
- </tr>
192
- <tr>
193
- <td nowrap="nowrap" align="left">TextMonkey</td>
194
- <td>9.7B</td>
195
- <td>558</td>
196
- <td>64.3</td>
197
- <td>66.7</td>
198
- <td>-</td>
199
- <td>-</td>
200
- <td>-</td>
201
- <td>-</td>
202
- <td>-</td>
203
- <td>-</td>
204
- <td>-</td>
205
- <td>-</td>
206
- <td>-</td>
207
- </tr>
208
- <tr>
209
- <td nowrap="nowrap" align="left">Idefics2</td>
210
- <td>8.0B</td>
211
- <td>-</td>
212
- <td>73.0</td>
213
- <td>74.0</td>
214
- <td>57.2</td>
215
- <td>1847.6</td>
216
- <td>75.7</td>
217
- <td>68.6</td>
218
- <td>45.2</td>
219
- <td>52.2</td>
220
- <td>49.1</td>
221
- <td>60.7</td>
222
- <td>-</td>
223
- </tr>
224
- <tr>
225
- <td nowrap="nowrap" align="left">Bunny-LLama-3-8B</td>
226
- <td>8.4B</td>
227
- <td>-</td>
228
- <td>-</td>
229
- <td>-</td>
230
- <td>54.3</td>
231
- <td>1920.3</td>
232
- <td>77.0</td>
233
- <td>73.9</td>
234
- <td>41.3</td>
235
- <td>31.5</td>
236
- <td>61.2</td>
237
- <td>58.8</td>
238
- <td>-</td>
239
- </tr>
240
- <tr>
241
- <td nowrap="nowrap" align="left">LLaVA-NeXT Llama-3-8B</td>
242
- <td>8.4B</td>
243
- <td>-</td>
244
- <td>-</td>
245
- <td>78.2</td>
246
- <td>-</td>
247
- <td>1971.5</td>
248
- <td>-</td>
249
- <td>-</td>
250
- <td>41.7</td>
251
- <td>37.5</td>
252
- <td>80.1</td>
253
- <td>60.0</td>
254
- <td>-</td>
255
- </tr>
256
- <tr>
257
- <td nowrap="nowrap" align="left">Phi-3-vision-128k-instruct</td>
258
- <td>4.2B</td>
259
- <td>639*</td>
260
- <td>70.9</td>
261
- <td>-</td>
262
- <td>-</td>
263
- <td>1537.5*</td>
264
- <td>-</td>
265
- <td>-</td>
266
- <td>40.4</td>
267
- <td>44.5</td>
268
- <td>64.2*</td>
269
- <td>58.8*</td>
270
- <td>-</td>
271
- </tr>
272
- <tr style="background-color: #e6f2ff;">
273
- <td nowrap="nowrap" align="left">MiniCPM-V 1.0</td>
274
- <td>2.8B</td>
275
- <td>366</td>
276
- <td>60.6</td>
277
- <td>38.2</td>
278
- <td>47.5</td>
279
- <td>1650.2</td>
280
- <td>64.1</td>
281
- <td>62.6</td>
282
- <td>38.3</td>
283
- <td>28.9</td>
284
- <td>51.3</td>
285
- <td>51.2</td>
286
- <td>78.4</td>
287
- </tr>
288
- <tr style="background-color: #e6f2ff;">
289
- <td nowrap="nowrap" align="left">MiniCPM-V 2.0</td>
290
- <td>2.8B</td>
291
- <td>605</td>
292
- <td>74.1</td>
293
- <td>71.9</td>
294
- <td>54.5</td>
295
- <td>1808.6</td>
296
- <td>69.1</td>
297
- <td>66.5</td>
298
- <td>38.2</td>
299
- <td>38.7</td>
300
- <td>69.2</td>
301
- <td>55.8</td>
302
- <td>85.5</td>
303
- </tr>
304
- <tr style="background-color: #e6f2ff;">
305
- <td nowrap="nowrap" align="left">MiniCPM-Llama3-V 2.5</td>
306
- <td>8.5B</td>
307
- <td><strong>725</strong></td>
308
- <td><strong>76.6</strong></td>
309
- <td><strong>84.8</strong></td>
310
- <td><strong>65.1</strong></td>
311
- <td>2024.6</td>
312
- <td><strong>77.2</strong></td>
313
- <td><strong>74.2</strong></td>
314
- <td><strong>45.8</strong></td>
315
- <td><strong>54.3</strong></td>
316
- <td><strong>86.7</strong></td>
317
- <td><strong>63.5</strong></td>
318
- <td><strong>89.7</strong></td>
319
- </tr>
320
- </tbody>
321
- </table>
322
- </small>
323
 
324
  Evaluation results of multilingual LLaVA Bench
325
  <div align="center">
 
50
  ### Evaluation <!-- omit in toc -->
51
 
52
  Results on TextVQA, DocVQA, OCRBench, OpenCompass MultiModal Avg , MME, MMBench, MMMU, MathVista, LLaVA Bench, RealWorld QA, Object HalBench.
53
+
54
+ <div align="center">
55
+ <img src="https://cdn-uploads.huggingface.co/production/uploads/64abc4aa6cadc7aca585dddf/v2KE3wqQgM05ZW3dH2wbx.png" width="110%" />
56
+ </div>
57
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
  Evaluation results of multilingual LLaVA Bench
60
  <div align="center">