hf-transformers-bot commited on
Commit
3e15fde
·
verified ·
1 Parent(s): 1d4ef54

Upload 2025-11-04/ci_results_run_models_gpu/new_failures_with_bad_commit.json with huggingface_hub

Browse files
2025-11-04/ci_results_run_models_gpu/new_failures_with_bad_commit.json ADDED
@@ -0,0 +1,1548 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bamba": {
3
+ "single-gpu": [
4
+ {
5
+ "test": "tests/models/bamba/test_modeling_bamba.py::BambaModelTest::test_flash_attn_2_inference_equivalence",
6
+ "commit": null,
7
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
8
+ "pr_number": null,
9
+ "author": null,
10
+ "merged_by": null,
11
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979005"
12
+ },
13
+ {
14
+ "test": "tests/models/bamba/test_modeling_bamba.py::BambaModelTest::test_flash_attn_2_inference_equivalence_right_padding",
15
+ "commit": null,
16
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
17
+ "pr_number": null,
18
+ "author": null,
19
+ "merged_by": null,
20
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979005"
21
+ }
22
+ ]
23
+ },
24
+ "bark": {
25
+ "single-gpu": [
26
+ {
27
+ "test": "tests/models/bark/test_modeling_bark.py::BarkSemanticModelTest::test_eager_matches_fa2_generate",
28
+ "commit": null,
29
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
30
+ "pr_number": null,
31
+ "author": null,
32
+ "merged_by": null,
33
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979038"
34
+ },
35
+ {
36
+ "test": "tests/models/bark/test_modeling_bark.py::BarkSemanticModelTest::test_flash_attention_2_continue_generate_with_position_ids",
37
+ "commit": null,
38
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
39
+ "pr_number": null,
40
+ "author": null,
41
+ "merged_by": null,
42
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979038"
43
+ },
44
+ {
45
+ "test": "tests/models/bark/test_modeling_bark.py::BarkCoarseModelTest::test_eager_matches_fa2_generate",
46
+ "commit": null,
47
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
48
+ "pr_number": null,
49
+ "author": null,
50
+ "merged_by": null,
51
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979038"
52
+ },
53
+ {
54
+ "test": "tests/models/bark/test_modeling_bark.py::BarkCoarseModelTest::test_flash_attention_2_continue_generate_with_position_ids",
55
+ "commit": null,
56
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
57
+ "pr_number": null,
58
+ "author": null,
59
+ "merged_by": null,
60
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979038"
61
+ }
62
+ ]
63
+ },
64
+ "blt": {
65
+ "single-gpu": [
66
+ {
67
+ "test": "tests/models/blt/test_modeling_blt.py::BltModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
68
+ "commit": null,
69
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
70
+ "pr_number": null,
71
+ "author": null,
72
+ "merged_by": null,
73
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979529"
74
+ }
75
+ ]
76
+ },
77
+ "chameleon": {
78
+ "single-gpu": [
79
+ {
80
+ "test": "tests/models/chameleon/test_modeling_chameleon.py::ChameleonModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
81
+ "commit": null,
82
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
83
+ "pr_number": null,
84
+ "author": null,
85
+ "merged_by": null,
86
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979630"
87
+ },
88
+ {
89
+ "test": "tests/models/chameleon/test_modeling_chameleon.py::ChameleonVision2SeqModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
90
+ "commit": null,
91
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
92
+ "pr_number": null,
93
+ "author": null,
94
+ "merged_by": null,
95
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979630"
96
+ }
97
+ ]
98
+ },
99
+ "clap": {
100
+ "single-gpu": [
101
+ {
102
+ "test": "tests/models/clap/test_modeling_clap.py::ClapAudioModelTest::test_flash_attn_2_inference_equivalence",
103
+ "commit": null,
104
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
105
+ "pr_number": null,
106
+ "author": null,
107
+ "merged_by": null,
108
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979690"
109
+ },
110
+ {
111
+ "test": "tests/models/clap/test_modeling_clap.py::ClapAudioModelTest::test_flash_attn_2_inference_equivalence_right_padding",
112
+ "commit": null,
113
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
114
+ "pr_number": null,
115
+ "author": null,
116
+ "merged_by": null,
117
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979690"
118
+ },
119
+ {
120
+ "test": "tests/models/clap/test_modeling_clap.py::ClapModelTest::test_flash_attn_2_inference_equivalence",
121
+ "commit": null,
122
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
123
+ "pr_number": null,
124
+ "author": null,
125
+ "merged_by": null,
126
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979690"
127
+ },
128
+ {
129
+ "test": "tests/models/clap/test_modeling_clap.py::ClapModelTest::test_flash_attn_2_inference_equivalence_right_padding",
130
+ "commit": null,
131
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
132
+ "pr_number": null,
133
+ "author": null,
134
+ "merged_by": null,
135
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979690"
136
+ }
137
+ ]
138
+ },
139
+ "colpali": {
140
+ "single-gpu": [
141
+ {
142
+ "test": "tests/models/colpali/test_modeling_colpali.py::ColPaliForRetrievalModelTest::test_flash_attn_2_inference_equivalence",
143
+ "commit": null,
144
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
145
+ "pr_number": null,
146
+ "author": null,
147
+ "merged_by": null,
148
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979791"
149
+ },
150
+ {
151
+ "test": "tests/models/colpali/test_modeling_colpali.py::ColPaliForRetrievalModelTest::test_flash_attn_2_inference_equivalence_right_padding",
152
+ "commit": null,
153
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
154
+ "pr_number": null,
155
+ "author": null,
156
+ "merged_by": null,
157
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979791"
158
+ }
159
+ ]
160
+ },
161
+ "deepseek_v2": {
162
+ "single-gpu": [
163
+ {
164
+ "test": "tests/models/deepseek_v2/test_modeling_deepseek_v2.py::DeepseekV2ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
165
+ "commit": null,
166
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
167
+ "pr_number": null,
168
+ "author": null,
169
+ "merged_by": null,
170
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980167"
171
+ },
172
+ {
173
+ "test": "tests/models/deepseek_v2/test_modeling_deepseek_v2.py::DeepseekV2ModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
174
+ "commit": null,
175
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
176
+ "pr_number": null,
177
+ "author": null,
178
+ "merged_by": null,
179
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980167"
180
+ },
181
+ {
182
+ "test": "tests/models/deepseek_v2/test_modeling_deepseek_v2.py::DeepseekV2ModelTest::test_flash_attn_2_fp32_ln",
183
+ "commit": null,
184
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
185
+ "pr_number": null,
186
+ "author": null,
187
+ "merged_by": null,
188
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980167"
189
+ }
190
+ ]
191
+ },
192
+ "deepseek_vl_hybrid": {
193
+ "single-gpu": [
194
+ {
195
+ "test": "tests/models/deepseek_vl_hybrid/test_modeling_deepseek_vl_hybrid.py::DeepseekVLHybridModelTest::test_flash_attention_2_continue_generate_with_position_ids",
196
+ "commit": null,
197
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
198
+ "pr_number": null,
199
+ "author": null,
200
+ "merged_by": null,
201
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980245"
202
+ },
203
+ {
204
+ "test": "tests/models/deepseek_vl_hybrid/test_modeling_deepseek_vl_hybrid.py::DeepseekVLHybridModelTest::test_flash_attn_2_fp32_ln",
205
+ "commit": null,
206
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
207
+ "pr_number": null,
208
+ "author": null,
209
+ "merged_by": null,
210
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980245"
211
+ },
212
+ {
213
+ "test": "tests/models/deepseek_vl_hybrid/test_modeling_deepseek_vl_hybrid.py::DeepseekVLHybridModelTest::test_flash_attn_2_from_config",
214
+ "commit": null,
215
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
216
+ "pr_number": null,
217
+ "author": null,
218
+ "merged_by": null,
219
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980245"
220
+ }
221
+ ]
222
+ },
223
+ "diffllama": {
224
+ "single-gpu": [
225
+ {
226
+ "test": "tests/models/diffllama/test_modeling_diffllama.py::DiffLlamaModelTest::test_flash_attn_2_generate_padding_right",
227
+ "commit": null,
228
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
229
+ "pr_number": null,
230
+ "author": null,
231
+ "merged_by": null,
232
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980397"
233
+ },
234
+ {
235
+ "test": "tests/models/diffllama/test_modeling_diffllama.py::DiffLlamaModelTest::test_flash_attn_2_inference_equivalence",
236
+ "commit": null,
237
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
238
+ "pr_number": null,
239
+ "author": null,
240
+ "merged_by": null,
241
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980397"
242
+ },
243
+ {
244
+ "test": "tests/models/diffllama/test_modeling_diffllama.py::DiffLlamaModelTest::test_flash_attn_2_inference_equivalence_right_padding",
245
+ "commit": null,
246
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
247
+ "pr_number": null,
248
+ "author": null,
249
+ "merged_by": null,
250
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980397"
251
+ }
252
+ ]
253
+ },
254
+ "donut": {
255
+ "single-gpu": [
256
+ {
257
+ "test": "tests/models/donut/test_modeling_donut_swin.py::DonutSwinModelTest::test_flash_attn_2_inference_equivalence",
258
+ "commit": null,
259
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
260
+ "pr_number": null,
261
+ "author": null,
262
+ "merged_by": null,
263
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980556"
264
+ },
265
+ {
266
+ "test": "tests/models/donut/test_modeling_donut_swin.py::DonutSwinModelTest::test_flash_attn_2_inference_equivalence_right_padding",
267
+ "commit": null,
268
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
269
+ "pr_number": null,
270
+ "author": null,
271
+ "merged_by": null,
272
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980556"
273
+ }
274
+ ]
275
+ },
276
+ "efficientloftr": {
277
+ "single-gpu": [
278
+ {
279
+ "test": "tests/models/efficientloftr/test_modeling_efficientloftr.py::EfficientLoFTRModelTest::test_flash_attn_2_inference_equivalence",
280
+ "commit": null,
281
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
282
+ "pr_number": null,
283
+ "author": null,
284
+ "merged_by": null,
285
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980690"
286
+ },
287
+ {
288
+ "test": "tests/models/efficientloftr/test_modeling_efficientloftr.py::EfficientLoFTRModelTest::test_flash_attn_2_inference_equivalence_right_padding",
289
+ "commit": null,
290
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
291
+ "pr_number": null,
292
+ "author": null,
293
+ "merged_by": null,
294
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980690"
295
+ }
296
+ ]
297
+ },
298
+ "electra": {
299
+ "single-gpu": [
300
+ {
301
+ "test": "tests/models/electra/test_modeling_electra.py::ElectraModelTest::test_flash_attn_2_inference_equivalence_right_padding",
302
+ "commit": null,
303
+ "status": "flaky: test fails on the current CI run (commit: 64397a830188a35b5e9056036e13bb6ccfe434d8) but passes during the check.",
304
+ "pr_number": null,
305
+ "author": null,
306
+ "merged_by": null,
307
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980701"
308
+ }
309
+ ]
310
+ },
311
+ "emu3": {
312
+ "single-gpu": [
313
+ {
314
+ "test": "tests/models/emu3/test_modeling_emu3.py::Emu3Vision2TextModelTest::test_flash_attn_2_inference_equivalence",
315
+ "commit": null,
316
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
317
+ "pr_number": null,
318
+ "author": null,
319
+ "merged_by": null,
320
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980751"
321
+ },
322
+ {
323
+ "test": "tests/models/emu3/test_modeling_emu3.py::Emu3Vision2TextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
324
+ "commit": null,
325
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
326
+ "pr_number": null,
327
+ "author": null,
328
+ "merged_by": null,
329
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980751"
330
+ }
331
+ ]
332
+ },
333
+ "exaone4": {
334
+ "single-gpu": [
335
+ {
336
+ "test": "tests/models/exaone4/test_modeling_exaone4.py::Exaone4ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
337
+ "commit": null,
338
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
339
+ "pr_number": null,
340
+ "author": null,
341
+ "merged_by": null,
342
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980911"
343
+ }
344
+ ]
345
+ },
346
+ "falcon": {
347
+ "single-gpu": [
348
+ {
349
+ "test": "tests/models/falcon/test_modeling_falcon.py::FalconModelTest::test_flash_attn_2_inference_equivalence",
350
+ "commit": null,
351
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
352
+ "pr_number": null,
353
+ "author": null,
354
+ "merged_by": null,
355
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980957"
356
+ },
357
+ {
358
+ "test": "tests/models/falcon/test_modeling_falcon.py::FalconModelTest::test_flash_attn_2_inference_equivalence_right_padding",
359
+ "commit": null,
360
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
361
+ "pr_number": null,
362
+ "author": null,
363
+ "merged_by": null,
364
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980957"
365
+ }
366
+ ]
367
+ },
368
+ "flex_olmo": {
369
+ "single-gpu": [
370
+ {
371
+ "test": "tests/models/flex_olmo/test_modeling_flex_olmo.py::FlexOlmoModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
372
+ "commit": null,
373
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
374
+ "pr_number": null,
375
+ "author": null,
376
+ "merged_by": null,
377
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981028"
378
+ },
379
+ {
380
+ "test": "tests/models/flex_olmo/test_modeling_flex_olmo.py::FlexOlmoModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
381
+ "commit": null,
382
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
383
+ "pr_number": null,
384
+ "author": null,
385
+ "merged_by": null,
386
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981028"
387
+ }
388
+ ]
389
+ },
390
+ "gemma3n": {
391
+ "single-gpu": [
392
+ {
393
+ "test": "tests/models/gemma3n/test_modeling_gemma3n.py::Gemma3nTextModelTest::test_flash_attn_2_equivalence",
394
+ "commit": null,
395
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
396
+ "pr_number": null,
397
+ "author": null,
398
+ "merged_by": null,
399
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981191"
400
+ },
401
+ {
402
+ "test": "tests/models/gemma3n/test_modeling_gemma3n.py::Gemma3nTextModelTest::test_flash_attn_2_inference_equivalence",
403
+ "commit": null,
404
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
405
+ "pr_number": null,
406
+ "author": null,
407
+ "merged_by": null,
408
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981191"
409
+ },
410
+ {
411
+ "test": "tests/models/gemma3n/test_modeling_gemma3n.py::Gemma3nTextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
412
+ "commit": null,
413
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
414
+ "pr_number": null,
415
+ "author": null,
416
+ "merged_by": null,
417
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981191"
418
+ }
419
+ ]
420
+ },
421
+ "glm4_moe": {
422
+ "single-gpu": [
423
+ {
424
+ "test": "tests/models/glm4_moe/test_modeling_glm4_moe.py::Glm4MoeModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
425
+ "commit": null,
426
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
427
+ "pr_number": null,
428
+ "author": null,
429
+ "merged_by": null,
430
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981304"
431
+ }
432
+ ]
433
+ },
434
+ "gpt2": {
435
+ "single-gpu": [
436
+ {
437
+ "test": "tests/models/gpt2/test_modeling_gpt2.py::GPT2ModelLanguageGenerationTest::test_flash_attn_2_generate_padding_left",
438
+ "commit": null,
439
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
440
+ "pr_number": null,
441
+ "author": null,
442
+ "merged_by": null,
443
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981364"
444
+ }
445
+ ]
446
+ },
447
+ "gpt_oss": {
448
+ "single-gpu": [
449
+ {
450
+ "test": "tests/models/gpt_oss/test_modeling_gpt_oss.py::GptOssModelTest::test_flash_attn_2_inference_equivalence",
451
+ "commit": null,
452
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
453
+ "pr_number": null,
454
+ "author": null,
455
+ "merged_by": null,
456
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981452"
457
+ },
458
+ {
459
+ "test": "tests/models/gpt_oss/test_modeling_gpt_oss.py::GptOssModelTest::test_flash_attn_2_inference_equivalence_right_padding",
460
+ "commit": null,
461
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
462
+ "pr_number": null,
463
+ "author": null,
464
+ "merged_by": null,
465
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981452"
466
+ }
467
+ ]
468
+ },
469
+ "granitemoe": {
470
+ "single-gpu": [
471
+ {
472
+ "test": "tests/models/granitemoe/test_modeling_granitemoe.py::GraniteMoeModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
473
+ "commit": null,
474
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
475
+ "pr_number": null,
476
+ "author": null,
477
+ "merged_by": null,
478
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981564"
479
+ }
480
+ ]
481
+ },
482
+ "granitemoehybrid": {
483
+ "single-gpu": [
484
+ {
485
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::BambaModelTest::test_flash_attn_2_inference_equivalence",
486
+ "commit": null,
487
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
488
+ "pr_number": null,
489
+ "author": null,
490
+ "merged_by": null,
491
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981575"
492
+ },
493
+ {
494
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::BambaModelTest::test_flash_attn_2_inference_equivalence_right_padding",
495
+ "commit": null,
496
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
497
+ "pr_number": null,
498
+ "author": null,
499
+ "merged_by": null,
500
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981575"
501
+ },
502
+ {
503
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::GraniteMoeHybridModelTest::test_flash_attn_2_inference_equivalence",
504
+ "commit": null,
505
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
506
+ "pr_number": null,
507
+ "author": null,
508
+ "merged_by": null,
509
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981575"
510
+ },
511
+ {
512
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::GraniteMoeHybridModelTest::test_flash_attn_2_inference_equivalence_right_padding",
513
+ "commit": null,
514
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
515
+ "pr_number": null,
516
+ "author": null,
517
+ "merged_by": null,
518
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981575"
519
+ }
520
+ ]
521
+ },
522
+ "granitemoeshared": {
523
+ "single-gpu": [
524
+ {
525
+ "test": "tests/models/granitemoeshared/test_modeling_granitemoeshared.py::GraniteMoeSharedModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
526
+ "commit": null,
527
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
528
+ "pr_number": null,
529
+ "author": null,
530
+ "merged_by": null,
531
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981628"
532
+ }
533
+ ]
534
+ },
535
+ "grounding_dino": {
536
+ "single-gpu": [
537
+ {
538
+ "test": "tests/models/grounding_dino/test_modeling_grounding_dino.py::GroundingDinoModelTest::test_flash_attn_2_inference_equivalence",
539
+ "commit": null,
540
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
541
+ "pr_number": null,
542
+ "author": null,
543
+ "merged_by": null,
544
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981661"
545
+ },
546
+ {
547
+ "test": "tests/models/grounding_dino/test_modeling_grounding_dino.py::GroundingDinoModelTest::test_flash_attn_2_inference_equivalence_right_padding",
548
+ "commit": null,
549
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
550
+ "pr_number": null,
551
+ "author": null,
552
+ "merged_by": null,
553
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981661"
554
+ }
555
+ ]
556
+ },
557
+ "instructblip": {
558
+ "single-gpu": [
559
+ {
560
+ "test": "tests/models/instructblip/test_modeling_instructblip.py::InstructBlipForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_fp32_ln",
561
+ "commit": null,
562
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
563
+ "pr_number": null,
564
+ "author": null,
565
+ "merged_by": null,
566
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982031"
567
+ },
568
+ {
569
+ "test": "tests/models/instructblip/test_modeling_instructblip.py::InstructBlipForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_from_config",
570
+ "commit": null,
571
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
572
+ "pr_number": null,
573
+ "author": null,
574
+ "merged_by": null,
575
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982031"
576
+ }
577
+ ]
578
+ },
579
+ "instructblipvideo": {
580
+ "single-gpu": [
581
+ {
582
+ "test": "tests/models/instructblipvideo/test_modeling_instructblipvideo.py::InstructBlipVideoForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_fp32_ln",
583
+ "commit": null,
584
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
585
+ "pr_number": null,
586
+ "author": null,
587
+ "merged_by": null,
588
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982101"
589
+ },
590
+ {
591
+ "test": "tests/models/instructblipvideo/test_modeling_instructblipvideo.py::InstructBlipVideoForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_from_config",
592
+ "commit": null,
593
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
594
+ "pr_number": null,
595
+ "author": null,
596
+ "merged_by": null,
597
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982101"
598
+ }
599
+ ]
600
+ },
601
+ "janus": {
602
+ "single-gpu": [
603
+ {
604
+ "test": "tests/models/janus/test_modeling_janus.py::JanusVisionText2TextModelTest::test_flash_attn_2_inference_equivalence",
605
+ "commit": null,
606
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
607
+ "pr_number": null,
608
+ "author": null,
609
+ "merged_by": null,
610
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982159"
611
+ },
612
+ {
613
+ "test": "tests/models/janus/test_modeling_janus.py::JanusVisionText2TextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
614
+ "commit": null,
615
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
616
+ "pr_number": null,
617
+ "author": null,
618
+ "merged_by": null,
619
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982159"
620
+ }
621
+ ]
622
+ },
623
+ "jetmoe": {
624
+ "single-gpu": [
625
+ {
626
+ "test": "tests/models/jetmoe/test_modeling_jetmoe.py::JetMoeModelTest::test_flash_attn_2_equivalence",
627
+ "commit": null,
628
+ "status": "flaky: test fails on the current CI run (commit: 64397a830188a35b5e9056036e13bb6ccfe434d8) but passes during the check.",
629
+ "pr_number": null,
630
+ "author": null,
631
+ "merged_by": null,
632
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982182"
633
+ },
634
+ {
635
+ "test": "tests/models/jetmoe/test_modeling_jetmoe.py::JetMoeModelTest::test_flash_attn_2_fp32_ln",
636
+ "commit": null,
637
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
638
+ "pr_number": null,
639
+ "author": null,
640
+ "merged_by": null,
641
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982182"
642
+ }
643
+ ]
644
+ },
645
+ "kosmos2": {
646
+ "single-gpu": [
647
+ {
648
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_eager_matches_fa2_generate",
649
+ "commit": null,
650
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
651
+ "pr_number": null,
652
+ "author": null,
653
+ "merged_by": null,
654
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982209"
655
+ },
656
+ {
657
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attention_2_continue_generate_with_position_ids",
658
+ "commit": null,
659
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
660
+ "pr_number": null,
661
+ "author": null,
662
+ "merged_by": null,
663
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982209"
664
+ },
665
+ {
666
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_can_dispatch_composite_models",
667
+ "commit": null,
668
+ "status": "flaky: test fails on the current CI run (commit: 64397a830188a35b5e9056036e13bb6ccfe434d8) but passes during the check.",
669
+ "pr_number": null,
670
+ "author": null,
671
+ "merged_by": null,
672
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982209"
673
+ },
674
+ {
675
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_fp32_ln",
676
+ "commit": null,
677
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
678
+ "pr_number": null,
679
+ "author": null,
680
+ "merged_by": null,
681
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982209"
682
+ },
683
+ {
684
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_from_config",
685
+ "commit": null,
686
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
687
+ "pr_number": null,
688
+ "author": null,
689
+ "merged_by": null,
690
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982209"
691
+ },
692
+ {
693
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_inference_equivalence",
694
+ "commit": null,
695
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
696
+ "pr_number": null,
697
+ "author": null,
698
+ "merged_by": null,
699
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982209"
700
+ },
701
+ {
702
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_inference_equivalence_right_padding",
703
+ "commit": null,
704
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
705
+ "pr_number": null,
706
+ "author": null,
707
+ "merged_by": null,
708
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982209"
709
+ }
710
+ ]
711
+ },
712
+ "kosmos2_5": {
713
+ "single-gpu": [
714
+ {
715
+ "test": "tests/models/kosmos2_5/test_modeling_kosmos2_5.py::Kosmos2_5ModelTest::test_flash_attn_2_can_dispatch_composite_models",
716
+ "commit": null,
717
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
718
+ "pr_number": null,
719
+ "author": null,
720
+ "merged_by": null,
721
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982199"
722
+ }
723
+ ]
724
+ },
725
+ "kyutai_speech_to_text": {
726
+ "single-gpu": [
727
+ {
728
+ "test": "tests/models/kyutai_speech_to_text/test_modeling_kyutai_speech_to_text.py::KyutaiSpeechToTextModelTest::test_eager_matches_fa2_generate",
729
+ "commit": null,
730
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
731
+ "pr_number": null,
732
+ "author": null,
733
+ "merged_by": null,
734
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982201"
735
+ },
736
+ {
737
+ "test": "tests/models/kyutai_speech_to_text/test_modeling_kyutai_speech_to_text.py::KyutaiSpeechToTextModelTest::test_flash_attn_2_inference_equivalence",
738
+ "commit": null,
739
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
740
+ "pr_number": null,
741
+ "author": null,
742
+ "merged_by": null,
743
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982201"
744
+ },
745
+ {
746
+ "test": "tests/models/kyutai_speech_to_text/test_modeling_kyutai_speech_to_text.py::KyutaiSpeechToTextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
747
+ "commit": null,
748
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
749
+ "pr_number": null,
750
+ "author": null,
751
+ "merged_by": null,
752
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982201"
753
+ }
754
+ ]
755
+ },
756
+ "lfm2": {
757
+ "single-gpu": [
758
+ {
759
+ "test": "tests/models/lfm2/test_modeling_lfm2.py::Lfm2ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
760
+ "commit": null,
761
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
762
+ "pr_number": null,
763
+ "author": null,
764
+ "merged_by": null,
765
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982376"
766
+ },
767
+ {
768
+ "test": "tests/models/lfm2/test_modeling_lfm2.py::Lfm2ModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
769
+ "commit": null,
770
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
771
+ "pr_number": null,
772
+ "author": null,
773
+ "merged_by": null,
774
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982376"
775
+ }
776
+ ]
777
+ },
778
+ "lfm2_moe": {
779
+ "single-gpu": [
780
+ {
781
+ "test": "tests/models/lfm2_moe/test_modeling_lfm2_moe.py::Lfm2MoeModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
782
+ "commit": null,
783
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
784
+ "pr_number": null,
785
+ "author": null,
786
+ "merged_by": null,
787
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982369"
788
+ },
789
+ {
790
+ "test": "tests/models/lfm2_moe/test_modeling_lfm2_moe.py::Lfm2MoeModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
791
+ "commit": null,
792
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
793
+ "pr_number": null,
794
+ "author": null,
795
+ "merged_by": null,
796
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982369"
797
+ }
798
+ ]
799
+ },
800
+ "lfm2_vl": {
801
+ "single-gpu": [
802
+ {
803
+ "test": "tests/models/lfm2_vl/test_modeling_lfm2_vl.py::Lfm2VlModelTest::test_flash_attn_2_inference_equivalence",
804
+ "commit": null,
805
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
806
+ "pr_number": null,
807
+ "author": null,
808
+ "merged_by": null,
809
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982367"
810
+ },
811
+ {
812
+ "test": "tests/models/lfm2_vl/test_modeling_lfm2_vl.py::Lfm2VlModelTest::test_flash_attn_2_inference_equivalence_right_padding",
813
+ "commit": null,
814
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
815
+ "pr_number": null,
816
+ "author": null,
817
+ "merged_by": null,
818
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982367"
819
+ }
820
+ ]
821
+ },
822
+ "llava_next": {
823
+ "single-gpu": [
824
+ {
825
+ "test": "tests/models/llava_next/test_modeling_llava_next.py::LlavaNextForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
826
+ "commit": null,
827
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
828
+ "pr_number": null,
829
+ "author": null,
830
+ "merged_by": null,
831
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982498"
832
+ },
833
+ {
834
+ "test": "tests/models/llava_next/test_modeling_llava_next.py::LlavaNextForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
835
+ "commit": null,
836
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
837
+ "pr_number": null,
838
+ "author": null,
839
+ "merged_by": null,
840
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982498"
841
+ }
842
+ ]
843
+ },
844
+ "llava_next_video": {
845
+ "single-gpu": [
846
+ {
847
+ "test": "tests/models/llava_next_video/test_modeling_llava_next_video.py::LlavaNextVideoForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
848
+ "commit": null,
849
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
850
+ "pr_number": null,
851
+ "author": null,
852
+ "merged_by": null,
853
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982511"
854
+ },
855
+ {
856
+ "test": "tests/models/llava_next_video/test_modeling_llava_next_video.py::LlavaNextVideoForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
857
+ "commit": null,
858
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
859
+ "pr_number": null,
860
+ "author": null,
861
+ "merged_by": null,
862
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982511"
863
+ }
864
+ ]
865
+ },
866
+ "llava_onevision": {
867
+ "single-gpu": [
868
+ {
869
+ "test": "tests/models/llava_onevision/test_modeling_llava_onevision.py::LlavaOnevisionForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
870
+ "commit": null,
871
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
872
+ "pr_number": null,
873
+ "author": null,
874
+ "merged_by": null,
875
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982547"
876
+ },
877
+ {
878
+ "test": "tests/models/llava_onevision/test_modeling_llava_onevision.py::LlavaOnevisionForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
879
+ "commit": null,
880
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
881
+ "pr_number": null,
882
+ "author": null,
883
+ "merged_by": null,
884
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982547"
885
+ }
886
+ ]
887
+ },
888
+ "mask2former": {
889
+ "single-gpu": [
890
+ {
891
+ "test": "tests/models/mask2former/test_modeling_mask2former.py::Mask2FormerModelTest::test_flash_attn_2_inference_equivalence",
892
+ "commit": null,
893
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
894
+ "pr_number": null,
895
+ "author": null,
896
+ "merged_by": null,
897
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982762"
898
+ },
899
+ {
900
+ "test": "tests/models/mask2former/test_modeling_mask2former.py::Mask2FormerModelTest::test_flash_attn_2_inference_equivalence_right_padding",
901
+ "commit": null,
902
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
903
+ "pr_number": null,
904
+ "author": null,
905
+ "merged_by": null,
906
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982762"
907
+ }
908
+ ]
909
+ },
910
+ "maskformer": {
911
+ "single-gpu": [
912
+ {
913
+ "test": "tests/models/maskformer/test_modeling_maskformer.py::MaskFormerModelTest::test_flash_attn_2_inference_equivalence",
914
+ "commit": null,
915
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
916
+ "pr_number": null,
917
+ "author": null,
918
+ "merged_by": null,
919
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982746"
920
+ },
921
+ {
922
+ "test": "tests/models/maskformer/test_modeling_maskformer.py::MaskFormerModelTest::test_flash_attn_2_inference_equivalence_right_padding",
923
+ "commit": null,
924
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
925
+ "pr_number": null,
926
+ "author": null,
927
+ "merged_by": null,
928
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982746"
929
+ },
930
+ {
931
+ "test": "tests/models/maskformer/test_modeling_maskformer_swin.py::MaskFormerSwinModelTest::test_flash_attn_2_inference_equivalence",
932
+ "commit": null,
933
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
934
+ "pr_number": null,
935
+ "author": null,
936
+ "merged_by": null,
937
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982746"
938
+ },
939
+ {
940
+ "test": "tests/models/maskformer/test_modeling_maskformer_swin.py::MaskFormerSwinModelTest::test_flash_attn_2_inference_equivalence_right_padding",
941
+ "commit": null,
942
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
943
+ "pr_number": null,
944
+ "author": null,
945
+ "merged_by": null,
946
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982746"
947
+ }
948
+ ]
949
+ },
950
+ "mixtral": {
951
+ "single-gpu": [
952
+ {
953
+ "test": "tests/models/mixtral/test_modeling_mixtral.py::MixtralModelTest::test_flash_attn_2_equivalence",
954
+ "commit": null,
955
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
956
+ "pr_number": null,
957
+ "author": null,
958
+ "merged_by": null,
959
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426978950"
960
+ }
961
+ ]
962
+ },
963
+ "mllama": {
964
+ "single-gpu": [
965
+ {
966
+ "test": "tests/models/mllama/test_modeling_mllama.py::MllamaForCausalLMModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
967
+ "commit": null,
968
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
969
+ "pr_number": null,
970
+ "author": null,
971
+ "merged_by": null,
972
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426978963"
973
+ },
974
+ {
975
+ "test": "tests/models/mllama/test_modeling_mllama.py::MllamaForConditionalGenerationModelTest::test_eager_matches_fa2_generate",
976
+ "commit": null,
977
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
978
+ "pr_number": null,
979
+ "author": null,
980
+ "merged_by": null,
981
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426978963"
982
+ },
983
+ {
984
+ "test": "tests/models/mllama/test_modeling_mllama.py::MllamaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
985
+ "commit": null,
986
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
987
+ "pr_number": null,
988
+ "author": null,
989
+ "merged_by": null,
990
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426978963"
991
+ },
992
+ {
993
+ "test": "tests/models/mllama/test_modeling_mllama.py::MllamaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
994
+ "commit": null,
995
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
996
+ "pr_number": null,
997
+ "author": null,
998
+ "merged_by": null,
999
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426978963"
1000
+ }
1001
+ ]
1002
+ },
1003
+ "mm_grounding_dino": {
1004
+ "single-gpu": [
1005
+ {
1006
+ "test": "tests/models/mm_grounding_dino/test_modeling_mm_grounding_dino.py::MMGroundingDinoModelTest::test_flash_attn_2_inference_equivalence",
1007
+ "commit": null,
1008
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1009
+ "pr_number": null,
1010
+ "author": null,
1011
+ "merged_by": null,
1012
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426978976"
1013
+ },
1014
+ {
1015
+ "test": "tests/models/mm_grounding_dino/test_modeling_mm_grounding_dino.py::MMGroundingDinoModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1016
+ "commit": null,
1017
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1018
+ "pr_number": null,
1019
+ "author": null,
1020
+ "merged_by": null,
1021
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426978976"
1022
+ }
1023
+ ]
1024
+ },
1025
+ "moshi": {
1026
+ "single-gpu": [
1027
+ {
1028
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiDecoderTest::test_flash_attn_2_inference_equivalence",
1029
+ "commit": null,
1030
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1031
+ "pr_number": null,
1032
+ "author": null,
1033
+ "merged_by": null,
1034
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979243"
1035
+ },
1036
+ {
1037
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiDecoderTest::test_flash_attn_2_inference_equivalence_right_padding",
1038
+ "commit": null,
1039
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1040
+ "pr_number": null,
1041
+ "author": null,
1042
+ "merged_by": null,
1043
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979243"
1044
+ },
1045
+ {
1046
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_eager_matches_fa2_generate",
1047
+ "commit": null,
1048
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1049
+ "pr_number": null,
1050
+ "author": null,
1051
+ "merged_by": null,
1052
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979243"
1053
+ },
1054
+ {
1055
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_fp32_ln",
1056
+ "commit": null,
1057
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1058
+ "pr_number": null,
1059
+ "author": null,
1060
+ "merged_by": null,
1061
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979243"
1062
+ },
1063
+ {
1064
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_from_config",
1065
+ "commit": null,
1066
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1067
+ "pr_number": null,
1068
+ "author": null,
1069
+ "merged_by": null,
1070
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979243"
1071
+ },
1072
+ {
1073
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_inference_equivalence",
1074
+ "commit": null,
1075
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1076
+ "pr_number": null,
1077
+ "author": null,
1078
+ "merged_by": null,
1079
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979243"
1080
+ },
1081
+ {
1082
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_inference_equivalence_right_padding",
1083
+ "commit": null,
1084
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1085
+ "pr_number": null,
1086
+ "author": null,
1087
+ "merged_by": null,
1088
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979243"
1089
+ }
1090
+ ]
1091
+ },
1092
+ "nemotron": {
1093
+ "single-gpu": [
1094
+ {
1095
+ "test": "tests/models/nemotron/test_modeling_nemotron.py::NemotronModelTest::test_flash_attn_2_equivalence",
1096
+ "commit": null,
1097
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1098
+ "pr_number": null,
1099
+ "author": null,
1100
+ "merged_by": null,
1101
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979489"
1102
+ }
1103
+ ]
1104
+ },
1105
+ "olmo": {
1106
+ "single-gpu": [
1107
+ {
1108
+ "test": "tests/models/olmo/test_modeling_olmo.py::OlmoModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1109
+ "commit": null,
1110
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1111
+ "pr_number": null,
1112
+ "author": null,
1113
+ "merged_by": null,
1114
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979493"
1115
+ }
1116
+ ]
1117
+ },
1118
+ "olmo2": {
1119
+ "single-gpu": [
1120
+ {
1121
+ "test": "tests/models/olmo2/test_modeling_olmo2.py::Olmo2ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1122
+ "commit": null,
1123
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1124
+ "pr_number": null,
1125
+ "author": null,
1126
+ "merged_by": null,
1127
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979496"
1128
+ }
1129
+ ]
1130
+ },
1131
+ "omdet_turbo": {
1132
+ "single-gpu": [
1133
+ {
1134
+ "test": "tests/models/omdet_turbo/test_modeling_omdet_turbo.py::OmDetTurboModelTest::test_flash_attn_2_inference_equivalence",
1135
+ "commit": null,
1136
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1137
+ "pr_number": null,
1138
+ "author": null,
1139
+ "merged_by": null,
1140
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979661"
1141
+ },
1142
+ {
1143
+ "test": "tests/models/omdet_turbo/test_modeling_omdet_turbo.py::OmDetTurboModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1144
+ "commit": null,
1145
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1146
+ "pr_number": null,
1147
+ "author": null,
1148
+ "merged_by": null,
1149
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979661"
1150
+ }
1151
+ ]
1152
+ },
1153
+ "oneformer": {
1154
+ "single-gpu": [
1155
+ {
1156
+ "test": "tests/models/oneformer/test_modeling_oneformer.py::OneFormerModelTest::test_flash_attn_2_inference_equivalence",
1157
+ "commit": null,
1158
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1159
+ "pr_number": null,
1160
+ "author": null,
1161
+ "merged_by": null,
1162
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979671"
1163
+ },
1164
+ {
1165
+ "test": "tests/models/oneformer/test_modeling_oneformer.py::OneFormerModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1166
+ "commit": null,
1167
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1168
+ "pr_number": null,
1169
+ "author": null,
1170
+ "merged_by": null,
1171
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979671"
1172
+ }
1173
+ ]
1174
+ },
1175
+ "paligemma": {
1176
+ "single-gpu": [
1177
+ {
1178
+ "test": "tests/models/paligemma/test_modeling_paligemma.py::PaliGemmaForConditionalGenerationModelTest::test_flash_attn_2_from_config",
1179
+ "commit": null,
1180
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1181
+ "pr_number": null,
1182
+ "author": null,
1183
+ "merged_by": null,
1184
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979663"
1185
+ },
1186
+ {
1187
+ "test": "tests/models/paligemma/test_modeling_paligemma.py::PaliGemmaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
1188
+ "commit": null,
1189
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1190
+ "pr_number": null,
1191
+ "author": null,
1192
+ "merged_by": null,
1193
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979663"
1194
+ },
1195
+ {
1196
+ "test": "tests/models/paligemma/test_modeling_paligemma.py::PaliGemmaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1197
+ "commit": null,
1198
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1199
+ "pr_number": null,
1200
+ "author": null,
1201
+ "merged_by": null,
1202
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979663"
1203
+ }
1204
+ ]
1205
+ },
1206
+ "paligemma2": {
1207
+ "single-gpu": [
1208
+ {
1209
+ "test": "tests/models/paligemma2/test_modeling_paligemma2.py::PaliGemma2ForConditionalGenerationModelTest::test_flash_attn_2_from_config",
1210
+ "commit": null,
1211
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1212
+ "pr_number": null,
1213
+ "author": null,
1214
+ "merged_by": null,
1215
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979705"
1216
+ },
1217
+ {
1218
+ "test": "tests/models/paligemma2/test_modeling_paligemma2.py::PaliGemma2ForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
1219
+ "commit": null,
1220
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1221
+ "pr_number": null,
1222
+ "author": null,
1223
+ "merged_by": null,
1224
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979705"
1225
+ },
1226
+ {
1227
+ "test": "tests/models/paligemma2/test_modeling_paligemma2.py::PaliGemma2ForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1228
+ "commit": null,
1229
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1230
+ "pr_number": null,
1231
+ "author": null,
1232
+ "merged_by": null,
1233
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979705"
1234
+ }
1235
+ ]
1236
+ },
1237
+ "pegasus_x": {
1238
+ "single-gpu": [
1239
+ {
1240
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXModelTest::test_flash_attn_2_inference_equivalence",
1241
+ "commit": null,
1242
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1243
+ "pr_number": null,
1244
+ "author": null,
1245
+ "merged_by": null,
1246
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979825"
1247
+ },
1248
+ {
1249
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1250
+ "commit": null,
1251
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1252
+ "pr_number": null,
1253
+ "author": null,
1254
+ "merged_by": null,
1255
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979825"
1256
+ },
1257
+ {
1258
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXStandaloneDecoderModelTest::test_flash_attn_2_inference_equivalence",
1259
+ "commit": null,
1260
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1261
+ "pr_number": null,
1262
+ "author": null,
1263
+ "merged_by": null,
1264
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979825"
1265
+ },
1266
+ {
1267
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXStandaloneDecoderModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1268
+ "commit": null,
1269
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1270
+ "pr_number": null,
1271
+ "author": null,
1272
+ "merged_by": null,
1273
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979825"
1274
+ }
1275
+ ]
1276
+ },
1277
+ "perception_lm": {
1278
+ "single-gpu": [
1279
+ {
1280
+ "test": "tests/models/perception_lm/test_modeling_perception_lm.py::PerceptionLMForConditionalGenerationModelTest::test_flash_attention_2_continue_generate_with_position_ids",
1281
+ "commit": null,
1282
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1283
+ "pr_number": null,
1284
+ "author": null,
1285
+ "merged_by": null,
1286
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979850"
1287
+ }
1288
+ ]
1289
+ },
1290
+ "phi": {
1291
+ "single-gpu": [
1292
+ {
1293
+ "test": "tests/models/phi/test_modeling_phi.py::PhiModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1294
+ "commit": null,
1295
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1296
+ "pr_number": null,
1297
+ "author": null,
1298
+ "merged_by": null,
1299
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979858"
1300
+ }
1301
+ ]
1302
+ },
1303
+ "phimoe": {
1304
+ "single-gpu": [
1305
+ {
1306
+ "test": "tests/models/phimoe/test_modeling_phimoe.py::PhimoeModelTest::test_flash_attn_2_equivalence",
1307
+ "commit": null,
1308
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1309
+ "pr_number": null,
1310
+ "author": null,
1311
+ "merged_by": null,
1312
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426979979"
1313
+ }
1314
+ ]
1315
+ },
1316
+ "pixtral": {
1317
+ "single-gpu": [
1318
+ {
1319
+ "test": "tests/models/pixtral/test_modeling_pixtral.py::PixtralVisionModelModelTest::test_flash_attn_2_inference_equivalence",
1320
+ "commit": null,
1321
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1322
+ "pr_number": null,
1323
+ "author": null,
1324
+ "merged_by": null,
1325
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980007"
1326
+ },
1327
+ {
1328
+ "test": "tests/models/pixtral/test_modeling_pixtral.py::PixtralVisionModelModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1329
+ "commit": null,
1330
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1331
+ "pr_number": null,
1332
+ "author": null,
1333
+ "merged_by": null,
1334
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980007"
1335
+ }
1336
+ ]
1337
+ },
1338
+ "qwen2_5_vl": {
1339
+ "single-gpu": [
1340
+ {
1341
+ "test": "tests/models/qwen2_5_vl/test_modeling_qwen2_5_vl.py::Qwen2_5_VLIntegrationTest::test_small_model_integration_test_batch_wo_image_flashatt2",
1342
+ "commit": null,
1343
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1344
+ "pr_number": null,
1345
+ "author": null,
1346
+ "merged_by": null,
1347
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980179"
1348
+ }
1349
+ ]
1350
+ },
1351
+ "qwen3_omni_moe": {
1352
+ "single-gpu": [
1353
+ {
1354
+ "test": "tests/models/qwen3_omni_moe/test_modeling_qwen3_omni_moe.py::Qwen2_5OmniThinkerForConditionalGenerationModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1355
+ "commit": null,
1356
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1357
+ "pr_number": null,
1358
+ "author": null,
1359
+ "merged_by": null,
1360
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980332"
1361
+ },
1362
+ {
1363
+ "test": "tests/models/qwen3_omni_moe/test_modeling_qwen3_omni_moe.py::Qwen2_5OmniThinkerForConditionalGenerationModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
1364
+ "commit": null,
1365
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1366
+ "pr_number": null,
1367
+ "author": null,
1368
+ "merged_by": null,
1369
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980332"
1370
+ },
1371
+ {
1372
+ "test": "tests/models/qwen3_omni_moe/test_modeling_qwen3_omni_moe.py::Qwen2_5OmniModelIntegrationTest::test_small_model_integration_test_batch_flashatt2",
1373
+ "commit": null,
1374
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1375
+ "pr_number": null,
1376
+ "author": null,
1377
+ "merged_by": null,
1378
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980332"
1379
+ }
1380
+ ]
1381
+ },
1382
+ "roberta_prelayernorm": {
1383
+ "single-gpu": [
1384
+ {
1385
+ "test": "tests/models/roberta_prelayernorm/test_modeling_roberta_prelayernorm.py::RobertaPreLayerNormModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1386
+ "commit": null,
1387
+ "status": "flaky: test fails on the current CI run (commit: 64397a830188a35b5e9056036e13bb6ccfe434d8) but passes during the check.",
1388
+ "pr_number": null,
1389
+ "author": null,
1390
+ "merged_by": null,
1391
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980538"
1392
+ }
1393
+ ]
1394
+ },
1395
+ "sam2": {
1396
+ "single-gpu": [
1397
+ {
1398
+ "test": "tests/models/sam2/test_modeling_sam2.py::Sam2ModelTest::test_flash_attn_2_can_dispatch_composite_models",
1399
+ "commit": null,
1400
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1401
+ "pr_number": null,
1402
+ "author": null,
1403
+ "merged_by": null,
1404
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980709"
1405
+ }
1406
+ ]
1407
+ },
1408
+ "smollm3": {
1409
+ "single-gpu": [
1410
+ {
1411
+ "test": "tests/models/smollm3/test_modeling_smollm3.py::SmolLM3IntegrationTest::test_model_3b_long_prompt",
1412
+ "commit": null,
1413
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1414
+ "pr_number": null,
1415
+ "author": null,
1416
+ "merged_by": null,
1417
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426980932"
1418
+ }
1419
+ ]
1420
+ },
1421
+ "squeezebert": {
1422
+ "single-gpu": [
1423
+ {
1424
+ "test": "tests/models/squeezebert/test_modeling_squeezebert.py::SqueezeBertModelTest::test_flash_attn_2_inference_equivalence",
1425
+ "commit": null,
1426
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1427
+ "pr_number": null,
1428
+ "author": null,
1429
+ "merged_by": null,
1430
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981010"
1431
+ },
1432
+ {
1433
+ "test": "tests/models/squeezebert/test_modeling_squeezebert.py::SqueezeBertModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1434
+ "commit": null,
1435
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1436
+ "pr_number": null,
1437
+ "author": null,
1438
+ "merged_by": null,
1439
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981010"
1440
+ }
1441
+ ]
1442
+ },
1443
+ "starcoder2": {
1444
+ "single-gpu": [
1445
+ {
1446
+ "test": "tests/models/starcoder2/test_modeling_starcoder2.py::Starcoder2IntegrationTest::test_starcoder2_batched_generation_fa2",
1447
+ "commit": null,
1448
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1449
+ "pr_number": null,
1450
+ "author": null,
1451
+ "merged_by": null,
1452
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981080"
1453
+ }
1454
+ ]
1455
+ },
1456
+ "swin": {
1457
+ "single-gpu": [
1458
+ {
1459
+ "test": "tests/models/swin/test_modeling_swin.py::SwinModelTest::test_flash_attn_2_inference_equivalence",
1460
+ "commit": null,
1461
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1462
+ "pr_number": null,
1463
+ "author": null,
1464
+ "merged_by": null,
1465
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981143"
1466
+ },
1467
+ {
1468
+ "test": "tests/models/swin/test_modeling_swin.py::SwinModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1469
+ "commit": null,
1470
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1471
+ "pr_number": null,
1472
+ "author": null,
1473
+ "merged_by": null,
1474
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981143"
1475
+ }
1476
+ ]
1477
+ },
1478
+ "swin2sr": {
1479
+ "single-gpu": [
1480
+ {
1481
+ "test": "tests/models/swin2sr/test_modeling_swin2sr.py::Swin2SRModelTest::test_flash_attn_2_inference_equivalence",
1482
+ "commit": null,
1483
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1484
+ "pr_number": null,
1485
+ "author": null,
1486
+ "merged_by": null,
1487
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981134"
1488
+ },
1489
+ {
1490
+ "test": "tests/models/swin2sr/test_modeling_swin2sr.py::Swin2SRModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1491
+ "commit": null,
1492
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1493
+ "pr_number": null,
1494
+ "author": null,
1495
+ "merged_by": null,
1496
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981134"
1497
+ }
1498
+ ]
1499
+ },
1500
+ "swinv2": {
1501
+ "single-gpu": [
1502
+ {
1503
+ "test": "tests/models/swinv2/test_modeling_swinv2.py::Swinv2ModelTest::test_flash_attn_2_inference_equivalence",
1504
+ "commit": null,
1505
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1506
+ "pr_number": null,
1507
+ "author": null,
1508
+ "merged_by": null,
1509
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981141"
1510
+ },
1511
+ {
1512
+ "test": "tests/models/swinv2/test_modeling_swinv2.py::Swinv2ModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1513
+ "commit": null,
1514
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1515
+ "pr_number": null,
1516
+ "author": null,
1517
+ "merged_by": null,
1518
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981141"
1519
+ }
1520
+ ]
1521
+ },
1522
+ "t5gemma": {
1523
+ "single-gpu": [
1524
+ {
1525
+ "test": "tests/models/t5gemma/test_modeling_t5gemma.py::T5GemmaModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
1526
+ "commit": null,
1527
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1528
+ "pr_number": null,
1529
+ "author": null,
1530
+ "merged_by": null,
1531
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426981212"
1532
+ }
1533
+ ]
1534
+ },
1535
+ "zamba": {
1536
+ "single-gpu": [
1537
+ {
1538
+ "test": "tests/models/zamba/test_modeling_zamba.py::ZambaModelTest::test_flash_attn_2_fp32_ln",
1539
+ "commit": null,
1540
+ "status": "flaky: test passed in the previous run (commit: b9f90dc388fd415a2ba2a6a31a372f451d4a4eed) but failed (on the same commit) during the check of the current run.",
1541
+ "pr_number": null,
1542
+ "author": null,
1543
+ "merged_by": null,
1544
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/19056156009/job/54426982378"
1545
+ }
1546
+ ]
1547
+ }
1548
+ }