hf-transformers-bot commited on
Commit
84b8127
·
verified ·
1 Parent(s): df650b8

Upload 2025-10-28/ci_results_run_models_gpu/new_failures_with_bad_commit_grouped_by_authors.json with huggingface_hub

Browse files
2025-10-28/ci_results_run_models_gpu/new_failures_with_bad_commit_grouped_by_authors.json ADDED
@@ -0,0 +1,1554 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "null": {
3
+ "bamba": {
4
+ "single-gpu": [
5
+ {
6
+ "test": "tests/models/bamba/test_modeling_bamba.py::BambaModelTest::test_flash_attn_2_inference_equivalence",
7
+ "commit": null,
8
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
9
+ "pr_number": null,
10
+ "author": null,
11
+ "merged_by": null,
12
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663714"
13
+ },
14
+ {
15
+ "test": "tests/models/bamba/test_modeling_bamba.py::BambaModelTest::test_flash_attn_2_inference_equivalence_right_padding",
16
+ "commit": null,
17
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
18
+ "pr_number": null,
19
+ "author": null,
20
+ "merged_by": null,
21
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663714"
22
+ }
23
+ ]
24
+ },
25
+ "bark": {
26
+ "single-gpu": [
27
+ {
28
+ "test": "tests/models/bark/test_modeling_bark.py::BarkSemanticModelTest::test_eager_matches_fa2_generate",
29
+ "commit": null,
30
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
31
+ "pr_number": null,
32
+ "author": null,
33
+ "merged_by": null,
34
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663644"
35
+ },
36
+ {
37
+ "test": "tests/models/bark/test_modeling_bark.py::BarkSemanticModelTest::test_flash_attention_2_continue_generate_with_position_ids",
38
+ "commit": null,
39
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
40
+ "pr_number": null,
41
+ "author": null,
42
+ "merged_by": null,
43
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663644"
44
+ },
45
+ {
46
+ "test": "tests/models/bark/test_modeling_bark.py::BarkCoarseModelTest::test_eager_matches_fa2_generate",
47
+ "commit": null,
48
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
49
+ "pr_number": null,
50
+ "author": null,
51
+ "merged_by": null,
52
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663644"
53
+ },
54
+ {
55
+ "test": "tests/models/bark/test_modeling_bark.py::BarkCoarseModelTest::test_flash_attention_2_continue_generate_with_position_ids",
56
+ "commit": null,
57
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
58
+ "pr_number": null,
59
+ "author": null,
60
+ "merged_by": null,
61
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663644"
62
+ }
63
+ ]
64
+ },
65
+ "blt": {
66
+ "single-gpu": [
67
+ {
68
+ "test": "tests/models/blt/test_modeling_blt.py::BltModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
69
+ "commit": null,
70
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
71
+ "pr_number": null,
72
+ "author": null,
73
+ "merged_by": null,
74
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664661"
75
+ }
76
+ ]
77
+ },
78
+ "chameleon": {
79
+ "single-gpu": [
80
+ {
81
+ "test": "tests/models/chameleon/test_modeling_chameleon.py::ChameleonModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
82
+ "commit": null,
83
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
84
+ "pr_number": null,
85
+ "author": null,
86
+ "merged_by": null,
87
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664990"
88
+ }
89
+ ]
90
+ },
91
+ "clap": {
92
+ "single-gpu": [
93
+ {
94
+ "test": "tests/models/clap/test_modeling_clap.py::ClapAudioModelTest::test_flash_attn_2_inference_equivalence",
95
+ "commit": null,
96
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
97
+ "pr_number": null,
98
+ "author": null,
99
+ "merged_by": null,
100
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665287"
101
+ },
102
+ {
103
+ "test": "tests/models/clap/test_modeling_clap.py::ClapAudioModelTest::test_flash_attn_2_inference_equivalence_right_padding",
104
+ "commit": null,
105
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
106
+ "pr_number": null,
107
+ "author": null,
108
+ "merged_by": null,
109
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665287"
110
+ },
111
+ {
112
+ "test": "tests/models/clap/test_modeling_clap.py::ClapModelTest::test_flash_attn_2_inference_equivalence",
113
+ "commit": null,
114
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
115
+ "pr_number": null,
116
+ "author": null,
117
+ "merged_by": null,
118
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665287"
119
+ },
120
+ {
121
+ "test": "tests/models/clap/test_modeling_clap.py::ClapModelTest::test_flash_attn_2_inference_equivalence_right_padding",
122
+ "commit": null,
123
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
124
+ "pr_number": null,
125
+ "author": null,
126
+ "merged_by": null,
127
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665287"
128
+ }
129
+ ]
130
+ },
131
+ "colpali": {
132
+ "single-gpu": [
133
+ {
134
+ "test": "tests/models/colpali/test_modeling_colpali.py::ColPaliForRetrievalModelTest::test_flash_attn_2_inference_equivalence",
135
+ "commit": null,
136
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
137
+ "pr_number": null,
138
+ "author": null,
139
+ "merged_by": null,
140
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665562"
141
+ },
142
+ {
143
+ "test": "tests/models/colpali/test_modeling_colpali.py::ColPaliForRetrievalModelTest::test_flash_attn_2_inference_equivalence_right_padding",
144
+ "commit": null,
145
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
146
+ "pr_number": null,
147
+ "author": null,
148
+ "merged_by": null,
149
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665562"
150
+ }
151
+ ]
152
+ },
153
+ "data2vec": {
154
+ "single-gpu": [
155
+ {
156
+ "test": "tests/models/data2vec/test_modeling_data2vec_text.py::Data2VecTextModelTest::test_flash_attn_2_inference_equivalence",
157
+ "commit": null,
158
+ "status": "flaky: test fails on the current CI run (commit: 1f0b490a2c42eb129dccc69031ccb537058689c4) but passes during the check.",
159
+ "pr_number": null,
160
+ "author": null,
161
+ "merged_by": null,
162
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666183"
163
+ }
164
+ ]
165
+ },
166
+ "deepseek_v2": {
167
+ "single-gpu": [
168
+ {
169
+ "test": "tests/models/deepseek_v2/test_modeling_deepseek_v2.py::DeepseekV2ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
170
+ "commit": null,
171
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
172
+ "pr_number": null,
173
+ "author": null,
174
+ "merged_by": null,
175
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666379"
176
+ },
177
+ {
178
+ "test": "tests/models/deepseek_v2/test_modeling_deepseek_v2.py::DeepseekV2ModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
179
+ "commit": null,
180
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
181
+ "pr_number": null,
182
+ "author": null,
183
+ "merged_by": null,
184
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666379"
185
+ },
186
+ {
187
+ "test": "tests/models/deepseek_v2/test_modeling_deepseek_v2.py::DeepseekV2ModelTest::test_flash_attn_2_fp32_ln",
188
+ "commit": null,
189
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
190
+ "pr_number": null,
191
+ "author": null,
192
+ "merged_by": null,
193
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666379"
194
+ }
195
+ ]
196
+ },
197
+ "deepseek_vl_hybrid": {
198
+ "single-gpu": [
199
+ {
200
+ "test": "tests/models/deepseek_vl_hybrid/test_modeling_deepseek_vl_hybrid.py::DeepseekVLHybridModelTest::test_flash_attention_2_continue_generate_with_position_ids",
201
+ "commit": null,
202
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
203
+ "pr_number": null,
204
+ "author": null,
205
+ "merged_by": null,
206
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666646"
207
+ },
208
+ {
209
+ "test": "tests/models/deepseek_vl_hybrid/test_modeling_deepseek_vl_hybrid.py::DeepseekVLHybridModelTest::test_flash_attn_2_fp32_ln",
210
+ "commit": null,
211
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
212
+ "pr_number": null,
213
+ "author": null,
214
+ "merged_by": null,
215
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666646"
216
+ },
217
+ {
218
+ "test": "tests/models/deepseek_vl_hybrid/test_modeling_deepseek_vl_hybrid.py::DeepseekVLHybridModelTest::test_flash_attn_2_from_config",
219
+ "commit": null,
220
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
221
+ "pr_number": null,
222
+ "author": null,
223
+ "merged_by": null,
224
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666646"
225
+ }
226
+ ]
227
+ },
228
+ "diffllama": {
229
+ "single-gpu": [
230
+ {
231
+ "test": "tests/models/diffllama/test_modeling_diffllama.py::DiffLlamaModelTest::test_flash_attn_2_generate_padding_right",
232
+ "commit": null,
233
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
234
+ "pr_number": null,
235
+ "author": null,
236
+ "merged_by": null,
237
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666821"
238
+ },
239
+ {
240
+ "test": "tests/models/diffllama/test_modeling_diffllama.py::DiffLlamaModelTest::test_flash_attn_2_inference_equivalence",
241
+ "commit": null,
242
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
243
+ "pr_number": null,
244
+ "author": null,
245
+ "merged_by": null,
246
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666821"
247
+ },
248
+ {
249
+ "test": "tests/models/diffllama/test_modeling_diffllama.py::DiffLlamaModelTest::test_flash_attn_2_inference_equivalence_right_padding",
250
+ "commit": null,
251
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
252
+ "pr_number": null,
253
+ "author": null,
254
+ "merged_by": null,
255
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666821"
256
+ }
257
+ ]
258
+ },
259
+ "donut": {
260
+ "single-gpu": [
261
+ {
262
+ "test": "tests/models/donut/test_modeling_donut_swin.py::DonutSwinModelTest::test_flash_attn_2_inference_equivalence",
263
+ "commit": null,
264
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
265
+ "pr_number": null,
266
+ "author": null,
267
+ "merged_by": null,
268
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667514"
269
+ },
270
+ {
271
+ "test": "tests/models/donut/test_modeling_donut_swin.py::DonutSwinModelTest::test_flash_attn_2_inference_equivalence_right_padding",
272
+ "commit": null,
273
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
274
+ "pr_number": null,
275
+ "author": null,
276
+ "merged_by": null,
277
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667514"
278
+ }
279
+ ]
280
+ },
281
+ "efficientloftr": {
282
+ "single-gpu": [
283
+ {
284
+ "test": "tests/models/efficientloftr/test_modeling_efficientloftr.py::EfficientLoFTRModelTest::test_flash_attn_2_inference_equivalence",
285
+ "commit": null,
286
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
287
+ "pr_number": null,
288
+ "author": null,
289
+ "merged_by": null,
290
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667730"
291
+ },
292
+ {
293
+ "test": "tests/models/efficientloftr/test_modeling_efficientloftr.py::EfficientLoFTRModelTest::test_flash_attn_2_inference_equivalence_right_padding",
294
+ "commit": null,
295
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
296
+ "pr_number": null,
297
+ "author": null,
298
+ "merged_by": null,
299
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667730"
300
+ }
301
+ ]
302
+ },
303
+ "emu3": {
304
+ "single-gpu": [
305
+ {
306
+ "test": "tests/models/emu3/test_modeling_emu3.py::Emu3Vision2TextModelTest::test_flash_attn_2_inference_equivalence",
307
+ "commit": null,
308
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
309
+ "pr_number": null,
310
+ "author": null,
311
+ "merged_by": null,
312
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667759"
313
+ },
314
+ {
315
+ "test": "tests/models/emu3/test_modeling_emu3.py::Emu3Vision2TextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
316
+ "commit": null,
317
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
318
+ "pr_number": null,
319
+ "author": null,
320
+ "merged_by": null,
321
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667759"
322
+ }
323
+ ]
324
+ },
325
+ "exaone4": {
326
+ "single-gpu": [
327
+ {
328
+ "test": "tests/models/exaone4/test_modeling_exaone4.py::Exaone4ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
329
+ "commit": null,
330
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
331
+ "pr_number": null,
332
+ "author": null,
333
+ "merged_by": null,
334
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850669115"
335
+ }
336
+ ]
337
+ },
338
+ "falcon": {
339
+ "single-gpu": [
340
+ {
341
+ "test": "tests/models/falcon/test_modeling_falcon.py::FalconModelTest::test_flash_attn_2_inference_equivalence",
342
+ "commit": null,
343
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
344
+ "pr_number": null,
345
+ "author": null,
346
+ "merged_by": null,
347
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850669084"
348
+ },
349
+ {
350
+ "test": "tests/models/falcon/test_modeling_falcon.py::FalconModelTest::test_flash_attn_2_inference_equivalence_right_padding",
351
+ "commit": null,
352
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
353
+ "pr_number": null,
354
+ "author": null,
355
+ "merged_by": null,
356
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850669084"
357
+ }
358
+ ]
359
+ },
360
+ "flex_olmo": {
361
+ "single-gpu": [
362
+ {
363
+ "test": "tests/models/flex_olmo/test_modeling_flex_olmo.py::FlexOlmoModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
364
+ "commit": null,
365
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
366
+ "pr_number": null,
367
+ "author": null,
368
+ "merged_by": null,
369
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850670812"
370
+ },
371
+ {
372
+ "test": "tests/models/flex_olmo/test_modeling_flex_olmo.py::FlexOlmoModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
373
+ "commit": null,
374
+ "status": "flaky: test fails on the current CI run (commit: 1f0b490a2c42eb129dccc69031ccb537058689c4) but passes during the check.",
375
+ "pr_number": null,
376
+ "author": null,
377
+ "merged_by": null,
378
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850670812"
379
+ },
380
+ {
381
+ "test": "tests/models/flex_olmo/test_modeling_flex_olmo.py::FlexOlmoModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
382
+ "commit": null,
383
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
384
+ "pr_number": null,
385
+ "author": null,
386
+ "merged_by": null,
387
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850670812"
388
+ }
389
+ ]
390
+ },
391
+ "gemma3n": {
392
+ "single-gpu": [
393
+ {
394
+ "test": "tests/models/gemma3n/test_modeling_gemma3n.py::Gemma3nTextModelTest::test_flash_attn_2_equivalence",
395
+ "commit": null,
396
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
397
+ "pr_number": null,
398
+ "author": null,
399
+ "merged_by": null,
400
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672783"
401
+ },
402
+ {
403
+ "test": "tests/models/gemma3n/test_modeling_gemma3n.py::Gemma3nTextModelTest::test_flash_attn_2_inference_equivalence",
404
+ "commit": null,
405
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
406
+ "pr_number": null,
407
+ "author": null,
408
+ "merged_by": null,
409
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672783"
410
+ },
411
+ {
412
+ "test": "tests/models/gemma3n/test_modeling_gemma3n.py::Gemma3nTextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
413
+ "commit": null,
414
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
415
+ "pr_number": null,
416
+ "author": null,
417
+ "merged_by": null,
418
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672783"
419
+ }
420
+ ]
421
+ },
422
+ "glm4": {
423
+ "single-gpu": [
424
+ {
425
+ "test": "tests/models/glm4/test_modeling_glm4.py::Glm4ModelTest::test_flash_attn_2_equivalence",
426
+ "commit": null,
427
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
428
+ "pr_number": null,
429
+ "author": null,
430
+ "merged_by": null,
431
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672808"
432
+ }
433
+ ]
434
+ },
435
+ "glm4_moe": {
436
+ "single-gpu": [
437
+ {
438
+ "test": "tests/models/glm4_moe/test_modeling_glm4_moe.py::Glm4MoeModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
439
+ "commit": null,
440
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
441
+ "pr_number": null,
442
+ "author": null,
443
+ "merged_by": null,
444
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672789"
445
+ }
446
+ ]
447
+ },
448
+ "gpt2": {
449
+ "single-gpu": [
450
+ {
451
+ "test": "tests/models/gpt2/test_modeling_gpt2.py::GPT2ModelLanguageGenerationTest::test_flash_attn_2_generate_padding_left",
452
+ "commit": null,
453
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
454
+ "pr_number": null,
455
+ "author": null,
456
+ "merged_by": null,
457
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674711"
458
+ }
459
+ ]
460
+ },
461
+ "gpt_oss": {
462
+ "single-gpu": [
463
+ {
464
+ "test": "tests/models/gpt_oss/test_modeling_gpt_oss.py::GptOssModelTest::test_flash_attn_2_inference_equivalence",
465
+ "commit": null,
466
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
467
+ "pr_number": null,
468
+ "author": null,
469
+ "merged_by": null,
470
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674760"
471
+ },
472
+ {
473
+ "test": "tests/models/gpt_oss/test_modeling_gpt_oss.py::GptOssModelTest::test_flash_attn_2_inference_equivalence_right_padding",
474
+ "commit": null,
475
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
476
+ "pr_number": null,
477
+ "author": null,
478
+ "merged_by": null,
479
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674760"
480
+ }
481
+ ]
482
+ },
483
+ "granitemoe": {
484
+ "single-gpu": [
485
+ {
486
+ "test": "tests/models/granitemoe/test_modeling_granitemoe.py::GraniteMoeModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
487
+ "commit": null,
488
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
489
+ "pr_number": null,
490
+ "author": null,
491
+ "merged_by": null,
492
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677268"
493
+ }
494
+ ]
495
+ },
496
+ "granitemoehybrid": {
497
+ "single-gpu": [
498
+ {
499
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::BambaModelTest::test_flash_attn_2_inference_equivalence",
500
+ "commit": null,
501
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
502
+ "pr_number": null,
503
+ "author": null,
504
+ "merged_by": null,
505
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677273"
506
+ },
507
+ {
508
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::BambaModelTest::test_flash_attn_2_inference_equivalence_right_padding",
509
+ "commit": null,
510
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
511
+ "pr_number": null,
512
+ "author": null,
513
+ "merged_by": null,
514
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677273"
515
+ },
516
+ {
517
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::GraniteMoeHybridModelTest::test_flash_attn_2_inference_equivalence",
518
+ "commit": null,
519
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
520
+ "pr_number": null,
521
+ "author": null,
522
+ "merged_by": null,
523
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677273"
524
+ },
525
+ {
526
+ "test": "tests/models/granitemoehybrid/test_modeling_granitemoehybrid.py::GraniteMoeHybridModelTest::test_flash_attn_2_inference_equivalence_right_padding",
527
+ "commit": null,
528
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
529
+ "pr_number": null,
530
+ "author": null,
531
+ "merged_by": null,
532
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677273"
533
+ }
534
+ ]
535
+ },
536
+ "granitemoeshared": {
537
+ "single-gpu": [
538
+ {
539
+ "test": "tests/models/granitemoeshared/test_modeling_granitemoeshared.py::GraniteMoeSharedModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
540
+ "commit": null,
541
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
542
+ "pr_number": null,
543
+ "author": null,
544
+ "merged_by": null,
545
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677318"
546
+ }
547
+ ]
548
+ },
549
+ "grounding_dino": {
550
+ "single-gpu": [
551
+ {
552
+ "test": "tests/models/grounding_dino/test_modeling_grounding_dino.py::GroundingDinoModelTest::test_flash_attn_2_inference_equivalence",
553
+ "commit": null,
554
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
555
+ "pr_number": null,
556
+ "author": null,
557
+ "merged_by": null,
558
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677267"
559
+ },
560
+ {
561
+ "test": "tests/models/grounding_dino/test_modeling_grounding_dino.py::GroundingDinoModelTest::test_flash_attn_2_inference_equivalence_right_padding",
562
+ "commit": null,
563
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
564
+ "pr_number": null,
565
+ "author": null,
566
+ "merged_by": null,
567
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850677267"
568
+ }
569
+ ]
570
+ },
571
+ "instructblip": {
572
+ "single-gpu": [
573
+ {
574
+ "test": "tests/models/instructblip/test_modeling_instructblip.py::InstructBlipForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_fp32_ln",
575
+ "commit": null,
576
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
577
+ "pr_number": null,
578
+ "author": null,
579
+ "merged_by": null,
580
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681250"
581
+ },
582
+ {
583
+ "test": "tests/models/instructblip/test_modeling_instructblip.py::InstructBlipForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_from_config",
584
+ "commit": null,
585
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
586
+ "pr_number": null,
587
+ "author": null,
588
+ "merged_by": null,
589
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681250"
590
+ }
591
+ ]
592
+ },
593
+ "instructblipvideo": {
594
+ "single-gpu": [
595
+ {
596
+ "test": "tests/models/instructblipvideo/test_modeling_instructblipvideo.py::InstructBlipVideoForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_fp32_ln",
597
+ "commit": null,
598
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
599
+ "pr_number": null,
600
+ "author": null,
601
+ "merged_by": null,
602
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681235"
603
+ },
604
+ {
605
+ "test": "tests/models/instructblipvideo/test_modeling_instructblipvideo.py::InstructBlipVideoForConditionalGenerationDecoderOnlyTest::test_flash_attn_2_from_config",
606
+ "commit": null,
607
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
608
+ "pr_number": null,
609
+ "author": null,
610
+ "merged_by": null,
611
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681235"
612
+ }
613
+ ]
614
+ },
615
+ "janus": {
616
+ "single-gpu": [
617
+ {
618
+ "test": "tests/models/janus/test_modeling_janus.py::JanusVisionText2TextModelTest::test_flash_attn_2_inference_equivalence",
619
+ "commit": null,
620
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
621
+ "pr_number": null,
622
+ "author": null,
623
+ "merged_by": null,
624
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681283"
625
+ },
626
+ {
627
+ "test": "tests/models/janus/test_modeling_janus.py::JanusVisionText2TextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
628
+ "commit": null,
629
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
630
+ "pr_number": null,
631
+ "author": null,
632
+ "merged_by": null,
633
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681283"
634
+ }
635
+ ]
636
+ },
637
+ "jetmoe": {
638
+ "single-gpu": [
639
+ {
640
+ "test": "tests/models/jetmoe/test_modeling_jetmoe.py::JetMoeModelTest::test_flash_attn_2_equivalence",
641
+ "commit": null,
642
+ "status": "flaky: test fails on the current CI run (commit: 1f0b490a2c42eb129dccc69031ccb537058689c4) but passes during the check.",
643
+ "pr_number": null,
644
+ "author": null,
645
+ "merged_by": null,
646
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681272"
647
+ },
648
+ {
649
+ "test": "tests/models/jetmoe/test_modeling_jetmoe.py::JetMoeModelTest::test_flash_attn_2_fp32_ln",
650
+ "commit": null,
651
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
652
+ "pr_number": null,
653
+ "author": null,
654
+ "merged_by": null,
655
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681272"
656
+ }
657
+ ]
658
+ },
659
+ "kosmos2": {
660
+ "single-gpu": [
661
+ {
662
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_eager_matches_fa2_generate",
663
+ "commit": null,
664
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
665
+ "pr_number": null,
666
+ "author": null,
667
+ "merged_by": null,
668
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681258"
669
+ },
670
+ {
671
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attention_2_continue_generate_with_position_ids",
672
+ "commit": null,
673
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
674
+ "pr_number": null,
675
+ "author": null,
676
+ "merged_by": null,
677
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681258"
678
+ },
679
+ {
680
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_can_dispatch_composite_models",
681
+ "commit": null,
682
+ "status": "flaky: test fails on the current CI run (commit: 1f0b490a2c42eb129dccc69031ccb537058689c4) but passes during the check.",
683
+ "pr_number": null,
684
+ "author": null,
685
+ "merged_by": null,
686
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681258"
687
+ },
688
+ {
689
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_fp32_ln",
690
+ "commit": null,
691
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
692
+ "pr_number": null,
693
+ "author": null,
694
+ "merged_by": null,
695
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681258"
696
+ },
697
+ {
698
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_from_config",
699
+ "commit": null,
700
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
701
+ "pr_number": null,
702
+ "author": null,
703
+ "merged_by": null,
704
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681258"
705
+ },
706
+ {
707
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_inference_equivalence",
708
+ "commit": null,
709
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
710
+ "pr_number": null,
711
+ "author": null,
712
+ "merged_by": null,
713
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681258"
714
+ },
715
+ {
716
+ "test": "tests/models/kosmos2/test_modeling_kosmos2.py::Kosmos2ModelTest::test_flash_attn_2_inference_equivalence_right_padding",
717
+ "commit": null,
718
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
719
+ "pr_number": null,
720
+ "author": null,
721
+ "merged_by": null,
722
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850681258"
723
+ }
724
+ ]
725
+ },
726
+ "kosmos2_5": {
727
+ "single-gpu": [
728
+ {
729
+ "test": "tests/models/kosmos2_5/test_modeling_kosmos2_5.py::Kosmos2_5ModelTest::test_flash_attn_2_can_dispatch_composite_models",
730
+ "commit": null,
731
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
732
+ "pr_number": null,
733
+ "author": null,
734
+ "merged_by": null,
735
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682119"
736
+ }
737
+ ]
738
+ },
739
+ "kyutai_speech_to_text": {
740
+ "single-gpu": [
741
+ {
742
+ "test": "tests/models/kyutai_speech_to_text/test_modeling_kyutai_speech_to_text.py::KyutaiSpeechToTextModelTest::test_eager_matches_fa2_generate",
743
+ "commit": null,
744
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
745
+ "pr_number": null,
746
+ "author": null,
747
+ "merged_by": null,
748
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682111"
749
+ },
750
+ {
751
+ "test": "tests/models/kyutai_speech_to_text/test_modeling_kyutai_speech_to_text.py::KyutaiSpeechToTextModelTest::test_flash_attn_2_inference_equivalence",
752
+ "commit": null,
753
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
754
+ "pr_number": null,
755
+ "author": null,
756
+ "merged_by": null,
757
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682111"
758
+ },
759
+ {
760
+ "test": "tests/models/kyutai_speech_to_text/test_modeling_kyutai_speech_to_text.py::KyutaiSpeechToTextModelTest::test_flash_attn_2_inference_equivalence_right_padding",
761
+ "commit": null,
762
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
763
+ "pr_number": null,
764
+ "author": null,
765
+ "merged_by": null,
766
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682111"
767
+ }
768
+ ]
769
+ },
770
+ "lfm2": {
771
+ "single-gpu": [
772
+ {
773
+ "test": "tests/models/lfm2/test_modeling_lfm2.py::Lfm2ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
774
+ "commit": null,
775
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
776
+ "pr_number": null,
777
+ "author": null,
778
+ "merged_by": null,
779
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682140"
780
+ },
781
+ {
782
+ "test": "tests/models/lfm2/test_modeling_lfm2.py::Lfm2ModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
783
+ "commit": null,
784
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
785
+ "pr_number": null,
786
+ "author": null,
787
+ "merged_by": null,
788
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682140"
789
+ }
790
+ ]
791
+ },
792
+ "lfm2_moe": {
793
+ "single-gpu": [
794
+ {
795
+ "test": "tests/models/lfm2_moe/test_modeling_lfm2_moe.py::Lfm2MoeModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
796
+ "commit": null,
797
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
798
+ "pr_number": null,
799
+ "author": null,
800
+ "merged_by": null,
801
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682155"
802
+ },
803
+ {
804
+ "test": "tests/models/lfm2_moe/test_modeling_lfm2_moe.py::Lfm2MoeModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
805
+ "commit": null,
806
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
807
+ "pr_number": null,
808
+ "author": null,
809
+ "merged_by": null,
810
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682155"
811
+ }
812
+ ]
813
+ },
814
+ "lfm2_vl": {
815
+ "single-gpu": [
816
+ {
817
+ "test": "tests/models/lfm2_vl/test_modeling_lfm2_vl.py::Lfm2VlModelTest::test_flash_attn_2_inference_equivalence",
818
+ "commit": null,
819
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
820
+ "pr_number": null,
821
+ "author": null,
822
+ "merged_by": null,
823
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682785"
824
+ },
825
+ {
826
+ "test": "tests/models/lfm2_vl/test_modeling_lfm2_vl.py::Lfm2VlModelTest::test_flash_attn_2_inference_equivalence_right_padding",
827
+ "commit": null,
828
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
829
+ "pr_number": null,
830
+ "author": null,
831
+ "merged_by": null,
832
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682785"
833
+ }
834
+ ]
835
+ },
836
+ "llava_next": {
837
+ "single-gpu": [
838
+ {
839
+ "test": "tests/models/llava_next/test_modeling_llava_next.py::LlavaNextForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
840
+ "commit": null,
841
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
842
+ "pr_number": null,
843
+ "author": null,
844
+ "merged_by": null,
845
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682805"
846
+ },
847
+ {
848
+ "test": "tests/models/llava_next/test_modeling_llava_next.py::LlavaNextForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
849
+ "commit": null,
850
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
851
+ "pr_number": null,
852
+ "author": null,
853
+ "merged_by": null,
854
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682805"
855
+ }
856
+ ]
857
+ },
858
+ "llava_next_video": {
859
+ "single-gpu": [
860
+ {
861
+ "test": "tests/models/llava_next_video/test_modeling_llava_next_video.py::LlavaNextVideoForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
862
+ "commit": null,
863
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
864
+ "pr_number": null,
865
+ "author": null,
866
+ "merged_by": null,
867
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682799"
868
+ },
869
+ {
870
+ "test": "tests/models/llava_next_video/test_modeling_llava_next_video.py::LlavaNextVideoForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
871
+ "commit": null,
872
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
873
+ "pr_number": null,
874
+ "author": null,
875
+ "merged_by": null,
876
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682799"
877
+ }
878
+ ]
879
+ },
880
+ "llava_onevision": {
881
+ "single-gpu": [
882
+ {
883
+ "test": "tests/models/llava_onevision/test_modeling_llava_onevision.py::LlavaOnevisionForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
884
+ "commit": null,
885
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
886
+ "pr_number": null,
887
+ "author": null,
888
+ "merged_by": null,
889
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682814"
890
+ },
891
+ {
892
+ "test": "tests/models/llava_onevision/test_modeling_llava_onevision.py::LlavaOnevisionForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
893
+ "commit": null,
894
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
895
+ "pr_number": null,
896
+ "author": null,
897
+ "merged_by": null,
898
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850682814"
899
+ }
900
+ ]
901
+ },
902
+ "mask2former": {
903
+ "single-gpu": [
904
+ {
905
+ "test": "tests/models/mask2former/test_modeling_mask2former.py::Mask2FormerModelTest::test_flash_attn_2_inference_equivalence",
906
+ "commit": null,
907
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
908
+ "pr_number": null,
909
+ "author": null,
910
+ "merged_by": null,
911
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850683382"
912
+ },
913
+ {
914
+ "test": "tests/models/mask2former/test_modeling_mask2former.py::Mask2FormerModelTest::test_flash_attn_2_inference_equivalence_right_padding",
915
+ "commit": null,
916
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
917
+ "pr_number": null,
918
+ "author": null,
919
+ "merged_by": null,
920
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850683382"
921
+ }
922
+ ]
923
+ },
924
+ "maskformer": {
925
+ "single-gpu": [
926
+ {
927
+ "test": "tests/models/maskformer/test_modeling_maskformer.py::MaskFormerModelTest::test_flash_attn_2_inference_equivalence",
928
+ "commit": null,
929
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
930
+ "pr_number": null,
931
+ "author": null,
932
+ "merged_by": null,
933
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850684017"
934
+ },
935
+ {
936
+ "test": "tests/models/maskformer/test_modeling_maskformer.py::MaskFormerModelTest::test_flash_attn_2_inference_equivalence_right_padding",
937
+ "commit": null,
938
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
939
+ "pr_number": null,
940
+ "author": null,
941
+ "merged_by": null,
942
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850684017"
943
+ },
944
+ {
945
+ "test": "tests/models/maskformer/test_modeling_maskformer_swin.py::MaskFormerSwinModelTest::test_flash_attn_2_inference_equivalence",
946
+ "commit": null,
947
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
948
+ "pr_number": null,
949
+ "author": null,
950
+ "merged_by": null,
951
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850684017"
952
+ },
953
+ {
954
+ "test": "tests/models/maskformer/test_modeling_maskformer_swin.py::MaskFormerSwinModelTest::test_flash_attn_2_inference_equivalence_right_padding",
955
+ "commit": null,
956
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
957
+ "pr_number": null,
958
+ "author": null,
959
+ "merged_by": null,
960
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850684017"
961
+ }
962
+ ]
963
+ },
964
+ "mllama": {
965
+ "single-gpu": [
966
+ {
967
+ "test": "tests/models/mllama/test_modeling_mllama.py::MllamaForConditionalGenerationModelTest::test_eager_matches_fa2_generate",
968
+ "commit": null,
969
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
970
+ "pr_number": null,
971
+ "author": null,
972
+ "merged_by": null,
973
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663778"
974
+ },
975
+ {
976
+ "test": "tests/models/mllama/test_modeling_mllama.py::MllamaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
977
+ "commit": null,
978
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
979
+ "pr_number": null,
980
+ "author": null,
981
+ "merged_by": null,
982
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663778"
983
+ },
984
+ {
985
+ "test": "tests/models/mllama/test_modeling_mllama.py::MllamaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
986
+ "commit": null,
987
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
988
+ "pr_number": null,
989
+ "author": null,
990
+ "merged_by": null,
991
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663778"
992
+ }
993
+ ]
994
+ },
995
+ "mm_grounding_dino": {
996
+ "single-gpu": [
997
+ {
998
+ "test": "tests/models/mm_grounding_dino/test_modeling_mm_grounding_dino.py::MMGroundingDinoModelTest::test_flash_attn_2_inference_equivalence",
999
+ "commit": null,
1000
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1001
+ "pr_number": null,
1002
+ "author": null,
1003
+ "merged_by": null,
1004
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663824"
1005
+ },
1006
+ {
1007
+ "test": "tests/models/mm_grounding_dino/test_modeling_mm_grounding_dino.py::MMGroundingDinoModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1008
+ "commit": null,
1009
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1010
+ "pr_number": null,
1011
+ "author": null,
1012
+ "merged_by": null,
1013
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850663824"
1014
+ }
1015
+ ]
1016
+ },
1017
+ "modernbert": {
1018
+ "single-gpu": [
1019
+ {
1020
+ "test": "tests/models/modernbert/test_modeling_modernbert.py::ModernBertModelTest::test_flash_attn_2_inference_equivalence",
1021
+ "commit": null,
1022
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1023
+ "pr_number": null,
1024
+ "author": null,
1025
+ "merged_by": null,
1026
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664269"
1027
+ }
1028
+ ]
1029
+ },
1030
+ "moshi": {
1031
+ "single-gpu": [
1032
+ {
1033
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiDecoderTest::test_flash_attn_2_inference_equivalence",
1034
+ "commit": null,
1035
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1036
+ "pr_number": null,
1037
+ "author": null,
1038
+ "merged_by": null,
1039
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664432"
1040
+ },
1041
+ {
1042
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiDecoderTest::test_flash_attn_2_inference_equivalence_right_padding",
1043
+ "commit": null,
1044
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1045
+ "pr_number": null,
1046
+ "author": null,
1047
+ "merged_by": null,
1048
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664432"
1049
+ },
1050
+ {
1051
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_eager_matches_fa2_generate",
1052
+ "commit": null,
1053
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1054
+ "pr_number": null,
1055
+ "author": null,
1056
+ "merged_by": null,
1057
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664432"
1058
+ },
1059
+ {
1060
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_fp32_ln",
1061
+ "commit": null,
1062
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1063
+ "pr_number": null,
1064
+ "author": null,
1065
+ "merged_by": null,
1066
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664432"
1067
+ },
1068
+ {
1069
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_from_config",
1070
+ "commit": null,
1071
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1072
+ "pr_number": null,
1073
+ "author": null,
1074
+ "merged_by": null,
1075
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664432"
1076
+ },
1077
+ {
1078
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_inference_equivalence",
1079
+ "commit": null,
1080
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1081
+ "pr_number": null,
1082
+ "author": null,
1083
+ "merged_by": null,
1084
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664432"
1085
+ },
1086
+ {
1087
+ "test": "tests/models/moshi/test_modeling_moshi.py::MoshiTest::test_flash_attn_2_inference_equivalence_right_padding",
1088
+ "commit": null,
1089
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1090
+ "pr_number": null,
1091
+ "author": null,
1092
+ "merged_by": null,
1093
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664432"
1094
+ }
1095
+ ]
1096
+ },
1097
+ "nemotron": {
1098
+ "single-gpu": [
1099
+ {
1100
+ "test": "tests/models/nemotron/test_modeling_nemotron.py::NemotronModelTest::test_flash_attn_2_equivalence",
1101
+ "commit": null,
1102
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1103
+ "pr_number": null,
1104
+ "author": null,
1105
+ "merged_by": null,
1106
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664938"
1107
+ }
1108
+ ]
1109
+ },
1110
+ "olmo": {
1111
+ "single-gpu": [
1112
+ {
1113
+ "test": "tests/models/olmo/test_modeling_olmo.py::OlmoModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1114
+ "commit": null,
1115
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1116
+ "pr_number": null,
1117
+ "author": null,
1118
+ "merged_by": null,
1119
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664957"
1120
+ }
1121
+ ]
1122
+ },
1123
+ "olmo2": {
1124
+ "single-gpu": [
1125
+ {
1126
+ "test": "tests/models/olmo2/test_modeling_olmo2.py::Olmo2ModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1127
+ "commit": null,
1128
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1129
+ "pr_number": null,
1130
+ "author": null,
1131
+ "merged_by": null,
1132
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850664956"
1133
+ }
1134
+ ]
1135
+ },
1136
+ "omdet_turbo": {
1137
+ "single-gpu": [
1138
+ {
1139
+ "test": "tests/models/omdet_turbo/test_modeling_omdet_turbo.py::OmDetTurboModelTest::test_flash_attn_2_inference_equivalence",
1140
+ "commit": null,
1141
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1142
+ "pr_number": null,
1143
+ "author": null,
1144
+ "merged_by": null,
1145
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665443"
1146
+ },
1147
+ {
1148
+ "test": "tests/models/omdet_turbo/test_modeling_omdet_turbo.py::OmDetTurboModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1149
+ "commit": null,
1150
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1151
+ "pr_number": null,
1152
+ "author": null,
1153
+ "merged_by": null,
1154
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665443"
1155
+ }
1156
+ ]
1157
+ },
1158
+ "oneformer": {
1159
+ "single-gpu": [
1160
+ {
1161
+ "test": "tests/models/oneformer/test_modeling_oneformer.py::OneFormerModelTest::test_flash_attn_2_inference_equivalence",
1162
+ "commit": null,
1163
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1164
+ "pr_number": null,
1165
+ "author": null,
1166
+ "merged_by": null,
1167
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665480"
1168
+ },
1169
+ {
1170
+ "test": "tests/models/oneformer/test_modeling_oneformer.py::OneFormerModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1171
+ "commit": null,
1172
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1173
+ "pr_number": null,
1174
+ "author": null,
1175
+ "merged_by": null,
1176
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665480"
1177
+ }
1178
+ ]
1179
+ },
1180
+ "paligemma": {
1181
+ "single-gpu": [
1182
+ {
1183
+ "test": "tests/models/paligemma/test_modeling_paligemma.py::PaliGemmaForConditionalGenerationModelTest::test_flash_attn_2_from_config",
1184
+ "commit": null,
1185
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1186
+ "pr_number": null,
1187
+ "author": null,
1188
+ "merged_by": null,
1189
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665455"
1190
+ },
1191
+ {
1192
+ "test": "tests/models/paligemma/test_modeling_paligemma.py::PaliGemmaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
1193
+ "commit": null,
1194
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1195
+ "pr_number": null,
1196
+ "author": null,
1197
+ "merged_by": null,
1198
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665455"
1199
+ },
1200
+ {
1201
+ "test": "tests/models/paligemma/test_modeling_paligemma.py::PaliGemmaForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1202
+ "commit": null,
1203
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1204
+ "pr_number": null,
1205
+ "author": null,
1206
+ "merged_by": null,
1207
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665455"
1208
+ }
1209
+ ]
1210
+ },
1211
+ "paligemma2": {
1212
+ "single-gpu": [
1213
+ {
1214
+ "test": "tests/models/paligemma2/test_modeling_paligemma2.py::PaliGemma2ForConditionalGenerationModelTest::test_flash_attn_2_from_config",
1215
+ "commit": null,
1216
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1217
+ "pr_number": null,
1218
+ "author": null,
1219
+ "merged_by": null,
1220
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665476"
1221
+ },
1222
+ {
1223
+ "test": "tests/models/paligemma2/test_modeling_paligemma2.py::PaliGemma2ForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence",
1224
+ "commit": null,
1225
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1226
+ "pr_number": null,
1227
+ "author": null,
1228
+ "merged_by": null,
1229
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665476"
1230
+ },
1231
+ {
1232
+ "test": "tests/models/paligemma2/test_modeling_paligemma2.py::PaliGemma2ForConditionalGenerationModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1233
+ "commit": null,
1234
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1235
+ "pr_number": null,
1236
+ "author": null,
1237
+ "merged_by": null,
1238
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665476"
1239
+ }
1240
+ ]
1241
+ },
1242
+ "pegasus_x": {
1243
+ "single-gpu": [
1244
+ {
1245
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXModelTest::test_flash_attn_2_inference_equivalence",
1246
+ "commit": null,
1247
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1248
+ "pr_number": null,
1249
+ "author": null,
1250
+ "merged_by": null,
1251
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665928"
1252
+ },
1253
+ {
1254
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1255
+ "commit": null,
1256
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1257
+ "pr_number": null,
1258
+ "author": null,
1259
+ "merged_by": null,
1260
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665928"
1261
+ },
1262
+ {
1263
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXStandaloneDecoderModelTest::test_flash_attn_2_inference_equivalence",
1264
+ "commit": null,
1265
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1266
+ "pr_number": null,
1267
+ "author": null,
1268
+ "merged_by": null,
1269
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665928"
1270
+ },
1271
+ {
1272
+ "test": "tests/models/pegasus_x/test_modeling_pegasus_x.py::PegasusXStandaloneDecoderModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1273
+ "commit": null,
1274
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1275
+ "pr_number": null,
1276
+ "author": null,
1277
+ "merged_by": null,
1278
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665928"
1279
+ }
1280
+ ]
1281
+ },
1282
+ "perception_lm": {
1283
+ "single-gpu": [
1284
+ {
1285
+ "test": "tests/models/perception_lm/test_modeling_perception_lm.py::PerceptionLMForConditionalGenerationModelTest::test_flash_attention_2_continue_generate_with_position_ids",
1286
+ "commit": null,
1287
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1288
+ "pr_number": null,
1289
+ "author": null,
1290
+ "merged_by": null,
1291
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665955"
1292
+ }
1293
+ ]
1294
+ },
1295
+ "phi": {
1296
+ "single-gpu": [
1297
+ {
1298
+ "test": "tests/models/phi/test_modeling_phi.py::PhiModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1299
+ "commit": null,
1300
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1301
+ "pr_number": null,
1302
+ "author": null,
1303
+ "merged_by": null,
1304
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850665990"
1305
+ }
1306
+ ]
1307
+ },
1308
+ "phimoe": {
1309
+ "single-gpu": [
1310
+ {
1311
+ "test": "tests/models/phimoe/test_modeling_phimoe.py::PhimoeModelTest::test_flash_attn_2_equivalence",
1312
+ "commit": null,
1313
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1314
+ "pr_number": null,
1315
+ "author": null,
1316
+ "merged_by": null,
1317
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666289"
1318
+ }
1319
+ ]
1320
+ },
1321
+ "pixtral": {
1322
+ "single-gpu": [
1323
+ {
1324
+ "test": "tests/models/pixtral/test_modeling_pixtral.py::PixtralVisionModelModelTest::test_flash_attn_2_inference_equivalence",
1325
+ "commit": null,
1326
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1327
+ "pr_number": null,
1328
+ "author": null,
1329
+ "merged_by": null,
1330
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666312"
1331
+ },
1332
+ {
1333
+ "test": "tests/models/pixtral/test_modeling_pixtral.py::PixtralVisionModelModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1334
+ "commit": null,
1335
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1336
+ "pr_number": null,
1337
+ "author": null,
1338
+ "merged_by": null,
1339
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666312"
1340
+ }
1341
+ ]
1342
+ },
1343
+ "qwen2_5_vl": {
1344
+ "single-gpu": [
1345
+ {
1346
+ "test": "tests/models/qwen2_5_vl/test_modeling_qwen2_5_vl.py::Qwen2_5_VLIntegrationTest::test_small_model_integration_test_batch_wo_image_flashatt2",
1347
+ "commit": null,
1348
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1349
+ "pr_number": null,
1350
+ "author": null,
1351
+ "merged_by": null,
1352
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850666833"
1353
+ }
1354
+ ]
1355
+ },
1356
+ "qwen3_omni_moe": {
1357
+ "single-gpu": [
1358
+ {
1359
+ "test": "tests/models/qwen3_omni_moe/test_modeling_qwen3_omni_moe.py::Qwen2_5OmniThinkerForConditionalGenerationModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids",
1360
+ "commit": null,
1361
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1362
+ "pr_number": null,
1363
+ "author": null,
1364
+ "merged_by": null,
1365
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667462"
1366
+ },
1367
+ {
1368
+ "test": "tests/models/qwen3_omni_moe/test_modeling_qwen3_omni_moe.py::Qwen2_5OmniThinkerForConditionalGenerationModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs",
1369
+ "commit": null,
1370
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1371
+ "pr_number": null,
1372
+ "author": null,
1373
+ "merged_by": null,
1374
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667462"
1375
+ },
1376
+ {
1377
+ "test": "tests/models/qwen3_omni_moe/test_modeling_qwen3_omni_moe.py::Qwen2_5OmniModelIntegrationTest::test_small_model_integration_test_batch_flashatt2",
1378
+ "commit": null,
1379
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1380
+ "pr_number": null,
1381
+ "author": null,
1382
+ "merged_by": null,
1383
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850667462"
1384
+ }
1385
+ ]
1386
+ },
1387
+ "roberta_prelayernorm": {
1388
+ "single-gpu": [
1389
+ {
1390
+ "test": "tests/models/roberta_prelayernorm/test_modeling_roberta_prelayernorm.py::RobertaPreLayerNormModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1391
+ "commit": null,
1392
+ "status": "flaky: test fails on the current CI run (commit: 1f0b490a2c42eb129dccc69031ccb537058689c4) but passes during the check.",
1393
+ "pr_number": null,
1394
+ "author": null,
1395
+ "merged_by": null,
1396
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850668946"
1397
+ }
1398
+ ]
1399
+ },
1400
+ "sam2": {
1401
+ "single-gpu": [
1402
+ {
1403
+ "test": "tests/models/sam2/test_modeling_sam2.py::Sam2ModelTest::test_flash_attn_2_can_dispatch_composite_models",
1404
+ "commit": null,
1405
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1406
+ "pr_number": null,
1407
+ "author": null,
1408
+ "merged_by": null,
1409
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850668963"
1410
+ }
1411
+ ]
1412
+ },
1413
+ "smollm3": {
1414
+ "single-gpu": [
1415
+ {
1416
+ "test": "tests/models/smollm3/test_modeling_smollm3.py::SmolLM3IntegrationTest::test_model_3b_long_prompt",
1417
+ "commit": null,
1418
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1419
+ "pr_number": null,
1420
+ "author": null,
1421
+ "merged_by": null,
1422
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672670"
1423
+ }
1424
+ ]
1425
+ },
1426
+ "squeezebert": {
1427
+ "single-gpu": [
1428
+ {
1429
+ "test": "tests/models/squeezebert/test_modeling_squeezebert.py::SqueezeBertModelTest::test_flash_attn_2_inference_equivalence",
1430
+ "commit": null,
1431
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1432
+ "pr_number": null,
1433
+ "author": null,
1434
+ "merged_by": null,
1435
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672666"
1436
+ },
1437
+ {
1438
+ "test": "tests/models/squeezebert/test_modeling_squeezebert.py::SqueezeBertModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1439
+ "commit": null,
1440
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1441
+ "pr_number": null,
1442
+ "author": null,
1443
+ "merged_by": null,
1444
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672666"
1445
+ }
1446
+ ]
1447
+ },
1448
+ "starcoder2": {
1449
+ "single-gpu": [
1450
+ {
1451
+ "test": "tests/models/starcoder2/test_modeling_starcoder2.py::Starcoder2IntegrationTest::test_starcoder2_batched_generation_fa2",
1452
+ "commit": null,
1453
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1454
+ "pr_number": null,
1455
+ "author": null,
1456
+ "merged_by": null,
1457
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850672703"
1458
+ }
1459
+ ]
1460
+ },
1461
+ "swin": {
1462
+ "single-gpu": [
1463
+ {
1464
+ "test": "tests/models/swin/test_modeling_swin.py::SwinModelTest::test_flash_attn_2_inference_equivalence",
1465
+ "commit": null,
1466
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1467
+ "pr_number": null,
1468
+ "author": null,
1469
+ "merged_by": null,
1470
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674554"
1471
+ },
1472
+ {
1473
+ "test": "tests/models/swin/test_modeling_swin.py::SwinModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1474
+ "commit": null,
1475
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1476
+ "pr_number": null,
1477
+ "author": null,
1478
+ "merged_by": null,
1479
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674554"
1480
+ }
1481
+ ]
1482
+ },
1483
+ "swin2sr": {
1484
+ "single-gpu": [
1485
+ {
1486
+ "test": "tests/models/swin2sr/test_modeling_swin2sr.py::Swin2SRModelTest::test_flash_attn_2_inference_equivalence",
1487
+ "commit": null,
1488
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1489
+ "pr_number": null,
1490
+ "author": null,
1491
+ "merged_by": null,
1492
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674545"
1493
+ },
1494
+ {
1495
+ "test": "tests/models/swin2sr/test_modeling_swin2sr.py::Swin2SRModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1496
+ "commit": null,
1497
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1498
+ "pr_number": null,
1499
+ "author": null,
1500
+ "merged_by": null,
1501
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674545"
1502
+ }
1503
+ ]
1504
+ },
1505
+ "swinv2": {
1506
+ "single-gpu": [
1507
+ {
1508
+ "test": "tests/models/swinv2/test_modeling_swinv2.py::Swinv2ModelTest::test_flash_attn_2_inference_equivalence",
1509
+ "commit": null,
1510
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1511
+ "pr_number": null,
1512
+ "author": null,
1513
+ "merged_by": null,
1514
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674592"
1515
+ },
1516
+ {
1517
+ "test": "tests/models/swinv2/test_modeling_swinv2.py::Swinv2ModelTest::test_flash_attn_2_inference_equivalence_right_padding",
1518
+ "commit": null,
1519
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1520
+ "pr_number": null,
1521
+ "author": null,
1522
+ "merged_by": null,
1523
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674592"
1524
+ }
1525
+ ]
1526
+ },
1527
+ "t5gemma": {
1528
+ "single-gpu": [
1529
+ {
1530
+ "test": "tests/models/t5gemma/test_modeling_t5gemma.py::T5GemmaModelTest::test_flash_attn_2_can_compile_with_attention_mask_None_without_graph_break",
1531
+ "commit": null,
1532
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1533
+ "pr_number": null,
1534
+ "author": null,
1535
+ "merged_by": null,
1536
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850674560"
1537
+ }
1538
+ ]
1539
+ },
1540
+ "zamba": {
1541
+ "single-gpu": [
1542
+ {
1543
+ "test": "tests/models/zamba/test_modeling_zamba.py::ZambaModelTest::test_flash_attn_2_fp32_ln",
1544
+ "commit": null,
1545
+ "status": "flaky: test passed in the previous run (commit: 77e8b9f8dfc8e736ad2f603a5b2ae2b1076ed271) but failed (on the same commit) during the check of the current run.",
1546
+ "pr_number": null,
1547
+ "author": null,
1548
+ "merged_by": null,
1549
+ "job_link": "https://github.com/huggingface/transformers/actions/runs/18862420125/job/53850683161"
1550
+ }
1551
+ ]
1552
+ }
1553
+ }
1554
+ }