Skip to content

Commit fcd04f3

Browse files
committed
update ci
1 parent 300184e commit fcd04f3

4 files changed

Lines changed: 98 additions & 18 deletions

File tree

.github/workflows/ci.yml

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ jobs:
1818
os: [ubuntu-latest]
1919
python: ['3.10', '3.11', '3.12', '3.13']
2020
transformers: ['4.48.3', '4.51.3', '4.52.4', '4.55.4', '4.56.2', '4.57.1', 'main']
21-
torch: ['2.8', '2.9', 'main']
21+
torch: ['2.9', 'main']
2222
exclude:
2323
- python: '3.10' # 3.10
2424
torch: 'main'
@@ -36,8 +36,6 @@ jobs:
3636
transformers: '4.57.1'
3737
- python: '3.11' # 3.11
3838
torch: 'main'
39-
- python: '3.11'
40-
torch: '2.8'
4139
- python: '3.11'
4240
transformers: 'main'
4341
- python: '3.11'
@@ -46,9 +44,7 @@ jobs:
4644
transformers: '4.56.2'
4745
- python: '3.11'
4846
transformers: '4.57.1'
49-
- python: '3.13' # 3.13
50-
torch: '2.8'
51-
- python: '3.13'
47+
- python: '3.13' # 3.11
5248
torch: '2.9'
5349
- python: '3.13'
5450
transformers: '4.48.3'

.github/workflows/model.yml

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
name: ci
2+
3+
on:
4+
push:
5+
pull_request:
6+
types:
7+
- closed
8+
branches:
9+
- main
10+
11+
jobs:
12+
run:
13+
name: MODEL to-${{ matrix.torch }}-tr-${{ matrix.transformers }}-ci ${{ matrix.os }}-${{ matrix.python }}
14+
runs-on: ${{ matrix.os }}
15+
strategy:
16+
fail-fast: false
17+
matrix:
18+
os: [ubuntu-latest]
19+
python: ['3.12']
20+
transformers: ['main']
21+
torch: ['main']
22+
steps:
23+
- uses: actions/checkout@v3
24+
25+
- uses: actions/setup-python@v4
26+
with:
27+
python-version: ${{ matrix.python }}
28+
29+
- name: Install pytorch ${{ matrix.torch }}
30+
run: |
31+
if [[ "${{ matrix.torch }}" == "main" ]]; then
32+
python -m pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cpu
33+
else
34+
echo "install torch==${{ matrix.torch }} torchvision torchaudio"
35+
pip install torch==${{ matrix.torch }} torchvision torchaudio
36+
fi
37+
38+
- name: Install transformers ${{ matrix.transformers }}
39+
run: |
40+
if [[ "${{ matrix.transformers }}" == "main" ]]; then
41+
echo "install transformers from github"
42+
git clone https://github.com/huggingface/transformers.git
43+
cd transformers
44+
pip install -e .
45+
cd ..
46+
else
47+
echo "install transformers==${{ matrix.transformers }}"
48+
pip install transformers==${{ matrix.transformers }}
49+
fi
50+
51+
- name: Install requirements
52+
run: python -m pip install -r requirements.txt
53+
54+
- name: Install requirements dev
55+
run: python -m pip install -r requirements-dev.txt
56+
57+
- name: Uninstall onnx-diagnostic
58+
run: python -m pip uninstall -y onnx-diagnostic
59+
60+
- name: Cache pip
61+
uses: actions/cache@v4
62+
with:
63+
path: ~/.cache/pip
64+
key: ${{ runner.os }}-pip-${{ hashFiles('requirements-dev.txt') }}
65+
restore-keys: |
66+
${{ runner.os }}-pip-
67+
${{ runner.os }}-
68+
69+
- name: pip freeze
70+
run: python -m pip freeze
71+
72+
- name: qwen2.5
73+
run: PYTHONPATH=. UNITTEST_GOING=1 NEVERTEST=1 TESTDTYPE=float16 TESTDEVICE=cpu python _unittests/ut_tasks/try_export.py -f -k test_imagetext2text_qwen_2_5_vl_instruct_visual
74+

_unittests/ut_tasks/try_export.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,12 @@ def test_imagetext2text_qwen_2_5_vl_instruct_visual(self):
3737
3838
.. code-block:: bash
3939
40+
NEVERTEST=1 \\
4041
QWEN25ATTENTION=BIGMASK \\
4142
PRETRAINED=1 \\
4243
TESTDEVICE=cuda \\
4344
TESTDTYPE=float16 \\
4445
EXPORTER=custom \\
45-
NEVERTEST=1 \\
46-
DROPPATTERN=SkipSimplifiedLayerNormalizationMulPattern,SkipSimplifiedLayerNormalizationPattern \\
4746
python _unittests/ut_tasks/try_export.py -k qwen_2_5_vl_instruct_visual
4847
"""
4948
device = os.environ.get("TESTDEVICE", "cpu")
@@ -111,7 +110,8 @@ def _config_reduction(config, task):
111110

112111
print(f"-- inputs: {self.string_type(inputs, with_shape=True)}")
113112
# this is too long
114-
expected = model.visual(**inputs)
113+
model_to_export = model.visual if hasattr(model, "visual") else model.model.visual
114+
expected = model_to_export(**inputs)
115115
print(f"-- expected: {self.string_type(expected, with_shape=True)}")
116116

117117
filename = self.get_dump_file(
@@ -136,13 +136,13 @@ def _config_reduction(config, task):
136136
stop_if_static=2,
137137
):
138138
to_onnx(
139-
model.visual,
139+
model_to_export,
140140
kwargs=export_inputs,
141141
dynamic_shapes=dynamic_shapes,
142142
filename=filename,
143143
exporter=exporter,
144144
verbose=1,
145-
save_ep=(fileep, 2**35),
145+
save_ep=None if self.unit_test_going() else (fileep, 2**35),
146146
target_opset=22,
147147
optimize=True,
148148
onnx_plugs=PLUGS,
@@ -159,7 +159,7 @@ def _config_reduction(config, task):
159159
self.assert_onnx_disc(
160160
f"test_imagetext2text_qwen_2_5_vl_instruct_visual.{device}.{dtype}.{exporter}",
161161
filename,
162-
model.visual,
162+
model_to_export,
163163
export_inputs,
164164
verbose=1,
165165
providers=(
@@ -171,8 +171,11 @@ def _config_reduction(config, task):
171171
atol=0.02,
172172
rtol=10,
173173
ort_optimized_graph=False,
174-
ep=pt2_file,
174+
# ep=pt2_file,
175+
expected=expected,
175176
)
177+
if self.unit_test_going():
178+
self.clean_dump()
176179

177180

178181
if __name__ == "__main__":

onnx_diagnostic/ext_test_case.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def skipif_ci_apple(msg) -> Callable:
6464
return lambda x: x
6565

6666

67-
def unit_test_going():
67+
def unit_test_going() -> bool:
6868
"""
6969
Enables a flag telling the script is running while testing it.
7070
Avois unit tests to be very long.
@@ -743,6 +743,13 @@ class ExtTestCase(unittest.TestCase):
743743
_warns: List[Tuple[str, int, Warning]] = []
744744
_todos: List[Tuple[Callable, str]] = []
745745

746+
def unit_test_going(self):
747+
"""
748+
Enables a flag telling the script is running while testing it.
749+
Avois unit tests to be very long.
750+
"""
751+
return unit_test_going()
752+
746753
@property
747754
def verbose(self):
748755
"Returns the the value of environment variable ``VERBOSE``."
@@ -1238,7 +1245,7 @@ def assert_onnx_disc(
12381245
if isinstance(proto, str):
12391246
name = proto
12401247
proto = onnx.load(name)
1241-
else:
1248+
elif not self.unit_tst_going():
12421249
assert isinstance(
12431250
proto, onnx.ModelProto
12441251
), f"Unexpected type {type(proto)} for proto"
@@ -1309,7 +1316,7 @@ def assert_onnx_disc(
13091316
)
13101317
if verbose:
13111318
print(f"[{vname}] ep_expected {string_type(ep_expected, **kws)}")
1312-
ep_diff = max_diff(expected, ep_expected)
1319+
ep_diff = max_diff(expected, ep_expected, hist=[0.1, 0.01])
13131320
if verbose:
13141321
print(f"[{vname}] ep_diff {string_diff(ep_diff)}")
13151322
assert (
@@ -1323,11 +1330,11 @@ def assert_onnx_disc(
13231330
f"discrepancies in {test_name!r} between the model "
13241331
f"and the exported model diff={string_diff(ep_diff)}"
13251332
)
1326-
ep_nx_diff = max_diff(ep_expected, got, flatten=True)
1333+
ep_nx_diff = max_diff(ep_expected, got, flatten=True, hist=[0.1, 0.01])
13271334
if verbose:
13281335
print(f"[{vname}] ep_nx_diff {string_diff(ep_nx_diff)}")
13291336

1330-
diff = max_diff(expected, got, flatten=True)
1337+
diff = max_diff(expected, got, flatten=True, hist=[0.1, 0.01])
13311338
if verbose:
13321339
print(f"[{vname}] diff {string_diff(diff)}")
13331340
assert (

0 commit comments

Comments
 (0)