Skip to content

Commit cb87e85

Browse files
authored
use 4.57.6 in CI (#386)
* use 4.57.6 in CI * add missing pad_token_id * fix ut
1 parent f46a780 commit cb87e85

5 files changed

Lines changed: 20 additions & 17 deletions

File tree

.github/workflows/ci.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
matrix:
1818
os: [ubuntu-latest]
1919
python: ['3.10', '3.11', '3.12', '3.13']
20-
transformers: ['4.48.3', '4.51.3', '4.52.4', '4.55.4', '4.56.2', '4.57.3', 'main']
20+
transformers: ['4.48.3', '4.51.3', '4.52.4', '4.55.4', '4.56.2', '4.57.6', 'main']
2121
torch: ['2.9', 'main']
2222
exclude:
2323
- python: '3.10' # 3.10
@@ -33,7 +33,7 @@ jobs:
3333
- python: '3.10'
3434
transformers: '4.56.2'
3535
- python: '3.10'
36-
transformers: '4.57.3'
36+
transformers: '4.57.6'
3737
- python: '3.11' # 3.11
3838
torch: 'main'
3939
- python: '3.11'
@@ -43,7 +43,7 @@ jobs:
4343
- python: '3.11'
4444
transformers: '4.56.2'
4545
- python: '3.11'
46-
transformers: '4.57.3'
46+
transformers: '4.57.6'
4747
- python: '3.13' # 3.11
4848
torch: '2.9'
4949
- python: '3.13'

.github/workflows/models.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
matrix:
1818
os: [ubuntu-latest]
1919
python: ['3.12']
20-
transformers: ['4.57.3']
20+
transformers: ['4.57.6']
2121
torch: ['main']
2222
steps:
2323
- uses: actions/checkout@v3

onnx_diagnostic/torch_export_patches/patches/patch_torch.py

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -524,13 +524,16 @@ def compute_concrete_val() -> sympy.Basic:
524524

525525
transmute_into_runtime_assert = False
526526

527+
backed_var_to_val = getattr(
528+
self, "backed_var_to_val", getattr(self, "var_to_val", {})
529+
)
527530
concrete_val = None
528-
if not (expr.free_symbols <= self.var_to_val.keys()):
531+
if not (expr.free_symbols <= backed_var_to_val.keys()):
529532
# TODO: dedupe this with _maybe_evaluate_static
530533
# Attempt to eliminate the unbacked SymInt
531534
new_expr = self._maybe_evaluate_static(expr, unbacked_only=True)
532535
assert new_expr is not None
533-
if not (new_expr.free_symbols <= self.var_to_val.keys()):
536+
if not (new_expr.free_symbols <= backed_var_to_val.keys()):
534537
ok = False
535538

536539
# fallback_value is set when guard_or_true or guard_or_false are used.
@@ -541,17 +544,15 @@ def compute_concrete_val() -> sympy.Basic:
541544
# oblivious_var_to_val will be defined iff we have sizes
542545
# with DimDynamic.OBLIVIOUS_SIZE type.
543546
# See https://github.com/pytorch/pytorch/issues/137100#issuecomment-2495778113
544-
var_to_val = getattr(
545-
self,
546-
"unbacked_var_to_val",
547-
getattr(self, "oblivious_var_to_val", False),
548-
)
549547
if (
550-
var_to_val
551-
and not (correct_hint := orig_expr.xreplace(var_to_val)).free_symbols
548+
backed_var_to_val
549+
and getattr(self, "real_tensor_prop_unbacked_vals", True)
550+
and not (
551+
correct_hint := orig_expr.xreplace(backed_var_to_val)
552+
).free_symbols
552553
and not (
553554
counterfactual_hint := orig_expr.xreplace(
554-
{k: max(2, v) for k, v in var_to_val.items()}
555+
{k: max(2, v) for k, v in backed_var_to_val.items()}
555556
)
556557
).free_symbols
557558
and correct_hint == counterfactual_hint
@@ -574,10 +575,10 @@ def compute_concrete_val() -> sympy.Basic:
574575
# and if they pass we add a runtime assertions and continue.
575576
if (
576577
not ok
577-
and var_to_val
578+
and backed_var_to_val
578579
and not (
579-
unsound_result := orig_expr.xreplace(var_to_val).xreplace(
580-
var_to_val
580+
unsound_result := orig_expr.xreplace(backed_var_to_val).xreplace(
581+
backed_var_to_val
581582
)
582583
).free_symbols
583584
):

onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,7 @@ def _ccached_microsoft_phi2():
143143
"transformers_version": "4.51.0.dev0",
144144
"use_cache": true,
145145
"vocab_size": 51200,
146+
"pad_token_id": 0,
146147
}
147148
)
148149

onnx_diagnostic/torch_models/untrained/llm_phi2.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ def get_phi2(
5353
"transformers_version": "4.37.0",
5454
"use_cache": True,
5555
"vocab_size": 51200,
56+
"pad_token_id": 0,
5657
}
5758
config.update(**kwargs)
5859
conf = transformers.PhiConfig(**config)

0 commit comments

Comments
 (0)