From 3530925e9014caa867fc3d1ce1d2b7d60a7d85a8 Mon Sep 17 00:00:00 2001 From: "iree-pr-automator[bot]" Date: Mon, 25 Aug 2025 11:05:46 +0000 Subject: [PATCH 1/2] Bump IREE to 3.7.0rc20250825. Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- requirements-iree-pinned.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-iree-pinned.txt b/requirements-iree-pinned.txt index d843308af..ecf4af6bf 100644 --- a/requirements-iree-pinned.txt +++ b/requirements-iree-pinned.txt @@ -7,5 +7,5 @@ # Uncomment to skip versions from PyPI (so _only_ nightly versions). # --no-index -iree-base-compiler==3.7.0rc20250822 -iree-base-runtime==3.7.0rc20250822 +iree-base-compiler==3.7.0rc20250825 +iree-base-runtime==3.7.0rc20250825 From 9999491a6b2e1d0c6d04e80683ea09aa1ab7796f Mon Sep 17 00:00:00 2001 From: zjgarvey Date: Tue, 9 Sep 2025 08:52:18 -0700 Subject: [PATCH 2/2] update to 3.8.0rc20250909 Signed-off-by: zjgarvey --- requirements-iree-pinned.txt | 4 ++-- tests/kernel/boo/modeling/boo_conv_module_test.py | 3 --- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/requirements-iree-pinned.txt b/requirements-iree-pinned.txt index ecf4af6bf..949ae9692 100644 --- a/requirements-iree-pinned.txt +++ b/requirements-iree-pinned.txt @@ -7,5 +7,5 @@ # Uncomment to skip versions from PyPI (so _only_ nightly versions). # --no-index -iree-base-compiler==3.7.0rc20250825 -iree-base-runtime==3.7.0rc20250825 +iree-base-compiler==3.8.0rc20250909 +iree-base-runtime==3.8.0rc20250909 diff --git a/tests/kernel/boo/modeling/boo_conv_module_test.py b/tests/kernel/boo/modeling/boo_conv_module_test.py index 1925b4540..ce6364c3a 100644 --- a/tests/kernel/boo/modeling/boo_conv_module_test.py +++ b/tests/kernel/boo/modeling/boo_conv_module_test.py @@ -137,9 +137,6 @@ def testNoBatch(self, boo_cache_dir: Path): in [i.name for i in boo_cache_dir.glob("*")] ) - @pytest.mark.xfail( - condition=torch.cuda.is_available(), reason="Compilation failure on GPU." - ) def testReplacement(self, boo_cache_dir: Path): x = torch.ones([10, 3, 16, 16], device=self.device, dtype=torch.float32) model2 = replace_conv2d_with_boo_conv(self.model1)