diff --git a/.github/workflows/buildAndTest.yml b/.github/workflows/buildAndTest.yml index 8e27b2d947c..c2ac7c59d55 100644 --- a/.github/workflows/buildAndTest.yml +++ b/.github/workflows/buildAndTest.yml @@ -27,15 +27,12 @@ jobs: matrix: os-arch: [ubuntu-x86_64, macos-arm64, windows-x86_64] llvm-build: [in-tree, out-of-tree] - torch-binary: [ON, OFF] + torch-binary: [ON] torch-version: [nightly, stable] exclude: - # Exclude llvm in-tree and pytorch source - - llvm-build: in-tree - torch-binary: OFF - # Exclude llvm out-of-tree and pytorch binary + # Exclude llvm out-of-tree and pytorch stable (to save resources) - llvm-build: out-of-tree - torch-binary: ON + torch-version: stable # Exclude macos-arm64 and llvm out-of-tree altogether - os-arch: macos-arm64 llvm-build: out-of-tree @@ -45,9 +42,6 @@ jobs: llvm-build: out-of-tree - os-arch: windows-x86_64 torch-version: stable - # For PyTorch stable builds, we don't build PyTorch from source - - torch-version: stable - torch-binary: OFF include: # Specify OS versions - os-arch: ubuntu-x86_64 diff --git a/.gitignore b/.gitignore index 6b76bc3eae0..5c407428929 100644 --- a/.gitignore +++ b/.gitignore @@ -26,7 +26,7 @@ __pycache__ bazel-* # Autogenerated files -/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/generated +/projects/ltc/csrc/base_lazy_backend/generated #Docker builds build_oot/ diff --git a/CMakeLists.txt b/CMakeLists.txt index cf33ccac140..1114643a593 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -149,10 +149,12 @@ endfunction() # Configure CMake. list(APPEND CMAKE_MODULE_PATH ${MLIR_MAIN_SRC_DIR}/cmake/modules) list(APPEND CMAKE_MODULE_PATH ${LLVM_MAIN_SRC_DIR}/cmake) +list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/build_tools/cmake) include(TableGen) include(AddLLVM) include(AddMLIR) +include(AddMLIRPython) ################################################################################ # Setup python. @@ -231,6 +233,4 @@ endif() # Sub-projects #------------------------------------------------------------------------------- -if(TORCH_MLIR_ENABLE_PROJECT_PT1) - add_subdirectory(projects/pt1) -endif() +add_subdirectory(projects) diff --git a/build_tools/autogen_ltc_backend.py b/build_tools/autogen_ltc_backend.py index 02ac0eff09d..40a64c1c1c2 100644 --- a/build_tools/autogen_ltc_backend.py +++ b/build_tools/autogen_ltc_backend.py @@ -29,7 +29,6 @@ TORCH_INCLUDE_DIR = TORCH_DIR TORCHGEN_DIR = Path(torchgen.__path__[0]).resolve() TORCH_MLIR_DIR = Path(__file__).resolve().parent.parent -TORCH_MLIR_PT1_DIR = TORCH_MLIR_DIR / "projects" / "pt1" def reindent(text, prefix=""): return indent(dedent(text), prefix) @@ -114,12 +113,12 @@ def __init__(self, binary_dir): self.binary_dir = Path(binary_dir) assert self.binary_dir.is_dir(), f"Binary directory not found: {self.binary_dir}" self.source_yaml = self.binary_dir.joinpath("generated_native_functions.yaml") - self.backend_path = TORCH_MLIR_PT1_DIR.joinpath( - "python", "torch_mlir", "csrc", "base_lazy_backend" + self.backend_path = TORCH_MLIR_DIR.joinpath( + "projects", "ltc", "csrc", "base_lazy_backend" ) assert self.backend_path.is_dir(), f"Backend path not found: {self.backend_path}" self.generated_path = self.binary_dir.joinpath( - "projects", "pt1", "python", "torch_mlir", "csrc", "base_lazy_backend", "generated" + "projects", "ltc", "csrc", "base_lazy_backend", "generated" ) self.generated_path.mkdir(parents=True, exist_ok=True) @@ -415,7 +414,7 @@ def extract_signatures(text): // for ops that dont have a corresponding structured kernel or shape definition #include "shape_inference.h" - #include "torch_mlir/csrc/base_lazy_backend/utils/exception.h" + #include "base_lazy_backend/utils/exception.h" namespace torch {{ namespace lazy {{ {} @@ -467,7 +466,7 @@ def gen_fallback_code(*args, **kwargs): node_base="torch::lazy::TorchMlirNode", node_base_hdr=str(self.backend_path.joinpath("mlir_node.h")), tensor_class=self.tensor_class, - tensor_class_hdr="torch_mlir/csrc/base_lazy_backend/tensor.h", + tensor_class_hdr="base_lazy_backend/tensor.h", create_aten_from_ltc_tensor="CreateFunctionalizedAtenFromLtcTensor", shape_inference_hdr=str(self.generated_path.joinpath("shape_inference.h")), lazy_ir_generator=GenMlirLazyIr, diff --git a/projects/pt1/python/torch_mlir/cmake/modules/TorchMLIRPyTorch.cmake b/build_tools/cmake/TorchMLIRPyTorch.cmake similarity index 100% rename from projects/pt1/python/torch_mlir/cmake/modules/TorchMLIRPyTorch.cmake rename to build_tools/cmake/TorchMLIRPyTorch.cmake diff --git a/build_tools/python_deploy/build_linux_packages.sh b/build_tools/python_deploy/build_linux_packages.sh index 8b8c7b0ebbc..2a909266f43 100755 --- a/build_tools/python_deploy/build_linux_packages.sh +++ b/build_tools/python_deploy/build_linux_packages.sh @@ -364,9 +364,9 @@ function setup_venv() { function build_out_of_tree() { local torch_from_bin="$1" local python_version="$2" - echo ":::: Build out-of-tree Torch from binary: $torch_from_bin with Python: $python_version" - local torch_version="$3" + echo ":::: Build out-of-tree Torch from binary: $torch_from_bin with Python: $python_version ($torch_version)" + local enable_ltc="ON" if [[ "${torch_version}" == "stable" ]] then diff --git a/build_tools/update_abstract_interp_lib.sh b/build_tools/update_abstract_interp_lib.sh index d33c6953685..cb44a4e8b27 100755 --- a/build_tools/update_abstract_interp_lib.sh +++ b/build_tools/update_abstract_interp_lib.sh @@ -42,6 +42,6 @@ if [ ! -z ${TORCH_MLIR_EXT_MODULES} ]; then fi PYTHONPATH="${pypath}" python \ - -m torch_mlir.dialects.torch.importer.jit_ir.build_tools.abstract_interp_lib_gen \ + -m torch_mlir.jit_ir_importer.build_tools.abstract_interp_lib_gen \ --pytorch_op_extensions=${ext_module:-""} \ --torch_transforms_cpp_dir="${torch_transforms_cpp_dir}" diff --git a/build_tools/update_torch_ods.sh b/build_tools/update_torch_ods.sh index e0564a62dff..cb0599f16f1 100755 --- a/build_tools/update_torch_ods.sh +++ b/build_tools/update_torch_ods.sh @@ -43,7 +43,7 @@ fi set +u PYTHONPATH="${PYTHONPATH}:${pypath}" python \ - -m torch_mlir.dialects.torch.importer.jit_ir.build_tools.torch_ods_gen \ + -m torch_mlir.jit_ir_importer.build_tools.torch_ods_gen \ --torch_ir_include_dir="${torch_ir_include_dir}" \ --pytorch_op_extensions="${ext_module}" \ --debug_registry_dump="${torch_ir_include_dir}/JITOperatorRegistryDump.txt" diff --git a/docs/Torch-ops-E2E-implementation.md b/docs/Torch-ops-E2E-implementation.md index 153246f375b..53031c9ce1f 100644 --- a/docs/Torch-ops-E2E-implementation.md +++ b/docs/Torch-ops-E2E-implementation.md @@ -17,7 +17,7 @@ The end-to-end test is important to check the correctness of the other steps. ### Step 2. Update ods -Update [torch_ods_gen.py](https://github.com/llvm/torch-mlir/blob/main/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/torch_ods_gen.py) with the new op and run [update_torch_ods.sh](https://github.com/llvm/torch-mlir/blob/main/build_tools/update_torch_ods.sh) to generate the ods. Running `update_torch_ods.sh` would dump all the operators with schema into `JITOperatorRegistryDump.txt`. It’s convenient to look for ops signatures and operands names in this file. +Update [torch_ods_gen.py](https://github.com/llvm/torch-mlir/blob/main/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/torch_ods_gen.py) with the new op and run [update_torch_ods.sh](https://github.com/llvm/torch-mlir/blob/main/build_tools/update_torch_ods.sh) to generate the ods. Running `update_torch_ods.sh` would dump all the operators with schema into `JITOperatorRegistryDump.txt`. It’s convenient to look for ops signatures and operands names in this file. ### Step 3. Propagate types It’s essential to make sure the new op implements shape and dtype inference. See [abstract_interp_lib](https://github.com/llvm/torch-mlir/blob/main/docs/abstract_interp_lib.md) for information on adding shape and dtype inference. diff --git a/docs/abstract_interp_lib.md b/docs/abstract_interp_lib.md index 14ffc2181a6..eb862e6bb40 100644 --- a/docs/abstract_interp_lib.md +++ b/docs/abstract_interp_lib.md @@ -26,7 +26,7 @@ The two main use cases are: ## Architecture Functions are defined as TorchScript-able Python functions in -`python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/abstract_interp_lib_gen.py`. +`python/torch_mlir/jit_ir_importer/build_tools/abstract_interp_lib_gen.py`. The signatures of the functions are systematically derived from Torch JIT operator registry. Most shape functions are expected to reuse the upstream helper functions diff --git a/docs/adding_an_e2e_test.md b/docs/adding_an_e2e_test.md index 1c961c5c19f..61664c7dce2 100644 --- a/docs/adding_an_e2e_test.md +++ b/docs/adding_an_e2e_test.md @@ -87,7 +87,7 @@ following order: 1. Shape of input tensor. Use `-1` for dynamic dimensions 2. Dtype of the input tensor -3. Boolean representing whether the input tensor [has value semantics](https://github.com/llvm/torch-mlir/blob/ba17a4d6c09b4bbb4ef21b1d8d4a93cb056be109/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator.h#L54-L67). This +3. Boolean representing whether the input tensor [has value semantics](https://github.com/llvm/torch-mlir/blob/ba17a4d6c09b4bbb4ef21b1d8d4a93cb056be109/python/torch_mlir/jit_ir_importer/csrc/class_annotator.h#L54-L67). This will always be true for E2E tests, since the [Torch-MLIR backend contract](architecture.md#the-backend-contract) requires all tensors in the IR to eventually have value semantics. diff --git a/docs/architecture.md b/docs/architecture.md index e503ba40d93..8ee6bfda8a0 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -55,14 +55,14 @@ factored such that we can handle this with one core import path, which is through the PyTorch "[JIT IR](https://github.com/pytorch/pytorch/blob/78c8a0d75220bdd4955415b5f81509e005af4232/torch/csrc/jit/OVERVIEW.md)", and lives in -[torch-mlir/python/torch_mlir/dialects/torch/importer/jit_ir](https://github.com/llvm/torch-mlir/tree/e322f6a8784009b37aa354abfa9a40a80f30877d/python/torch_mlir/dialects/torch/importer/jit_ir). +[torch-mlir/python/torch_mlir/jit_ir_importer](https://github.com/llvm/torch-mlir/tree/e322f6a8784009b37aa354abfa9a40a80f30877d/python/torch_mlir/dialects/torch/importer/jit_ir). The JIT IR is a highly principled IR that faithfully models a Python subset (+ tensors, the PyTorch op registry, and a few other things). All the other PyTorch program representations can eventually bottom-out on the JIT IR via some path provided by PyTorch. The `torch` dialect is almost entirely in 1:1 correspondence with the JIT IR -- this allows the importer to be extremely small (the core is -[under 500 lines of code](https://github.com/llvm/torch-mlir/blob/e322f6a8784009b37aa354abfa9a40a80f30877d/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/node_importer.cpp#L1)). +[under 500 lines of code](https://github.com/llvm/torch-mlir/blob/e322f6a8784009b37aa354abfa9a40a80f30877d/python/torch_mlir/jit_ir_importer/csrc/node_importer.cpp#L1)). ### Ops @@ -70,7 +70,7 @@ See [TorchOps.td](https://github.com/llvm/torch-mlir/blob/114f48e96c578ee76a6f83 The ops in the `torch` dialect are almost entirely generated based on the PyTorch JIT IR operator registry via the script -[torch_ods_gen.py](https://github.com/llvm/torch-mlir/blob/e322f6a8784009b37aa354abfa9a40a80f30877d/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/torch_ods_gen.py#L1) (invoked via [update_torch_ods.sh](https://github.com/llvm/torch-mlir/blob/main/build_tools/update_torch_ods.sh)). +[torch_ods_gen.py](https://github.com/llvm/torch-mlir/blob/e322f6a8784009b37aa354abfa9a40a80f30877d/python/torch_mlir/jit_ir_importer/build_tools/torch_ods_gen.py#L1) (invoked via [update_torch_ods.sh](https://github.com/llvm/torch-mlir/blob/main/build_tools/update_torch_ods.sh)). This script queries the registry and generates MLIR [ODS](https://mlir.llvm.org/docs/OpDefinitions/) in [GeneratedTorchOps.td](https://github.com/llvm/torch-mlir/blob/e322f6a8784009b37aa354abfa9a40a80f30877d/include/torch-mlir/Dialect/Torch/IR/GeneratedTorchOps.td#L1). We have a guide for [adding a new op end-to-end](https://github.com/llvm/torch-mlir/wiki/Torch-ops-E2E-implementation). @@ -195,7 +195,7 @@ values. When one `torch.jit.script`'s a `torch.nn.Module`, the result is actually an `IValue` that represents the module, with a hierarchy of children `IValue`'s. Strictly speaking, JIT IR `torch::jit::Graph`'s are only used to represent the bodies of methods on the modules. So in addition to importing the -JIT IR, we also need to import the `IValue`'s. This happens inside [ivalue_importer.cpp](https://github.com/llvm/torch-mlir/blob/fde390c7669e29362b18388448ef2b188713383f/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/ivalue_importer.cpp#L1). +JIT IR, we also need to import the `IValue`'s. This happens inside [ivalue_importer.cpp](https://github.com/llvm/torch-mlir/blob/fde390c7669e29362b18388448ef2b188713383f/python/torch_mlir/jit_ir_importer/csrc/ivalue_importer.cpp#L1). Most of the IValue modeling can reuse `torch` dialect ops that already exist otherwise, such as `torch.constant.int` to represent an int in the object graph. diff --git a/docs/ltc_backend.md b/docs/ltc_backend.md index ae3cc887c7d..b0177542899 100644 --- a/docs/ltc_backend.md +++ b/docs/ltc_backend.md @@ -12,7 +12,7 @@ [Lazy Tensor Core](https://github.com/pytorch/pytorch/blob/master/torch/csrc/lazy/tutorial.md) is a tracing system in PyTorch which is supported as an entry point to Torch-MLIR. After registering an LTC backend, all operations performed on lazy tensors are recorded and handed off to the backend implementation. -LTC support is provided through an abstract [`TorchMlirBackendImpl`](../python/torch_mlir/csrc/base_lazy_backend/backend_impl.h) class, which handles the conversion to MLIR. +LTC support is provided through an abstract [`TorchMlirBackendImpl`](../projects/ltc/csrc/base_lazy_backend/backend_impl.h) class, which handles the conversion to MLIR. Implementations based on this abstract class will be able to specify their own compile and execution workflows. Additional details about how to implement a custom backend is available [below](#Implementing-a-custom-backend). @@ -27,7 +27,7 @@ View examples [here](ltc_examples.md). - The [autogen files](#autogen-files) are generated by this script based on the list of supported ops, which includes all ops from [`GeneratedTorchOps.td`](https://github.com/llvm/torch-mlir/blob/main/include/torch-mlir/Dialect/Torch/IR/GeneratedTorchOps.td), excluding those explicitly blacklisted in the YAML file -### Autogen Files ([`python/torch_mlir/csrc/base_lazy_backend/generated`](../python/torch_mlir/csrc/base_lazy_backend/generated)) +### Autogen Files ([`projects/ltc/csrc/base_lazy_backend/generated`](../projects/ltc/csrc/base_lazy_backend/generated)) Generated files are created in this directory, which is ignored by version control. - `LazyIr.h` @@ -41,7 +41,7 @@ Generated files are created in this directory, which is ignored by version contr - `shape_inference.{cpp,h}` - Shape inference headers for supported ops and autogen'd placeholders for unimplemented functions -### Base Backend ([`python/torch_mlir/csrc/base_lazy_backend`](../python/torch_mlir/csrc/base_lazy_backend)) +### Base Backend ([`projects/ltc/csrc/base_lazy_backend`](../projects/ltc/csrc/base_lazy_backend)) - `backend_impl.{cpp,h}` - Base LTC backend to setup Torch-MLIR lowering context diff --git a/include/torch-mlir/Dialect/Torch/IR/GeneratedTorchOps.td b/include/torch-mlir/Dialect/Torch/IR/GeneratedTorchOps.td index 3cacd78a230..0c3efd6ce7e 100644 --- a/include/torch-mlir/Dialect/Torch/IR/GeneratedTorchOps.td +++ b/include/torch-mlir/Dialect/Torch/IR/GeneratedTorchOps.td @@ -13,7 +13,7 @@ // This file is automatically generated. Please do not edit. // Generated via: // ``` -// python -m torch_mlir.dialects.torch.importer.jit_ir.build_tools.torch_ods_gen +// python -m torch_mlir.jit_ir_importer.build_tools.torch_ods_gen // ``` // //===----------------------------------------------------------------------===// diff --git a/lib/Dialect/Torch/Transforms/AbstractInterpLibrary.cpp b/lib/Dialect/Torch/Transforms/AbstractInterpLibrary.cpp index 92f8c8006e2..eed24195c87 100644 --- a/lib/Dialect/Torch/Transforms/AbstractInterpLibrary.cpp +++ b/lib/Dialect/Torch/Transforms/AbstractInterpLibrary.cpp @@ -6227,7 +6227,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %3 = torch.prim.ListConstruct %1, %2 : (!torch.int, !torch.int) -> !torch.list\n" " return %3 : !torch.list\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.testing_framework._convert_dtype_to_int(%arg0: !torch.int) -> !torch.int {\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.testing_framework._convert_dtype_to_int(%arg0: !torch.int) -> !torch.int {\n" " return %arg0 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_shape_fn.aten.triu\"(%arg0: !torch.list, %arg1: !torch.int) -> !torch.list {\n" @@ -7924,7 +7924,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " torch.prim.If %1 -> () {\n" " torch.prim.If.yield\n" " } else {\n" @@ -7939,12 +7939,12 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " return %3 : !torch.int\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%arg0: !torch.int) -> !torch.bool {\n" -" %0 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.all_complex_dtypes() : () -> !torch.list\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%arg0: !torch.int) -> !torch.bool {\n" +" %0 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.all_complex_dtypes() : () -> !torch.list\n" " %1 = torch.aten.__contains__.int_list %0, %arg0 : !torch.list, !torch.int -> !torch.bool\n" " return %1 : !torch.bool\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.all_complex_dtypes() -> !torch.list {\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.all_complex_dtypes() -> !torch.list {\n" " %int10 = torch.constant.int 10\n" " %int9 = torch.constant.int 9\n" " %0 = torch.prim.ListConstruct %int9, %int10 : (!torch.int, !torch.int) -> !torch.list\n" @@ -8424,7 +8424,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %true = torch.constant.bool true\n" " %false = torch.constant.bool false\n" " %int6 = torch.constant.int 6\n" -" %0 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%arg0) : (!torch.int) -> !torch.bool\n" +" %0 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%arg0) : (!torch.int) -> !torch.bool\n" " %1 = torch.prim.If %0 -> (!torch.bool) {\n" " %4 = torch.aten.ne.int %arg0, %int6 : !torch.int, !torch.int -> !torch.bool\n" " torch.prim.If.yield %4 : !torch.bool\n" @@ -8434,7 +8434,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %2 = torch.prim.If %1 -> (!torch.bool) {\n" " torch.prim.If.yield %true : !torch.bool\n" " } else {\n" -" %4 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%arg0) : (!torch.int) -> !torch.bool\n" +" %4 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%arg0) : (!torch.int) -> !torch.bool\n" " torch.prim.If.yield %4 : !torch.bool\n" " }\n" " %3 = torch.prim.If %2 -> (!torch.int) {\n" @@ -8444,12 +8444,12 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " return %3 : !torch.int\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%arg0: !torch.int) -> !torch.bool {\n" -" %0 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.all_float_dtypes() : () -> !torch.list\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%arg0: !torch.int) -> !torch.bool {\n" +" %0 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.all_float_dtypes() : () -> !torch.list\n" " %1 = torch.aten.__contains__.int_list %0, %arg0 : !torch.list, !torch.int -> !torch.bool\n" " return %1 : !torch.bool\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.all_float_dtypes() -> !torch.list {\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.all_float_dtypes() -> !torch.list {\n" " %int7 = torch.constant.int 7\n" " %int6 = torch.constant.int 6\n" " %int15 = torch.constant.int 15\n" @@ -8524,7 +8524,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.softplus\"(%arg0: !torch.tuple, %arg1: !torch.number, %arg2: !torch.number) -> !torch.int {\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.prim.If %1 -> (!torch.int) {\n" " torch.prim.If.yield %0#1 : !torch.int\n" " } else {\n" @@ -8533,12 +8533,12 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " return %2 : !torch.int\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%arg0: !torch.int) -> !torch.bool {\n" -" %0 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.all_integer_dtypes() : () -> !torch.list\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%arg0: !torch.int) -> !torch.bool {\n" +" %0 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.all_integer_dtypes() : () -> !torch.list\n" " %1 = torch.aten.__contains__.int_list %0, %arg0 : !torch.list, !torch.int -> !torch.bool\n" " return %1 : !torch.bool\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.all_integer_dtypes() -> !torch.list {\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.all_integer_dtypes() -> !torch.list {\n" " %int4 = torch.constant.int 4\n" " %int3 = torch.constant.int 3\n" " %int2 = torch.constant.int 2\n" @@ -8559,7 +8559,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %true = torch.constant.bool true\n" " %0 = torch.prim.Uninitialized : !torch.int\n" " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %3 = torch.aten.__not__ %2 : !torch.bool -> !torch.bool\n" " torch.prim.If %3 -> () {\n" " torch.prim.If.yield\n" @@ -8589,7 +8589,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " func.func @\"__torch_mlir_dtype_fn.prims.sqrt\"(%arg0: !torch.tuple) -> !torch.int {\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.prim.If %1 -> (!torch.int) {\n" " torch.prim.If.yield %0#1 : !torch.int\n" " } else {\n" @@ -8767,7 +8767,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield %2 : !torch.int\n" " } else {\n" " %2:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%2#1) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%2#1) : (!torch.int) -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.int) {\n" " torch.prim.If.yield %int4 : !torch.int\n" " } else {\n" @@ -8836,10 +8836,10 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%arg0: !torch.list>, %arg1: !torch.list) -> !torch.int {\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%arg0: !torch.list>, %arg1: !torch.list) -> !torch.int {\n" " %0 = torch.promote_dtypes %arg0, %arg1 : (!torch.list>, !torch.list) -> !torch.int\n" " return %0 : !torch.int\n" " }\n" @@ -8858,7 +8858,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " func.func @\"__torch_mlir_dtype_fn.aten.hardtanh_backward\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.number, %arg3: !torch.number) -> !torch.int {\n" " %int6 = torch.constant.int 6\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.prim.If %1 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -8915,7 +8915,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n" " torch.prim.If %2 -> () {\n" " torch.prim.If.yield\n" @@ -8930,7 +8930,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.lift_fresh_copy\"(%arg0: !torch.tuple) -> !torch.int {\n" @@ -9011,7 +9011,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " %5 = torch.aten.eq.int %4, %int11 : !torch.int, !torch.int -> !torch.bool\n" " %6 = torch.prim.If %5 -> (!torch.int) {\n" " torch.prim.If.yield %int4 : !torch.int\n" @@ -9152,7 +9152,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.threshold\"(%arg0: !torch.tuple, %arg1: !torch.number, %arg2: !torch.number) -> !torch.int {\n" @@ -9224,10 +9224,10 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " return %0#1 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.prim.abs.Scalar\"(%arg0: !torch.number) -> !torch.int {\n" -" %0 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" +" %0 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" " return %0 : !torch.int\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0: !torch.number) -> !torch.int {\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0: !torch.number) -> !torch.int {\n" " %0 = torch.prim.NumToTensor.Scalar %arg0 : !torch.number -> !torch.tensor\n" " %1 = torch.prim.dtype %0 : !torch.tensor -> !torch.int\n" " return %1 : !torch.int\n" @@ -9239,7 +9239,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " %5 = torch.aten.eq.int %4, %int11 : !torch.int, !torch.int -> !torch.bool\n" " %6 = torch.prim.If %5 -> (!torch.int) {\n" " torch.prim.If.yield %int4 : !torch.int\n" @@ -9369,10 +9369,10 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " func.func @\"__torch_mlir_dtype_fn.aten.add\"(%arg0: !torch.number, %arg1: !torch.number) -> !torch.int {\n" " %none = torch.constant.none\n" " %0 = torch.prim.ListConstruct %none, %none : (!torch.none, !torch.none) -> !torch.list>\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%0, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%0, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.fft_fft\"(%arg0: !torch.tuple, %arg1: !torch.optional, %arg2: !torch.int, %arg3: !torch.optional) -> !torch.int {\n" @@ -9386,7 +9386,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %int5 = torch.constant.int 5\n" " %0 = torch.prim.Uninitialized : !torch.int\n" " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %3 = torch.prim.If %2 -> (!torch.int) {\n" " torch.prim.If.yield %1#1 : !torch.int\n" " } else {\n" @@ -9402,7 +9402,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %9 = torch.prim.If %8 -> (!torch.int) {\n" " torch.prim.If.yield %int10 : !torch.int\n" " } else {\n" -" %10 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %10 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %11 = torch.prim.If %10 -> (!torch.int) {\n" " torch.prim.If.yield %int9 : !torch.int\n" " } else {\n" @@ -9423,9 +9423,9 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.__and__.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9433,7 +9433,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.__or__.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9441,7 +9441,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.add.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.number) -> !torch.int {\n" @@ -9449,7 +9449,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.bitwise_and.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9457,16 +9457,16 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.bitwise_and.Scalar\"(%arg0: !torch.tuple, %arg1: !torch.number) -> !torch.int {\n" " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.bitwise_or.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9474,7 +9474,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.bitwise_xor.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9482,7 +9482,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.bitwise_right_shift.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9490,14 +9490,14 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.bmm\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" " %0:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_priority_of_dtype(%0#1) : (!torch.int) -> !torch.int\n" -" %3 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_priority_of_dtype(%1#1) : (!torch.int) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_priority_of_dtype(%0#1) : (!torch.int) -> !torch.int\n" +" %3 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_priority_of_dtype(%1#1) : (!torch.int) -> !torch.int\n" " %4 = torch.aten.lt.int %2, %3 : !torch.int, !torch.int -> !torch.bool\n" " %5 = torch.prim.If %4 -> (!torch.int) {\n" " torch.prim.If.yield %0#1 : !torch.int\n" @@ -9506,7 +9506,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " return %5 : !torch.int\n" " }\n" -" func.func @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_priority_of_dtype(%arg0: !torch.int) -> !torch.int {\n" +" func.func @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_priority_of_dtype(%arg0: !torch.int) -> !torch.int {\n" " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: Cannot determine priority of dtype\"\n" " %int15 = torch.constant.int 15\n" @@ -9606,7 +9606,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %str_1 = torch.constant.str \"AssertionError: `self` cannot be complex\"\n" " %0:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %3 = torch.aten.__not__ %2 : !torch.bool -> !torch.bool\n" " torch.prim.If %3 -> () {\n" " torch.prim.If.yield\n" @@ -9614,7 +9614,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str_1, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %5 = torch.aten.__not__ %4 : !torch.bool -> !torch.bool\n" " torch.prim.If %5 -> () {\n" " torch.prim.If.yield\n" @@ -9624,7 +9624,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " %6 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %7 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %8 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%6, %7) : (!torch.list>, !torch.list) -> !torch.int\n" +" %8 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%6, %7) : (!torch.list>, !torch.list) -> !torch.int\n" " %9 = torch.aten.ne.int %8, %int11 : !torch.int, !torch.int -> !torch.bool\n" " torch.prim.If %9 -> () {\n" " torch.prim.If.yield\n" @@ -9642,12 +9642,12 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%4) : (!torch.int) -> !torch.bool\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%4) : (!torch.int) -> !torch.bool\n" " %6 = torch.prim.If %5 -> (!torch.bool) {\n" " torch.prim.If.yield %true : !torch.bool\n" " } else {\n" -" %8 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%4) : (!torch.int) -> !torch.bool\n" +" %8 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%4) : (!torch.int) -> !torch.bool\n" " %9 = torch.prim.If %8 -> (!torch.bool) {\n" " %10 = torch.aten.ne.int %4, %int6 : !torch.int, !torch.int -> !torch.bool\n" " torch.prim.If.yield %10 : !torch.bool\n" @@ -9686,12 +9686,12 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %4:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %5 = torch.prim.ListConstruct %4#0, %3#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %6 = torch.prim.ListConstruct %4#1, %3#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %7 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%5, %6) : (!torch.list>, !torch.list) -> !torch.int\n" -" %8 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%7) : (!torch.int) -> !torch.bool\n" +" %7 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%5, %6) : (!torch.list>, !torch.list) -> !torch.int\n" +" %8 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%7) : (!torch.int) -> !torch.bool\n" " %9 = torch.prim.If %8 -> (!torch.bool) {\n" " torch.prim.If.yield %true : !torch.bool\n" " } else {\n" -" %12 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" +" %12 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" " %13 = torch.prim.If %12 -> (!torch.bool) {\n" " %14 = torch.aten.ne.int %7, %int6 : !torch.int, !torch.int -> !torch.bool\n" " torch.prim.If.yield %14 : !torch.bool\n" @@ -9725,8 +9725,8 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " func.func @\"__torch_mlir_dtype_fn.aten.matmul\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" " %0:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_priority_of_dtype(%0#1) : (!torch.int) -> !torch.int\n" -" %3 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_priority_of_dtype(%1#1) : (!torch.int) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_priority_of_dtype(%0#1) : (!torch.int) -> !torch.int\n" +" %3 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_priority_of_dtype(%1#1) : (!torch.int) -> !torch.int\n" " %4 = torch.aten.lt.int %2, %3 : !torch.int, !torch.int -> !torch.bool\n" " %5 = torch.prim.If %4 -> (!torch.int) {\n" " torch.prim.If.yield %0#1 : !torch.int\n" @@ -9740,7 +9740,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.minimum\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9748,7 +9748,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.mm\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9776,7 +9776,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " } else {\n" " %7 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %8 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %9 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%7, %8) : (!torch.list>, !torch.list) -> !torch.int\n" +" %9 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%7, %8) : (!torch.list>, !torch.list) -> !torch.int\n" " torch.prim.If.yield %9 : !torch.int\n" " }\n" " return %6 : !torch.int\n" @@ -9788,8 +9788,8 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" " %6 = torch.aten.__not__ %5 : !torch.bool -> !torch.bool\n" " torch.prim.If %6 -> () {\n" " torch.prim.If.yield\n" @@ -9804,7 +9804,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.mv\"(%arg0: !torch.tuple, %arg1: !torch.tuple) -> !torch.int {\n" @@ -9812,7 +9812,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.sub.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.number) -> !torch.int {\n" @@ -9820,7 +9820,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.threshold_backward\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.number) -> !torch.int {\n" @@ -9831,7 +9831,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %str_1 = torch.constant.str \"AssertionError: `grad_output` cannot be complex\"\n" " %0:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %3 = torch.aten.__not__ %2 : !torch.bool -> !torch.bool\n" " torch.prim.If %3 -> () {\n" " torch.prim.If.yield\n" @@ -9839,7 +9839,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str_1, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %5 = torch.aten.__not__ %4 : !torch.bool -> !torch.bool\n" " torch.prim.If %5 -> () {\n" " torch.prim.If.yield\n" @@ -9849,7 +9849,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " %6 = torch.prim.ListConstruct %1#0, %0#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %7 = torch.prim.ListConstruct %1#1, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %8 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%6, %7) : (!torch.list>, !torch.list) -> !torch.int\n" +" %8 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%6, %7) : (!torch.list>, !torch.list) -> !torch.int\n" " %9 = torch.prim.ListConstruct %int11 : (!torch.int) -> !torch.list\n" " %10 = torch.aten.__contains__.int_list %9, %8 : !torch.list, !torch.int -> !torch.bool\n" " %11 = torch.aten.__not__ %10 : !torch.bool -> !torch.bool\n" @@ -9875,7 +9875,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %3 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %3 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " %5 = torch.prim.If %4 -> (!torch.bool) {\n" " %12 = torch.aten.__isnot__ %0#1, %int11 : !torch.int, !torch.int -> !torch.bool\n" @@ -9889,7 +9889,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %6 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %6 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %7 = torch.aten.__not__ %6 : !torch.bool -> !torch.bool\n" " %8 = torch.prim.If %7 -> (!torch.bool) {\n" " %12 = torch.aten.__isnot__ %1#1, %int11 : !torch.int, !torch.int -> !torch.bool\n" @@ -9905,7 +9905,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " %9 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %10 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %11 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%9, %10) : (!torch.list>, !torch.list) -> !torch.int\n" +" %11 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%9, %10) : (!torch.list>, !torch.list) -> !torch.int\n" " return %11 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten._convolution.deprecated\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.optional>, %arg3: !torch.list, %arg4: !torch.list, %arg5: !torch.list, %arg6: !torch.bool, %arg7: !torch.list, %arg8: !torch.int, %arg9: !torch.bool, %arg10: !torch.bool, %arg11: !torch.bool) -> !torch.int {\n" @@ -9922,7 +9922,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %3 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %3 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " %5 = torch.prim.If %4 -> (!torch.bool) {\n" " %12 = torch.aten.__isnot__ %0#1, %int11 : !torch.int, !torch.int -> !torch.bool\n" @@ -9936,7 +9936,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %6 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %6 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %7 = torch.aten.__not__ %6 : !torch.bool -> !torch.bool\n" " %8 = torch.prim.If %7 -> (!torch.bool) {\n" " %12 = torch.aten.__isnot__ %1#1, %int11 : !torch.int, !torch.int -> !torch.bool\n" @@ -9952,7 +9952,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " %9 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %10 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %11 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%9, %10) : (!torch.list>, !torch.list) -> !torch.int\n" +" %11 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%9, %10) : (!torch.list>, !torch.list) -> !torch.int\n" " return %11 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.conv2d\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.optional>, %arg3: !torch.list, %arg4: !torch.list, %arg5: !torch.list, %arg6: !torch.int) -> !torch.int {\n" @@ -9980,7 +9980,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %false = torch.constant.bool false\n" " %int11 = torch.constant.int 11\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.prim.If %1 -> (!torch.bool) {\n" " %5 = torch.aten.ne.int %0#1, %int11 : !torch.int, !torch.int -> !torch.bool\n" " torch.prim.If.yield %5 : !torch.bool\n" @@ -10015,7 +10015,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %2:2 = torch.prim.TupleUnpack %arg2 : !torch.tuple -> !torch.int, !torch.int\n" " %3 = torch.prim.ListConstruct %0#0, %1#0, %2#0 : (!torch.int, !torch.int, !torch.int) -> !torch.list>\n" " %4 = torch.prim.ListConstruct %0#1, %1#1, %2#1 : (!torch.int, !torch.int, !torch.int) -> !torch.list\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%3, %4) : (!torch.list>, !torch.list) -> !torch.int\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%3, %4) : (!torch.list>, !torch.list) -> !torch.int\n" " return %5 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.lerp.Tensor\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.tuple) -> !torch.int {\n" @@ -10024,7 +10024,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %2:2 = torch.prim.TupleUnpack %arg2 : !torch.tuple -> !torch.int, !torch.int\n" " %3 = torch.prim.ListConstruct %0#0, %1#0, %2#0 : (!torch.int, !torch.int, !torch.int) -> !torch.list>\n" " %4 = torch.prim.ListConstruct %0#1, %1#1, %2#1 : (!torch.int, !torch.int, !torch.int) -> !torch.list\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%3, %4) : (!torch.list>, !torch.list) -> !torch.int\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%3, %4) : (!torch.list>, !torch.list) -> !torch.int\n" " return %5 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.addcmul\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.tuple, %arg3: !torch.number) -> !torch.int {\n" @@ -10057,7 +10057,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " %6 = torch.prim.ListConstruct %0#0, %1#0, %2#0 : (!torch.int, !torch.int, !torch.int) -> !torch.list>\n" " %7 = torch.prim.ListConstruct %0#1, %1#1, %2#1 : (!torch.int, !torch.int, !torch.int) -> !torch.list\n" -" %8 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%6, %7) : (!torch.list>, !torch.list) -> !torch.int\n" +" %8 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%6, %7) : (!torch.list>, !torch.list) -> !torch.int\n" " return %8 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.addcdiv\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.tuple, %arg3: !torch.number) -> !torch.int {\n" @@ -10067,8 +10067,8 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %2:2 = torch.prim.TupleUnpack %arg2 : !torch.tuple -> !torch.int, !torch.int\n" " %3 = torch.prim.ListConstruct %0#0, %1#0, %2#0 : (!torch.int, !torch.int, !torch.int) -> !torch.list>\n" " %4 = torch.prim.ListConstruct %0#1, %1#1, %2#1 : (!torch.int, !torch.int, !torch.int) -> !torch.list\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%3, %4) : (!torch.list>, !torch.list) -> !torch.int\n" -" %6 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%5) : (!torch.int) -> !torch.bool\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%3, %4) : (!torch.list>, !torch.list) -> !torch.int\n" +" %6 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%5) : (!torch.int) -> !torch.bool\n" " %7 = torch.prim.If %6 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -10080,27 +10080,27 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.sub.Scalar\"(%arg0: !torch.tuple, %arg1: !torch.number, %arg2: !torch.number) -> !torch.int {\n" " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.mul.Scalar\"(%arg0: !torch.tuple, %arg1: !torch.number) -> !torch.int {\n" " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.div.Scalar\"(%arg0: !torch.tuple, %arg1: !torch.number) -> !torch.int {\n" @@ -10108,10 +10108,10 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" " %6 = torch.prim.If %5 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -10123,16 +10123,16 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.floor_divide.Scalar\"(%arg0: !torch.tuple, %arg1: !torch.number) -> !torch.int {\n" " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n" " torch.prim.If %2 -> () {\n" " torch.prim.If.yield\n" @@ -10141,27 +10141,27 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield\n" " }\n" " %3 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %5 = torch.prim.ListConstruct %0#1, %4 : (!torch.int, !torch.int) -> !torch.list\n" -" %6 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%3, %5) : (!torch.list>, !torch.list) -> !torch.int\n" +" %6 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%3, %5) : (!torch.list>, !torch.list) -> !torch.int\n" " return %6 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.pow.Scalar\"(%arg0: !torch.number, %arg1: !torch.tuple) -> !torch.int {\n" " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %none, %0#0 : (!torch.none, !torch.int) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %2, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.pow.Tensor_Scalar\"(%arg0: !torch.tuple, %arg1: !torch.number) -> !torch.int {\n" " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.leaky_relu\"(%arg0: !torch.tuple, %arg1: !torch.number) -> !torch.int {\n" @@ -10177,10 +10177,10 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield\n" " }\n" " %2 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %3 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%3) : (!torch.int) -> !torch.bool\n" +" %3 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%3) : (!torch.int) -> !torch.bool\n" " torch.prim.If %4 -> () {\n" -" %7 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %7 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %8 = torch.aten.__not__ %7 : !torch.bool -> !torch.bool\n" " torch.prim.If %8 -> () {\n" " torch.prim.If.yield\n" @@ -10193,7 +10193,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield\n" " }\n" " %5 = torch.prim.ListConstruct %0#1, %3 : (!torch.int, !torch.int) -> !torch.list\n" -" %6 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %5) : (!torch.list>, !torch.list) -> !torch.int\n" +" %6 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %5) : (!torch.list>, !torch.list) -> !torch.int\n" " return %6 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.elu\"(%arg0: !torch.tuple, %arg1: !torch.number, %arg2: !torch.number, %arg3: !torch.number) -> !torch.int {\n" @@ -10215,7 +10215,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.Loop %int3, %true, init() {\n" " ^bb0(%arg4: !torch.int):\n" " %7 = torch.aten.__getitem__.t %3, %arg4 : !torch.list, !torch.int -> !torch.number\n" -" %8 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%7) : (!torch.number) -> !torch.int\n" +" %8 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%7) : (!torch.number) -> !torch.int\n" " %9 = torch.aten.append.t %2, %8 : !torch.list, !torch.int -> !torch.list\n" " torch.prim.Loop.condition %true, iter()\n" " } : (!torch.int, !torch.bool) -> ()\n" @@ -10224,13 +10224,13 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.Loop %5, %true, init() {\n" " ^bb0(%arg4: !torch.int):\n" " %7 = torch.aten.__getitem__.t %2, %arg4 : !torch.list, !torch.int -> !torch.int\n" -" %8 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" +" %8 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" " %9 = torch.aten.append.t %4, %8 : !torch.list, !torch.bool -> !torch.list\n" " torch.prim.Loop.condition %true, iter()\n" " } : (!torch.int, !torch.bool) -> ()\n" " %6 = torch.aten.any.bool %4 : !torch.list -> !torch.bool\n" " torch.prim.If %6 -> () {\n" -" %7 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %7 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %8 = torch.aten.__not__ %7 : !torch.bool -> !torch.bool\n" " torch.prim.If %8 -> () {\n" " torch.prim.If.yield\n" @@ -10248,9 +10248,9 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.baddbmm\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.tuple, %arg3: !torch.number, %arg4: !torch.number) -> !torch.int {\n" @@ -10282,7 +10282,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " %5 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %6 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %7 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%5, %6) : (!torch.list>, !torch.list) -> !torch.int\n" +" %7 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%5, %6) : (!torch.list>, !torch.list) -> !torch.int\n" " return %7 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.where.self\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.tuple) -> !torch.int {\n" @@ -10290,18 +10290,18 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg2 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.where.Scalar\"(%arg0: !torch.tuple, %arg1: !torch.number, %arg2: !torch.number) -> !torch.int {\n" " %int6 = torch.constant.int 6\n" " %int4 = torch.constant.int 4\n" " %false = torch.constant.bool false\n" -" %0 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0) : (!torch.int) -> !torch.bool\n" +" %0 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0) : (!torch.int) -> !torch.bool\n" " %2 = torch.prim.If %1 -> (!torch.bool) {\n" -" %4 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg2) : (!torch.number) -> !torch.int\n" -" %5 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" +" %4 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg2) : (!torch.number) -> !torch.int\n" +" %5 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" " torch.prim.If.yield %5 : !torch.bool\n" " } else {\n" " torch.prim.If.yield %false : !torch.bool\n" @@ -10317,18 +10317,18 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %0#0, %none : (!torch.int, !torch.none) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg2) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg2) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %0#1, %2 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.where.ScalarSelf\"(%arg0: !torch.tuple, %arg1: !torch.number, %arg2: !torch.tuple) -> !torch.int {\n" " %none = torch.constant.none\n" " %0:2 = torch.prim.TupleUnpack %arg2 : !torch.tuple -> !torch.int, !torch.int\n" " %1 = torch.prim.ListConstruct %none, %0#0 : (!torch.none, !torch.int) -> !torch.list>\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" " %3 = torch.prim.ListConstruct %2, %0#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%1, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.nll_loss_forward\"(%arg0: !torch.tuple, %arg1: !torch.tuple, %arg2: !torch.optional>, %arg3: !torch.int, %arg4: !torch.int) -> !torch.tuple {\n" @@ -10355,7 +10355,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n" " torch.prim.If %2 -> () {\n" " torch.prim.If.yield\n" @@ -10395,7 +10395,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " func.func @\"__torch_mlir_dtype_fn.aten.native_batch_norm\"(%arg0: !torch.tuple, %arg1: !torch.optional>, %arg2: !torch.optional>, %arg3: !torch.optional>, %arg4: !torch.optional>, %arg5: !torch.bool, %arg6: !torch.float, %arg7: !torch.float) -> !torch.tuple {\n" " %int6 = torch.constant.int 6\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.prim.If %1 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -10412,7 +10412,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %0 = torch.aten.__isnot__ %arg1, %none : !torch.optional, !torch.none -> !torch.bool\n" " %1 = torch.prim.If %0 -> (!torch.int) {\n" " %2 = torch.prim.unchecked_cast %arg1 : !torch.optional -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " torch.prim.If %4 -> () {\n" " torch.prim.If.yield\n" @@ -10422,8 +10422,8 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " torch.prim.If.yield %2 : !torch.int\n" " } else {\n" -" %2 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %2 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -10442,7 +10442,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %0 = torch.aten.__isnot__ %arg2, %none : !torch.optional, !torch.none -> !torch.bool\n" " %1 = torch.prim.If %0 -> (!torch.int) {\n" " %2 = torch.prim.unchecked_cast %arg2 : !torch.optional -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " torch.prim.If %4 -> () {\n" " torch.prim.If.yield\n" @@ -10452,13 +10452,13 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " torch.prim.If.yield %2 : !torch.int\n" " } else {\n" -" %2 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %2 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.bool) {\n" " torch.prim.If.yield %true : !torch.bool\n" " } else {\n" -" %6 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" -" %7 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%6) : (!torch.int) -> !torch.bool\n" +" %6 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %7 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%6) : (!torch.int) -> !torch.bool\n" " torch.prim.If.yield %7 : !torch.bool\n" " }\n" " %5 = torch.prim.If %4 -> (!torch.int) {\n" @@ -10479,7 +10479,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %0 = torch.aten.__isnot__ %arg3, %none : !torch.optional, !torch.none -> !torch.bool\n" " %1 = torch.prim.If %0 -> (!torch.int) {\n" " %2 = torch.prim.unchecked_cast %arg3 : !torch.optional -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " torch.prim.If %4 -> () {\n" " torch.prim.If.yield\n" @@ -10489,20 +10489,20 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " }\n" " torch.prim.If.yield %2 : !torch.int\n" " } else {\n" -" %2 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %2 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.bool) {\n" " torch.prim.If.yield %true : !torch.bool\n" " } else {\n" -" %7 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" -" %8 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" +" %7 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %8 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" " torch.prim.If.yield %8 : !torch.bool\n" " }\n" " %5 = torch.prim.If %4 -> (!torch.bool) {\n" " torch.prim.If.yield %true : !torch.bool\n" " } else {\n" -" %7 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg2) : (!torch.number) -> !torch.int\n" -" %8 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" +" %7 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg2) : (!torch.number) -> !torch.int\n" +" %8 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%7) : (!torch.int) -> !torch.bool\n" " torch.prim.If.yield %8 : !torch.bool\n" " }\n" " %6 = torch.prim.If %5 -> (!torch.int) {\n" @@ -10523,7 +10523,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield %2 : !torch.int\n" " } else {\n" " %2:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%2#1) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%2#1) : (!torch.int) -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.int) {\n" " torch.prim.If.yield %int4 : !torch.int\n" " } else {\n" @@ -10546,7 +10546,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield %2 : !torch.int\n" " } else {\n" " %2:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%2#1) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%2#1) : (!torch.int) -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.int) {\n" " torch.prim.If.yield %int4 : !torch.int\n" " } else {\n" @@ -10560,7 +10560,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0 = call @\"__torch_mlir_dtype_fn.aten.sum\"(%arg0, %arg3) : (!torch.tuple, !torch.optional) -> !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0) : (!torch.int) -> !torch.bool\n" " %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n" " torch.prim.If %2 -> () {\n" " torch.prim.If.yield\n" @@ -10674,7 +10674,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n" " torch.prim.If %2 -> () {\n" " torch.prim.If.yield\n" @@ -10685,7 +10685,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %3 = torch.aten.__isnot__ %arg4, %none : !torch.optional, !torch.none -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.int) {\n" " %5 = torch.prim.unchecked_cast %arg4 : !torch.optional -> !torch.int\n" -" %6 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%5) : (!torch.int) -> !torch.bool\n" +" %6 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%5) : (!torch.int) -> !torch.bool\n" " %7 = torch.aten.__not__ %6 : !torch.bool -> !torch.bool\n" " torch.prim.If %7 -> () {\n" " torch.prim.If.yield\n" @@ -10693,9 +10693,9 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %8 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %8 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %9 = torch.prim.If %8 -> (!torch.int) {\n" -" %10 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%5) : (!torch.int) -> !torch.bool\n" +" %10 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%5) : (!torch.int) -> !torch.bool\n" " torch.prim.If %10 -> () {\n" " torch.prim.If.yield\n" " } else {\n" @@ -10706,7 +10706,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %12 = func.call @\"__torch_mlir_dtype_fn.aten.std\"(%11, %true) : (!torch.tuple, !torch.bool) -> !torch.int\n" " torch.prim.If.yield %12 : !torch.int\n" " } else {\n" -" %10 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%5) : (!torch.int) -> !torch.bool\n" +" %10 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%5) : (!torch.int) -> !torch.bool\n" " %11 = torch.aten.__not__ %10 : !torch.bool -> !torch.bool\n" " torch.prim.If %11 -> () {\n" " torch.prim.If.yield\n" @@ -10827,8 +10827,8 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %2 = torch.prim.unchecked_cast %arg2 : !torch.optional -> !torch.int\n" " torch.prim.If.yield %2 : !torch.int\n" " } else {\n" -" %2 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %2 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg1) : (!torch.number) -> !torch.int\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.prim.If %3 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -10981,7 +10981,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %5 = torch.prim.unchecked_cast %arg1 : !torch.optional -> !torch.int\n" " torch.prim.If.yield %5 : !torch.int\n" " }\n" -" %3 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %3 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " torch.prim.If %4 -> () {\n" " torch.prim.If.yield\n" @@ -11041,7 +11041,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield %int4 : !torch.int\n" " } else {\n" " %2 = torch.prim.unchecked_cast %arg3 : !torch.optional -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " torch.prim.If %4 -> () {\n" " torch.prim.If.yield\n" @@ -11062,7 +11062,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" " %2 = torch.prim.unchecked_cast %arg1 : !torch.optional -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " torch.prim.If %4 -> () {\n" " torch.prim.If.yield\n" @@ -11083,7 +11083,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" " %2 = torch.prim.unchecked_cast %arg2 : !torch.optional -> !torch.int\n" -" %3 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%2) : (!torch.int) -> !torch.bool\n" +" %3 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%2) : (!torch.int) -> !torch.bool\n" " %4 = torch.aten.__not__ %3 : !torch.bool -> !torch.bool\n" " torch.prim.If %4 -> () {\n" " torch.prim.If.yield\n" @@ -11103,7 +11103,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n" " torch.prim.If %2 -> () {\n" " torch.prim.If.yield\n" @@ -11136,7 +11136,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %none = torch.constant.none\n" " %str = torch.constant.str \"AssertionError: \"\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.aten.__not__ %1 : !torch.bool -> !torch.bool\n" " torch.prim.If %2 -> () {\n" " torch.prim.If.yield\n" @@ -11167,8 +11167,8 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%4) : (!torch.int) -> !torch.bool\n" " %6 = torch.prim.If %5 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -11179,7 +11179,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " func.func @\"__torch_mlir_dtype_fn.aten.atan\"(%arg0: !torch.tuple) -> !torch.int {\n" " %int6 = torch.constant.int 6\n" " %0:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %1 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" +" %1 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%0#1) : (!torch.int) -> !torch.bool\n" " %2 = torch.prim.If %1 -> (!torch.int) {\n" " torch.prim.If.yield %int6 : !torch.int\n" " } else {\n" @@ -11192,7 +11192,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %1:2 = torch.prim.TupleUnpack %arg1 : !torch.tuple -> !torch.int, !torch.int\n" " %2 = torch.prim.ListConstruct %0#0, %1#0 : (!torch.int, !torch.int) -> !torch.list>\n" " %3 = torch.prim.ListConstruct %0#1, %1#1 : (!torch.int, !torch.int) -> !torch.list\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%2, %3) : (!torch.list>, !torch.list) -> !torch.int\n" " return %4 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.cat\"(%arg0: !torch.list>, %arg1: !torch.int) -> !torch.int {\n" @@ -11219,7 +11219,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %9 = torch.aten.append.t %1, %7#1 : !torch.list, !torch.int -> !torch.list\n" " torch.prim.Loop.condition %true, iter()\n" " } : (!torch.int, !torch.bool) -> ()\n" -" %5 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes(%0, %1) : (!torch.list>, !torch.list) -> !torch.int\n" +" %5 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes(%0, %1) : (!torch.list>, !torch.list) -> !torch.int\n" " return %5 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten._shape_as_tensor\"(%arg0: !torch.tuple) -> !torch.int {\n" @@ -11236,7 +11236,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " %str_0 = torch.constant.str \"AssertionError: \"\n" " %0 = torch.prim.Uninitialized : !torch.int\n" " %1:2 = torch.prim.TupleUnpack %arg0 : !torch.tuple -> !torch.int, !torch.int\n" -" %2 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %2 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_complex_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %3 = torch.aten.__not__ %2 : !torch.bool -> !torch.bool\n" " torch.prim.If %3 -> () {\n" " torch.prim.If.yield\n" @@ -11244,11 +11244,11 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " torch.prim.RaiseException %str_0, %none : !torch.str, !torch.none\n" " torch.prim.If.yield\n" " }\n" -" %4 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_float_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %4 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_float_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %5 = torch.prim.If %4 -> (!torch.int) {\n" " torch.prim.If.yield %int7 : !torch.int\n" " } else {\n" -" %6 = func.call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.is_integer_dtype(%1#1) : (!torch.int) -> !torch.bool\n" +" %6 = func.call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.is_integer_dtype(%1#1) : (!torch.int) -> !torch.bool\n" " %7 = torch.prim.If %6 -> (!torch.bool) {\n" " %9 = torch.aten.ne.int %1#1, %int11 : !torch.int, !torch.int -> !torch.bool\n" " torch.prim.If.yield %9 : !torch.bool\n" @@ -11272,7 +11272,7 @@ StringRef mlir::torch::Torch::getAbstractInterpLibrary() { " return %5 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.prim.NumToTensor.Scalar\"(%arg0: !torch.number) -> !torch.int {\n" -" %0 = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" +" %0 = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.get_dtype_of_scalar(%arg0) : (!torch.number) -> !torch.int\n" " return %0 : !torch.int\n" " }\n" " func.func @\"__torch_mlir_dtype_fn.aten.softmax.int\"(%arg0: !torch.tuple, %arg1: !torch.int, %arg2: !torch.optional) -> !torch.int {\n" diff --git a/projects/CMakeLists.txt b/projects/CMakeLists.txt new file mode 100644 index 00000000000..2838570a879 --- /dev/null +++ b/projects/CMakeLists.txt @@ -0,0 +1,44 @@ +include(AddMLIRPython) + +# Configure PyTorch if we have any features enabled which require it. +if(TORCH_MLIR_ENABLE_JIT_IR_IMPORTER OR TORCH_MLIR_ENABLE_LTC) + message(STATUS "Enabling PyTorch C++ dep (features depend on it)") + include(TorchMLIRPyTorch) + + TorchMLIRProbeForPyTorchInstall() + if(TORCH_MLIR_USE_INSTALLED_PYTORCH) + TorchMLIRConfigurePyTorch() + else() + # Assume it is a sibling to the overall project. + set(Torch_DIR "${PROJECT_SOURCE_DIR}/../libtorch/share/cmake/Torch") + message(STATUS "Attempting to locate libtorch as a sibling to the project: ${Torch_DIR}") + endif() + + find_package(Torch 1.11 REQUIRED) + + set(TORCHGEN_DIR ${Torch_ROOT}/../../../torchgen) + + include_directories(BEFORE + ${TORCH_INCLUDE_DIRS} + ${Python3_INCLUDE_DIRS} + ) + link_directories("${TORCH_INSTALL_PREFIX}/lib") + message(STATUS "libtorch_python CXXFLAGS is ...${TORCH_CXXFLAGS}") + message(STATUS "TORCH_LIBRARIES = ${TORCH_LIBRARIES}") +endif() + +# Include jit_ir_common if the jit_ir importer or LTC is enabled, +# since they both require it. +if(TORCH_MLIR_ENABLE_JIT_IR_IMPORTER OR TORCH_MLIR_ENABLE_LTC) + add_subdirectory(jit_ir_common) +endif() + +# Include LTC. +if(TORCH_MLIR_ENABLE_LTC) + add_subdirectory(ltc) +endif() + +# Include overall PT1 project. +if(TORCH_MLIR_ENABLE_PROJECT_PT1) + add_subdirectory(pt1) +endif() diff --git a/projects/jit_ir_common/CMakeLists.txt b/projects/jit_ir_common/CMakeLists.txt new file mode 100644 index 00000000000..f0a3ff59674 --- /dev/null +++ b/projects/jit_ir_common/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(csrc/jit_ir_importer) diff --git a/projects/jit_ir_common/csrc/jit_ir_importer/CMakeLists.txt b/projects/jit_ir_common/csrc/jit_ir_importer/CMakeLists.txt new file mode 100644 index 00000000000..b5f24fb80e8 --- /dev/null +++ b/projects/jit_ir_common/csrc/jit_ir_importer/CMakeLists.txt @@ -0,0 +1,27 @@ +# Static library with core functionality. +# We can't use a shared library here, due to issues with linking on macOS-arm64 (the library itself won't build) +# For details, see: https://github.com/llvm/torch-mlir/runs/7919012376 +add_library(TorchMLIRJITIRImporter STATIC + class_annotator.cpp + function_importer.cpp + node_importer.cpp + ivalue_importer.cpp + torch_to_mlir_utils.cpp + ) +message(STATUS "Linking TorchMLIRJITImporter with ${TORCH_LIBRARIES}") +target_link_libraries(TorchMLIRJITIRImporter + TorchMLIRAggregateCAPI + ${TORCH_LIBRARIES} + ) +# Includes are relative to the csrc dir (i.e. #include "jit_ir_importer/...") +target_include_directories(TorchMLIRJITIRImporter PUBLIC + ${CMAKE_CURRENT_SOURCE_DIR}/.. +) +set_target_properties(TorchMLIRJITIRImporter PROPERTIES + LIBRARY_OUTPUT_DIRECTORY "${TORCH_MLIR_PYTHON_PACKAGES_DIR}/torch_mlir/torch_mlir/_mlir_libs" + OUTPUT_NAME lib_jit_ir_importer + PREFIX "" + SUFFIX ".a" + CXX_VISIBILITY_PRESET "default" + COMPILE_FLAGS "${TORCH_CXXFLAGS}" + ) diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator.cpp b/projects/jit_ir_common/csrc/jit_ir_importer/class_annotator.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator.cpp rename to projects/jit_ir_common/csrc/jit_ir_importer/class_annotator.cpp diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator.h b/projects/jit_ir_common/csrc/jit_ir_importer/class_annotator.h similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator.h rename to projects/jit_ir_common/csrc/jit_ir_importer/class_annotator.h diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/function_importer.cpp b/projects/jit_ir_common/csrc/jit_ir_importer/function_importer.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/function_importer.cpp rename to projects/jit_ir_common/csrc/jit_ir_importer/function_importer.cpp diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/function_importer.h b/projects/jit_ir_common/csrc/jit_ir_importer/function_importer.h similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/function_importer.h rename to projects/jit_ir_common/csrc/jit_ir_importer/function_importer.h diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options.h b/projects/jit_ir_common/csrc/jit_ir_importer/import_options.h similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options.h rename to projects/jit_ir_common/csrc/jit_ir_importer/import_options.h diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/ivalue_importer.cpp b/projects/jit_ir_common/csrc/jit_ir_importer/ivalue_importer.cpp similarity index 98% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/ivalue_importer.cpp rename to projects/jit_ir_common/csrc/jit_ir_importer/ivalue_importer.cpp index 75013d5ee9a..ef02096eb34 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/ivalue_importer.cpp +++ b/projects/jit_ir_common/csrc/jit_ir_importer/ivalue_importer.cpp @@ -190,7 +190,8 @@ MlirValue IValueImporter::importModule(torch::jit::Module currentModule) { torchMlirTorchNnModuleTypeGet(context, toMlirStringRef(moduleTypeName)), mlirRegionCreate()); MlirRegion nnModuleRegion = mlirOperationGetRegion(nnModule, 0); - mlirRegionAppendOwnedBlock(nnModuleRegion, mlirBlockCreate(0, nullptr, nullptr)); + mlirRegionAppendOwnedBlock(nnModuleRegion, + mlirBlockCreate(0, nullptr, nullptr)); MlirBlock nnModuleBody = mlirRegionGetFirstBlock(nnModuleRegion); InserterGuard inserterGuard(importBlock, nnModule); @@ -491,8 +492,9 @@ void IValueImporter::importClassType(c10::ClassType *classType) { toMlirNamedAttribute( "name", mlirStringAttrGet( context, toMlirStringRef(classAttribute.getName()))), - toMlirNamedAttribute("type", mlirTypeAttrGet(getMlirTypeFromTorchType( - loc, classAttribute.getType(), importOptions))), + toMlirNamedAttribute( + "type", mlirTypeAttrGet(getMlirTypeFromTorchType( + loc, classAttribute.getType(), importOptions))), isPrivate); } diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/ivalue_importer.h b/projects/jit_ir_common/csrc/jit_ir_importer/ivalue_importer.h similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/ivalue_importer.h rename to projects/jit_ir_common/csrc/jit_ir_importer/ivalue_importer.h diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/mlir_utils.h b/projects/jit_ir_common/csrc/jit_ir_importer/mlir_utils.h similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/mlir_utils.h rename to projects/jit_ir_common/csrc/jit_ir_importer/mlir_utils.h diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/node_importer.cpp b/projects/jit_ir_common/csrc/jit_ir_importer/node_importer.cpp similarity index 93% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/node_importer.cpp rename to projects/jit_ir_common/csrc/jit_ir_importer/node_importer.cpp index 15cffedbe83..0bb4722fcf7 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/node_importer.cpp +++ b/projects/jit_ir_common/csrc/jit_ir_importer/node_importer.cpp @@ -41,10 +41,9 @@ class NodeImporter { const ImportOptions &importOptions = {}); private: - MlirBlock - createBlockFor(Block *jitBlock, - c10::optional> blockArgTypes, - const ImportOptions &importOptions = {}); + MlirBlock createBlockFor(Block *jitBlock, + c10::optional> blockArgTypes, + const ImportOptions &importOptions = {}); void mapValue(Value *jitValue, MlirValue value); void mapResults(Node *node, MlirOperation operation); MlirValue lookupMappedValue(Value *jitValue); @@ -269,9 +268,9 @@ void NodeImporter::importNode(Node *node, MlirBlock appendToBlock, terminatorOperandTypes, /*userAllowsRefinement=*/false)); }; - mlirRegionAppendOwnedBlock( - mlirOperationGetRegion(operation, 0), - importBlock(node->blocks()[0], createTerminator, c10::nullopt, importOptions)); + mlirRegionAppendOwnedBlock(mlirOperationGetRegion(operation, 0), + importBlock(node->blocks()[0], createTerminator, + c10::nullopt, importOptions)); return; } @@ -290,12 +289,12 @@ void NodeImporter::importNode(Node *node, MlirBlock appendToBlock, resultTypes, /*userAllowsRefinement=*/false)); }; - mlirRegionAppendOwnedBlock( - mlirOperationGetRegion(operation, 0), - importBlock(node->blocks()[0], createTerminator, c10::nullopt, importOptions)); - mlirRegionAppendOwnedBlock( - mlirOperationGetRegion(operation, 1), - importBlock(node->blocks()[1], createTerminator, c10::nullopt, importOptions)); + mlirRegionAppendOwnedBlock(mlirOperationGetRegion(operation, 0), + importBlock(node->blocks()[0], createTerminator, + c10::nullopt, importOptions)); + mlirRegionAppendOwnedBlock(mlirOperationGetRegion(operation, 1), + importBlock(node->blocks()[1], createTerminator, + c10::nullopt, importOptions)); return; } @@ -303,8 +302,8 @@ void NodeImporter::importNode(Node *node, MlirBlock appendToBlock, auto classType = node->input(0)->type()->cast(); auto methodName = node->s(c10::attr::name); torch::jit::Function *function = classType->findMethod(methodName); - MlirType calleeType = - getFunctionTypeFromSchema(context, function->getSchema(), importOptions); + MlirType calleeType = getFunctionTypeFromSchema( + context, function->getSchema(), importOptions); std::vector expectedTypes; for (int i = 0, e = mlirFunctionTypeGetNumInputs(calleeType); i < e; ++i) { expectedTypes.push_back(mlirFunctionTypeGetInput(calleeType, i)); @@ -361,10 +360,10 @@ void NodeImporter::importNode(Node *node, MlirBlock appendToBlock, } } -MlirBlock NodeImporter::importBlock( - Block *jitBlock, CreateTerminatorFn createTerminator, - c10::optional> blockArgTypes, - const ImportOptions &importOptions) { +MlirBlock +NodeImporter::importBlock(Block *jitBlock, CreateTerminatorFn createTerminator, + c10::optional> blockArgTypes, + const ImportOptions &importOptions) { MlirBlock block = createBlockFor(jitBlock, blockArgTypes, importOptions); for (Node *node : jitBlock->nodes()) { importNode(node, block, importOptions); @@ -434,5 +433,6 @@ torch_mlir::importBlock(MlirContext context, Block *jitBlock, c10::optional> blockArgTypes, const ImportOptions &importOptions) { NodeImporter importer(context); - return importer.importBlock(jitBlock, createTerminator, blockArgTypes, importOptions); + return importer.importBlock(jitBlock, createTerminator, blockArgTypes, + importOptions); } diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/node_importer.h b/projects/jit_ir_common/csrc/jit_ir_importer/node_importer.h similarity index 85% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/node_importer.h rename to projects/jit_ir_common/csrc/jit_ir_importer/node_importer.h index dd01444f415..7fce8b988c4 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/node_importer.h +++ b/projects/jit_ir_common/csrc/jit_ir_importer/node_importer.h @@ -36,11 +36,11 @@ using CreateTerminatorFn = /// are required to be for correctness. The code will internally attempt to /// adjust the types to the block argument types. /// TODO: Formalize what type conversions are allowed here. -MlirBlock importBlock( - MlirContext context, torch::jit::Block *jitBlock, - CreateTerminatorFn createTerminator, - c10::optional> blockArgTypes = c10::nullopt, - const ImportOptions &importOptions = {}); +MlirBlock +importBlock(MlirContext context, torch::jit::Block *jitBlock, + CreateTerminatorFn createTerminator, + c10::optional> blockArgTypes = c10::nullopt, + const ImportOptions &importOptions = {}); } // namespace torch_mlir diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/torch_to_mlir_utils.cpp b/projects/jit_ir_common/csrc/jit_ir_importer/torch_to_mlir_utils.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/torch_to_mlir_utils.cpp rename to projects/jit_ir_common/csrc/jit_ir_importer/torch_to_mlir_utils.cpp diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/torch_to_mlir_utils.h b/projects/jit_ir_common/csrc/jit_ir_importer/torch_to_mlir_utils.h similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/torch_to_mlir_utils.h rename to projects/jit_ir_common/csrc/jit_ir_importer/torch_to_mlir_utils.h diff --git a/projects/ltc/CMakeLists.txt b/projects/ltc/CMakeLists.txt new file mode 100644 index 00000000000..892faabd7eb --- /dev/null +++ b/projects/ltc/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(csrc/base_lazy_backend) diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/CMakeLists.txt b/projects/ltc/csrc/base_lazy_backend/CMakeLists.txt similarity index 76% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/CMakeLists.txt rename to projects/ltc/csrc/base_lazy_backend/CMakeLists.txt index 2087f99eb53..eee3044f0fc 100644 --- a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/CMakeLists.txt +++ b/projects/ltc/csrc/base_lazy_backend/CMakeLists.txt @@ -2,30 +2,6 @@ # Setup PyTorch/LTC #------------------------------------------------------------------------------- -include(TorchMLIRPyTorch) - -TorchMLIRProbeForPyTorchInstall() -if(TORCH_MLIR_USE_INSTALLED_PYTORCH) - TorchMLIRConfigurePyTorch() -else() - # Assume it is a sibling to the overall project. - set(Torch_DIR "${PROJECT_SOURCE_DIR}/../libtorch/share/cmake/Torch") - message(STATUS "Attempting to locate libtorch as a sibling to the project: ${Torch_DIR}") -endif() - -find_package(Torch 1.11 REQUIRED) - -set(TORCHGEN_DIR ${Torch_ROOT}/../../../torchgen) - -include_directories(BEFORE - ${TORCH_INCLUDE_DIRS} - ${CMAKE_CURRENT_SOURCE_DIR} - ${CMAKE_CURRENT_BINARY_DIR} - ${Python3_INCLUDE_DIRS} - ${PROJECT_SOURCE_DIR}/projects/pt1/python -) -link_directories("${TORCH_INSTALL_PREFIX}/lib") - set(LTC_GENERATED generated/LazyNativeFunctions.cpp generated/RegisterLazy.cpp @@ -80,6 +56,12 @@ add_library(torch_mlir_ltc_backend SHARED utils/tensor_utils.cpp ) target_compile_features(torch_mlir_ltc_backend PRIVATE cxx_std_17) +# Includes are resolved relative to csrc (i.e. #include "base_lazy_backend/..."). +# Add both the source and generated include directories. +target_include_directories(torch_mlir_ltc_backend PUBLIC + ${CMAKE_CURRENT_SOURCE_DIR}/.. + ${CMAKE_CURRENT_BINARY_DIR}/.. +) add_dependencies(torch_mlir_ltc_backend TorchMLIRJITIRImporter @@ -112,13 +94,13 @@ add_custom_command( add_custom_command( TARGET torch_mlir_ltc_backend POST_BUILD COMMAND cp - ${PROJECT_SOURCE_DIR}/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/*.h + ${PROJECT_SOURCE_DIR}/projects/ltc/csrc/base_lazy_backend/*.h ${TORCH_MLIR_PYTHON_PACKAGES_DIR}/torch_mlir/torch_mlir/base_lazy_backend/) add_custom_command( TARGET torch_mlir_ltc_backend POST_BUILD COMMAND cp - ${PROJECT_SOURCE_DIR}/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/generated/*.h + ${PROJECT_SOURCE_DIR}/projects/ltc/csrc/base_lazy_backend/generated/*.h ${TORCH_MLIR_PYTHON_PACKAGES_DIR}/torch_mlir/torch_mlir/base_lazy_backend/generated/) add_custom_command( @@ -129,7 +111,7 @@ add_custom_command( add_custom_command( TARGET torch_mlir_ltc_backend POST_BUILD COMMAND cp - ${PROJECT_SOURCE_DIR}/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/*.h + ${PROJECT_SOURCE_DIR}/projects/ltc/csrc/base_lazy_backend/ops/*.h ${TORCH_MLIR_PYTHON_PACKAGES_DIR}/torch_mlir/torch_mlir/base_lazy_backend/ops/) add_custom_command( @@ -140,5 +122,5 @@ add_custom_command( add_custom_command( TARGET torch_mlir_ltc_backend POST_BUILD COMMAND cp - ${PROJECT_SOURCE_DIR}/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/*.h + ${PROJECT_SOURCE_DIR}/projects/ltc/csrc/base_lazy_backend/utils/*.h ${TORCH_MLIR_PYTHON_PACKAGES_DIR}/torch_mlir/torch_mlir/base_lazy_backend/utils/) diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/README.md b/projects/ltc/csrc/base_lazy_backend/README.md similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/README.md rename to projects/ltc/csrc/base_lazy_backend/README.md diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/backend_impl.cpp b/projects/ltc/csrc/base_lazy_backend/backend_impl.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/backend_impl.cpp rename to projects/ltc/csrc/base_lazy_backend/backend_impl.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/backend_impl.h b/projects/ltc/csrc/base_lazy_backend/backend_impl.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/backend_impl.h rename to projects/ltc/csrc/base_lazy_backend/backend_impl.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/dynamic_ir.cpp b/projects/ltc/csrc/base_lazy_backend/dynamic_ir.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/dynamic_ir.cpp rename to projects/ltc/csrc/base_lazy_backend/dynamic_ir.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/dynamic_ir.h b/projects/ltc/csrc/base_lazy_backend/dynamic_ir.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/dynamic_ir.h rename to projects/ltc/csrc/base_lazy_backend/dynamic_ir.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ir_builder.h b/projects/ltc/csrc/base_lazy_backend/ir_builder.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ir_builder.h rename to projects/ltc/csrc/base_lazy_backend/ir_builder.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_lowering_context.cpp b/projects/ltc/csrc/base_lazy_backend/mlir_lowering_context.cpp similarity index 99% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_lowering_context.cpp rename to projects/ltc/csrc/base_lazy_backend/mlir_lowering_context.cpp index 4823b4929ab..7e6f40c5c2e 100644 --- a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_lowering_context.cpp +++ b/projects/ltc/csrc/base_lazy_backend/mlir_lowering_context.cpp @@ -21,8 +21,8 @@ #include "mlir-c/IR.h" #include "mlir-c/Pass.h" -#include "../../dialects/torch/importer/jit_ir/csrc/function_importer.h" #include "backend_impl.h" +#include "jit_ir_importer/function_importer.h" #include "mlir_lowering_context.h" #include "mlir_node.h" #include "utils/debug.h" diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_lowering_context.h b/projects/ltc/csrc/base_lazy_backend/mlir_lowering_context.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_lowering_context.h rename to projects/ltc/csrc/base_lazy_backend/mlir_lowering_context.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_native_functions.cpp b/projects/ltc/csrc/base_lazy_backend/mlir_native_functions.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_native_functions.cpp rename to projects/ltc/csrc/base_lazy_backend/mlir_native_functions.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node.cpp b/projects/ltc/csrc/base_lazy_backend/mlir_node.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node.cpp rename to projects/ltc/csrc/base_lazy_backend/mlir_node.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node.h b/projects/ltc/csrc/base_lazy_backend/mlir_node.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node.h rename to projects/ltc/csrc/base_lazy_backend/mlir_node.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node_lowering.cpp b/projects/ltc/csrc/base_lazy_backend/mlir_node_lowering.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node_lowering.cpp rename to projects/ltc/csrc/base_lazy_backend/mlir_node_lowering.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node_lowering.h b/projects/ltc/csrc/base_lazy_backend/mlir_node_lowering.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/mlir_node_lowering.h rename to projects/ltc/csrc/base_lazy_backend/mlir_node_lowering.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/device_data.cpp b/projects/ltc/csrc/base_lazy_backend/ops/device_data.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/device_data.cpp rename to projects/ltc/csrc/base_lazy_backend/ops/device_data.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/device_data.h b/projects/ltc/csrc/base_lazy_backend/ops/device_data.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/device_data.h rename to projects/ltc/csrc/base_lazy_backend/ops/device_data.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/generic.cpp b/projects/ltc/csrc/base_lazy_backend/ops/generic.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/generic.cpp rename to projects/ltc/csrc/base_lazy_backend/ops/generic.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/generic.h b/projects/ltc/csrc/base_lazy_backend/ops/generic.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/generic.h rename to projects/ltc/csrc/base_lazy_backend/ops/generic.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/index.cpp b/projects/ltc/csrc/base_lazy_backend/ops/index.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/index.cpp rename to projects/ltc/csrc/base_lazy_backend/ops/index.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/index.h b/projects/ltc/csrc/base_lazy_backend/ops/index.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/index.h rename to projects/ltc/csrc/base_lazy_backend/ops/index.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/ivalue.cpp b/projects/ltc/csrc/base_lazy_backend/ops/ivalue.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/ivalue.cpp rename to projects/ltc/csrc/base_lazy_backend/ops/ivalue.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/ivalue.h b/projects/ltc/csrc/base_lazy_backend/ops/ivalue.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/ivalue.h rename to projects/ltc/csrc/base_lazy_backend/ops/ivalue.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/split.cpp b/projects/ltc/csrc/base_lazy_backend/ops/split.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/split.cpp rename to projects/ltc/csrc/base_lazy_backend/ops/split.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/split.h b/projects/ltc/csrc/base_lazy_backend/ops/split.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/split.h rename to projects/ltc/csrc/base_lazy_backend/ops/split.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/to_copy.h b/projects/ltc/csrc/base_lazy_backend/ops/to_copy.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/to_copy.h rename to projects/ltc/csrc/base_lazy_backend/ops/to_copy.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/unbind_int.cpp b/projects/ltc/csrc/base_lazy_backend/ops/unbind_int.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/unbind_int.cpp rename to projects/ltc/csrc/base_lazy_backend/ops/unbind_int.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/unbind_int.h b/projects/ltc/csrc/base_lazy_backend/ops/unbind_int.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/ops/unbind_int.h rename to projects/ltc/csrc/base_lazy_backend/ops/unbind_int.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/shape_inference.cpp b/projects/ltc/csrc/base_lazy_backend/shape_inference.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/shape_inference.cpp rename to projects/ltc/csrc/base_lazy_backend/shape_inference.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/tensor.cpp b/projects/ltc/csrc/base_lazy_backend/tensor.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/tensor.cpp rename to projects/ltc/csrc/base_lazy_backend/tensor.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/tensor.h b/projects/ltc/csrc/base_lazy_backend/tensor.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/tensor.h rename to projects/ltc/csrc/base_lazy_backend/tensor.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/debug.h b/projects/ltc/csrc/base_lazy_backend/utils/debug.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/debug.h rename to projects/ltc/csrc/base_lazy_backend/utils/debug.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/exception.h b/projects/ltc/csrc/base_lazy_backend/utils/exception.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/exception.h rename to projects/ltc/csrc/base_lazy_backend/utils/exception.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/jit_utils.cpp b/projects/ltc/csrc/base_lazy_backend/utils/jit_utils.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/jit_utils.cpp rename to projects/ltc/csrc/base_lazy_backend/utils/jit_utils.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/jit_utils.h b/projects/ltc/csrc/base_lazy_backend/utils/jit_utils.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/jit_utils.h rename to projects/ltc/csrc/base_lazy_backend/utils/jit_utils.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/string_utils.h b/projects/ltc/csrc/base_lazy_backend/utils/string_utils.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/string_utils.h rename to projects/ltc/csrc/base_lazy_backend/utils/string_utils.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/sys_utils.h b/projects/ltc/csrc/base_lazy_backend/utils/sys_utils.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/sys_utils.h rename to projects/ltc/csrc/base_lazy_backend/utils/sys_utils.h diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/tensor_utils.cpp b/projects/ltc/csrc/base_lazy_backend/utils/tensor_utils.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/tensor_utils.cpp rename to projects/ltc/csrc/base_lazy_backend/utils/tensor_utils.cpp diff --git a/projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/tensor_utils.h b/projects/ltc/csrc/base_lazy_backend/utils/tensor_utils.h similarity index 100% rename from projects/pt1/python/torch_mlir/csrc/base_lazy_backend/utils/tensor_utils.h rename to projects/ltc/csrc/base_lazy_backend/utils/tensor_utils.h diff --git a/projects/pt1/examples/torchscript_resnet_inference.ipynb b/projects/pt1/examples/torchscript_resnet_inference.ipynb index 82258fd3927..3ab7cc64dad 100644 --- a/projects/pt1/examples/torchscript_resnet_inference.ipynb +++ b/projects/pt1/examples/torchscript_resnet_inference.ipynb @@ -92,8 +92,8 @@ "import torchvision\n", "\n", "import torch_mlir\n", - "from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ModuleBuilder\n", - "from torch_mlir.dialects.torch.importer.jit_ir.torchscript_annotations import extract_annotations\n", + "from torch_mlir.jit_ir_importer import ClassAnnotator, ModuleBuilder\n", + "from torch_mlir.jit_ir_importer.torchscript_annotations import extract_annotations\n", "\n", "from torch_mlir.passmanager import PassManager\n", "from torch_mlir_e2e_test.linalg_on_tensors_backends.refbackend import RefBackendLinalgOnTensorsBackend" diff --git a/projects/pt1/python/CMakeLists.txt b/projects/pt1/python/CMakeLists.txt index dd286401384..23dd5be4071 100644 --- a/projects/pt1/python/CMakeLists.txt +++ b/projects/pt1/python/CMakeLists.txt @@ -1,5 +1,3 @@ -include(AddMLIRPython) - # Disables generation of "version soname" (i.e. libFoo.so.), which # causes pure duplication as part of Python wheels. set(CMAKE_PLATFORM_NO_VERSIONED_SONAME ON) @@ -90,9 +88,6 @@ declare_mlir_python_extension(TorchMLIRPythonExtensions.Main # Lazy Tensor Core ################################################################################ -if(TORCH_MLIR_ENABLE_LTC) - add_subdirectory(torch_mlir/csrc/base_lazy_backend) -endif() # Reference backend has a separate check for TORCH_MLIR_ENABLE_LTC, since it # generates a dummy Python library when disabled. if(NOT TORCH_MLIR_ENABLE_ONLY_MLIR_PYTHON_BINDINGS) @@ -104,7 +99,8 @@ endif() ################################################################################ if(TORCH_MLIR_ENABLE_JIT_IR_IMPORTER) - add_subdirectory(torch_mlir/dialects/torch/importer/jit_ir) + add_subdirectory(torch_mlir/jit_ir_importer) + add_subdirectory(torch_mlir/csrc/jit_ir_importer) add_subdirectory(torch_mlir_e2e_test) endif() diff --git a/projects/pt1/python/test/annotations-sugar.py b/projects/pt1/python/test/annotations-sugar.py index 98cbec74d1c..e540e84b9e1 100644 --- a/projects/pt1/python/test/annotations-sugar.py +++ b/projects/pt1/python/test/annotations-sugar.py @@ -8,8 +8,8 @@ import torch from torch_mlir_e2e_test.annotations import annotate_args, export -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator -from torch_mlir.dialects.torch.importer.jit_ir.torchscript_annotations import extract_annotations +from torch_mlir.jit_ir_importer import ClassAnnotator +from torch_mlir.jit_ir_importer.torchscript_annotations import extract_annotations class MmModule(torch.nn.Module): def __init__(self): diff --git a/projects/pt1/python/torch_mlir/__init__.py b/projects/pt1/python/torch_mlir/__init__.py index 8de6cc1a14f..8bbcce9943d 100644 --- a/projects/pt1/python/torch_mlir/__init__.py +++ b/projects/pt1/python/torch_mlir/__init__.py @@ -17,8 +17,8 @@ from torch.fx.experimental.proxy_tensor import make_fx from .compiler_utils import run_pipeline_with_repro_report -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ImportOptions, ModuleBuilder -from torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator import generate_library +from torch_mlir.jit_ir_importer import ClassAnnotator, ImportOptions, ModuleBuilder +from torch_mlir.jit_ir_importer.build_tools.library_generator import generate_library class OutputType(Enum): diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/CMakeLists.txt b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/CMakeLists.txt similarity index 50% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/CMakeLists.txt rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/CMakeLists.txt index 287e9a20c87..5ae5ddf0a48 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/CMakeLists.txt +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/CMakeLists.txt @@ -1,39 +1,3 @@ -# Sharp edge: Torch extensions need to use the same pybind11 that torch -# was compiled with, or else there will be issues in cross module exception -# handling (which will abort instead of raise). We circumvent the possibility -# by forcing the torch directories first. -include_directories(BEFORE - ${TORCH_INCLUDE_DIRS} - ${CMAKE_CURRENT_SOURCE_DIR} - ${CMAKE_CURRENT_BINARY_DIR} - ${Python3_INCLUDE_DIRS} - ) -link_directories("${TORCH_INSTALL_PREFIX}/lib") - -# Static library with core functionality. -# We can't use a shared library here, due to issues with linking on macOS-arm64 (the library itself won't build) -# For details, see: https://github.com/llvm/torch-mlir/runs/7919012376 -add_library(TorchMLIRJITIRImporter STATIC - class_annotator.cpp - function_importer.cpp - node_importer.cpp - ivalue_importer.cpp - torch_to_mlir_utils.cpp - ) -target_link_libraries(TorchMLIRJITIRImporter - TorchMLIRAggregateCAPI - ${TORCH_LIBRARIES} - ) -message(STATUS "TORCH_CXXFLAGS=${TORCH_CXXFLAGS}") -set_target_properties(TorchMLIRJITIRImporter PROPERTIES - LIBRARY_OUTPUT_DIRECTORY "${TORCH_MLIR_PYTHON_PACKAGES_DIR}/torch_mlir/torch_mlir/_mlir_libs" - OUTPUT_NAME lib_jit_ir_importer - PREFIX "" - SUFFIX ".a" - CXX_VISIBILITY_PRESET "default" - COMPILE_FLAGS "${TORCH_CXXFLAGS}" - ) - # Separate Pybind MODULE due to issues with a SHARED library. # https://github.com/llvm/torch-mlir/issues/1154 add_library(TorchMLIRJITIRImporterPybind MODULE @@ -62,7 +26,6 @@ if(Python3_LIBRARIES) ) endif() -message(STATUS "TORCH_CXXFLAGS=${TORCH_CXXFLAGS}") set_target_properties(TorchMLIRJITIRImporterPybind PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${TORCH_MLIR_PYTHON_PACKAGES_DIR}/torch_mlir/torch_mlir/_mlir_libs" OUTPUT_NAME _jit_ir_importer diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator_pybind.cpp b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/class_annotator_pybind.cpp similarity index 79% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator_pybind.cpp rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/class_annotator_pybind.cpp index 7d8525209d4..c1219d48d4d 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator_pybind.cpp +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/class_annotator_pybind.cpp @@ -8,7 +8,7 @@ //===----------------------------------------------------------------------===// #include "class_annotator_pybind.h" -#include "class_annotator.h" +#include "jit_ir_importer/class_annotator.h" #include #include @@ -18,7 +18,7 @@ using namespace torch_mlir; static c10::ScalarType convertToC10ScalarType(py::object obj) { if (THPDtype_Check(obj.ptr())) { // Need reinterpret_cast, since no C++-level inheritance is involved. - THPDtype *dtype = reinterpret_cast(obj.ptr()); + THPDtype* dtype = reinterpret_cast(obj.ptr()); return dtype->scalar_type; } std::stringstream ss; @@ -48,16 +48,17 @@ static std::vector getArgAnnotations(py::list pyArgAnnotations) { return argAnnotations; } -void torch_mlir::initClassAnnotatorBindings(py::module &m) { +void torch_mlir::initClassAnnotatorBindings(py::module& m) { py::class_(m, "ClassAnnotator") .def(py::init<>()) .def("exportPath", &ClassAnnotator::exportPath) .def("exportNone", &ClassAnnotator::exportNone) - .def("annotateArgs", - [&](ClassAnnotator &cls_annotator, c10::ClassType &rootClassType, - std::vector path, py::list argAnnotations) { - cls_annotator.annotateArgs(rootClassType, path, - getArgAnnotations(argAnnotations)); - }) + .def( + "annotateArgs", + [&](ClassAnnotator& cls_annotator, c10::ClassType& rootClassType, + std::vector path, py::list argAnnotations) { + cls_annotator.annotateArgs( + rootClassType, path, getArgAnnotations(argAnnotations)); + }) .def("__repr__", &ClassAnnotator::toString); } diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator_pybind.h b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/class_annotator_pybind.h similarity index 95% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator_pybind.h rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/class_annotator_pybind.h index a0d1a75817a..4eb170b8ba9 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/class_annotator_pybind.h +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/class_annotator_pybind.h @@ -18,7 +18,7 @@ namespace py = pybind11; namespace torch_mlir { -void initClassAnnotatorBindings(py::module &m); +void initClassAnnotatorBindings(py::module& m); } // namespace torch_mlir #endif // TORCHMLIRJITIRIMPORTER_CSRC_CLASS_ANNOTATOR_PYBIND_H diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/get_registered_ops.cpp b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/get_registered_ops.cpp similarity index 89% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/get_registered_ops.cpp rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/get_registered_ops.cpp index 2b90b3b65bf..a168ca1c05d 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/get_registered_ops.cpp +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/get_registered_ops.cpp @@ -50,9 +50,9 @@ static py::list getRegisteredOps() { // since the JIT has its own dispatch mechanism that it uses to implement // "prim" ops and a handful of "aten" ops that are effectively prim ops, such // as `aten::__is__`. - for (const std::shared_ptr &op : + for (const std::shared_ptr& op : torch::jit::getAllOperators()) { - const c10::FunctionSchema &schema = op->schema(); + const c10::FunctionSchema& schema = op->schema(); py::dict record; { @@ -69,7 +69,7 @@ static py::list getRegisteredOps() { py::list arguments; py::list returns; - auto addArgument = [](py::list &container, const c10::Argument &arg) { + auto addArgument = [](py::list& container, const c10::Argument& arg) { py::dict argRecord; argRecord["name"] = arg.name(); argRecord["type"] = arg.type()->str(); @@ -87,10 +87,10 @@ static py::list getRegisteredOps() { py::dict aliasInfo; py::list before; py::list after; - for (auto &symbol : arg.alias_info()->beforeSets()) { + for (auto& symbol : arg.alias_info()->beforeSets()) { before.append(std::string(symbol.toQualString())); } - for (auto &symbol : arg.alias_info()->afterSets()) { + for (auto& symbol : arg.alias_info()->afterSets()) { after.append(std::string(symbol.toQualString())); } aliasInfo["is_write"] = arg.alias_info()->isWrite(); @@ -101,10 +101,10 @@ static py::list getRegisteredOps() { container.append(std::move(argRecord)); }; - for (auto &argument : schema.arguments()) { + for (auto& argument : schema.arguments()) { addArgument(arguments, argument); } - for (auto &returnArg : schema.returns()) { + for (auto& returnArg : schema.returns()) { addArgument(returns, returnArg); } record["arguments"] = std::move(arguments); @@ -115,6 +115,6 @@ static py::list getRegisteredOps() { return results; } -void torch_mlir::initGetRegisteredOpsBindings(py::module &m) { +void torch_mlir::initGetRegisteredOpsBindings(py::module& m) { m.def("get_registered_ops", &getRegisteredOps, kGetRegisteredOpsDocstring); } diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/get_registered_ops.h b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/get_registered_ops.h similarity index 94% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/get_registered_ops.h rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/get_registered_ops.h index ec336878c3c..b2851e6a420 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/get_registered_ops.h +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/get_registered_ops.h @@ -19,7 +19,7 @@ namespace torch_mlir { -void initGetRegisteredOpsBindings(py::module &m); +void initGetRegisteredOpsBindings(py::module& m); } // namespace torch_mlir diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options_pybind.cpp b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/import_options_pybind.cpp similarity index 61% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options_pybind.cpp rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/import_options_pybind.cpp index b072b0ed922..94a47229dda 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options_pybind.cpp +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/import_options_pybind.cpp @@ -8,17 +8,19 @@ //===----------------------------------------------------------------------===// #include "import_options_pybind.h" -#include "import_options.h" +#include "jit_ir_importer/import_options.h" namespace py = pybind11; using namespace torch_mlir; -void torch_mlir::initImportOptionsBindings(py::module &m) { +void torch_mlir::initImportOptionsBindings(py::module& m) { py::class_(m, "ImportOptions") .def(py::init<>()) - .def_readwrite("assumeTensorsHaveValueSemantics", - &ImportOptions::assumeTensorsHaveValueSemantics) - .def_readwrite("ignoreExistingTensorShapesAndDtypes", - &ImportOptions::ignoreExistingTensorShapesAndDtypes); + .def_readwrite( + "assumeTensorsHaveValueSemantics", + &ImportOptions::assumeTensorsHaveValueSemantics) + .def_readwrite( + "ignoreExistingTensorShapesAndDtypes", + &ImportOptions::ignoreExistingTensorShapesAndDtypes); } diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options_pybind.h b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/import_options_pybind.h similarity index 92% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options_pybind.h rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/import_options_pybind.h index 6e8e1389ca3..4ca27a21858 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/import_options_pybind.h +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/import_options_pybind.h @@ -13,7 +13,7 @@ #include namespace torch_mlir { -void initImportOptionsBindings(pybind11::module &m); +void initImportOptionsBindings(pybind11::module& m); } // namespace torch_mlir #endif // TORCHMLIRJITIRIMPORTER_CSRC_IMPORT_OPTIONS_PYBIND_H diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/init_python_bindings.cpp b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/init_python_bindings.cpp similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/init_python_bindings.cpp rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/init_python_bindings.cpp diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/module_builder.cpp b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/module_builder.cpp similarity index 75% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/module_builder.cpp rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/module_builder.cpp index ca4bd600f5a..92f131b0d73 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/module_builder.cpp +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/module_builder.cpp @@ -9,9 +9,9 @@ #include "module_builder.h" -#include "function_importer.h" -#include "ivalue_importer.h" -#include "mlir_utils.h" +#include "jit_ir_importer/function_importer.h" +#include "jit_ir_importer/ivalue_importer.h" +#include "jit_ir_importer/mlir_utils.h" #include "mlir-c/Bindings/Python/Interop.h" #include "mlir-c/BuiltinAttributes.h" @@ -22,7 +22,7 @@ namespace py = pybind11; using namespace torch_mlir; -static py::object getMlirIrClass(const char *className) { +static py::object getMlirIrClass(const char* className) { return py::module::import(MAKE_MLIR_PYTHON_QUALNAME("ir")).attr(className); } @@ -33,7 +33,7 @@ static py::object createPythonContextIfNone(py::object contextObj) { return contextObj; } -static MlirContext castPythonObjectToMlirContext(py::object &contextObj) { +static MlirContext castPythonObjectToMlirContext(py::object& contextObj) { assert(!contextObj.is_none() && "context cannot be None"); auto contextCapsule = contextObj.attr(MLIR_PYTHON_CAPI_PTR_ATTR); MlirContext context = mlirPythonCapsuleToContext(contextCapsule.ptr()); @@ -77,15 +77,15 @@ static void printDiagnostic(MlirDiagnostic diagnostic) { std::stringstream ss; ss << stringifyMlirDiagnosticSeverity(mlirDiagnosticGetSeverity(diagnostic)) << ": "; - auto stringCallback = [](MlirStringRef s, void *stringCallbackUserData) { - auto *ssp = static_cast(stringCallbackUserData); + auto stringCallback = [](MlirStringRef s, void* stringCallbackUserData) { + auto* ssp = static_cast(stringCallbackUserData); ssp->write(s.data, s.length); }; - mlirDiagnosticPrint(diagnostic, stringCallback, static_cast(&ss)); + mlirDiagnosticPrint(diagnostic, stringCallback, static_cast(&ss)); // Use pybind11's print: // https://pybind11.readthedocs.io/en/stable/advanced/pycpp/utilities.html - py::print(ss.str(), - py::arg("file") = py::module_::import("sys").attr("stderr")); + py::print( + ss.str(), py::arg("file") = py::module_::import("sys").attr("stderr")); } // Register a diagnostic handler that will redirect output to `sys.stderr` @@ -93,7 +93,7 @@ static void printDiagnostic(MlirDiagnostic diagnostic) { // that mlir diagnostics emitted are correctly routed in Jupyter notebooks. static void registerPythonSysStderrDiagnosticHandler(MlirContext context) { auto diagnosticHandler = [](MlirDiagnostic diagnostic, - void *) -> MlirLogicalResult { + void*) -> MlirLogicalResult { printDiagnostic(diagnostic); for (int i = 0, e = mlirDiagnosticGetNumNotes(diagnostic); i != e; i++) { printDiagnostic(mlirDiagnosticGetNote(diagnostic, i)); @@ -101,7 +101,7 @@ static void registerPythonSysStderrDiagnosticHandler(MlirContext context) { return mlirLogicalResultSuccess(); }; MlirDiagnosticHandlerID id = mlirContextAttachDiagnosticHandler( - context, diagnosticHandler, nullptr, [](void *) { return; }); + context, diagnosticHandler, nullptr, [](void*) { return; }); // Ignore the ID. We intend to keep this handler for the entire lifetime // of this context. (void)id; @@ -123,28 +123,28 @@ ModuleBuilder::ModuleBuilder(pybind11::object contextObj) terminator = mlirBlockGetFirstOperation(getBodyBlock()); } -torch::jit::StrongFunctionPtr -ModuleBuilder::importFunction(torch::jit::StrongFunctionPtr function, - py::object maybeImportOptions) { +torch::jit::StrongFunctionPtr ModuleBuilder::importFunction( + torch::jit::StrongFunctionPtr function, py::object maybeImportOptions) { ImportOptions importOptions; if (!maybeImportOptions.is_none()) { importOptions = py::cast(maybeImportOptions); } MlirBlock block = getBodyBlock(); MlirOperation terminator = this->terminator; - MlirOperation func = importJitFunctionAsFuncOp(context, function.function_, - [](int) -> MlirAttribute { return {nullptr}; }, importOptions); + MlirOperation func = importJitFunctionAsFuncOp( + context, function.function_, + [](int) -> MlirAttribute { return {nullptr}; }, importOptions); mlirBlockInsertOwnedOperationBefore(block, terminator, func); return function; } -void ModuleBuilder::importModule(torch::jit::Module jitModule, - py::object maybeClassAnnotator, - py::object maybeImportOptions) { +void ModuleBuilder::importModule( + torch::jit::Module jitModule, py::object maybeClassAnnotator, + py::object maybeImportOptions) { ClassAnnotator dummyAnnotator; - ClassAnnotator *classAnnotator = &dummyAnnotator; + ClassAnnotator* classAnnotator = &dummyAnnotator; if (!maybeClassAnnotator.is_none()) { - classAnnotator = py::cast(maybeClassAnnotator); + classAnnotator = py::cast(maybeClassAnnotator); } ImportOptions importOptions; if (!maybeImportOptions.is_none()) { @@ -168,14 +168,15 @@ void ModuleBuilder::importModule(torch::jit::Module jitModule, // precise `torch.class_type` names. // // This name is not semantically load-bearing!!! - auto &name = *jitModule.type()->name(); + auto& name = *jitModule.type()->name(); auto debugModuleNameAttr = mlirStringAttrGet( context, toMlirStringRef(name.atoms()[name.atoms().size() - 1])); - mlirOperationSetAttributeByName(mlirModuleGetOperation(module), - toMlirStringRef("torch.debug_module_name"), - debugModuleNameAttr); - importIValue(jitModule._ivalue(), mlirModuleGetBody(module), - mlirModuleGetContext(module), *classAnnotator, importOptions); + mlirOperationSetAttributeByName( + mlirModuleGetOperation(module), + toMlirStringRef("torch.debug_module_name"), debugModuleNameAttr); + importIValue( + jitModule._ivalue(), mlirModuleGetBody(module), + mlirModuleGetContext(module), *classAnnotator, importOptions); } MlirBlock ModuleBuilder::getBodyBlock() { @@ -183,14 +184,16 @@ MlirBlock ModuleBuilder::getBodyBlock() { return mlirRegionGetFirstBlock(mlirOperationGetRegion(moduleOp, 0)); } -void ModuleBuilder::bind(py::module &m) { +void ModuleBuilder::bind(py::module& m) { py::class_(m, "ModuleBuilder") .def(py::init(), py::arg("context") = py::none()) .def_property_readonly("context", &ModuleBuilder::getContextObj) .def_property_readonly("module", &ModuleBuilder::getModuleObj) - .def("import_function", &ModuleBuilder::importFunction, py::arg("function"), - py::arg("importOptions") = py::none()) - .def("import_module", &ModuleBuilder::importModule, py::arg("module"), - py::arg("classAnnotator") = py::none(), - py::arg("importOptions") = py::none()); + .def( + "import_function", &ModuleBuilder::importFunction, + py::arg("function"), py::arg("importOptions") = py::none()) + .def( + "import_module", &ModuleBuilder::importModule, py::arg("module"), + py::arg("classAnnotator") = py::none(), + py::arg("importOptions") = py::none()); } diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/module_builder.h b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/module_builder.h similarity index 84% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/module_builder.h rename to projects/pt1/python/torch_mlir/csrc/jit_ir_importer/module_builder.h index 08695e15faf..cff2200d365 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/csrc/module_builder.h +++ b/projects/pt1/python/torch_mlir/csrc/jit_ir_importer/module_builder.h @@ -10,7 +10,7 @@ #ifndef TORCHMLIRJITIRIMPORTER_CSRC_BUILDER_H #define TORCHMLIRJITIRIMPORTER_CSRC_BUILDER_H -#include "class_annotator.h" +#include "jit_ir_importer/class_annotator.h" #include "mlir-c/IR.h" @@ -29,7 +29,7 @@ class ModuleBuilder { ModuleBuilder(pybind11::object contextObj); /// Creates Python bindings for the class. - static void bind(pybind11::module &m); + static void bind(pybind11::module& m); pybind11::object getContextObj() { return contextObj; } pybind11::object getModuleObj() { return moduleObj; } @@ -38,16 +38,15 @@ class ModuleBuilder { // torch.jit.ScriptFunction is the C++ type torch::jit::StrongFunctionPtr. // Just a bit of naming cruft. // Returns the same function, making it suitable as a nested decorator. - torch::jit::StrongFunctionPtr - importFunction(torch::jit::StrongFunctionPtr function, - py::object maybeImportOptions); + torch::jit::StrongFunctionPtr importFunction( + torch::jit::StrongFunctionPtr function, py::object maybeImportOptions); // Imports a torch::jit::Module into the current module, using the // annotations, if not none, provided in `maybeClassAnnotator` which should be // a ClassAnnotator. - void importModule(torch::jit::Module jitModule, - py::object maybeClassAnnotator, - py::object maybeImportOptions); + void importModule( + torch::jit::Module jitModule, py::object maybeClassAnnotator, + py::object maybeImportOptions); private: MlirBlock getBodyBlock(); diff --git a/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/CMakeLists.txt b/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/CMakeLists.txt index fdef2714372..1c1f2fa2a43 100644 --- a/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/CMakeLists.txt +++ b/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/CMakeLists.txt @@ -1,28 +1,3 @@ -########################################################################### -# Setup PyTorch -########################################################################### - -include(TorchMLIRPyTorch) - -TorchMLIRProbeForPyTorchInstall() -if(TORCH_MLIR_USE_INSTALLED_PYTORCH) - TorchMLIRConfigurePyTorch() -else() - # Assume it is a sibling to the overall project. - set(Torch_DIR "${PROJECT_SOURCE_DIR}/../libtorch/share/cmake/Torch") - message(STATUS "Attempting to locate libtorch as a sibling to the project: ${Torch_DIR}") -endif() - -find_package(Torch 1.11 REQUIRED) - -########################################################################### -# Setup Python development -########################################################################### - -list(APPEND CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/externals/llvm-project/mlir/cmake/modules") -include(MLIRDetectPythonEnv) -mlir_configure_python_dev_packages() - ########################################################################### # Library definition ########################################################################### diff --git a/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/backend_impl.cpp b/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/backend_impl.cpp index 1064a3d1e1a..4bcb9347b5a 100644 --- a/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/backend_impl.cpp +++ b/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/backend_impl.cpp @@ -14,12 +14,12 @@ #include #include -#include -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include +#include #include "backend_impl.h" diff --git a/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/reference_lazy_backend_pybind.cpp b/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/reference_lazy_backend_pybind.cpp index c575d9dd299..f4b8cd9ba57 100644 --- a/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/reference_lazy_backend_pybind.cpp +++ b/projects/pt1/python/torch_mlir/csrc/reference_lazy_backend/reference_lazy_backend_pybind.cpp @@ -11,10 +11,10 @@ #include "torch/csrc/lazy/core/config.h" #include "torch/csrc/lazy/backend/backend_interface.h" -#include -#include -#include -#include +#include +#include +#include +#include #include #include diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/CMakeLists.txt b/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/CMakeLists.txt deleted file mode 100644 index 30bb4cb3151..00000000000 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/CMakeLists.txt +++ /dev/null @@ -1,32 +0,0 @@ -#------------------------------------------------------------------------------- -# Setup PyTorch -#------------------------------------------------------------------------------- - -include(TorchMLIRPyTorch) - -TorchMLIRProbeForPyTorchInstall() -if(TORCH_MLIR_USE_INSTALLED_PYTORCH) - TorchMLIRConfigurePyTorch() -else() - # Assume it is a sibling to the overall project. - set(Torch_DIR "${PROJECT_SOURCE_DIR}/../libtorch/share/cmake/Torch") - message(STATUS "Attempting to locate libtorch as a sibling to the project: ${Torch_DIR}") -endif() - -find_package(Torch 1.11 REQUIRED) - -message(STATUS "libtorch_python CXXFLAGS is ...${TORCH_CXXFLAGS}") -#------------------------------------------------------------------------------- -# Subdirectories -#------------------------------------------------------------------------------- - -add_subdirectory(csrc) - -## Declare the sources of the Python module. - -declare_mlir_python_sources(TorchMLIRPythonSources.JitIRImporter - ROOT_DIR "${TORCH_MLIR_PYTHON_ROOT_DIR}" - ADD_TO_PARENT TorchMLIRPythonSources - SOURCES_GLOB - dialects/torch/importer/jit_ir/*.py -) diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/__init__.py b/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/projects/pt1/python/torch_mlir/jit_ir_importer/CMakeLists.txt b/projects/pt1/python/torch_mlir/jit_ir_importer/CMakeLists.txt new file mode 100644 index 00000000000..c2883b3dca8 --- /dev/null +++ b/projects/pt1/python/torch_mlir/jit_ir_importer/CMakeLists.txt @@ -0,0 +1,12 @@ +#------------------------------------------------------------------------------- +# Subdirectories +#------------------------------------------------------------------------------- + +## Declare the sources of the Python module. + +declare_mlir_python_sources(TorchMLIRPythonSources.JitIRImporter + ROOT_DIR "${TORCH_MLIR_PYTHON_ROOT_DIR}" + ADD_TO_PARENT TorchMLIRPythonSources + SOURCES_GLOB + jit_ir_importer/*.py +) diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/__init__.py b/projects/pt1/python/torch_mlir/jit_ir_importer/__init__.py similarity index 75% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/__init__.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/__init__.py index ead98dd5c6d..b5a49561ade 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/__init__.py +++ b/projects/pt1/python/torch_mlir/jit_ir_importer/__init__.py @@ -11,8 +11,11 @@ # Our native extension is not self-contained. It references libraries which # must come in via the above first. -from ....._mlir_libs._jit_ir_importer import * +from .._mlir_libs._jit_ir_importer import * +# Ensure that the torch dialect has been loaded as it registers passes +# and other things the jit_ir_importer needs. +from ..dialects import torch as _unused_torch_dialect __all__ = [ "debug_trace_to_stderr", diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/__init__.py b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/__init__.py similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/__init__.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/__init__.py diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/abstract_interp_lib_gen.py b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/abstract_interp_lib_gen.py similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/abstract_interp_lib_gen.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/abstract_interp_lib_gen.py diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/library_generator.py b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/library_generator.py similarity index 99% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/library_generator.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/library_generator.py index 74eb520e22d..6cd19643a5f 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/library_generator.py +++ b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/library_generator.py @@ -10,7 +10,7 @@ import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder from torch_mlir.passmanager import PassManager from .registry import Registry diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/registry.py b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/registry.py similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/registry.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/registry.py diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/testing_framework.py b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/testing_framework.py similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/testing_framework.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/testing_framework.py diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/torch_ods_gen.py b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/torch_ods_gen.py similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/torch_ods_gen.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/torch_ods_gen.py diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/utils.py b/projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/utils.py similarity index 100% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/build_tools/utils.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/build_tools/utils.py diff --git a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/torchscript_annotations.py b/projects/pt1/python/torch_mlir/jit_ir_importer/torchscript_annotations.py similarity index 97% rename from projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/torchscript_annotations.py rename to projects/pt1/python/torch_mlir/jit_ir_importer/torchscript_annotations.py index d495dda4836..a6541b6503b 100644 --- a/projects/pt1/python/torch_mlir/dialects/torch/importer/jit_ir/torchscript_annotations.py +++ b/projects/pt1/python/torch_mlir/jit_ir_importer/torchscript_annotations.py @@ -8,7 +8,7 @@ import torch import torch_mlir -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator +from torch_mlir.jit_ir_importer import ClassAnnotator # Decorators diff --git a/projects/pt1/test/CMakeLists.txt b/projects/pt1/test/CMakeLists.txt index 1419b245ecb..b750c984519 100644 --- a/projects/pt1/test/CMakeLists.txt +++ b/projects/pt1/test/CMakeLists.txt @@ -23,4 +23,4 @@ add_lit_testsuite(check-torch-mlir-pt1 "Running the torch-mlir PT1 regression te ) set_target_properties(check-torch-mlir-pt1 PROPERTIES FOLDER "Tests") -add_lit_testsuites(TORCH_MLIR ${CMAKE_CURRENT_SOURCE_DIR} DEPENDS ${TORCH_MLIR_TEST_DEPENDS}) +add_lit_testsuites(TORCH_MLIR_PT1 ${CMAKE_CURRENT_SOURCE_DIR} DEPENDS ${TORCH_MLIR_TEST_DEPENDS}) diff --git a/projects/pt1/test/lit.cfg.py b/projects/pt1/test/lit.cfg.py index a9753bf2271..31e3ee388f3 100644 --- a/projects/pt1/test/lit.cfg.py +++ b/projects/pt1/test/lit.cfg.py @@ -19,7 +19,7 @@ # Configuration file for the 'lit' test runner. # name: The name of this test suite. -config.name = 'TORCH_MLIR' +config.name = 'TORCH_MLIR_PT1' config.test_format = lit.formats.ShTest(not llvm_config.use_lit_shell) diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-error.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-error.py index 7c448f6e3bb..26eaa5bd0cb 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-error.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-error.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ModuleBuilder # RUN: %PYTHON %s | FileCheck %s mb = ModuleBuilder() diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-tensor-type-bound.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-tensor-type-bound.py index e8bcd4864f1..6cc2d57b1ca 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-tensor-type-bound.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/arg-tensor-type-bound.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s mb = ModuleBuilder() diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/class-annotator-repr.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/class-annotator-repr.py index ce235a6bf03..3a2ed4319d2 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/class-annotator-repr.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/class-annotator-repr.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ModuleBuilder # RUN: %PYTHON %s | FileCheck %s mb = ModuleBuilder() diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-error.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-error.py index cc4b5656b13..2a0806f6fff 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-error.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-error.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ModuleBuilder # RUN: %PYTHON %s | FileCheck %s mb = ModuleBuilder() diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-recursive.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-recursive.py index cc2963d4678..79b4dccd208 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-recursive.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export-recursive.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s mb = ModuleBuilder() diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export.py index 37b5d48ad52..433f8249b1e 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/annotations/export.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s mb = ModuleBuilder() diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/debug-module-name.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/debug-module-name.py index f4ad4dd3a3c..399b45f7335 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/debug-module-name.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/debug-module-name.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/dict.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/dict.py index 0a9e7f9265f..117b0cff958 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/dict.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/dict.py @@ -5,7 +5,7 @@ from typing import Dict, Optional import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions-that-call-methods.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions-that-call-methods.py index ade43aca0ad..318e099758c 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions-that-call-methods.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions-that-call-methods.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions.py index 48426061757..ee22a495efa 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/functions.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/list.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/list.py index 2e8765be40a..0c1b8f2ffdd 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/list.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/list.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-derefine.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-derefine.py index 6a941330d03..fee1b2922f0 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-derefine.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-derefine.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-locations.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-locations.py index 7eb98beb9c1..5d38d6e3a11 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-locations.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods-locations.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods.py index fc246c458e8..0143012bf2b 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/methods.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error-submodule.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error-submodule.py index 9bd66c97c12..eae86ec1c94 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error-submodule.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error-submodule.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: not %PYTHON %s 2>&1 | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error.py index a3ce3440c88..968509accea 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-error.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: not %PYTHON %s 2>&1 | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-torch-bug.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-torch-bug.py index 25d65101486..4c323ec01e4 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-torch-bug.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity-torch-bug.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity.py index 253bdfcec3e..0f6516a2734 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/object-identity.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/prim.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/prim.py index 55fed3299e9..e48c327ed2f 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/prim.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/prim.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/primitives.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/primitives.py index 3bcfb07173f..3cb8cf992d3 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/primitives.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/primitives.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/quantization.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/quantization.py index f05cf434f83..d77b98323e2 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/quantization.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/quantization.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # UNSUPPORTED: system-darwin # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/strings.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/strings.py index d7d94bd9031..b65d6f5ca03 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/strings.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/strings.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules-select.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules-select.py index b0834691e74..5b2cf04b554 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules-select.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules-select.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules.py index 92333d20e1d..d9983628d92 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/submodules.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors-value-semantics.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors-value-semantics.py index e57c20fe59e..36dfa32f036 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors-value-semantics.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors-value-semantics.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ImportOptions, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ImportOptions, ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors.py index 831c619adc5..31a89e3e1e4 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/tensors.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/ivalue_import/tuple.py b/projects/pt1/test/python/importer/jit_ir/ivalue_import/tuple.py index 3b0bf2d4ea6..7bed706ac60 100644 --- a/projects/pt1/test/python/importer/jit_ir/ivalue_import/tuple.py +++ b/projects/pt1/test/python/importer/jit_ir/ivalue_import/tuple.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/classes.py b/projects/pt1/test/python/importer/jit_ir/node_import/classes.py index 511aac69027..09e2b1b0b4a 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/classes.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/classes.py @@ -6,7 +6,7 @@ import torch from torch._C import CompilationUnit -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder import typing diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/debug-info.py b/projects/pt1/test/python/importer/jit_ir/node_import/debug-info.py index f7b441a12da..bb6ab4ce4da 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/debug-info.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/debug-info.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/dict.py b/projects/pt1/test/python/importer/jit_ir/node_import/dict.py index ed4371bb014..0060357b4fc 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/dict.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/dict.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder import collections from typing import Tuple, Optional, List, NamedTuple, Dict diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/elif.py b/projects/pt1/test/python/importer/jit_ir/node_import/elif.py index 3a9d3a3211e..71853b0c0b0 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/elif.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/elif.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/errors.py b/projects/pt1/test/python/importer/jit_ir/node_import/errors.py index be0479dcd8a..2ac801bddea 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/errors.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/errors.py @@ -5,7 +5,7 @@ import enum import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder class Color(enum.Enum): diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/function-block-arg-adjustment.py b/projects/pt1/test/python/importer/jit_ir/node_import/function-block-arg-adjustment.py index e245ec870b5..a724f118547 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/function-block-arg-adjustment.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/function-block-arg-adjustment.py @@ -2,7 +2,7 @@ # This file is licensed under a pytorch-style license # See LICENSE.pytorch for license information. -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder from utils import create_script_function diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/function-derefine.py b/projects/pt1/test/python/importer/jit_ir/node_import/function-derefine.py index 94eed3cefdb..89f5604bf75 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/function-derefine.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/function-derefine.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder import typing diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/if.py b/projects/pt1/test/python/importer/jit_ir/node_import/if.py index fd8a7267e46..8289e05031c 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/if.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/if.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/list.py b/projects/pt1/test/python/importer/jit_ir/node_import/list.py index 9a09914e3ed..2b30d545b4c 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/list.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/list.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/loop.py b/projects/pt1/test/python/importer/jit_ir/node_import/loop.py index e21f4c8c0b5..d6bb141f25d 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/loop.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/loop.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder import typing diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/prim.py b/projects/pt1/test/python/importer/jit_ir/node_import/prim.py index 2565c6c4186..07a56616efa 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/prim.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/prim.py @@ -5,7 +5,7 @@ import typing import torch -from torch_mlir.dialects.torch.importer.jit_ir import ClassAnnotator, ImportOptions, ModuleBuilder +from torch_mlir.jit_ir_importer import ClassAnnotator, ImportOptions, ModuleBuilder from utils import create_script_function diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/tuple.py b/projects/pt1/test/python/importer/jit_ir/node_import/tuple.py index 8e14b677f23..2dff435cd42 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/tuple.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/tuple.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder from typing import Tuple, Optional, NamedTuple from utils import create_script_function diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/types-bool.py b/projects/pt1/test/python/importer/jit_ir/node_import/types-bool.py index f08fba24c40..8da5e0e2cc1 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/types-bool.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/types-bool.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/types-none.py b/projects/pt1/test/python/importer/jit_ir/node_import/types-none.py index eae6b4578ce..a0e86a66ae2 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/types-none.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/types-none.py @@ -3,7 +3,7 @@ # See LICENSE.pytorch for license information. import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/projects/pt1/test/python/importer/jit_ir/node_import/union.py b/projects/pt1/test/python/importer/jit_ir/node_import/union.py index 691a8e41344..14eb41a217c 100644 --- a/projects/pt1/test/python/importer/jit_ir/node_import/union.py +++ b/projects/pt1/test/python/importer/jit_ir/node_import/union.py @@ -5,7 +5,7 @@ from typing import Union import torch -from torch_mlir.dialects.torch.importer.jit_ir import ModuleBuilder +from torch_mlir.jit_ir_importer import ModuleBuilder # RUN: %PYTHON %s | torch-mlir-opt | FileCheck %s diff --git a/test/Dialect/Torch/reify-dtype-calculations.mlir b/test/Dialect/Torch/reify-dtype-calculations.mlir index 9aec26662b6..3fe94d0417e 100644 --- a/test/Dialect/Torch/reify-dtype-calculations.mlir +++ b/test/Dialect/Torch/reify-dtype-calculations.mlir @@ -24,11 +24,11 @@ func.func @basic(%arg0: !torch.vtensor) -> !torch.vtensor { // ----- -// CHECK-LABEL: func.func private @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes( +// CHECK-LABEL: func.func private @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes( // CHECK: {{.*}} = torch.promote_dtypes {{.*}} : (!torch.list>, !torch.list) -> !torch.int // CHECK-LABEL: func.func private @__torch_mlir_dtype_fn.aten.floor_divide( -// CHECK: {{.*}} = call @__torch__.torch_mlir.dialects.torch.importer.jit_ir.build_tools.library_generator.promote_dtypes({{.*}} +// CHECK: {{.*}} = call @__torch__.torch_mlir.jit_ir_importer.build_tools.library_generator.promote_dtypes({{.*}} // CHECK-LABEL: func.func @op_with_dtype_promotion( // CHECK: {{.*}} = func.call @__torch_mlir_dtype_fn.aten.floor_divide({{.*}}