forked from pytorch/xla
-
Notifications
You must be signed in to change notification settings - Fork 0
/
BUILD
83 lines (78 loc) · 2.12 KB
/
BUILD
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
load(
"@tsl//tsl/platform/default:cuda_build_defs.bzl",
"if_cuda_is_configured",
)
load("@python//:defs.bzl", "compile_pip_requirements")
load("@python_version_repo//:py_version.bzl", "REQUIREMENTS")
compile_pip_requirements(
name = "requirements",
extra_args = [
"--allow-unsafe",
"--build-isolation",
"--rebuild",
],
requirements_in = "requirements.in",
requirements_txt = REQUIREMENTS,
generate_hashes = True,
)
cc_binary(
name = "_XLAC.so",
copts = [
"-DTORCH_API_INCLUDE_EXTENSION_H",
"-DTORCH_EXTENSION_NAME=_XLAC",
"-fopenmp",
"-fPIC",
"-fwrapv",
],
linkopts = [
"-Wl,-rpath,$$ORIGIN/torch_xla/lib", # for libtpu
"-Wl,-soname,_XLAC.so",
"-lstdc++fs", # For std::filesystem
],
linkshared = 1,
visibility = ["//visibility:public"],
deps = [
"//torch_xla/csrc:init_python_bindings",
"@torch//:headers",
"@torch//:libc10",
"@torch//:libtorch",
"@torch//:libtorch_cpu",
"@torch//:libtorch_python",
] + if_cuda_is_configured([
"@xla//xla/stream_executor:cuda_platform",
]),
)
cc_binary(
name = "_XLAC_cuda_functions.so",
copts = [
"-fopenmp",
"-fPIC",
],
linkopts = [
"-Wl,-soname,_XLAC_cuda_functions.so",
],
linkshared = 1,
visibility = ["//visibility:public"],
deps = [
"//torch_xla/csrc:aten_cuda_functions",
],
)
test_suite(
name = "cpp_tests",
# testonly = True,
tests = [
"//test/cpp:test_aten_xla_tensor_1",
"//test/cpp:test_aten_xla_tensor_2",
"//test/cpp:test_aten_xla_tensor_3",
"//test/cpp:test_aten_xla_tensor_4",
"//test/cpp:test_aten_xla_tensor_5",
"//test/cpp:test_aten_xla_tensor_6",
"//test/cpp:test_ir",
"//test/cpp:test_lazy",
"//test/cpp:test_replication",
"//test/cpp:test_tensor",
"//test/cpp:test_xla_sharding",
"//torch_xla/csrc/runtime:pjrt_computation_client_test",
"//torch_xla/csrc/runtime:ifrt_computation_client_test",
],
)