forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
dispatch.cpp
62 lines (56 loc) · 1.74 KB
/
dispatch.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
#include <gtest/gtest.h>
#include <torch/torch.h>
#include <ATen/native/Pow.h>
#include <torch/types.h>
#include <torch/utils.h>
#include <test/cpp/api/support.h>
#include <iostream>
#include <vector>
#include <type_traits>
#include <cstdlib>
struct DispatchTest : torch::test::SeedingFixture {};
TEST_F(DispatchTest, TestAVX2) {
const std::vector<int> ints {1, 2, 3, 4};
const std::vector<int> result {1, 4, 27, 256};
const auto vals_tensor = torch::tensor(ints);
const auto pows_tensor = torch::tensor(ints);
#ifdef _WIN32
_putenv("ATEN_CPU_CAPABILITY=avx2");
#else
setenv("ATEN_CPU_CAPABILITY", "avx2", 1);
#endif
const auto actual_pow_avx2 = vals_tensor.pow(pows_tensor);
for (int i = 0; i < 4; i++) {
ASSERT_EQ(result[i], actual_pow_avx2[i].item<int>());
}
}
TEST_F(DispatchTest, TestAVX) {
const std::vector<int> ints {1, 2, 3, 4};
const std::vector<int> result {1, 4, 27, 256};
const auto vals_tensor = torch::tensor(ints);
const auto pows_tensor = torch::tensor(ints);
#ifdef _WIN32
_putenv("ATEN_CPU_CAPABILITY=avx");
#else
setenv("ATEN_CPU_CAPABILITY", "avx", 1);
#endif
const auto actual_pow_avx = vals_tensor.pow(pows_tensor);
for (int i = 0; i < 4; i++) {
ASSERT_EQ(result[i], actual_pow_avx[i].item<int>());
}
}
TEST_F(DispatchTest, TestDefault) {
const std::vector<int> ints {1, 2, 3, 4};
const std::vector<int> result {1, 4, 27, 256};
const auto vals_tensor = torch::tensor(ints);
const auto pows_tensor = torch::tensor(ints);
#ifdef _WIN32
_putenv("ATEN_CPU_CAPABILITY=default");
#else
setenv("ATEN_CPU_CAPABILITY", "default", 1);
#endif
const auto actual_pow_default = vals_tensor.pow(pows_tensor);
for (int i = 0; i < 4; i++) {
ASSERT_EQ(result[i], actual_pow_default[i].item<int>());
}
}