test_topi_math.py 6.79 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
17
import numpy as np
18
import scipy
19 20
import tvm
import topi
21
import topi.testing
22
from topi import util
23
from common import get_all_backend
24 25 26


def test_util():
27
    x = tvm.const(100, "int32")
28 29 30 31 32
    assert util.get_const_int(x) == 100
    assert util.get_const_tuple((x, x)) == (100, 100)


def test_ewise():
hlu1 committed
33 34 35 36 37 38 39 40 41 42 43 44 45 46
    def test_apply(
        func,
        name,
        f_numpy,
        low,
        high,
        shape=(20, 3),
        dtype=tvm.float32,
        check_round=False,
        skip_name_check=False,
    ):
        m = tvm.var("m")
        l = tvm.var("l")
        A = tvm.placeholder((m, l), dtype=dtype, name="A")
47 48 49

        B = func(A)
        assert tuple(B.shape) == tuple(A.shape)
50 51
        if not skip_name_check:
            assert B.op.body[0].name == name
52
        a_np = np.random.uniform(low=low, high=high, size=shape).astype(A.dtype) * 10
53 54
        # avoid round check too close to boundary
        if check_round:
55
            a_np += ((np.abs(np.fmod(a_np, 1)) - 0.5) < 1e-6) * 1e-5
56 57 58 59 60 61 62 63 64 65
        b_np = f_numpy(a_np)

        def check_device(device):
            ctx = tvm.context(device, 0)
            if not ctx.exist:
                print("Skip because %s is not enabled" % device)
                return
            print("Running on target: %s" % device)
            with tvm.target.create(device):
                s = topi.generic.schedule_injective(B)
66
            foo = tvm.build(s, [A, B], device, name=name)
67 68 69
            a = tvm.nd.array(a_np, ctx)
            b = tvm.nd.array(np.zeros_like(b_np), ctx)
            foo(a, b)
70
            tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5, atol=1e-5)
71

72 73 74 75 76 77 78 79 80 81
        check_device('llvm')
        check_device('cuda')
        check_device('opencl')
        check_device('metal')
        check_device('rocm')
        check_device('vulkan')
        check_device('nvptx')
        check_device('llvm -device=arm-cpu')
        check_device('opencl -device=mali')
        check_device('aocl_sw_emu')
82

83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102
    def test_isnan(
        low,
        high,
        shape=(20, 3),
        dtype=tvm.float32,
        check_round=False,
        skip_name_check=False,
    ):
        m = tvm.var("m")
        l = tvm.var("l")
        A = tvm.placeholder((m, l), dtype=dtype, name="A")

        B = topi.isnan(A)
        assert tuple(B.shape) == tuple(A.shape)
        if not skip_name_check:
            assert B.op.body[0].name == "isnan"
        a_np = np.random.uniform(low=low, high=high, size=shape).astype(A.dtype) * 10
        a_np.ravel()[np.random.choice(a_np.size, int(a_np.size * 0.5), replace=False)] = np.nan
        # avoid round check too close to boundary
        if check_round:
103
            a_np += ((np.abs(np.fmod(a_np, 1)) - 0.5) < 1e-6) * 1e-5
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119
        b_np = np.isnan(a_np)

        def check_device(device):
            ctx = tvm.context(device, 0)
            if not ctx.exist:
                print("Skip because %s is not enabled" % device)
                return
            print("Running on target: %s" % device)
            with tvm.target.create(device):
                s = topi.generic.schedule_injective(B)
            foo = tvm.build(s, [A, B], device, name="isnan")
            a = tvm.nd.array(a_np, ctx)
            b = tvm.nd.array(np.zeros_like(b_np), ctx)
            foo(a, b)
            tvm.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5, atol=1e-5)

120 121 122 123 124 125 126 127 128 129
        check_device('llvm')
        check_device('cuda')
        check_device('opencl')
        check_device('metal')
        check_device('rocm')
        check_device('vulkan')
        check_device('nvptx')
        check_device('llvm -device=arm-cpu')
        check_device('opencl -device=mali')
        check_device('aocl_sw_emu')
130

131 132
    test_apply(topi.floor, "floor", np.floor, -100, 100)
    test_apply(topi.ceil, "ceil", np.ceil, -100, 100)
133
    test_apply(topi.sign, "sign", np.sign, -100, 100, skip_name_check=True)
134
    test_apply(topi.trunc, "trunc", np.trunc, -100, 100)
135
    test_apply(topi.abs, "fabs", np.abs, -100, 100)
136
    test_apply(topi.round, "round", np.round, -100, 100, check_round=True)
137
    test_apply(topi.exp, "exp", np.exp, -1, 1)
hlu1 committed
138 139 140
    test_apply(topi.tanh, "tanh", np.tanh, -10, 10, shape=(128, 128))
    test_apply(topi.tanh, "tanh", np.tanh, -10, 10, shape=(128, 128), dtype="float64")
    test_apply(topi.sigmoid, "sigmoid", lambda x: 1 / (1 + np.exp(-x)), -1, 1)
141 142
    test_apply(topi.log, "log", np.log, 0, 100)
    test_apply(topi.sqrt, "sqrt", np.sqrt, 0, 100)
hlu1 committed
143
    test_apply(topi.rsqrt, "rsqrt", lambda x: np.ones_like(x) / np.sqrt(x), 0, 100, skip_name_check=True)
144 145
    test_apply(topi.cos, "cos", np.cos, -2.0*np.pi, 2.0*np.pi)
    test_apply(topi.sin, "sin", np.sin, -2.0*np.pi, 2.0*np.pi)
146
    test_apply(topi.erf, "erf", scipy.special.erf, -.1, .1, dtype="float32")
147
    test_isnan(-100, 100)
148

149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164

def test_cast():
    def verify(from_dtype, to_dtype, low=-100, high=100):
        shape = (5, 4)
        A = tvm.placeholder(shape, dtype=from_dtype, name="A")
        B = topi.cast(A, to_dtype)

        if from_dtype == "bool":
            a_np = np.random.choice([True, False], size=shape)
        else:
            a_np = np.random.uniform(low, high, size=shape).astype(from_dtype)
        if to_dtype == "bool":
            a_np = a_np - a_np[2, 3]
        b_np = a_np.astype(to_dtype)

        for device in get_all_backend():
hlu1 committed
165
            ctx = tvm.context(device, 0)
166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187
            if not ctx.exist:
                print("Skip because %s is not enabled" % device)
                continue
            print("Running on target: %s" % device)
            with tvm.target.create(device):
                s = topi.generic.schedule_injective(B)
            foo = tvm.build(s, [A, B], device)
            a = tvm.nd.array(a_np, ctx)
            b = tvm.nd.empty(shape=shape, dtype=to_dtype, ctx=ctx)
            foo(a, b)
            tvm.testing.assert_allclose(b.asnumpy(), b_np)

    verify("int32", "float32")
    verify("int32", "float64")
    verify("int32", "bool")
    verify("float32", "int32")
    verify("float32", "float64")
    verify("float32", "bool")
    verify("bool", "float32")
    verify("bool", "int32")


188 189 190
if __name__ == "__main__":
    test_util()
    test_ewise()
191
    test_cast()