Clean up stale examples.

They were confusing users, and most didn't even work anymore.
pull/300/head
Sean Silva 2021-09-03 22:13:06 +00:00
parent 1dec561cfd
commit 7a3570e881
10 changed files with 1 additions and 352 deletions

View File

@ -1,32 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import refjit, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
logging.enable()
torch.manual_seed(0)
input = torch.rand(2, 3)
mb = torch_mlir.ModuleBuilder()
with mb.capture_function("cos", [input]) as f:
result = torch.cos(input)
f.returns([result])
backend = iree.IreeNpcompBackend()
jit_module = backend.load(backend.compile(frontend_lowering.lower_module(mb.module)))
logging.debug(f"Executing jit_module.cos")
test_utils.compare_outputs(torch.cos, jit_module.cos, input)
# This fails because ModuleBuilder represents torch.cos with a constant:
# https://github.com/llvm/mlir-npcomp/issues/135
test_utils.compare_outputs(torch.cos, jit_module.cos, input + 1)

View File

@ -1,32 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import refjit, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
logging.enable()
torch.manual_seed(0)
arg0 = torch.ones(2, 2)
arg1 = torch.ones(2, 2)
def fun(a, b):
return a.div_(b)
mb = torch_mlir.ModuleBuilder()
with mb.capture_function("test", [arg0, arg1]) as f:
f.returns([fun(arg0, arg1)])
backend = iree.IreeNpcompBackend()
jit_module = backend.load(backend.compile(frontend_lowering.lower_module(mb.module)))
test_utils.compare_outputs(torch.mm, jit_module.test, arg0, arg1)
test_utils.compare_outputs(torch.mm, jit_module.test, arg0 + 1, arg1 + 1)

View File

@ -1,29 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import refjit, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
logging.enable()
torch.manual_seed(0)
lhs = torch.rand(2, 3)
rhs = torch.rand(3, 4)
mb = torch_mlir.ModuleBuilder()
with mb.capture_function("mm", [lhs, rhs]) as f:
result = torch.mm(lhs, rhs)
f.returns([result])
backend = iree.IreeNpcompBackend()
jit_module = backend.load(backend.compile(frontend_lowering.lower_module(mb.module)))
test_utils.compare_outputs(torch.mm, jit_module.mm, lhs, rhs)
test_utils.compare_outputs(torch.mm, jit_module.mm, lhs + 1, rhs - 1)

View File

@ -1,37 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import refjit, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
logging.enable()
lhs = torch.ones((4, 6, 1))
rhs = torch.ones((1, 1, 3)) * 0.6
bias = torch.ones((1, 1, 3)) * 0.2
threshold = torch.tensor((0.75, 0.25, 0.10))
def mul_maximum(lhs, rhs, threshold, bias):
return torch.maximum(lhs * rhs, threshold) + bias
mb = torch_mlir.ModuleBuilder()
with mb.capture_function("mul_maximum", [lhs, rhs, threshold, bias]) as f:
result = mul_maximum(lhs, rhs, threshold, bias)
f.returns([result])
backend = iree.IreeNpcompBackend()
jit_module = backend.load(backend.compile(frontend_lowering.lower_module(mb.module)))
test_utils.compare_outputs(mul_maximum, jit_module.mul_maximum, lhs, rhs,
threshold, bias)
test_utils.compare_outputs(mul_maximum, jit_module.mul_maximum, lhs + 1,
rhs + 2, threshold, bias)

View File

@ -94,9 +94,7 @@
"from mlir.passmanager import PassManager\n",
"\n",
"from torch_mlir_torchscript.annotations import annotate_args, export\n",
"from torch_mlir.torchscript_annotations import extract_annotations\n",
"\n",
"from npcomp.compiler.pytorch.backend.iree import IreeNpcompBackend"
"from torch_mlir.torchscript_annotations import extract_annotations"
]
},
{

View File

@ -1,33 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import refjit, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
logging.enable()
torch.manual_seed(0)
arg0 = torch.ones(2, 2)
def fun(a):
z = torch.zeros(2, 2)
torch.tanh(a, out=z)
return z
mb = torch_mlir.ModuleBuilder()
with mb.capture_function("test", [arg0]) as f:
f.returns([fun(arg0)])
backend = iree.IreeNpcompBackend()
jit_module = backend.load(backend.compile(frontend_lowering.lower_module(mb.module)))
test_utils.compare_outputs(torch.mm, jit_module.test, arg0, arg1)
test_utils.compare_outputs(torch.mm, jit_module.test, arg0 + 1, arg1 + 1)

View File

@ -1,30 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import sys
import textwrap
import numpy as np
INDENT = " "
def _indent(value):
return textwrap.indent(str(value), INDENT)
def compare_outputs(torch_func, jit_func, *args):
print('-' * 80)
print(f"Input args:\n{_indent(args)}", file=sys.stderr)
result = torch_func(*args)
jit_result = jit_func(*args)
np.testing.assert_allclose(result.numpy(), jit_result, rtol=1e-05, atol=1e-08)
# Only print these if the test passes, as np.testing will print them if it
# fails.
print("SUCCESS", file=sys.stderr)
print(f"PyTorch Result:\n{_indent(result.numpy())}", file=sys.stderr)
print(f"JIT Result:\n{_indent(jit_result)}", file=sys.stderr)

View File

@ -1,58 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import typing
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import refjit, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
logging.enable()
# RUN: %PYTHON %s | npcomp-opt | FileCheck %s
mb = torch_mlir.ModuleBuilder()
class Submodule(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, lhs, rhs):
return torch.mm(lhs, rhs)
class TestModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.s = Submodule()
def forward(self, lhs, rhs):
return self.s.forward(lhs, rhs)
test_module = TestModule()
class_annotator = torch_mlir.ClassAnnotator()
recursivescriptmodule = torch.jit.script(test_module)
torch.jit.save(recursivescriptmodule, '/tmp/foo.pt')
class_annotator.exportNone(recursivescriptmodule._c._type())
class_annotator.exportPath(recursivescriptmodule._c._type(), ['forward'])
class_annotator.annotateArgs(recursivescriptmodule._c._type(), ['forward'], [
None,
([-1, -1], torch.float32),
([-1, -1], torch.float32),
])
# TODO: Automatically handle unpacking Python class RecursiveScriptModule into the underlying ScriptModule.
mb.import_module(recursivescriptmodule._c, class_annotator)
#mb.module.operation.print()
backend = iree.IreeNpcompBackend()
compiled = backend.compile(frontend_lowering.lower_object_graph(mb.module))
jit_module = backend.load(compiled)
torch.manual_seed(0)
lhs = torch.rand(2, 3)
rhs = torch.rand(3, 4)
test_utils.compare_outputs(test_module.forward, jit_module.forward, lhs, rhs)

View File

@ -1,49 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import typing
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import refjit, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
#logging.enable()
# RUN: %PYTHON %s | npcomp-opt | FileCheck %s
mb = torch_mlir.ModuleBuilder()
class TestModule(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return torch.tanh(x)
test_module = TestModule()
class_annotator = torch_mlir.ClassAnnotator()
recursivescriptmodule = torch.jit.script(test_module)
torch.jit.save(recursivescriptmodule, '/tmp/foo.pt')
class_annotator.exportNone(recursivescriptmodule._c._type())
class_annotator.exportPath(recursivescriptmodule._c._type(), ['forward'])
class_annotator.annotateArgs(recursivescriptmodule._c._type(), ['forward'], [
None,
([2, 3, -1], torch.float32)
])
# TODO: Automatically handle unpacking Python class RecursiveScriptModule into the underlying ScriptModule.
mb.import_module(recursivescriptmodule._c, class_annotator)
#mb.module.operation.print()
backend = iree.IreeNpcompBackend()
compiled = backend.compile(frontend_lowering.lower_object_graph(mb.module))
jit_module = backend.load(compiled)
torch.manual_seed(0)
input = torch.rand(2, 3, 1)
test_utils.compare_outputs(test_module.forward, jit_module.forward, input)

View File

@ -1,49 +0,0 @@
# -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
import typing
import torch
import torch_mlir
import npcomp
from npcomp.compiler.pytorch.backend import iree, frontend_lowering
from npcomp.compiler.utils import logging
import test_utils
logging.enable()
# RUN: %PYTHON %s | npcomp-opt | FileCheck %s
mb = torch_mlir.ModuleBuilder()
class TestModule(torch.nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return torch.tanh(x)
test_module = TestModule()
class_annotator = torch_mlir.ClassAnnotator()
recursivescriptmodule = torch.jit.script(test_module)
torch.jit.save(recursivescriptmodule, '/tmp/foo.pt')
class_annotator.exportNone(recursivescriptmodule._c._type())
class_annotator.exportPath(recursivescriptmodule._c._type(), ['forward'])
class_annotator.annotateArgs(recursivescriptmodule._c._type(), ['forward'], [
None,
([2, 3, -1], torch.float32, True)
])
# TODO: Automatically handle unpacking Python class RecursiveScriptModule into the underlying ScriptModule.
mb.import_module(recursivescriptmodule._c, class_annotator)
#mb.module.operation.print()
backend = iree.IreeNpcompBackend()
compiled = backend.compile(frontend_lowering.lower_object_graph(mb.module))
jit_module = backend.load(compiled)
torch.manual_seed(0)
input = torch.rand(2, 3, 1)
test_utils.compare_outputs(test_module.forward, jit_module.forward, input)