Commit d1eb1229 by 雾雨魔理沙 Committed by Wuwei Lin

[Relay] Continuation Passing Style (#3456)

* save

add

me find type checker problem

save

save

lint

do

lint

reset ti

add some doc

add failed test case

add recursion for cps

add recursion for cps

fix pytest

lint

save

fix test error

lint

save

fix error

* fix rebase

* fix

* fix test

* lint

* lint

* restore rewriteannotationops

* do
parent 988ea2ac
......@@ -252,17 +252,6 @@ TVM_DLL tvm::Array<TypeVar> AllTypeVars(const Expr& expr, const Module& mod);
TVM_DLL tvm::Array<TypeVar> AllTypeVars(const Type& t, const Module& mod);
/*!
* \brief Rewrite the annotated program.
*
* \param expr The expression.
* \param fallback_device The fallback device which is the default device for
* operators without annotation.
*
* \return The updated program.
*/
TVM_DLL Expr RewriteAnnotatedOps(const Expr& expr, int fallback_device);
/*!
* \brief Collect the device mapping information of each expression.
*
* \param expr The expression.
......
......@@ -405,6 +405,22 @@ TVM_DLL Pass RewriteAnnotatedOps(int fallback_device);
TVM_DLL Pass ToANormalForm();
/*!
* \brief Turn an expression into continuation passing style(CPS).
*
* CPS mean that every function will, instead of returning the result directly,
* be passed down an extra function (called the continuation) as argument,
* and pass the result to the continuation instead.
*
* Thus, every function call has to be passed an extra argument
* that represent the rest of the computation (Hence the name of continuation).
*
* Similarly, all other compute will be wrapped and call the continuation as well.
*
* \return the pass.
*/
TVM_DLL Pass ToCPS();
/*!
* \brief Remove let binding and directly share via pointer instead.
*
* It will remove all let binding,
......@@ -586,6 +602,57 @@ TVM_DLL Expr ForwardRewrite(const Expr& expr,
std::function<NodeRef(const Call&)> fcontext = nullptr,
std::function<Expr(const Expr&)> fmulti_ref_trigger = nullptr);
/*!
* \brief Rewrite the annotated program.
*
* \param expr The expression.
* \param fallback_device The fallback device which is the default device for
* operators without annotation.
*
* \return The updated program.
*/
TVM_DLL Expr RewriteAnnotatedOps(const Expr& expr, int fallback_device);
/*!
* \brief Turn an expression into continuation passing style(CPS).
*
* CPS mean that every function will, instead of returning the result directly,
* be passed down an extra function (called the continuation) as argument,
* and pass the result to the continuation instead.
*
* Thus, every function call has to be passed an extra argument
* that represent the rest of the computation (Hence the name of continuation).
*
* Similarly, all other compute will be wrapped and call the continuation as well.
*
* \param f the function.
* \param mod the module.
*
* \return the converted Function.
*/
TVM_DLL Function ToCPS(const Function& f, const Module& mod);
/*!
* \brief Remove the continuation argument of a CPS function.
*
* Note that this only transform the type back into un-CPS form
* when there is no higher order input/output.
*
* \param f the function.
*
* \return the converted Function.
*/
TVM_DLL Function UnCPS(const Function& f);
/*!
* \brief Deduplicate the bound variables and type variables in the expression.
*
* \param e the expression.
*
* \return the deduplicated expression.
*/
TVM_DLL Expr DeDup(const Expr& e);
} // namespace relay
} // namespace tvm
......
......@@ -17,6 +17,9 @@
"""Utilities for testing and benchmarks"""
from __future__ import absolute_import as _abs
import tvm.relay as relay
from tvm.relay import transform
from . import mlp
from . import resnet
from . import dqn
......@@ -32,3 +35,15 @@ from . import yolo_detection
from .config import ctx_list
from .init import create_workload
from .nat import add_nat_definitions, count, make_nat_value, make_nat_expr
def run_opt_pass(expr, opt_pass):
assert isinstance(opt_pass, transform.Pass)
mod = relay.Module.from_expr(expr)
mod = opt_pass(mod)
entry = mod[mod.entry_func]
return entry if isinstance(expr, relay.Function) else entry.body
def run_infer_type(expr):
return run_opt_pass(expr, transform.InferType())
......@@ -446,6 +446,20 @@ def ToANormalForm():
return _transform.ToANormalForm()
def ToCPS(expr, mod=None):
"""
Turn expression into continuation passing style(CPS).
Every intermediate compute will be passed to a continuation.
Returns
-------
result: tvm.relay.Pass
The registered pass that transforms an expression into CPS.
"""
return _ir_pass.to_cps(expr, mod)
def EtaExpand():
"""Add abstraction over a function
......@@ -495,14 +509,6 @@ def PartialEvaluate():
expression is provided. Otherwise, it will rely on the pass manager to
carry out transformation.
Parameters
----------
expr : Optional[tvm.relay.Expr]
The input expression.
mod : Optional[tvm.relay.Module]
The global module.
Returns
-------
ret: tvm.relay.Pass
......@@ -554,6 +560,48 @@ def gradient(expr, mod=None, mode='higher_order'):
raise Exception('unknown mode')
def to_cps(func, mod=None):
"""
Turn expression into CPS expression.
Every intermediate compute will be passed to a continuation.
Parameters
----------
func: tvm.relay.Function
The input function.
mod: Optional[tvm.relay.Module]
The global module.
Returns
-------
result: tvm.relay.Function
The output function.
"""
return _transform.to_cps(func, mod)
def un_cps(func):
"""
Turn an cps function into a Function without the continuation argument.
Note that this will not give the exact same interface as before cps:
If the input/output is higher order, they will still be in cps form.
Parameters
----------
func: tvm.relay.Function
The input function
Returns
-------
result: tvm.relay.Function
The output function
"""
return _transform.un_cps(func)
def _wrap_class_module_pass(pass_cls, pass_info):
"""Wrap a python class as function pass"""
class PyModulePass(ModulePass):
......
......@@ -18,7 +18,7 @@
*/
/*!
* Copyright (c) 2018 by Contributors
* Copyright (c) 2019 by Contributors
* \file src/tvm/ir/adt.cc
* \brief AST nodes for Relay algebraic data types (ADTs).
*/
......
......@@ -89,8 +89,9 @@ GlobalTypeVar ModuleNode::GetGlobalTypeVar(const std::string& name) const {
}
void ModuleNode::Add(const GlobalVar& var,
const Function& func,
const Function& f,
bool update) {
Function func = Downcast<Function>(DeDup(f));
// Type check the item before we add it to the module.
auto mod = GetRef<Module>(this);
Function checked_func = InferType(func, mod, var);
......
......@@ -645,11 +645,21 @@ class PrettyPrinter :
Doc VisitType_(const FuncTypeNode* node) final {
Doc doc;
doc << "fn ";
if (node->type_params.size() != 0) {
doc << "<";
std::vector<Doc> type_params;
for (Type type_param : node->type_params) {
type_params.push_back(Print(type_param));
}
doc << PrintVec(type_params);
doc << ">";
}
std::vector<Doc> arg_types;
for (Type arg_type : node->arg_types) {
arg_types.push_back(Print(arg_type));
}
return doc << "fn (" << PrintVec(arg_types) << ") -> " << Print(node->ret_type);
return doc << "(" << PrintVec(arg_types) << ") -> " << Print(node->ret_type);
}
Doc VisitType_(const RefTypeNode* node) final {
......
......@@ -221,7 +221,7 @@ class TypeBinder : public TypeMutator {
};
Type Bind(const Type& type, const tvm::Map<TypeVar, Type>& args_map) {
return TypeBinder(args_map).VisitType(type);
return type.defined() ? TypeBinder(args_map).VisitType(type) : type;
}
} // namespace relay
......
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* Copyright (c) 2019 by Contributors
*
* \file de_duplicate.cc
* \brief Use a fresh Id for every Var to make the result well-formed.
*/
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/analysis.h>
#include <tvm/relay/pattern_functor.h>
#include "../ir/type_functor.h"
namespace tvm {
namespace relay {
Expr DeDup(const Expr& e) {
class DeDupMutator : public TypeMutator,
public ExprMutator,
public PatternMutator {
public:
TypeVar Fresh(const TypeVar& tv) {
TypeVar ret = TypeVarNode::make(tv->var->name_hint, tv->kind);
type_rename_[tv] = ret;
return ret;
}
Var Fresh(const Var& v) {
Var ret = VarNode::make(v->name_hint(), VisitType(v->type_annotation));
rename_[v] = ret;
return ret;
}
Expr VisitExpr(const Expr& e) final {
return ExprMutator::VisitExpr(e);
}
Expr VisitExpr_(const VarNode* op) final {
Var v = GetRef<Var>(op);
return rename_.count(v) != 0 ? rename_.at(v) : v;
}
Expr VisitExpr_(const LetNode* op) final {
Var v = Fresh(op->var);
return LetNode::make(v, VisitExpr(op->value), VisitExpr(op->body));
}
Type VisitType(const Type& t) final {
return t.defined() ? TypeMutator::VisitType(t) : t;
}
Expr VisitExpr_(const FunctionNode* op) final {
tvm::Array<TypeVar> type_params;
for (const TypeVar& type_param : op->type_params) {
type_params.push_back(Fresh(type_param));
}
tvm::Array<Var> params;
for (const Var& param : op->params) {
params.push_back(Fresh(param));
}
return FunctionNode::make(params,
VisitExpr(op->body),
VisitType(op->ret_type),
type_params,
op->attrs);
}
Pattern VisitPattern(const Pattern& p) final {
return PatternMutator::VisitPattern(p);
}
Pattern VisitPattern_(const PatternVarNode* op) final {
return PatternVarNode::make(Fresh(op->var));
}
Clause VisitClause(const Clause& c) final {
Pattern pat = VisitPattern(c->lhs);
return ClauseNode::make(pat, VisitExpr(c->rhs));
}
Type VisitType_(const TypeVarNode* op) final {
TypeVar v = GetRef<TypeVar>(op);
return type_rename_.count(v) != 0 ? type_rename_.at(v) : v;
}
Var VisitVar(const Var& v) final {
return Fresh(v);
}
private:
std::unordered_map<Var, Var, NodeHash, NodeEqual> rename_;
std::unordered_map<TypeVar, TypeVar, NodeHash, NodeEqual> type_rename_;
};
Expr ret = DeDupMutator().VisitExpr(e);
CHECK_EQ(FreeVars(ret).size(), FreeVars(e).size());
return ret;
}
TVM_REGISTER_API("relay._transform.dedup")
.set_body_typed(DeDup);
} // namespace relay
} // namespace tvm
......@@ -20,7 +20,7 @@
/*!
* Copyright (c) 2019 by Contributors.
* \file tvm/relay/pass/dependency_graph.h
* \brief
* \brief create a dependency graph.
*/
#ifndef TVM_RELAY_PASS_DEPENDENCY_GRAPH_H_
#define TVM_RELAY_PASS_DEPENDENCY_GRAPH_H_
......
......@@ -18,7 +18,7 @@
*/
/*!
* Copyright (c) 2018 by Contributors
* Copyright (c) 2019 by Contributors
* \file let_list.h
* \brief LetList record let binding and insert let expression implicitly.
* using it, one can treat AST as value instead of expression,
......@@ -46,6 +46,11 @@ namespace relay {
*/
class LetList {
public:
~LetList() {
if (lets_.size() > 0 && !used_) {
std::cout << "Warning: letlist not used" << std::endl;
}
}
/*!
* \brief insert a binding.
*
......@@ -64,13 +69,13 @@ class LetList {
/*!
* \brief insert a binding.
*
* \param ty the type of the binding.
*
* \param expr the value of the binding.
*
* \param ty the type of the binding.
*
* \return a Var that hold the inserted expr.
*/
Var Push(Type ty, Expr expr) {
Var Push(Expr expr, Type ty) {
return Push(VarNode::make("x", ty), expr);
}
......@@ -82,7 +87,7 @@ class LetList {
* \return a Var that hold the inserted expr.
*/
Var Push(Expr expr) {
return Push(Type(), expr);
return Push(expr, Type());
}
/*!
......@@ -129,6 +134,12 @@ class LetList {
return ll.Get(f(&ll));
}
static Expr Let(const Expr& e, const std::function<Expr(const Var&)>& f) {
return With([&](LetList* ll) {
return f(ll->Push(e));
});
}
private:
std::vector<std::pair<Var, Expr> > lets_;
bool used_ = false;
......
......@@ -18,7 +18,7 @@
*/
/*!
* Copyright (c) 2018 by Contributors
* Copyright (c) 2019 by Contributors
*
* \file partial_eval.cc
*
......@@ -426,8 +426,6 @@ TVM_ADD_FILELINE)
Expr StripWithFuncId(const Expr& e);
Expr DeDup(const Expr& e);
Function AsFunc(const Expr& e) {
if (e.as<FunctionNode>()) {
return Downcast<Function>(e);
......@@ -963,86 +961,6 @@ class PartialEvaluator : public ExprFunctor<PStatic(const Expr& e, LetList* ll)>
FInterpreter executor_ = CPUInterpreter();
};
/*! \brief Use a fresh Id for every Var to make the result well-formed. */
Expr DeDup(const Expr& e) {
class DeDupMutator : public TypeMutator,
public ExprMutator,
public PatternMutator {
public:
TypeVar Fresh(const TypeVar& tv) {
TypeVar ret = TypeVarNode::make(tv->var->name_hint, tv->kind);
type_rename_[tv] = ret;
return ret;
}
Var Fresh(const Var& v) {
Var ret = VarNode::make(v->name_hint(), VisitType(v->type_annotation));
rename_[v] = ret;
return ret;
}
Expr VisitExpr(const Expr& e) final {
return ExprMutator::VisitExpr(e);
}
Expr VisitExpr_(const VarNode* op) final {
Var v = GetRef<Var>(op);
return rename_.count(v) != 0 ? rename_.at(v) : v;
}
Expr VisitExpr_(const LetNode* op) final {
Var v = Fresh(op->var);
return LetNode::make(v, VisitExpr(op->value), VisitExpr(op->body));
}
Type VisitType(const Type& t) final {
return t.defined() ? TypeMutator::VisitType(t) : t;
}
Expr VisitExpr_(const FunctionNode* op) final {
tvm::Array<TypeVar> type_params;
for (const TypeVar& type_param : op->type_params) {
type_params.push_back(Fresh(type_param));
}
tvm::Array<Var> params;
for (const Var& param : op->params) {
params.push_back(Fresh(param));
}
return FunctionNode::make(params,
VisitExpr(op->body),
VisitType(op->ret_type),
type_params,
op->attrs);
}
Pattern VisitPattern(const Pattern& p) final {
return PatternMutator::VisitPattern(p);
}
Clause VisitClause(const Clause& c) final {
Pattern pat = VisitPattern(c->lhs);
return ClauseNode::make(pat, VisitExpr(c->rhs));
}
Type VisitType_(const TypeVarNode* op) final {
TypeVar v = GetRef<TypeVar>(op);
return type_rename_.count(v) != 0 ? type_rename_.at(v) : v;
}
Var VisitVar(const Var& v) final {
return Fresh(v);
}
private:
std::unordered_map<Var, Var, NodeHash, NodeEqual> rename_;
std::unordered_map<TypeVar, TypeVar, NodeHash, NodeEqual> type_rename_;
};
Expr ret = DeDupMutator().VisitExpr(e);
CHECK_EQ(FreeVars(ret).size(), FreeVars(e).size());
return ret;
}
/*! \brief Remap multiple Var sharing the same Id into the same Var. */
Expr Remap(const Expr& e) {
class RemapMutator : public ExprMutator, public PatternMutator {
......
......@@ -18,9 +18,9 @@
*/
/*!
* Copyright (c) 2018 by Contributors
* Copyright (c) 2019 by Contributors
*
* \file to_anf.cc
* \file to_a_normal_form.cc
*
* \brief Turn implicit sharing into observable sharing.
*/
......@@ -72,13 +72,16 @@ Scope LCA(Scope lhs, Scope rhs) {
std::unordered_map<DependencyGraph::Node*, Scope> CalcScope(const DependencyGraph& dg) {
std::unordered_map<DependencyGraph::Node*, Scope> expr_scope;
bool global_scope_used = false;
Scope global_scope = std::make_shared<ScopeNode>();
for (auto it = dg.post_dfs_order.rbegin(); it != dg.post_dfs_order.rend(); ++it) {
DependencyGraph::Node* n = *it;
auto iit = n->parents.head;
Scope s;
if (iit == nullptr) {
CHECK(!global_scope_used);
s = global_scope;
global_scope_used = true;
} else {
s = expr_scope.at(iit->value);
iit = iit->next;
......@@ -88,13 +91,10 @@ std::unordered_map<DependencyGraph::Node*, Scope> CalcScope(const DependencyGrap
}
expr_scope.insert({n, n->new_scope ? ChildScope(s) : s});
}
CHECK(global_scope_used);
return expr_scope;
}
bool IsPrimitiveFunction(const Expr& e) {
return e.as<FunctionNode>() && Downcast<Function>(e)->IsPrimitive();
}
/* Special care is needed to handle local recursion.
* Fill additionally take a (possibly null) Var argument,
* If it is not null, Fill is required to bind the transformed result to that var.
......@@ -137,22 +137,26 @@ class Fill : ExprFunctor<Expr(const Expr&, const Var&)> {
Expr VisitExpr(const Expr& e, const Var& v) final {
if (memo.count(e) == 0) {
memo.insert({e, ExprFunctor<Expr(const Expr&, const Var&)>::VisitExpr(e, v)});
} else if (v.defined()) {
GetScope(e)->ll->Push(v, memo.at(e));
}
return memo.at(e);
auto ret = memo.at(e);
CHECK(IsAtomic(ret));
return ret;
}
Expr VisitExpr(const Expr& e) {
return this->VisitExpr(e, Var());
}
Expr Atomic(const Expr& orig, const Expr& now, const Var& v) {
return v.defined() ? GetScope(orig)->ll->Push(v, now) : now;
Expr Atomic(const Expr& e, const Var& v) {
return v.defined() ? GetScope(e)->ll->Push(v, e) : e;
}
Expr Compound(const Expr& orig, const Expr& now, const Var& v) {
Var var = v.defined() ?
v :
VarNode::make(std::string("x"), IncompleteTypeNode::make(Kind::kType));
VarNode::make(std::string("x"), Type());
return GetScope(orig)->ll->Push(var, now);
}
......@@ -205,7 +209,7 @@ class Fill : ExprFunctor<Expr(const Expr&, const Var&)> {
Expr VisitExpr_(const FunctionNode* f, const Var& v) final {
Expr e = GetRef<Expr>(f);
Expr ret;
if (IsPrimitiveFunction(e)) {
if (f->IsPrimitive()) {
ret = e;
} else {
ret = FunctionNode::make(f->params,
......@@ -231,22 +235,22 @@ class Fill : ExprFunctor<Expr(const Expr&, const Var&)> {
Expr VisitExpr_(const VarNode* vn, const Var& v) final {
Expr e = GetRef<Expr>(vn);
return Atomic(e, e, v);
return Atomic(e, v);
}
Expr VisitExpr_(const GlobalVarNode* gvn, const Var& v) final {
GlobalVar gv = GetRef<GlobalVar>(gvn);
return Atomic(gv, gv, v);
return Atomic(gv, v);
}
Expr VisitExpr_(const OpNode* op, const Var& v) final {
Expr e = GetRef<Expr>(op);
return Atomic(e, e, v);
return Atomic(e, v);
}
Expr VisitExpr_(const ConstructorNode* c, const Var& v) final {
Expr e = GetRef<Expr>(c);
return Atomic(e, e, v);
return Atomic(e, v);
}
Expr VisitExpr_(const MatchNode* m, const Var& v) final {
......@@ -294,11 +298,15 @@ Module ToANormalForm(const Module& m) {
tvm::Map<GlobalVar, Function> updates;
auto funcs = m->functions;
for (const auto& it : funcs) {
CHECK_EQ(FreeVars(it.second).size(), 0);
Expr ret =
TransformF([&](const Expr& e) {
return ToANormalFormAux(e);
}, it.second);
CHECK_EQ(FreeVars(ret).size(), 0);
CHECK_EQ(FreeVars(ret).size(), 0)
<< AsText(ret)
<< "should not has free vars: "
<< FreeVars(ret);
updates.Set(it.first, Downcast<Function>(ret));
}
......
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* Copyright (c) 2019 by Contributors
*
* \file to_cps.cc
*
* \brief Turn a program to continuation passing style.
*
* Given a fresh type variable 'answer',
* continuation passing style(CPS) convert every function of a -> b to a -> (b -> anwer) -> answer.
*
* That is, instead of returning the result directly,
* function will now call another function (called the continuation)
* and return that value as a result instead.
*
* Continuation passing style turn all function call into tail call,
* which bound the stack size, prevent stack from overflowing during recursion,
* and allow tail call optimization.
*
* In relay, as tensor operation is the bottleneck,
* CPS is currently intended to transform the program before partial eval (PE),
* as it reify the control flow and enable PE to handle control flow join more agressively.
*
* For example, given 'let a = if b then c else d in e', it will transform the code into
* 'let f a = e in if b then f c else f d'.
* This allow f to be optimized individually in both branch.
*
* We implement CPS conversion by higher order transform
* (see http://matt.might.net/articles/cps-conversion/).
* The basic idea is that we will recursively traverse the AST.
* During the traversal, there is an extra parameter, mcont, of expr -> expr.
* It is basically a continuation at the metalevel.
* All cases in the transform must return via the mcont,
* wheter directly invoking it, or indirectly by recursion.
*/
#include <tvm/relay/transform.h>
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/pattern_functor.h>
#include "../ir/type_functor.h"
#include "let_list.h"
#include "pass_util.h"
namespace tvm {
namespace relay {
// we assume the data type has no closure - no idea how to look into datatype right now.
Type Arrow(const Type& l, const Type& r) {
return FuncTypeNode::make({l}, r, {}, {});
}
Type CPSType(const Type& t, const TypeVar& answer);
FuncType CPSFuncType(const FuncType& f, const TypeVar& answer) {
tvm::Array<Type> new_arg_types;
for (const Type& t : f->arg_types) {
new_arg_types.push_back(CPSType(t, answer));
}
new_arg_types.push_back(Arrow(CPSType(f->ret_type, answer), answer));
return FuncTypeNode::make(new_arg_types, answer, f->type_params, f->type_constraints);
}
Type CPSType(const Type& t, const TypeVar& answer) {
struct CPSTypeMutator : TypeMutator {
explicit CPSTypeMutator(const TypeVar& answer) : answer(answer) { }
TypeVar answer;
Type VisitType_(const FuncTypeNode* t) final {
return CPSFuncType(GetRef<FuncType>(t), answer);
}
} mut(answer);
return mut(t);
}
// transform global functions into cps form.
using CPSMap = std::unordered_map<GlobalVar, GlobalVar, NodeHash, NodeEqual>;
// transform vars from the original program into new vars, so their type will be correct.
using VarMap = std::unordered_map<Var, Var, NodeHash, NodeEqual>;
/*
* The meta continuation.
* There is 3 rules on the metacontinuation:
* 0: It can only use the argument once.
* The argument is code, and using it twice will duplicate code.
* Bound the argument via let instead.
* 1: If the size of the metacontinuation is unbounded, it can only be called once.
* It contain code, so calling it twice duplicate code.
* Reify the continuation and bound it instead.
* See the function 'reify' and the if case for more detail.
* 2: The argument must be effect free.
* It might reorder or drop the argument.
* Again, bound the argument via let instead.
* See the call case for more detail.
*/
using MCont = std::function<Expr(const Expr&)>;
Function ToCPS(const Function& f, const Module& m, CPSMap* cm);
Function ToCPS(const Function& f, const Module& m, CPSMap* cm, VarMap* vm, const TypeVar& answer) {
std::function<Var(Var)> remap = [&](const Var& v) { return vm->count(v) == 0 ? v : vm->at(v); };
auto function_type = Downcast<FuncType>(f->checked_type());
// Each MCont can be used at most once.
struct CPSFunctor : ExprFunctor<Expr(const Expr&, const MCont&)>, PatternMutator {
CPSFunctor(const std::function<Var(Var)>& remap,
const TypeVar& answer,
const Module& m,
VarMap* vm,
CPSMap* cm) : remap(remap), answer(answer), m(m), vm(vm), cm(cm) { }
const std::function<Var(Var)>& remap;
TypeVar answer;
Module m;
VarMap* vm;
CPSMap* cm;
Expr VisitExpr_(const LetNode* op, const MCont& k) final {
return VisitExpr(op->value, [&](const Expr& v) {
return LetNode::make(remap(op->var), v, VisitExpr(op->body, k));
});
}
Expr VisitExpr_(const FunctionNode* op, const MCont& k) final {
CHECK(!op->IsPrimitive()) << "primitive func not supported yet.";
return k(ToCPS(GetRef<Function>(op), m, cm, vm, answer));
}
Expr VisitExpr_(const ConstantNode* op, const MCont& k) final {
return k(GetRef<Constant>(op));
}
Expr VisitExpr_(const VarNode* op, const MCont& k) final {
return k(remap(GetRef<Var>(op)));
}
Pattern VisitPattern_(const PatternVarNode* op) final {
return PatternVarNode::make(remap(op->var));
}
Expr VisitExpr_(const GlobalVarNode* op, const MCont& k) final {
auto gv = GetRef<GlobalVar>(op);
if (cm->count(gv) == 0) {
auto cps_gv = GlobalVarNode::make(gv->name_hint + "_cps");
cm->insert({gv, cps_gv});
m->Add(cps_gv, ToCPS(m->Lookup(gv), m, cm));
}
return k(cm->at(gv));
}
Expr VisitExpr_(const RefCreateNode* op, const MCont& k) final {
return VisitExpr(op->value, [&](const Expr& v) { return k(RefCreateNode::make(v)); });
}
Expr reify(const MCont& k) {
Var arg = VarNode::make("arg", Type());
return FunctionNode::make({arg}, k(arg), Type(), {}, {});
}
Expr reify(const MCont& k, const std::function<Expr(MCont)>& cont) {
return LetList::Let(reify(k),
[&](const Var& f) {
return cont([&](const Expr& e) { return CallNode::make(f, {e}); });
});
}
Expr VisitExpr_(const IfNode* op, const MCont& k) final {
return reify(k, [&](const MCont& kf) {
return VisitExpr(op->cond,
[&](const Expr& v) {
return IfNode::make(v, VisitExpr(op->true_branch, kf), VisitExpr(op->false_branch, kf));
});
});
}
Expr VisitExpr_(const MatchNode* op, const MCont& k) final {
return reify(k, [&](const MCont& kf) {
return VisitExpr(op->data, [&](const Expr& v) {
tvm::Array<Clause> clauses;
for (const auto& c : op->clauses) {
clauses.push_back(ClauseNode::make(VisitPattern(c->lhs), VisitExpr(c->rhs, kf)));
}
return MatchNode::make(v, clauses);
});
});
}
Expr VisitExpr_(const RefReadNode* op, const MCont& k) final {
return VisitExpr(op->ref,
[&](const Expr& r) {
return LetList::Let(RefReadNode::make(r), k);
});
}
Expr VisitExpr_(const RefWriteNode* op, const MCont& k) final {
return VisitExpr(op->ref,
[&](const Expr& r) {
return VisitExpr(op->value,
[&](const Expr& v) {
return LetList::Let(RefWriteNode::make(r, v), k);
});
});
}
Expr VisitExpr_(const TupleNode* op, const MCont& k) final {
tvm::Array<Expr> fields;
std::function<Expr()> next;
next = [&]() {
return (fields.size() == op->fields.size()) ?
k(TupleNode::make(fields)) :
VisitExpr(op->fields[fields.size()], [&](const Expr& v) {
fields.push_back(v);
return next();
});
};
return next();
}
Expr VisitExpr_(const TupleGetItemNode* op, const MCont& k) final {
return VisitExpr(op->tuple, [&](const Expr& v) {
return k(TupleGetItemNode::make(v, op->index));
});
}
Expr VisitExpr_(const CallNode* op, const MCont& k) final {
if (op->op.as<OpNode>() || op->op.as<ConstructorNode>()) {
tvm::Array<Expr> args;
std::function<Expr()> next;
next = [&]() {
if (args.size() == op->args.size()) {
return LetList::Let(CallNode::make(op->op, args, op->attrs, op->type_args), k);
} else {
return VisitExpr(op->args[args.size()], [&](const Expr& v) {
args.push_back(v);
return next();
});
}
};
return next();
} else {
Expr f;
tvm::Array<Expr> args;
std::function<Expr()> next;
next = [&]() {
if (args.size() == op->args.size()) {
args.push_back(reify(k));
return Expr(CallNode::make(f, args, op->attrs, op->type_args));
} else {
return VisitExpr(op->args[args.size()], [&](const Expr& v) {
args.push_back(v);
return next();
});
}
};
return VisitExpr(op->op, [&](const Expr& v) {
f = v;
return next();
});
}
}
} mut(remap, answer, m, vm, cm);
Var k = VarNode::make("k", Arrow(CPSType(function_type->ret_type, answer), answer));
tvm::Array<Var> new_params;
for (const Var& v : f->params) {
new_params.push_back(remap(v));
}
new_params.push_back(k);
return FunctionNode::make(new_params,
mut.VisitExpr(f->body,
[&](const Expr& e) { return CallNode::make(k, {e}); }),
answer,
f->type_params,
f->attrs);
}
Function ToCPS(const Function& f, const Module& m, CPSMap* cm) {
TypeVar answer = TypeVarNode::make("answer", kType);
VarMap var;
struct Remapper : ExprVisitor, PatternVisitor {
Remapper(const TypeVar& answer, VarMap* vm) : answer(answer), vm(vm) { }
TypeVar answer;
VarMap* vm;
void VisitExpr_(const VarNode* vn) final {
Var v = GetRef<Var>(vn);
if (vm->count(v) == 0) {
auto ret = VarNode::make(v->name_hint(), CPSType(v->checked_type(), answer));
vm->insert({v, ret});
}
}
void VisitPattern(const Pattern& p) final {
PatternVisitor::VisitPattern(p);
}
void VisitPattern_(const PatternVarNode* op) final {
VisitExpr(op->var);
}
} remap(answer, &var);
remap.VisitExpr(f);
Function ret = ToCPS(f, m, cm, &var, answer);
auto new_type_params = ret->type_params;
new_type_params.push_back(answer);
return FunctionNode::make(ret->params, ret->body, ret->ret_type, new_type_params, ret->attrs);
}
Function ToCPS(const Function& f, const Module& m) {
CPSMap cps;
return ToCPS(f, m, &cps);
}
Function UnCPS(const Function& f) {
CHECK_GT(f->params.size(), 0);
std::vector<Var> new_params;
for (const auto& p : f->params) {
new_params.push_back(VarNode::make(p->name_hint(), p->checked_type()));
}
auto cont_type = Downcast<FuncType>(new_params.back()->type_annotation);
new_params.pop_back();
CHECK_EQ(cont_type->arg_types.size(), 1);
auto new_ret_type = Type(cont_type->arg_types[0]);
std::vector<TypeVar> new_type_params;
for (const auto& tp : f->type_params) {
new_type_params.push_back(TypeVarNode::make(tp->var->name_hint, tp->kind));
}
auto answer_type = new_type_params.back();
new_type_params.pop_back();
// TODO(@M.K.): make alphaequal work on free term
// CHECK(AlphaEqual(cont_type, Arrow(new_ret_type, answer_type)));
auto x = VarNode::make("x", new_ret_type);
auto cont = FunctionNode::make({x}, x, new_ret_type, {}, {});
tvm::Array<Expr> args;
for (const auto& p : new_params) {
args.push_back(p);
}
args.push_back(cont);
tvm::Array<Type> type_args;
for (const auto& tp : new_type_params) {
type_args.push_back(tp);
}
type_args.push_back(new_ret_type);
return FunctionNode::make(new_params,
CallNode::make(f, args, {}, type_args),
new_ret_type,
new_type_params,
f->attrs);
}
TVM_REGISTER_API("relay._transform.to_cps")
.set_body_typed(static_cast<Function (*)(const Function&, const Module&)>(ToCPS));
TVM_REGISTER_API("relay._transform.un_cps")
.set_body_typed(UnCPS);
namespace transform {
Pass ToCPS() {
runtime::TypedPackedFunc<Function(Function, Module, PassContext)> pass_func =
[=](Function f, Module m, PassContext pc) {
return Function(ToCPS(f, m));
};
return CreateFunctionPass(pass_func, 1, "ToCPS", {});
}
TVM_REGISTER_API("relay._transform.ToCPS")
.set_body_typed(ToCPS);
Pass UnCPS() {
runtime::TypedPackedFunc<Function(Function, Module, PassContext)> pass_func =
[=](Function f, Module m, PassContext pc) {
return Function(UnCPS(f));
};
return CreateFunctionPass(pass_func, 1, "UnCPS", {});
}
TVM_REGISTER_API("relay._transform.UnCPS")
.set_body_typed(UnCPS);
} // namespace transform
} // namespace relay
} // namespace tvm
......@@ -368,10 +368,14 @@ class TypeInferencer : private ExprFunctor<Type(const Expr&)>,
// Build a subsitituion map up from the function type and type arguments.
// Eventually allow the type vars to be passed in.
for (size_t i = 0; i < fn_ty->type_params.size(); i++) {
for (size_t i = 0; i < ty_args.size(); ++i) {
subst_map.Set(fn_ty->type_params[i], ty_args[i]);
}
for (size_t i = ty_args.size(); i < fn_ty->type_params.size(); ++i) {
subst_map.Set(fn_ty->type_params[i], IncompleteTypeNode::make(Kind::kType));
}
Type ret_type = fn_ty->ret_type;
// If the function type is incomplete, place a new IncompleteType
......@@ -437,13 +441,7 @@ class TypeInferencer : private ExprFunctor<Type(const Expr&)>,
}
Array<Type> type_args = call->type_args;
if (type_args.size() == 0) {
for (size_t i = 0; i < fn_ty_node->type_params.size(); i++) {
type_args.push_back(IncompleteTypeNode::make(Kind::kType));
}
}
if (type_args.size() != fn_ty_node->type_params.size()) {
if (type_args.size() > fn_ty_node->type_params.size()) {
this->ReportFatalError(GetRef<Call>(call),
RELAY_ERROR("Incorrect number of type args in "
<< call->span << ": "
......
......@@ -17,14 +17,7 @@
import tvm
from tvm import relay
from tvm.relay import transform
def run_opt_pass(expr, opt_pass):
assert isinstance(opt_pass, transform.Pass)
mod = relay.Module.from_expr(expr)
mod = opt_pass(mod)
entry = mod[mod.entry_func]
return entry if isinstance(expr, relay.Function) else entry.body
from tvm.relay.testing import run_opt_pass
def test_fuse_simple():
......
......@@ -22,15 +22,7 @@ from tvm.relay.analysis import free_vars, free_type_vars
from tvm.relay import create_executor, transform
from tvm.relay.transform import gradient
from tvm.relay.prelude import Prelude
from tvm.relay.testing import add_nat_definitions, make_nat_expr
def run_infer_type(expr):
mod = relay.Module.from_expr(expr)
mod = relay.Module.from_expr(expr)
mod = transform.InferType()(mod)
entry = mod[mod.entry_func]
return entry if isinstance(expr, relay.Function) else entry.body
from tvm.relay.testing import add_nat_definitions, make_nat_expr, run_infer_type
def rand(dtype='float32', *shape):
......
......@@ -186,6 +186,19 @@ def test_function():
check_eval(anf_f(d), 8)
def test_gradient_if():
x = relay.var("a", shape=(1, 16))
y = relay.var("y", shape=(1, 16))
cond = relay.var("cond", shape=(), dtype='uint1')
net = relay.If(cond, x, x)
net = relay.add(x, net)
net = relay.Function([cond,x,y], net)
mod = relay.Module.from_expr(net)
mod = relay.transform.ToANormalForm()(mod)
mod[mod.entry_func] = relay.transform.gradient(mod[mod.entry_func], mode='higher_order')
mod = relay.transform.ToANormalForm()(mod)
if __name__ == '__main__':
test_explicit_bound()
test_order()
......@@ -195,3 +208,4 @@ if __name__ == '__main__':
test_let()
test_nat_add()
test_function()
test_gradient_if()
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import tvm
from tvm import relay
from tvm.relay.analysis import alpha_equal, detect_feature
from tvm.relay.transform import to_cps, un_cps
from tvm.relay.feature import Feature
from tvm.relay.prelude import Prelude
from tvm.relay.testing import add_nat_definitions, make_nat_expr, run_infer_type, run_opt_pass
from tvm.relay import create_executor
from tvm.relay import Function, transform
def rand(dtype='float32', *shape):
return tvm.nd.array(np.random.rand(*shape).astype(dtype))
# make sure cps work for recursion.
def test_recursion():
mod = relay.Module()
p = Prelude(mod)
add_nat_definitions(p)
shape = (10, 10)
dtype = 'float32'
t = relay.TensorType(shape, dtype)
x = relay.var("x", t)
double = relay.Function([x], x + x)
i = relay.var("i", t)
func = relay.Function([i], p.nat_iterate(double, make_nat_expr(p, 3))(i))
mod[mod.entry_func] = func
mod[mod.entry_func] = to_cps(mod[mod.entry_func], mod=mod)
mod[mod.entry_func] = un_cps(mod[mod.entry_func])
ex = create_executor(mod=mod)
i_nd = rand(dtype, *shape)
forward = ex.evaluate(mod.entry_func)(i_nd)
tvm.testing.assert_allclose(forward.asnumpy(), 8 * i_nd.asnumpy())
# This serve as an integration test.
# It test that, given a program with reference,
# cps and pe can completely eliminate the allocation of reference.
def test_cps_pe():
def destroy_ref(x):
x = run_infer_type(x)
x = to_cps(x)
x = run_infer_type(x)
y = un_cps(x)
y = run_infer_type(y)
x = run_opt_pass(x, transform.Sequential([transform.PartialEvaluate(), transform.DeadCodeElimination(inline_once=True)]))
assert Feature.fRefCreate not in detect_feature(x)
unit = relay.Function([], relay.const(0., dtype='float32'))
f_ref = relay.Var("f_ref")
one = relay.const(1., dtype='float32')
two = relay.const(2., dtype='float32')
cond = relay.var(shape=(), dtype='uint1', name_hint='cond')
true_branch = relay.RefWrite(f_ref, relay.Function([], one))
false_branch = relay.RefWrite(f_ref, relay.Function([], two))
if_expr = relay.If(cond, true_branch, false_branch)
stmt = relay.Let(f_ref, relay.RefCreate(unit),
relay.Let(relay.Var("x"), if_expr,
relay.Call(relay.RefRead(f_ref), [])))
F = relay.Function([cond], stmt)
destroy_ref(F)
G = relay.Function([cond], relay.If(cond, one, two))
G = relay.transform.gradient(G)
destroy_ref(G)
x = relay.var("x", shape=(1, 16))
y = relay.var("y", shape=(1, 16))
z = relay.var("z", shape=(1, 16))
cond = relay.var("cond", shape=(), dtype='uint1')
H = relay.If(cond, x, y)
H = relay.add(H, z)
H = relay.Function([cond,x,y,z], H)
H = relay.transform.gradient(H)
destroy_ref(H)
if __name__ == '__main__':
test_recursion()
test_cps_pe()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment