Commit 4ba30478 by Junru Shao Committed by Tianqi Chen

Fix -Wreturn-std-move and -Wself-assign-overloaded (#2669)

parent 16b009b2
......@@ -199,7 +199,7 @@ Graph InferAttr(Graph &&ret,
ret.attrs[attr_name] = std::make_shared<any>(std::move(rshape));
// number of nodes who knows the shape.
ret.attrs[unknown_name] = std::make_shared<any>(num_unknown);
return ret;
return std::move(ret);
}
NNVM_REGISTER_PASS(InferShape)
......
......@@ -46,7 +46,7 @@ class StageInputReplacer : public IRMutator {
Var new_var(it->second->var->name_hint + ".sync", op->type);
inputs_.Set(new_var, it->second);
replace_[op] = new_var;
return new_var;
return std::move(new_var);
}
Expr Mutate_(const Load* op, const Expr& e) final {
CHECK(is_zero(op->index))
......@@ -60,7 +60,7 @@ class StageInputReplacer : public IRMutator {
Var data(it->second->var->name_hint + ".load.sync", op->type);
inputs_.Set(data, it->second);
replace_[op->buffer_var.get()] = data;
return data;
return std::move(data);
}
// inputs that get replaced.
Map<Var, StageInput> inputs_;
......
......@@ -39,7 +39,7 @@ class ContextCallCombiner final : public IRMutator {
}
Var ctx_var(name, ctx.type());
ctx_map_[ctx] = ctx_var;
return ctx_var;
return std::move(ctx_var);
}
} else {
return IRMutator::Mutate_(op, e);
......
......@@ -364,7 +364,7 @@ class ExprBinder : public ExprMutator {
if (it != args_map_.end()) {
return (*it).second;
} else {
return id;
return std::move(id);
}
}
......
......@@ -192,7 +192,7 @@ class TypeBinder : public TypeMutator {
if (it != args_map_.end()) {
return (*it).second;
} else {
return id;
return std::move(id);
}
}
......
......@@ -34,7 +34,7 @@ Expr TransformLayout(Expr raw, Layout src_layout, Layout dst_layout) {
attrs->src_layout = src_layout.name();
attrs->dst_layout = dst_layout.name();
Call transform = CallNode::make(transform_op, {raw}, Attrs{attrs});
return transform;
return std::move(transform);
}
// Memorize layout transform so we can reuse internal transformed nodes
......
......@@ -773,7 +773,7 @@ class FuseMutator : private ExprMutator {
} else {
// This is an intermediate node of a fused function
// simply return the new call.
return new_call;
return std::move(new_call);
}
} else {
return ExprMutator::VisitExpr_(call);
......
......@@ -398,7 +398,7 @@ class Fill : ExprFunctor<Expr(const Expr&, const Var&)> {
visited_->insert(gv);
mod_->Update(gv, Downcast<Function>(relay::ToANormalForm(mod_->Lookup(gv), mod_, visited_)));
}
return gv;
return std::move(gv);
}
Expr VisitExpr_(const OpNode* op, const Var& v) final {
......
......@@ -724,7 +724,7 @@ Expr InferType(const Expr& expr, const Module& mod_ref) {
// FromExpr wraps a naked expression as a function, we will unbox
// it here.
if (expr.as<FunctionNode>()) {
return func;
return std::move(func);
} else {
return func->body;
}
......
......@@ -39,7 +39,7 @@ inline Tensor l2_normalize(const Tensor& data,
topi::sqrt(tvm::compute(expand_sum->shape,
[&](const Array<Var>& i){
return (max(expand_sum(i), eps));
}, name = name, tag = tag)));
}, name, tag)));
}
} // namespace nn
} // namespace topi
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment