Skip to content

Commit

Permalink
[Torch] decompose AtenLerpTensorOp (#3251)
Browse files Browse the repository at this point in the history
as title
  • Loading branch information
Xinyu Yang authored Jun 3, 2024
1 parent 23b5305 commit 267052d
Show file tree
Hide file tree
Showing 4 changed files with 59 additions and 1 deletion.
32 changes: 31 additions & 1 deletion lib/Dialect/Torch/Transforms/DecomposeComplexOps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2585,7 +2585,36 @@ class DecomposeAtenLerpScalarOp : public OpRewritePattern<AtenLerpScalarOp> {

auto weightedDelta =
rewriter.create<AtenMulScalarOp>(loc, inputType, delta, op.getWeight());
auto lerp = rewriter.create<AtenAddTensorOp>(loc, inputType, start,
auto lerp = rewriter.create<AtenAddTensorOp>(loc, resType, start,
weightedDelta, cstOne);
rewriter.replaceOp(op, lerp);
return success();
}
};
} // namespace

namespace {
class DecomposeAtenLerpTensorOp : public OpRewritePattern<AtenLerpTensorOp> {
public:
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(AtenLerpTensorOp op,
PatternRewriter &rewriter) const override {
Location loc = op.getLoc();
auto resType = cast<BaseTensorType>(op.getType());
if (!resType.hasDtype()) {
return rewriter.notifyMatchFailure(op, "result should have dtype");
}
Value cstOne =
rewriter.create<ConstantIntOp>(loc, rewriter.getI64IntegerAttr(1));
auto start = op.getSelf();
auto inputType = cast<BaseTensorType>(start.getType());

auto delta = rewriter.create<AtenSubTensorOp>(loc, inputType, op.getEnd(),
start, cstOne);

auto weightedDelta =
rewriter.create<AtenMulTensorOp>(loc, inputType, delta, op.getWeight());
auto lerp = rewriter.create<AtenAddTensorOp>(loc, resType, start,
weightedDelta, cstOne);
rewriter.replaceOp(op, lerp);
return success();
Expand Down Expand Up @@ -8114,6 +8143,7 @@ class DecomposeComplexOpsPass
addPatternIfTargetOpIsIllegal<DecomposeAtenLeakyReluOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenLeakyReluBackwardOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenLerpScalarOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenLerpTensorOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenNewEmptyStridedOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenEmptyStridedOp>(patterns);
addPatternIfTargetOpIsIllegal<DecomposeAtenBucketizeTensorOp>(patterns);
Expand Down
1 change: 1 addition & 0 deletions lib/Dialect/Torch/Transforms/LowerToBackendContract.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -507,6 +507,7 @@ static void markDecomposedOpsAsIllegal(MLIRContext *context,
target.addIllegalOp<Aten_EmbeddingBagOp>();
target.addIllegalOp<AtenLiftFreshCopyOp>();
target.addIllegalOp<AtenLerpScalarOp>();
target.addIllegalOp<AtenLerpTensorOp>();
target.addIllegalOp<AtenMseLossOp>();
target.addIllegalOp<AtenRandintLowOp>();
target.addIllegalOp<AtenRandintOp>();
Expand Down
2 changes: 2 additions & 0 deletions projects/pt1/e2e_testing/xfail_sets.py
Original file line number Diff line number Diff line change
Expand Up @@ -1020,6 +1020,7 @@
"ElementwiseSqrtModule_basic",
"ElementwiseTanIntModule_basic",
"ElementwiseTanModule_basic",
"ElementwiseTernaryStaticShapeModule_basic",
"ElementwiseToDtypeF32ToI64Module_basic",
"ElementwiseToDtypeI64ToI8Module_basic",
"ElementwiseToDtypeIdentityModule_basic",
Expand Down Expand Up @@ -1475,6 +1476,7 @@
"AtenDotModule_basic",
"ElementwiseFloatTensorGtIntScalarModule_basic",
"ElementwiseLogSigmoidModule_basic",
"ElementwiseTernaryStaticShapeModule_basic",
"ElementwiseTruncModule_basic",
"ElementwiseTruncIntModule_basic",
"ElementwiseSgnModule_basic",
Expand Down
25 changes: 25 additions & 0 deletions projects/pt1/python/torch_mlir_e2e_test/test_suite/elementwise.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,31 @@ def ElementwiseTernaryModule_basic(module, tu: TestUtils):
# ==============================================================================


class ElementwiseTernaryStaticShapeModule(torch.nn.Module):
def __init__(self):
super().__init__()

@export
@annotate_args(
[
None,
([5, 4, 3], torch.float32, True),
([4, 3], torch.float32, True),
([3], torch.float32, True),
]
)
def forward(self, a, b, c):
return torch.lerp(a, b, c)


@register_test_case(module_factory=lambda: ElementwiseTernaryStaticShapeModule())
def ElementwiseTernaryStaticShapeModule_basic(module, tu: TestUtils):
module.forward(tu.rand(5, 4, 3), tu.rand(4, 3), tu.rand(3))


# ==============================================================================


class ElementwiseAtenWhereSelfModule(torch.nn.Module):
def __init__(self):
super().__init__()
Expand Down

0 comments on commit 267052d

Please sign in to comment.