Skip to content

Commit

Permalink
remove op.py in fluid (#52248)
Browse files Browse the repository at this point in the history
* remove op.py

* [Zero-Dim] change Tensor.numpy() usage to other equivalent usage, avoid hack (#52197)

* [BugFix] fix compute error in fused_dropout_add (#52261)

* fix bg

* add utest

* add utest

* [CodeStyle][UP034] remove (()) cases (#52060)

* add up34

* modify var name in loop

* revert changes in test_slice

* Revert "modify var name in loop"

This reverts commit 6d748e3.

* temporarily ignore test_slice.py

* add comment

* empty commit, re-trigger all ci

* fix inc

---------

Co-authored-by: SigureMo <sigure.qaq@gmail.com>

* [AMP OP&Test] add unittest for log_softmax (#52264)

* Fix_Linux_[-Wterminate]warning (#52186)

* [CustomOP Inplace] Automap inplace dtype and shape, prepare for vector<Tensor> output (#52214)

* [CustomOP Inplace] Automap inplace dtype and shape, prepare for vector<Tensor> output

* delete dtype,shape func of multi_inplace op

* [CustomOP Inplace] Automap inplace dtype and shape, support vector<Tensor> output

* [CustomOP Inplace] Auto-generate python API for inplace vector<Tensor> output

* [AMP OP&Test] add float16 optest for reshape_op (#51678)

* [AMP OP&Test] add float16 optest for reshape_op

* add public_python_api

* [AMP OP&Test] Add fp16/bf16 to clip op (#52158)

* add fp16/bf16 to clip op

* fix as reviewed

* update test_clip_op.py

* update test_clip_op.py

* fix bug

* fix code style

* fix bug

* fix bug

---------

Co-authored-by: Zhou Wei <1183042833@qq.com>
Co-authored-by: ShenLiang <1422485404@qq.com>
Co-authored-by: 张春乔 <83450930+Liyulingyue@users.noreply.github.com>
Co-authored-by: SigureMo <sigure.qaq@gmail.com>
Co-authored-by: Ccc <52520497+juncaipeng@users.noreply.github.com>
Co-authored-by: Galaxy1458 <55453380+Galaxy1458@users.noreply.github.com>
Co-authored-by: HongyuJia <jiahongyu@baidu.com>
Co-authored-by: zhaoyingli <86812880+zhaoyinglia@users.noreply.github.com>
Co-authored-by: wuyefeilin <30919197+wuyefeilin@users.noreply.github.com>
  • Loading branch information
10 people authored Apr 4, 2023
1 parent c85a0c5 commit 273783b
Show file tree
Hide file tree
Showing 43 changed files with 51 additions and 115 deletions.
3 changes: 1 addition & 2 deletions python/paddle/fluid/tests/unittests/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@

import numpy as np
from eager_op_test import OpTest

from paddle.fluid.op import Operator
from op import Operator


class BenchmarkSuite(OpTest):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from paddle import fluid
from paddle.fluid import core
from paddle.fluid.op import Operator
from paddle.fluid.tests.unittests.op import Operator


class TestDGCMomentumOp1(unittest.TestCase):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from paddle import fluid
from paddle.fluid import core
from paddle.fluid.op import Operator
from paddle.fluid.tests.unittests.op import Operator

g_array_size = 102400

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from paddle import fluid
from paddle.fluid import core
from paddle.fluid.framework import Program, program_guard
from paddle.fluid.op import Operator
from paddle.fluid.tests.unittests.op import Operator
from paddle.incubate.distributed.fleet.parameter_server.mode import (
DistributedMode,
)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/eager_op_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from copy import copy

import numpy as np
from op import Operator

import paddle
from paddle import fluid
Expand All @@ -35,7 +36,6 @@
_current_expected_place,
canonicalize_attrs,
)
from paddle.fluid.op import Operator

sys.path.append(os.path.abspath(os.path.dirname(__file__)))
from prim_op_test import OpTestUtils, PrimForwardChecker, PrimGradChecker
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@

import numpy as np

import paddle.fluid.core as core
import paddle.fluid.proto.framework_pb2 as framework_pb2
from paddle.fluid import core
from paddle.fluid.proto import framework_pb2


# NOTE: this is added to support creating a Scalar message
# from a python number
Expand Down Expand Up @@ -256,13 +257,13 @@ def __impl__(*args, **kwargs):
inputs=[(var.name, var.duplicable) for var in op_proto.inputs],
outputs=[(var.name, var.duplicable) for var in op_proto.outputs],
attrs=[attr.name for attr in op_proto.attrs],
extra_attrs=[item for item in extra_attrs_map.keys()],
extra_attrs=list(extra_attrs_map.keys()),
)


class OperatorFactory:
def __init__(self):
self.op_methods = dict()
self.op_methods = {}

for op_proto in get_all_op_protos():
method = create_op_creation_method(op_proto)
Expand Down Expand Up @@ -313,70 +314,4 @@ def get_op_extra_attr_names(self, type):
return self.get_op_info(type).extra_attrs


class __RecurrentOp__:
__proto__ = None
type = "recurrent"

def __init__(self):
# cache recurrent_op's proto
if self.__proto__ is None:
for op_proto in get_all_op_protos():
if op_proto.type == self.type:
self.__proto__ = op_proto

def __call__(self, *args, **kwargs):
if self.type not in args and "type" not in kwargs:
kwargs["type"] = self.type
# create proto
create_method = OpDescCreationMethod(self.__proto__)
proto = create_method(*args, **kwargs)
# create rnnop
return core.RecurrentOp.create(proto.SerializeToString())


class __DynamicRecurrentOp__:
__proto__ = None
type = "dynamic_recurrent"

def __init__(self):
# cache recurrent_op's proto
if self.__proto__ is None:
for op_proto in get_all_op_protos():
if op_proto.type == self.type:
self.__proto__ = op_proto

def __call__(self, *args, **kwargs):
if self.type not in args and "type" not in kwargs:
kwargs["type"] = self.type
# create proto
create_method = OpDescCreationMethod(self.__proto__)
proto = create_method(*args, **kwargs)
# create rnnop
return core.DynamicRecurrentOp.create(proto.SerializeToString())


class __CondOp__:
__proto__ = None
type = "cond"

def __init__(self):
# cache recurrent_op's proto
if self.__proto__ is None:
for op_proto in get_all_op_protos():
if op_proto.type == self.type:
self.__proto__ = op_proto

def __call__(self, *args, **kwargs):
if self.type not in args and "type" not in kwargs:
kwargs["type"] = self.type
# create proto
create_method = OpDescCreationMethod(self.__proto__)
proto = create_method(*args, **kwargs)
# create condop
return core.CondOp.create(proto.SerializeToString())


Operator = OperatorFactory() # The default global factory
RecurrentOp = __RecurrentOp__()
DynamicRecurrentOp = __DynamicRecurrentOp__()
CondOp = __CondOp__()
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
import unittest

import numpy as np
from op import Operator

import paddle
from paddle.fluid import core
from paddle.fluid.op import Operator


class TestSparseSquareOp(unittest.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_adagrad_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@

import numpy as np
from eager_op_test import OpTest
from op import Operator

import paddle
from paddle.fluid import core
from paddle.fluid.op import Operator


def adamgrad_wrapper(
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_adam_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@

import numpy as np
from eager_op_test import OpTest
from op import Operator

import paddle
from paddle import fluid
from paddle.fluid import core
from paddle.fluid.op import Operator


def adam_wrapper(
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_batch_norm_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@

import numpy as np
from eager_op_test import OpTest, _set_use_system_allocator
from op import Operator

import paddle
from paddle import fluid
from paddle.fluid import Program, core, program_guard
from paddle.fluid.framework import grad_var_name
from paddle.fluid.op import Operator

_set_use_system_allocator(True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
import unittest

import numpy as np
from op import Operator

from paddle.fluid import core
from paddle.fluid.op import Operator


class TestBeamSearchDecodeOp(unittest.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_beam_search_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
import unittest

import numpy as np
from op import Operator

from paddle.fluid import core
from paddle.fluid.op import Operator


def create_tensor(scope, name, np_data):
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/tests/unittests/test_clip_by_norm_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@

import numpy as np
from eager_op_test import OpTest
from op import Operator

import paddle
from paddle import fluid
from paddle.fluid import core
from paddle.nn import clip

Expand Down Expand Up @@ -119,7 +119,7 @@ def check_with_place(self, place):
out_selected_rows = scope.var('Out').get_selected_rows()

# run clip_by_norm_op
clip_by_norm_op = fluid.op.Operator(
clip_by_norm_op = Operator(
"clip_by_norm", max_norm=self.max_norm, X='X', Out='Out'
)
clip_by_norm_op.run(scope, place)
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_data_norm_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@

import numpy as np
from eager_op_test import OpTest
from op import Operator

import paddle
from paddle import fluid
from paddle.fluid import Program, core, program_guard
from paddle.fluid.op import Operator


def _reference_testing(x, batch_size, batch_sum, batch_square_sum, slot_dim=-1):
Expand Down
3 changes: 2 additions & 1 deletion python/paddle/fluid/tests/unittests/test_fake_init_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@

import unittest

from op import Operator

from paddle.fluid import core
from paddle.fluid.op import Operator


class TestFakeInitOpSelectedRows(unittest.TestCase):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@

import numpy as np
from eager_op_test import OpTest, convert_float_to_uint16
from op import Operator

import paddle
from paddle import fluid
from paddle.fluid import Program, core, program_guard
from paddle.fluid.op import Operator


def fill_wrapper(shape, value=0.0):
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_fill_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@

import numpy as np
from eager_op_test import OpTest, convert_float_to_uint16
from op import Operator

from paddle.fluid import core
from paddle.fluid.op import Operator


class TestFillOp1(OpTest):
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_ftrl_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@

import numpy as np
from eager_op_test import OpTest
from op import Operator

from paddle.fluid import core
from paddle.fluid.op import Operator


def ftrl_step(param, grad, rows, sq_accum, lin_accum, lr, l1, l2, lr_power):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
import unittest

import numpy as np
from op import Operator

import paddle
from paddle.fluid import Program, core, program_guard
from paddle.fluid.op import Operator
from paddle.nn import clip


Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_lamb_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@

import numpy as np
from eager_op_test import OpTest
from op import Operator

import paddle
from paddle.fluid import core
from paddle.fluid.op import Operator

paddle.enable_static()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@
import unittest

import numpy as np
from op import Operator

import paddle
from paddle import enable_static, fluid
from paddle.fluid import core
from paddle.fluid.op import Operator
from paddle.fluid.tests.unittests.eager_op_test import (
OpTest,
convert_float_to_uint16,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@
paddle_static_guard,
skip_check_grad_ci,
)
from op import Operator

import paddle
import paddle.nn.functional as F
from paddle import fluid
from paddle.fluid import Program, core, program_guard
from paddle.fluid.op import Operator


class TestLookupTableOp(OpTest):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@

import numpy as np
from eager_op_test import OpTest, skip_check_grad_ci
from op import Operator

import paddle
from paddle import fluid
from paddle.fluid import Program, core, program_guard
from paddle.fluid.op import Operator


class TestStaticGraphSupportMultipleInt(unittest.TestCase):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
import unittest

import numpy as np
from op import Operator

from paddle.fluid import core
from paddle.fluid.op import Operator


class TestMergeSelectedRows(unittest.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/unittests/test_momentum_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
import numpy
import numpy as np
from eager_op_test import OpTest
from op import Operator

import paddle
from paddle import fluid
from paddle.fluid import core
from paddle.fluid.op import Operator


def calculate_momentum_by_numpy(
Expand Down
Loading

0 comments on commit 273783b

Please sign in to comment.