Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix distribuions distributions, etc #62161

Merged
merged 1 commit into from
Feb 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion test/distribution/test_distribution_categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ def get_numpy_selected_probs(self, probability):
class CategoricalTest7(CategoricalTest):
def init_numpy_data(self, batch_size, dims):
# input logtis is 3-D Tensor
# value used in probs and log_prob method has the same number of distribuions with input
# value used in probs and log_prob method has the same number of distributions with input
self.logits_np = np.random.rand(3, 2, 5).astype('float32')
self.other_logits_np = np.random.rand(3, 2, 5).astype('float32')
self.value_np = np.array([2, 1, 3]).astype('int64')
Expand Down
2 changes: 1 addition & 1 deletion test/xpu/test_adamw_fp16_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def test_state_dict(self):
state_dict_1["linear_0.b_0_moment1_0.SCALE_VALUE"] = 12.3125
adam.set_state_dict(state_dict_1)

# check overwrited value
# check overwritten value
state_dict_2 = adam.state_dict()
self.assertTrue("linear_0.w_0_moment1_0.SCALE_VALUE" in state_dict_2)
self.assertTrue("linear_0.b_0_moment1_0.SCALE_VALUE" in state_dict_2)
Expand Down
4 changes: 2 additions & 2 deletions test/xpu/test_argsort_op_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def init_test_case(self):
2,
8732,
1,
] # test for 8192 < n <= 10240 + nees_transpose
] # test for 8192 < n <= 10240 + need_transpose
self.axis = 1

class TestArgsortOpCase4(TestArgsortOpCase1):
Expand All @@ -174,7 +174,7 @@ def init_test_case(self):
2,
10241,
1,
] # test for 10240 < n <= 16384 + nees_transpose
] # test for 10240 < n <= 16384 + need_transpose
self.axis = 1


Expand Down
4 changes: 2 additions & 2 deletions test/xpu/test_collective_allgather_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def _setup_config(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_allgather(self):
support_types = get_xpu_op_support_types('c_allgather')
Expand All @@ -40,7 +40,7 @@ def test_allgather(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_allgather_dygraph(self):
support_types = get_xpu_op_support_types('c_allgather')
Expand Down
4 changes: 2 additions & 2 deletions test/xpu/test_collective_allreduce_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def _setup_config(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_allreduce(self):
support_types = get_xpu_op_support_types('c_allreduce_sum')
Expand All @@ -42,7 +42,7 @@ def test_allreduce(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_allreduce_dygraph(self):
support_types = get_xpu_op_support_types('c_allreduce_sum')
Expand Down
4 changes: 2 additions & 2 deletions test/xpu/test_collective_broadcast_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def _setup_config(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_broadcast(self):
support_types = get_xpu_op_support_types('c_broadcast')
Expand All @@ -42,7 +42,7 @@ def test_broadcast(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_broadcast_dygraph(self):
support_types = get_xpu_op_support_types('c_broadcast')
Expand Down
2 changes: 1 addition & 1 deletion test/xpu/test_collective_process_group_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
class TestProcessGroup(TestMultipleXpus):
@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_process_group_bkcl(self):
self.run_mnist_2xpu('process_group_bkcl.py')
Expand Down
4 changes: 2 additions & 2 deletions test/xpu/test_collective_reduce_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def _setup_config(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_reduce(self):
support_types = get_xpu_op_support_types('c_reduce_sum')
Expand All @@ -42,7 +42,7 @@ def test_reduce(self):

@unittest.skipIf(
not core.is_compiled_with_xpu() or paddle.device.xpu.device_count() < 2,
"run test when having at leaset 2 XPUs.",
"run test when having at least 2 XPUs.",
)
def test_reduce_dygraph(self):
support_types = get_xpu_op_support_types('c_reduce_sum')
Expand Down
4 changes: 2 additions & 2 deletions test/xpu/test_device_guard_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def execute(main_program, startup_program):
exe.run(main_program)


def get_vaild_warning_num(warning, w):
def get_valid_warning_num(warning, w):
num = 0
for i in range(len(w)):
if warning in str(w[i].message):
Expand Down Expand Up @@ -160,7 +160,7 @@ def test_without_kernel_op(self):
paddle.assign(paddle.less_than(x=i, y=loop_len), cond)

warning = "The Op(while) is not support to set device."
warning_num = get_vaild_warning_num(warning, w)
warning_num = get_valid_warning_num(warning, w)
assert warning_num == 1

all_ops = main_program.global_block().ops
Expand Down
6 changes: 3 additions & 3 deletions test/xpu/test_scatter_nd_add_op_xpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ def numpy_scatter_nd(ref, index, updates, fun):
end_size = index_shape[-1]

# as type int32, flat_index or flat_updates can't reshape to int64
remain_numl = np.prod(index_shape[:-1]).astype("int32")
remain_numel = np.prod(index_shape[:-1]).astype("int32")
slice_size = np.prod(ref_shape[end_size : len(ref_shape)]).astype("int32")

flat_index = index.reshape([remain_numl] + list(index_shape[-1:]))
flat_updates = updates.reshape((remain_numl, slice_size))
flat_index = index.reshape([remain_numel] + list(index_shape[-1:]))
flat_updates = updates.reshape((remain_numel, slice_size))
flat_output = ref.reshape(list(ref_shape[:end_size]) + [slice_size])

for i_up, i_out in enumerate(flat_index):
Expand Down