Skip to content

Commit 7f22708

Browse files
committed
Fix
1 parent d16b9b4 commit 7f22708

File tree

2 files changed

+31
-0
lines changed

2 files changed

+31
-0
lines changed

python/paddle/tensor/math.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4012,6 +4012,9 @@ def clip(
40124012
elif x_dtype == 'paddle.float16':
40134013
min_ = float(np.finfo(np.float16).min)
40144014
max_ = float(np.finfo(np.float16).max)
4015+
elif x_dtype == 'paddle.float64':
4016+
min_ = float(np.finfo(np.float64).min)
4017+
max_ = float(np.finfo(np.float64).max)
40154018
else:
40164019
min_ = float(np.finfo(np.float32).min)
40174020
max_ = float(np.finfo(np.float32).max)

test/legacy_test/test_clip_op.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -525,5 +525,33 @@ def _executed_api(self, x, min=None, max=None):
525525
return x.clip_(min, max)
526526

527527

528+
class TestClipOp_FP64(OpTest):
529+
def setUp(self):
530+
self.python_api = paddle.clip
531+
self.public_python_api = paddle.clip
532+
533+
self.inputs = {}
534+
self.dtype = np.float64
535+
self.shape = (8, 16, 8)
536+
self.max = 0.7
537+
self.min = None
538+
539+
self.op_type = "clip"
540+
self.attrs = {}
541+
self.attrs['min'] = float(np.finfo(np.float64).min)
542+
self.attrs['max'] = self.max
543+
544+
self.inputs['X'] = np.random.random(self.shape).astype(self.dtype)
545+
self.outputs = {'Out': np.clip(self.inputs['X'], self.min, self.max)}
546+
547+
def test_check_output(self):
548+
self.check_output(
549+
check_pir=True,
550+
)
551+
552+
def test_check_grad_normal(self):
553+
self.check_grad(['X'], 'Out', check_pir=True)
554+
555+
528556
if __name__ == '__main__':
529557
unittest.main()

0 commit comments

Comments
 (0)