Skip to content

Commit

Permalink
[CodeStyle] Cleanup some useless eager Tensor usage (PaddlePaddle#66303)
Browse files Browse the repository at this point in the history
  • Loading branch information
SigureMo authored and lixcli committed Jul 22, 2024
1 parent f013c36 commit 8e87f4d
Show file tree
Hide file tree
Showing 11 changed files with 19 additions and 22 deletions.
2 changes: 1 addition & 1 deletion python/paddle/amp/grad_scaler.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def scale(self, var: Tensor) -> Tensor:
check_type(
var,
"var",
(core.eager.Tensor, paddle.pir.Value),
(paddle.Tensor, paddle.pir.Value),
'AmpScaler.scale()',
)

Expand Down
2 changes: 1 addition & 1 deletion python/paddle/base/dygraph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def _convert_into_variable(tensor):
"""
if paddle.framework.use_pir_api():
return paddle.pir.core._convert_into_value(tensor)
if isinstance(tensor, core.eager.Tensor):
if isinstance(tensor, paddle.Tensor):
# Check whether has been created before.
new_var = tensor.block._find_var_recursive(tensor.name)
if new_var is not None:
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/distributed/fleet/recompute/recompute.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,12 @@ def detach_variable(inputs):
def check_recompute_necessary(inputs):
necessary_for_each_input = []
for input_ in inputs:
if isinstance(input_, (core.eager.Tensor, paddle.Tensor)):
if isinstance(input_, paddle.Tensor):
necessary_for_each_input.append(input_.stop_gradient)
elif type(input_) is tuple:
for i in input_:
# traverse all tensors in the tuple
if isinstance(i, (core.eager.Tensor, paddle.Tensor)):
if isinstance(i, paddle.Tensor):
necessary_for_each_input.append(i.stop_gradient)
if all(necessary_for_each_input):
logger.warning(
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/io/dataloader/collate.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def default_collate_fn(batch):
if isinstance(sample, np.ndarray):
batch = np.stack(batch, axis=0)
return batch
elif isinstance(sample, (paddle.Tensor, core.eager.Tensor)):
elif isinstance(sample, paddle.Tensor):
return paddle.stack(batch, axis=0)
elif isinstance(sample, numbers.Number):
batch = np.array(batch)
Expand Down
6 changes: 2 additions & 4 deletions python/paddle/io/dataloader/dataloader_iter.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ def _thread_loop(self, legacy_expected_place):
# pack as LoDTensorArray
array = core.LoDTensorArray()
for slot in batch:
if isinstance(slot, (paddle.Tensor, core.eager.Tensor)):
if isinstance(slot, paddle.Tensor):
slot = slot.value().get_tensor()
elif not isinstance(slot, core.LoDTensor):
tmp = core.LoDTensor()
Expand Down Expand Up @@ -633,9 +633,7 @@ def _thread_loop(self, legacy_expected_place):
# LoDTensor not in shared memory is not
# serializable, cannot be create in workers
for slot in batch:
if isinstance(
slot, (paddle.Tensor, core.eager.Tensor)
):
if isinstance(slot, paddle.Tensor):
slot = slot.get_tensor()
elif not isinstance(slot, core.LoDTensor):
tmp = core.LoDTensor()
Expand Down
14 changes: 7 additions & 7 deletions python/paddle/metric/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ def update(self, correct: Tensor, *args: Any) -> Tensor:
Return:
Tensor: the accuracy of current step.
"""
if isinstance(correct, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if isinstance(correct, paddle.Tensor):
correct = np.array(correct)
num_samples = np.prod(np.array(correct.shape[:-1]))
accs = []
Expand Down Expand Up @@ -442,12 +442,12 @@ def update(
the shape should keep the same as preds.
The data type is 'int32' or 'int64'.
"""
if isinstance(preds, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if isinstance(preds, paddle.Tensor):
preds = np.array(preds)
elif not _is_numpy_(preds):
raise ValueError("The 'preds' must be a numpy ndarray or Tensor.")

if isinstance(labels, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if isinstance(labels, paddle.Tensor):
labels = np.array(labels)
elif not _is_numpy_(labels):
raise ValueError("The 'labels' must be a numpy ndarray or Tensor.")
Expand Down Expand Up @@ -581,12 +581,12 @@ def update(
the shape should keep the same as preds.
Shape: [batch_size, 1], Dtype: 'int32' or 'int64'.
"""
if isinstance(preds, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if isinstance(preds, paddle.Tensor):
preds = np.array(preds)
elif not _is_numpy_(preds):
raise ValueError("The 'preds' must be a numpy ndarray or Tensor.")

if isinstance(labels, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if isinstance(labels, paddle.Tensor):
labels = np.array(labels)
elif not _is_numpy_(labels):
raise ValueError("The 'labels' must be a numpy ndarray or Tensor.")
Expand Down Expand Up @@ -740,12 +740,12 @@ def update(
(batch_size, 1), labels[i] is either o or 1,
representing the label of the instance i.
"""
if isinstance(labels, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if isinstance(labels, paddle.Tensor):
labels = np.array(labels)
elif not _is_numpy_(labels):
raise ValueError("The 'labels' must be a numpy ndarray or Tensor.")

if isinstance(preds, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if isinstance(preds, paddle.Tensor):
preds = np.array(preds)
elif not _is_numpy_(preds):
raise ValueError("The 'preds' must be a numpy ndarray or Tensor.")
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/optimizer/adamw.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def __init__(
# paddle.Tensor is also iterable, so here we don't check whether
# the input is iterable, if the input is paddle.Tensor, the
# list(paddle.Tensor) will be a error value
if isinstance(parameters, (paddle.Tensor, core.eager.Tensor)):
if isinstance(parameters, paddle.Tensor):
raise TypeError(
"`parameters` argument given to the optimizer should be "
f"an iterable of paddle Tensors, but got argument type is `{type(parameters)}`."
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/optimizer/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ def __init__(
# paddle.Tensor is also iterable, so here we don't check whether
# the input is iterable, if the input is paddle.Tensor, the
# list(paddle.Tensor) will be a error value
if isinstance(parameters, (paddle.Tensor, core.eager.Tensor)):
if isinstance(parameters, paddle.Tensor):
raise TypeError(
"`parameters` argument given to the optimizer should be "
f"an iterable of paddle Tensors, but got argument type is `{type(parameters)}`."
Expand Down
3 changes: 1 addition & 2 deletions python/paddle/static/nn/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -2815,8 +2815,7 @@ def batch_norm(
if in_dygraph_mode():
inputs_has_MomentumTensor = False
attrs_has_momentum = False
tmp_tensor_type = core.eager.Tensor
if isinstance(momentum, tmp_tensor_type):
if isinstance(momentum, paddle.Tensor):
inputs_has_MomentumTensor = True
else:
attrs_has_momentum = True
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/tensor/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1270,7 +1270,7 @@ def eye(
"""

def _check_attr(attr, message):
if isinstance(attr, ((Variable, core.eager.Tensor, paddle.pir.Value))):
if isinstance(attr, ((Variable, paddle.Tensor, paddle.pir.Value))):
assert len(attr.shape) == 1 and attr.shape[0] in [1, -1]
elif not isinstance(attr, int) or attr < 0:
raise TypeError(f"{message} should be a non-negative int.")
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/utils/dlpack.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def to_dlpack(x: Tensor) -> CapsuleType:
"""

if in_dygraph_mode():
if not isinstance(x, (paddle.Tensor, paddle.base.core.eager.Tensor)):
if not isinstance(x, paddle.Tensor):
raise TypeError(
"The type of 'x' in to_dlpack must be paddle.Tensor,"
f" but received {type(x)}."
Expand Down

0 comments on commit 8e87f4d

Please sign in to comment.