2020-10-13 19:50:54 +08:00
|
|
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
|
|
|
|
|
#
|
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
|
#
|
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
#
|
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
2024-06-12 11:59:21 +08:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
2024-09-26 16:01:14 +08:00
|
|
|
import os
|
|
|
|
|
|
2020-10-13 19:50:54 +08:00
|
|
|
import numpy as np
|
2022-11-28 11:52:40 +08:00
|
|
|
|
2024-02-06 18:50:21 +08:00
|
|
|
import paddle
|
2023-09-07 17:26:19 +08:00
|
|
|
from paddle.base.data_feeder import check_type, convert_dtype
|
2020-10-13 19:50:54 +08:00
|
|
|
|
2022-11-28 11:52:40 +08:00
|
|
|
from ..framework import core
|
|
|
|
|
|
2021-04-29 19:31:40 +08:00
|
|
|
__all__ = []
|
|
|
|
|
|
2020-10-13 19:50:54 +08:00
|
|
|
|
2022-11-08 11:29:41 +08:00
|
|
|
class PrintOptions:
|
2020-10-13 19:50:54 +08:00
|
|
|
precision = 8
|
|
|
|
|
threshold = 1000
|
|
|
|
|
edgeitems = 3
|
|
|
|
|
linewidth = 80
|
|
|
|
|
sci_mode = False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_PRINT_OPTIONS = PrintOptions()
|
|
|
|
|
|
|
|
|
|
|
2022-10-23 20:01:27 +08:00
|
|
|
def set_printoptions(
|
2024-06-12 11:59:21 +08:00
|
|
|
precision: int | None = None,
|
|
|
|
|
threshold: int | None = None,
|
|
|
|
|
edgeitems: int | None = None,
|
|
|
|
|
sci_mode: bool | None = None,
|
|
|
|
|
linewidth: int | None = None,
|
|
|
|
|
) -> None:
|
2020-10-13 19:50:54 +08:00
|
|
|
"""Set the printing options for Tensor.
|
|
|
|
|
|
|
|
|
|
Args:
|
2024-06-12 11:59:21 +08:00
|
|
|
precision (int|None, optional): Number of digits of the floating number, default 8.
|
|
|
|
|
threshold (int|None, optional): Total number of elements printed, default 1000.
|
|
|
|
|
edgeitems (int|None, optional): Number of elements in summary at the beginning and ending of each dimension, default 3.
|
|
|
|
|
sci_mode (bool|None, optional): Format the floating number with scientific notation or not, default False.
|
|
|
|
|
linewidth (int|None, optional): Number of characters each line, default 80.
|
2022-09-14 21:56:19 +08:00
|
|
|
|
|
|
|
|
|
2020-10-13 19:50:54 +08:00
|
|
|
Returns:
|
|
|
|
|
None.
|
|
|
|
|
|
|
|
|
|
Examples:
|
2026-02-03 10:54:35 +08:00
|
|
|
.. code-block:: pycon
|
2020-10-13 19:50:54 +08:00
|
|
|
|
2023-08-21 16:01:21 +08:00
|
|
|
>>> import paddle
|
|
|
|
|
|
|
|
|
|
>>> paddle.seed(10)
|
|
|
|
|
>>> a = paddle.rand([10, 20])
|
|
|
|
|
>>> paddle.set_printoptions(4, 100, 3)
|
|
|
|
|
>>> print(a)
|
|
|
|
|
Tensor(shape=[10, 20], dtype=float32, place=Place(cpu), stop_gradient=True,
|
|
|
|
|
[[0.2727, 0.5489, 0.8655, ..., 0.2916, 0.8525, 0.9000],
|
|
|
|
|
[0.3806, 0.8996, 0.0928, ..., 0.9535, 0.8378, 0.6409],
|
|
|
|
|
[0.1484, 0.4038, 0.8294, ..., 0.0148, 0.6520, 0.4250],
|
|
|
|
|
...,
|
|
|
|
|
[0.3426, 0.1909, 0.7240, ..., 0.4218, 0.2676, 0.5679],
|
|
|
|
|
[0.5561, 0.2081, 0.0676, ..., 0.9778, 0.3302, 0.9559],
|
|
|
|
|
[0.2665, 0.8483, 0.5389, ..., 0.4956, 0.6862, 0.9178]])
|
2020-10-13 19:50:54 +08:00
|
|
|
"""
|
|
|
|
|
kwargs = {}
|
|
|
|
|
|
|
|
|
|
if precision is not None:
|
|
|
|
|
check_type(precision, 'precision', (int), 'set_printoptions')
|
|
|
|
|
DEFAULT_PRINT_OPTIONS.precision = precision
|
|
|
|
|
kwargs['precision'] = precision
|
|
|
|
|
if threshold is not None:
|
|
|
|
|
check_type(threshold, 'threshold', (int), 'set_printoptions')
|
|
|
|
|
DEFAULT_PRINT_OPTIONS.threshold = threshold
|
|
|
|
|
kwargs['threshold'] = threshold
|
|
|
|
|
if edgeitems is not None:
|
|
|
|
|
check_type(edgeitems, 'edgeitems', (int), 'set_printoptions')
|
|
|
|
|
DEFAULT_PRINT_OPTIONS.edgeitems = edgeitems
|
|
|
|
|
kwargs['edgeitems'] = edgeitems
|
2021-09-01 16:18:02 +08:00
|
|
|
if linewidth is not None:
|
|
|
|
|
check_type(linewidth, 'linewidth', (int), 'set_printoptions')
|
|
|
|
|
DEFAULT_PRINT_OPTIONS.linewidth = linewidth
|
|
|
|
|
kwargs['linewidth'] = linewidth
|
2020-10-13 19:50:54 +08:00
|
|
|
if sci_mode is not None:
|
|
|
|
|
check_type(sci_mode, 'sci_mode', (bool), 'set_printoptions')
|
|
|
|
|
DEFAULT_PRINT_OPTIONS.sci_mode = sci_mode
|
|
|
|
|
kwargs['sci_mode'] = sci_mode
|
|
|
|
|
core.set_printoptions(**kwargs)
|
|
|
|
|
|
|
|
|
|
|
2021-09-01 16:18:02 +08:00
|
|
|
def _to_summary(var):
|
2020-10-13 19:50:54 +08:00
|
|
|
edgeitems = DEFAULT_PRINT_OPTIONS.edgeitems
|
|
|
|
|
|
2021-04-25 10:49:07 +08:00
|
|
|
# Handle tensor of shape contains 0, like [0, 2], [3, 0, 3]
|
|
|
|
|
if np.prod(var.shape) == 0:
|
|
|
|
|
return np.array([])
|
|
|
|
|
|
2020-10-13 19:50:54 +08:00
|
|
|
if len(var.shape) == 0:
|
|
|
|
|
return var
|
|
|
|
|
elif len(var.shape) == 1:
|
|
|
|
|
if var.shape[0] > 2 * edgeitems:
|
2022-10-23 20:01:27 +08:00
|
|
|
return np.concatenate([var[:edgeitems], var[(-1 * edgeitems) :]])
|
2020-10-13 19:50:54 +08:00
|
|
|
else:
|
|
|
|
|
return var
|
|
|
|
|
else:
|
|
|
|
|
# recursively handle all dimensions
|
|
|
|
|
if var.shape[0] > 2 * edgeitems:
|
2023-03-30 10:17:11 +08:00
|
|
|
begin = list(var[:edgeitems])
|
|
|
|
|
end = list(var[(-1 * edgeitems) :])
|
2021-09-01 16:18:02 +08:00
|
|
|
return np.stack([_to_summary(x) for x in (begin + end)])
|
2020-10-13 19:50:54 +08:00
|
|
|
else:
|
2021-09-01 16:18:02 +08:00
|
|
|
return np.stack([_to_summary(x) for x in var])
|
2020-10-13 19:50:54 +08:00
|
|
|
|
|
|
|
|
|
2020-10-25 22:09:34 -05:00
|
|
|
def _format_item(np_var, max_width=0, signed=False):
|
2022-10-23 20:01:27 +08:00
|
|
|
if (
|
|
|
|
|
np_var.dtype == np.float32
|
|
|
|
|
or np_var.dtype == np.float64
|
|
|
|
|
or np_var.dtype == np.float16
|
|
|
|
|
):
|
2020-10-13 19:50:54 +08:00
|
|
|
if DEFAULT_PRINT_OPTIONS.sci_mode:
|
2023-10-20 17:12:29 +08:00
|
|
|
item_str = f'{np_var:.{DEFAULT_PRINT_OPTIONS.precision}e}'
|
2020-10-13 19:50:54 +08:00
|
|
|
elif np.ceil(np_var) == np_var:
|
2023-03-31 10:11:56 +08:00
|
|
|
item_str = f'{np_var:.0f}.'
|
2020-10-13 19:50:54 +08:00
|
|
|
else:
|
2023-10-20 17:12:29 +08:00
|
|
|
item_str = f'{np_var:.{DEFAULT_PRINT_OPTIONS.precision}f}'
|
2025-11-13 11:36:42 +08:00
|
|
|
elif np_var.dtype == np.complex64 or np_var.dtype == np.complex128:
|
|
|
|
|
re = np.real(np_var)
|
|
|
|
|
im = np.imag(np_var)
|
|
|
|
|
prec = DEFAULT_PRINT_OPTIONS.precision
|
|
|
|
|
if DEFAULT_PRINT_OPTIONS.sci_mode:
|
|
|
|
|
if im >= 0:
|
|
|
|
|
item_str = f'({re:.{prec}e}+{im:.{prec}e}j)'
|
|
|
|
|
else:
|
|
|
|
|
item_str = f'({re:.{prec}e}{im:.{prec}e}j)'
|
|
|
|
|
else:
|
|
|
|
|
if im >= 0:
|
|
|
|
|
item_str = f'({re:.{prec}f}+{im:.{prec}f}j)'
|
|
|
|
|
else:
|
|
|
|
|
item_str = f'({re:.{prec}f}{im:.{prec}f}j)'
|
2020-10-13 19:50:54 +08:00
|
|
|
else:
|
2023-03-31 10:11:56 +08:00
|
|
|
item_str = f'{np_var}'
|
2020-10-13 19:50:54 +08:00
|
|
|
|
|
|
|
|
if max_width > len(item_str):
|
2023-12-18 15:39:14 +08:00
|
|
|
if signed: # handle sign character for tensor with negative item
|
2020-10-25 22:09:34 -05:00
|
|
|
if np_var < 0:
|
|
|
|
|
return item_str.ljust(max_width)
|
|
|
|
|
else:
|
|
|
|
|
return ' ' + item_str.ljust(max_width - 1)
|
|
|
|
|
else:
|
|
|
|
|
return item_str.ljust(max_width)
|
|
|
|
|
else: # used for _get_max_width
|
2020-10-13 19:50:54 +08:00
|
|
|
return item_str
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_max_width(var):
|
2021-09-01 16:18:02 +08:00
|
|
|
# return max_width for a scalar
|
2020-10-13 19:50:54 +08:00
|
|
|
max_width = 0
|
2020-10-25 22:09:34 -05:00
|
|
|
signed = False
|
|
|
|
|
for item in list(var.flatten()):
|
|
|
|
|
if (not signed) and (item < 0):
|
|
|
|
|
signed = True
|
2020-10-13 19:50:54 +08:00
|
|
|
item_str = _format_item(item)
|
|
|
|
|
max_width = max(max_width, len(item_str))
|
|
|
|
|
|
2020-10-25 22:09:34 -05:00
|
|
|
return max_width, signed
|
2020-10-13 19:50:54 +08:00
|
|
|
|
2020-10-25 22:09:34 -05:00
|
|
|
|
2021-09-01 16:18:02 +08:00
|
|
|
def _format_tensor(var, summary, indent=0, max_width=0, signed=False):
|
|
|
|
|
"""
|
|
|
|
|
Format a tensor
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
var(Tensor): The tensor to be formatted.
|
|
|
|
|
summary(bool): Do summary or not. If true, some elements will not be printed, and be replaced with "...".
|
|
|
|
|
indent(int): The indent of each line.
|
|
|
|
|
max_width(int): The max width of each elements in var.
|
|
|
|
|
signed(bool): Print +/- or not.
|
|
|
|
|
"""
|
2020-10-13 19:50:54 +08:00
|
|
|
edgeitems = DEFAULT_PRINT_OPTIONS.edgeitems
|
2021-09-01 16:18:02 +08:00
|
|
|
linewidth = DEFAULT_PRINT_OPTIONS.linewidth
|
2020-10-13 19:50:54 +08:00
|
|
|
|
|
|
|
|
if len(var.shape) == 0:
|
2023-05-16 19:45:15 +08:00
|
|
|
# 0-D Tensor, whose shape = [], should be formatted like this.
|
2020-11-11 20:25:10 +08:00
|
|
|
return _format_item(var, max_width, signed)
|
2020-10-13 19:50:54 +08:00
|
|
|
elif len(var.shape) == 1:
|
2021-09-01 16:18:02 +08:00
|
|
|
item_length = max_width + 2
|
2025-06-24 12:34:23 +08:00
|
|
|
items_per_line = max(1, (linewidth - indent) // item_length)
|
2021-09-01 16:18:02 +08:00
|
|
|
|
|
|
|
|
if summary and var.shape[0] > 2 * edgeitems:
|
2022-10-23 20:01:27 +08:00
|
|
|
items = (
|
|
|
|
|
[
|
2025-06-24 12:34:23 +08:00
|
|
|
_format_item(var[i], max_width, signed)
|
|
|
|
|
for i in range(edgeitems)
|
2022-10-23 20:01:27 +08:00
|
|
|
]
|
|
|
|
|
+ ['...']
|
|
|
|
|
+ [
|
2025-06-24 12:34:23 +08:00
|
|
|
_format_item(var[i], max_width, signed)
|
|
|
|
|
for i in range(var.shape[0] - edgeitems, var.shape[0])
|
2022-10-23 20:01:27 +08:00
|
|
|
]
|
|
|
|
|
)
|
2020-10-13 19:50:54 +08:00
|
|
|
else:
|
|
|
|
|
items = [
|
2025-06-24 12:34:23 +08:00
|
|
|
_format_item(var[i], max_width, signed)
|
|
|
|
|
for i in range(var.shape[0])
|
2020-10-13 19:50:54 +08:00
|
|
|
]
|
2021-09-01 16:18:02 +08:00
|
|
|
lines = [
|
2022-10-23 20:01:27 +08:00
|
|
|
items[i : i + items_per_line]
|
2021-09-01 16:18:02 +08:00
|
|
|
for i in range(0, len(items), items_per_line)
|
|
|
|
|
]
|
2022-06-05 10:58:58 +08:00
|
|
|
s = (',\n' + ' ' * (indent + 1)).join(
|
2022-10-23 20:01:27 +08:00
|
|
|
[', '.join(line) for line in lines]
|
|
|
|
|
)
|
2020-10-13 19:50:54 +08:00
|
|
|
return '[' + s + ']'
|
|
|
|
|
else:
|
|
|
|
|
# recursively handle all dimensions
|
2021-09-01 16:18:02 +08:00
|
|
|
if summary and var.shape[0] > 2 * edgeitems:
|
2022-10-23 20:01:27 +08:00
|
|
|
vars = (
|
|
|
|
|
[
|
2025-06-24 12:34:23 +08:00
|
|
|
_format_tensor(
|
|
|
|
|
var[i], summary, indent + 1, max_width, signed
|
|
|
|
|
)
|
|
|
|
|
for i in range(edgeitems)
|
2022-10-23 20:01:27 +08:00
|
|
|
]
|
|
|
|
|
+ ['...']
|
|
|
|
|
+ [
|
2025-06-24 12:34:23 +08:00
|
|
|
_format_tensor(
|
|
|
|
|
var[i], summary, indent + 1, max_width, signed
|
|
|
|
|
)
|
|
|
|
|
for i in range(var.shape[0] - edgeitems, var.shape[0])
|
2022-10-23 20:01:27 +08:00
|
|
|
]
|
|
|
|
|
)
|
2020-10-13 19:50:54 +08:00
|
|
|
else:
|
2020-10-25 22:09:34 -05:00
|
|
|
vars = [
|
2025-06-24 12:34:23 +08:00
|
|
|
_format_tensor(var[i], summary, indent + 1, max_width, signed)
|
|
|
|
|
for i in range(var.shape[0])
|
2020-10-25 22:09:34 -05:00
|
|
|
]
|
2020-10-13 19:50:54 +08:00
|
|
|
|
2025-06-24 12:34:23 +08:00
|
|
|
s = (',' + '\n' * (len(var.shape) - 1) + ' ' * (indent + 1)).join(vars)
|
|
|
|
|
return '[' + s + ']'
|
2020-10-13 19:50:54 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def to_string(var, prefix='Tensor'):
|
|
|
|
|
indent = len(prefix) + 1
|
|
|
|
|
|
2022-02-08 10:54:09 +08:00
|
|
|
dtype = convert_dtype(var.dtype)
|
2024-02-06 18:50:21 +08:00
|
|
|
if var.dtype == paddle.bfloat16:
|
2022-02-08 10:54:09 +08:00
|
|
|
dtype = 'bfloat16'
|
|
|
|
|
|
2020-10-13 19:50:54 +08:00
|
|
|
_template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient},\n{indent}{data})"
|
|
|
|
|
|
|
|
|
|
tensor = var.value().get_tensor()
|
|
|
|
|
if not tensor._is_initialized():
|
|
|
|
|
return "Tensor(Not initialized)"
|
|
|
|
|
|
2024-02-06 18:50:21 +08:00
|
|
|
if var.dtype == paddle.bfloat16:
|
2025-08-01 15:31:45 +08:00
|
|
|
if not var.place.is_cpu_place():
|
|
|
|
|
paddle.device.synchronize()
|
2022-02-08 10:54:09 +08:00
|
|
|
var = var.astype('float32')
|
2023-03-20 15:56:37 +08:00
|
|
|
np_var = var.numpy(False)
|
2020-10-25 22:09:34 -05:00
|
|
|
|
2020-10-13 19:50:54 +08:00
|
|
|
if len(var.shape) == 0:
|
|
|
|
|
size = 0
|
|
|
|
|
else:
|
|
|
|
|
size = 1
|
|
|
|
|
for dim in var.shape:
|
|
|
|
|
size *= dim
|
|
|
|
|
|
2021-09-01 16:18:02 +08:00
|
|
|
summary = False
|
2020-10-13 19:50:54 +08:00
|
|
|
if size > DEFAULT_PRINT_OPTIONS.threshold:
|
2021-09-01 16:18:02 +08:00
|
|
|
summary = True
|
2020-10-13 19:50:54 +08:00
|
|
|
|
2021-09-01 16:18:02 +08:00
|
|
|
max_width, signed = _get_max_width(_to_summary(np_var))
|
2020-10-25 22:09:34 -05:00
|
|
|
|
2022-10-23 20:01:27 +08:00
|
|
|
data = _format_tensor(
|
|
|
|
|
np_var, summary, indent=indent, max_width=max_width, signed=signed
|
|
|
|
|
)
|
2020-10-13 19:50:54 +08:00
|
|
|
|
2022-10-23 20:01:27 +08:00
|
|
|
return _template.format(
|
|
|
|
|
prefix=prefix,
|
|
|
|
|
shape=var.shape,
|
|
|
|
|
dtype=dtype,
|
|
|
|
|
place=var._place_str,
|
|
|
|
|
stop_gradient=var.stop_gradient,
|
|
|
|
|
indent=' ' * indent,
|
|
|
|
|
data=data,
|
|
|
|
|
)
|
[Eager] publish python c api for eager (#37550)
* refine a test case, test=develop
* publish python c api for eager, test=develop
* revert modify about test_allclose_layer.py, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* delete numpy includes, use pybind11 numpy.h, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* suport eager error msg, and add grad test case, test=develop
* refine, test=develop
* refine, test=develop
2021-12-03 10:14:18 +08:00
|
|
|
|
|
|
|
|
|
2024-09-26 16:01:14 +08:00
|
|
|
def mask_xpu_bf16_tensor(np_tensor):
|
|
|
|
|
# For XPU, we mask out the 0x8000 added to the tail when converting bf16 to fp32.
|
|
|
|
|
mask = np.array(0xFFFF0000, dtype='uint32')
|
|
|
|
|
return (np_tensor.view('uint32') & mask).view('float32')
|
|
|
|
|
|
|
|
|
|
|
2022-03-19 18:05:19 +08:00
|
|
|
def _format_dense_tensor(tensor, indent):
|
2024-09-26 16:01:14 +08:00
|
|
|
dtype = tensor.dtype
|
2025-09-09 14:33:58 +08:00
|
|
|
if dtype in {
|
|
|
|
|
paddle.bfloat16,
|
|
|
|
|
paddle.float8_e4m3fn,
|
|
|
|
|
paddle.float8_e5m2,
|
|
|
|
|
}:
|
2025-08-01 15:31:45 +08:00
|
|
|
if not tensor.place.is_cpu_place():
|
|
|
|
|
paddle.device.synchronize()
|
2022-04-04 16:38:48 +08:00
|
|
|
tensor = tensor.astype('float32')
|
|
|
|
|
|
2023-05-16 19:45:15 +08:00
|
|
|
# TODO(zhouwei): will remove 0-D Tensor.numpy() hack
|
2023-03-20 15:56:37 +08:00
|
|
|
np_tensor = tensor.numpy(False)
|
2024-09-26 16:01:14 +08:00
|
|
|
if (
|
|
|
|
|
paddle.is_compiled_with_xpu()
|
|
|
|
|
and os.getenv("XPU_PADDLE_MASK_BF16_PRINT") is not None
|
|
|
|
|
and (dtype == paddle.bfloat16 or dtype == core.VarDesc.VarType.BF16)
|
|
|
|
|
):
|
|
|
|
|
np_tensor = mask_xpu_bf16_tensor(np_tensor)
|
[Eager] publish python c api for eager (#37550)
* refine a test case, test=develop
* publish python c api for eager, test=develop
* revert modify about test_allclose_layer.py, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* delete numpy includes, use pybind11 numpy.h, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* suport eager error msg, and add grad test case, test=develop
* refine, test=develop
* refine, test=develop
2021-12-03 10:14:18 +08:00
|
|
|
|
2025-11-13 11:36:42 +08:00
|
|
|
summary = (
|
|
|
|
|
np.prod(tensor.shape, dtype="int64") > DEFAULT_PRINT_OPTIONS.threshold
|
|
|
|
|
)
|
[Eager] publish python c api for eager (#37550)
* refine a test case, test=develop
* publish python c api for eager, test=develop
* revert modify about test_allclose_layer.py, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* delete numpy includes, use pybind11 numpy.h, test=develop
* refine, test=develop
* refine, test=develop
* refine, test=develop
* suport eager error msg, and add grad test case, test=develop
* refine, test=develop
* refine, test=develop
2021-12-03 10:14:18 +08:00
|
|
|
|
|
|
|
|
max_width, signed = _get_max_width(_to_summary(np_tensor))
|
|
|
|
|
|
2022-10-23 20:01:27 +08:00
|
|
|
data = _format_tensor(
|
2024-02-19 11:44:52 +08:00
|
|
|
np_tensor, summary, indent=indent, max_width=max_width, signed=signed
|
2022-10-23 20:01:27 +08:00
|
|
|
)
|
2022-03-19 18:05:19 +08:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
2024-12-09 17:35:04 +08:00
|
|
|
def selected_rows_tensor_to_string(tensor, dtype, prefix='Tensor'):
|
|
|
|
|
indent = len(prefix) + 1
|
|
|
|
|
if tensor.is_selected_rows():
|
|
|
|
|
_template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, rows={rows},\n{indent}{data})"
|
|
|
|
|
data = _format_dense_tensor(tensor, indent)
|
|
|
|
|
return _template.format(
|
|
|
|
|
prefix=prefix,
|
2025-11-20 16:06:50 +08:00
|
|
|
shape=list(tensor.shape),
|
2024-12-09 17:35:04 +08:00
|
|
|
dtype=dtype,
|
|
|
|
|
place=tensor._place_str,
|
|
|
|
|
stop_gradient=tensor.stop_gradient,
|
|
|
|
|
indent=' ' * indent,
|
|
|
|
|
data=data,
|
|
|
|
|
rows=tensor.rows(),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2022-03-19 18:05:19 +08:00
|
|
|
def sparse_tensor_to_string(tensor, prefix='Tensor'):
|
|
|
|
|
indent = len(prefix) + 1
|
|
|
|
|
if tensor.is_sparse_coo():
|
2022-03-31 11:22:27 +08:00
|
|
|
_template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, \n{indent}{indices}, \n{indent}{values})"
|
2022-04-07 12:16:23 +08:00
|
|
|
indices_tensor = tensor.indices()
|
|
|
|
|
values_tensor = tensor.values()
|
2022-06-05 10:58:58 +08:00
|
|
|
indices_data = 'indices=' + _format_dense_tensor(
|
2022-10-23 20:01:27 +08:00
|
|
|
indices_tensor, indent + len('indices=')
|
|
|
|
|
)
|
|
|
|
|
values_data = 'values=' + _format_dense_tensor(
|
|
|
|
|
values_tensor, indent + len('values=')
|
|
|
|
|
)
|
|
|
|
|
return _template.format(
|
|
|
|
|
prefix=prefix,
|
2025-11-20 16:06:50 +08:00
|
|
|
shape=list(tensor.shape),
|
2022-10-23 20:01:27 +08:00
|
|
|
dtype=tensor.dtype,
|
|
|
|
|
place=tensor._place_str,
|
|
|
|
|
stop_gradient=tensor.stop_gradient,
|
|
|
|
|
indent=' ' * indent,
|
|
|
|
|
indices=indices_data,
|
|
|
|
|
values=values_data,
|
|
|
|
|
)
|
2022-03-19 18:05:19 +08:00
|
|
|
else:
|
2022-03-31 11:22:27 +08:00
|
|
|
_template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, \n{indent}{crows}, \n{indent}{cols}, \n{indent}{values})"
|
2022-04-07 12:16:23 +08:00
|
|
|
crows_tensor = tensor.crows()
|
|
|
|
|
cols_tensor = tensor.cols()
|
|
|
|
|
elements_tensor = tensor.values()
|
2022-10-23 20:01:27 +08:00
|
|
|
crows_data = 'crows=' + _format_dense_tensor(
|
|
|
|
|
crows_tensor, indent + len('crows=')
|
|
|
|
|
)
|
|
|
|
|
cols_data = 'cols=' + _format_dense_tensor(
|
|
|
|
|
cols_tensor, indent + len('cols=')
|
|
|
|
|
)
|
|
|
|
|
values_data = 'values=' + _format_dense_tensor(
|
|
|
|
|
elements_tensor, indent + len('values=')
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return _template.format(
|
|
|
|
|
prefix=prefix,
|
2025-11-20 16:06:50 +08:00
|
|
|
shape=list(tensor.shape),
|
2022-10-23 20:01:27 +08:00
|
|
|
dtype=tensor.dtype,
|
|
|
|
|
place=tensor._place_str,
|
|
|
|
|
stop_gradient=tensor.stop_gradient,
|
|
|
|
|
indent=' ' * indent,
|
|
|
|
|
crows=crows_data,
|
|
|
|
|
cols=cols_data,
|
|
|
|
|
values=values_data,
|
|
|
|
|
)
|
2022-03-19 18:05:19 +08:00
|
|
|
|
|
|
|
|
|
2023-06-13 21:42:34 +08:00
|
|
|
def dist_tensor_to_string(tensor, prefix='Tensor'):
|
|
|
|
|
# TODO(dev): Complete tensor will be printed after reshard
|
|
|
|
|
# is ready.
|
|
|
|
|
indent = len(prefix) + 1
|
|
|
|
|
dtype = convert_dtype(tensor.dtype)
|
2024-02-06 18:50:21 +08:00
|
|
|
if tensor.dtype == paddle.bfloat16:
|
2023-06-13 21:42:34 +08:00
|
|
|
dtype = 'bfloat16'
|
|
|
|
|
|
2023-11-17 17:03:53 +08:00
|
|
|
if not tensor._is_dense_tensor_hold_allocation():
|
2023-12-06 11:41:12 +08:00
|
|
|
_template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, process_mesh={process_mesh}, placements={placements}, GlobalDenseTensor Not initialized)"
|
2023-11-17 17:03:53 +08:00
|
|
|
return _template.format(
|
|
|
|
|
prefix=prefix,
|
2025-11-20 16:06:50 +08:00
|
|
|
shape=list(tensor.shape),
|
2023-11-17 17:03:53 +08:00
|
|
|
dtype=dtype,
|
|
|
|
|
place=tensor._place_str,
|
|
|
|
|
stop_gradient=tensor.stop_gradient,
|
2023-12-06 11:41:12 +08:00
|
|
|
process_mesh=tensor.process_mesh,
|
|
|
|
|
placements=tensor._placements_str,
|
2023-11-17 17:03:53 +08:00
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
indent = len(prefix) + 1
|
2024-08-30 15:25:18 +08:00
|
|
|
|
|
|
|
|
# If we print a dist_tensor with bf16 dtype and Partial placement, it is essential to ensure that the AllReduce communication
|
|
|
|
|
# is performed in bf16. After completing the communication, convert it to fp32, and then convert it into a numpy array.
|
|
|
|
|
from paddle.distributed import Replicate, reshard
|
|
|
|
|
|
|
|
|
|
placements = [Replicate() for _ in range(tensor.process_mesh.ndim)]
|
|
|
|
|
global_tensor = reshard(tensor, tensor.process_mesh, placements)
|
|
|
|
|
|
|
|
|
|
data = _format_dense_tensor(global_tensor, indent)
|
2023-12-06 11:41:12 +08:00
|
|
|
_template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient}, process_mesh={process_mesh}, placements={placements}, GlobalDenseTensor=\n{indent}{data})"
|
2023-11-17 17:03:53 +08:00
|
|
|
return _template.format(
|
|
|
|
|
prefix=prefix,
|
2025-11-20 16:06:50 +08:00
|
|
|
shape=list(tensor.shape),
|
2023-11-17 17:03:53 +08:00
|
|
|
dtype=dtype,
|
|
|
|
|
place=tensor._place_str,
|
|
|
|
|
stop_gradient=tensor.stop_gradient,
|
2023-12-06 11:41:12 +08:00
|
|
|
process_mesh=tensor.process_mesh,
|
|
|
|
|
placements=tensor._placements_str,
|
2023-11-17 17:03:53 +08:00
|
|
|
indent=' ' * indent,
|
|
|
|
|
data=data,
|
|
|
|
|
)
|
2023-06-13 21:42:34 +08:00
|
|
|
|
|
|
|
|
|
2022-03-19 18:05:19 +08:00
|
|
|
def tensor_to_string(tensor, prefix='Tensor'):
|
|
|
|
|
indent = len(prefix) + 1
|
|
|
|
|
|
2022-04-04 16:38:48 +08:00
|
|
|
dtype = convert_dtype(tensor.dtype)
|
2024-02-06 18:50:21 +08:00
|
|
|
if tensor.dtype == paddle.bfloat16:
|
2022-04-04 16:38:48 +08:00
|
|
|
dtype = 'bfloat16'
|
|
|
|
|
|
2022-03-19 18:05:19 +08:00
|
|
|
_template = "{prefix}(shape={shape}, dtype={dtype}, place={place}, stop_gradient={stop_gradient},\n{indent}{data})"
|
|
|
|
|
|
|
|
|
|
if tensor.is_sparse():
|
|
|
|
|
return sparse_tensor_to_string(tensor, prefix)
|
2022-03-31 11:22:27 +08:00
|
|
|
|
2024-12-09 17:35:04 +08:00
|
|
|
if tensor.is_selected_rows():
|
|
|
|
|
return selected_rows_tensor_to_string(tensor, dtype, prefix)
|
|
|
|
|
|
2023-06-13 21:42:34 +08:00
|
|
|
if tensor.is_dist():
|
|
|
|
|
return dist_tensor_to_string(tensor, prefix)
|
|
|
|
|
|
2022-03-31 11:22:27 +08:00
|
|
|
if not tensor._is_dense_tensor_hold_allocation():
|
|
|
|
|
return "Tensor(Not initialized)"
|
2022-03-19 18:05:19 +08:00
|
|
|
else:
|
|
|
|
|
data = _format_dense_tensor(tensor, indent)
|
2022-10-23 20:01:27 +08:00
|
|
|
return _template.format(
|
|
|
|
|
prefix=prefix,
|
2025-11-20 16:06:50 +08:00
|
|
|
shape=list(tensor.shape),
|
2022-10-23 20:01:27 +08:00
|
|
|
dtype=dtype,
|
|
|
|
|
place=tensor._place_str,
|
|
|
|
|
stop_gradient=tensor.stop_gradient,
|
|
|
|
|
indent=' ' * indent,
|
|
|
|
|
data=data,
|
|
|
|
|
)
|