2020-12-21 18:09:22 +08:00
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define functions about array.
2024-06-13 09:31:48 +08:00
from __future__ import annotations
2024-08-10 12:05:32 +08:00
from typing import TYPE_CHECKING , Any , TypeVar , overload
2020-12-21 18:09:22 +08:00
2023-11-15 10:56:54 +08:00
import paddle
2024-06-13 09:31:48 +08:00
from paddle import _typing
2023-11-15 10:56:54 +08:00
2023-09-07 17:26:19 +08:00
from . . base . data_feeder import check_type , check_variable_and_dtype
2023-11-15 10:56:54 +08:00
from . . base . framework import in_pir_mode
remove fluid.initializer.UniformInitializer, ConstantInitializer, NormalInitializer, TruncatedNormalInitializer, XavierInitializer, BilinearInitializer, MSRAInitializer, NumpyArrayInitializer and calculate_gain.. (#49498)
* move UniformInitializer and ConstantInitializer
* more modify
* circular import resolved
* another circular import resolved?
* more circular import 2
* circular import 3
* change import paddle in metric.py
* BuildStrategy import from fluid
* modify the framework import path in common.py
* change rnn.py import, from static to original framework
* change import static in the nn folder
* default_main_program should import from common_ops_import
* add import paddle in param_attr.py
* use core not paddle module for using VarDesc
* another old uniform
* mistake that use Uniform instead of UniformInitializer
* modify UniformInitializer doc
* move fluid.NormalInitializer to nn.initializer.NormalInitializer
* remove import of Normal in fluid.layers.nn.py
* remove more import of old Normal
* remove more import of old Normal
* sample code modify and tests modify import
* is_listen_failed passing arg should be log file
* problem solved
* a mistake solved
* comments resoleved and remove paddle.fluid.initializer.TruncatedNormalInitializer
* remove paddle.fluid.initializer.XavierInitializer and paddle.fluid.initializer.MSRAInitializer
* remove paddle.fluid.initializer.BilinearInitializer NumpyArrayInitializer and set_global_initializer
* change fluid to static
* change static to fluid to avoid circular import in distributed_strategy.py
* fix example code and test_initializer
* ValueType
* sample code fix
* change set_global_initializer back to fluid
* put paddle.static.BuildStrategy.ReduceStrategy into the fuction to avoid circular import
* remove calculate_gain, delete BilinearInitializer and revert set_global_initializer
* change the time of using UniformInitializer, ConstantInitializer, NormalInitializer, TruncatedNormalInitializer, XavierInitializer, MSRAInitializer, NumpyArrayInitializer as few as possible
* fix argument incampatible
* fix more arg incompatible
* fix test_prelu_op_xpu.py Constant
* fix inaccurate doc
* more doc fix: default value
2023-02-01 21:38:27 +08:00
from . . common_ops_import import Variable
2023-05-22 20:56:38 +08:00
from . . framework import LayerHelper , core , in_dynamic_mode
2020-12-21 18:09:22 +08:00
2024-08-10 12:05:32 +08:00
if TYPE_CHECKING :
from collections . abc import Sequence
2021-04-29 19:31:40 +08:00
__all__ = [ ]
2024-06-13 09:31:48 +08:00
T = TypeVar ( " T " )
@overload
2024-08-08 10:10:45 +08:00
def array_length ( array : list [ Any ] ) - > int : . . .
2024-06-13 09:31:48 +08:00
@overload
2024-08-08 10:10:45 +08:00
def array_length ( array : paddle . Tensor ) - > paddle . Tensor : . . .
2021-04-29 19:31:40 +08:00
2020-12-21 18:09:22 +08:00
def array_length ( array ) :
"""
This OP is used to get the length of the input array.
Args:
2024-11-12 15:51:42 +08:00
array (list|Tensor): The input array that will be used to compute the length. In dynamic mode, ``array`` is a Python list. But in static graph mode, array is a Tensor whose VarType is DENSE_TENSOR_ARRAY.
2020-12-21 18:09:22 +08:00
Returns:
2024-06-13 09:31:48 +08:00
Tensor, 0-D Tensor with shape [], which is the length of array.
2020-12-21 18:09:22 +08:00
Examples:
2026-01-29 20:44:09 +08:00
.. code-block:: pycon
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> import paddle
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.create_array(dtype= ' float32 ' )
>>> x = paddle.full(shape=[3, 3], fill_value=5, dtype= " float32 " )
>>> i = paddle.zeros(shape=[1], dtype= " int32 " )
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.array_write(x, i, array=arr)
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr_len = paddle.tensor.array_length(arr)
>>> print(arr_len)
1
2020-12-21 18:09:22 +08:00
"""
2023-05-22 20:56:38 +08:00
if in_dynamic_mode ( ) :
2025-08-21 02:07:41 +08:00
assert isinstance ( array , list ) , (
" The ' array ' in array_write must be a list in dygraph mode "
)
2022-04-11 20:11:33 +08:00
return len ( array )
2023-11-15 10:56:54 +08:00
elif in_pir_mode ( ) :
if (
2023-12-26 16:52:23 +08:00
not isinstance ( array , paddle . pir . Value )
2023-11-15 10:56:54 +08:00
or not array . is_dense_tensor_array_type ( )
) :
raise TypeError (
2024-01-18 11:31:56 +08:00
" array should be tensor array variable in array_length Op "
2023-11-15 10:56:54 +08:00
)
return paddle . _pir_ops . array_length ( array )
2022-12-27 09:06:13 +08:00
else :
if (
not isinstance ( array , Variable )
2024-11-12 15:51:42 +08:00
or array . type != core . VarDesc . VarType . DENSE_TENSOR_ARRAY
2022-12-27 09:06:13 +08:00
) :
raise TypeError (
2024-01-18 11:31:56 +08:00
" array should be tensor array variable in array_length Op "
2022-12-27 09:06:13 +08:00
)
2022-04-11 20:11:33 +08:00
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( ' array_length ' , * * locals ( ) )
tmp = helper . create_variable_for_type_inference ( dtype = ' int64 ' )
tmp . stop_gradient = True
helper . append_op (
type = ' lod_array_length ' ,
inputs = { ' X ' : [ array ] } ,
outputs = { ' Out ' : [ tmp ] } ,
2022-10-23 20:01:27 +08:00
)
2022-12-27 09:06:13 +08:00
return tmp
2020-12-21 18:09:22 +08:00
2024-06-13 09:31:48 +08:00
@overload
2024-08-08 10:10:45 +08:00
def array_read ( array : list [ T ] , i : paddle . Tensor ) - > T : . . .
2024-06-13 09:31:48 +08:00
@overload
2024-08-08 10:10:45 +08:00
def array_read ( array : paddle . Tensor , i : paddle . Tensor ) - > paddle . Tensor : . . .
2024-06-13 09:31:48 +08:00
2020-12-21 18:09:22 +08:00
def array_read ( array , i ) :
"""
This OP is used to read data at the specified position from the input array.
Case:
.. code-block:: text
Input:
The shape of first three tensors are [1], and that of the last one is [1,2]:
array = ([0.6], [0.1], [0.3], [0.4, 0.2])
And:
i = [3]
Output:
output = [0.4, 0.2]
Args:
2024-11-12 15:51:42 +08:00
array (list|Tensor): The input array. In dynamic mode, ``array`` is a Python list. But in static graph mode, array is a Tensor whose ``VarType`` is ``DENSE_TENSOR_ARRAY``.
2020-12-21 18:09:22 +08:00
i (Tensor): 1-D Tensor, whose shape is [1] and dtype is int64. It represents the
specified read position of ``array``.
Returns:
2024-06-13 09:31:48 +08:00
Tensor, A Tensor that is read at the specified position of ``array``.
2020-12-21 18:09:22 +08:00
Examples:
2026-01-29 20:44:09 +08:00
.. code-block:: pycon
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> import paddle
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.create_array(dtype= " float32 " )
>>> x = paddle.full(shape=[1, 3], fill_value=5, dtype= " float32 " )
>>> i = paddle.zeros(shape=[1], dtype= " int32 " )
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.array_write(x, i, array=arr)
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> item = paddle.tensor.array_read(arr, i)
>>> print(item.numpy())
[[5. 5. 5.]]
2020-12-21 18:09:22 +08:00
"""
2023-05-22 20:56:38 +08:00
if in_dynamic_mode ( ) :
2025-08-21 02:07:41 +08:00
assert isinstance ( array , list ) , (
" The ' array ' in array_read must be list in dygraph mode "
)
assert isinstance ( i , Variable ) , (
" The index ' i ' in array_read must be Variable in dygraph mode "
)
assert i . shape == [ 1 ] , (
" The shape of index ' i ' should be [1] in dygraph mode "
)
2023-03-29 15:09:07 +08:00
i = i . item ( 0 )
2022-04-11 20:11:33 +08:00
return array [ i ]
2023-11-15 10:56:54 +08:00
elif in_pir_mode ( ) :
if (
2023-12-26 16:52:23 +08:00
not isinstance ( array , paddle . pir . Value )
2023-11-15 10:56:54 +08:00
or not array . is_dense_tensor_array_type ( )
) :
raise TypeError (
2024-01-18 11:31:56 +08:00
" array should be tensor array variable in array_length Op "
2023-11-15 10:56:54 +08:00
)
return paddle . _pir_ops . array_read ( array , i )
2022-12-27 09:06:13 +08:00
else :
check_variable_and_dtype ( i , ' i ' , [ ' int64 ' ] , ' array_read ' )
helper = LayerHelper ( ' array_read ' , * * locals ( ) )
if (
not isinstance ( array , Variable )
2024-11-12 15:51:42 +08:00
or array . type != core . VarDesc . VarType . DENSE_TENSOR_ARRAY
2022-12-27 09:06:13 +08:00
) :
2024-01-18 11:31:56 +08:00
raise TypeError ( " array should be tensor array variable " )
2022-12-27 09:06:13 +08:00
out = helper . create_variable_for_type_inference ( dtype = array . dtype )
helper . append_op (
type = ' read_from_array ' ,
inputs = { ' X ' : [ array ] , ' I ' : [ i ] } ,
outputs = { ' Out ' : [ out ] } ,
)
return out
2020-12-21 18:09:22 +08:00
2024-06-13 09:31:48 +08:00
@overload
def array_write (
x : paddle . Tensor , i : paddle . Tensor , array : None = None
2024-08-08 10:10:45 +08:00
) - > list [ Any ] | paddle . Tensor : . . .
2024-06-13 09:31:48 +08:00
@overload
def array_write (
x : paddle . Tensor , i : paddle . Tensor , array : list [ paddle . Tensor ]
2024-08-08 10:10:45 +08:00
) - > list [ paddle . Tensor ] : . . .
2024-06-13 09:31:48 +08:00
@overload
def array_write (
x : paddle . Tensor , i : paddle . Tensor , array : paddle . Tensor
2024-08-08 10:10:45 +08:00
) - > paddle . Tensor : . . .
2024-06-13 09:31:48 +08:00
def array_write (
x ,
i ,
array = None ,
) :
2020-12-21 18:09:22 +08:00
"""
This OP writes the input ``x`` into the i-th position of the ``array`` returns the modified array.
If ``array`` is none, a new array will be created and returned.
Args:
x (Tensor): The input data to be written into array. It ' s multi-dimensional
2024-11-20 11:46:17 +08:00
Tensor. Data type: float32, float64, int32, int64 and bool.
2024-03-25 16:46:26 +08:00
i (Tensor): 0-D Tensor with shape [], which represents the position into which
2020-12-21 18:09:22 +08:00
``x`` is written.
array (list|Tensor, optional): The array into which ``x`` is written. The default value is None,
when a new array will be created and returned as a result. In dynamic mode, ``array`` is a Python list.
2024-11-12 15:51:42 +08:00
But in static graph mode, array is a Tensor whose ``VarType`` is ``DENSE_TENSOR_ARRAY``.
2020-12-21 18:09:22 +08:00
Returns:
2024-06-13 09:31:48 +08:00
list|Tensor, The input ``array`` after ``x`` is written into.
2020-12-21 18:09:22 +08:00
Examples:
2026-01-29 20:44:09 +08:00
.. code-block:: pycon
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> import paddle
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.create_array(dtype= " float32 " )
>>> x = paddle.full(shape=[1, 3], fill_value=5, dtype= " float32 " )
>>> i = paddle.zeros(shape=[1], dtype= " int32 " )
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.array_write(x, i, array=arr)
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> item = paddle.tensor.array_read(arr, i)
>>> print(item.numpy())
[[5. 5. 5.]]
2020-12-21 18:09:22 +08:00
"""
2023-05-22 20:56:38 +08:00
if in_dynamic_mode ( ) :
2025-08-21 02:07:41 +08:00
assert isinstance ( x , Variable ) , (
" The input data ' x ' in array_write must be Variable in dygraph mode "
)
assert isinstance ( i , Variable ) , (
" The index ' i ' in array_write must be Variable in dygraph mode "
)
assert i . shape == [ 1 ] , (
" The shape of index ' i ' should be [1] in dygraph mode "
)
2023-03-29 15:09:07 +08:00
i = i . item ( 0 )
2022-04-11 20:11:33 +08:00
if array is None :
array = create_array ( x . dtype )
2025-08-21 02:07:41 +08:00
assert isinstance ( array , list ) , (
" The ' array ' in array_write must be a list in dygraph mode "
)
assert i < = len ( array ) , (
" The index ' i ' should not be greater than the length of ' array ' in dygraph mode "
)
2022-04-11 20:11:33 +08:00
if i < len ( array ) :
array [ i ] = x
else :
array . append ( x )
return array
2023-11-15 10:56:54 +08:00
elif in_pir_mode ( ) :
check_variable_and_dtype ( i , ' i ' , [ ' int64 ' ] , ' array_write ' )
2023-12-26 16:52:23 +08:00
if not isinstance ( x , paddle . pir . Value ) :
2024-01-18 11:31:56 +08:00
raise TypeError ( f " x should be pir.Value, but received { type ( x ) } . " )
2023-11-15 10:56:54 +08:00
if array is not None :
if (
2023-12-26 16:52:23 +08:00
not isinstance ( array , paddle . pir . Value )
2023-11-15 10:56:54 +08:00
or not array . is_dense_tensor_array_type ( )
) :
2024-01-18 11:31:56 +08:00
raise TypeError ( " array should be tensor array variable " )
2023-11-15 10:56:54 +08:00
if array is None :
array = paddle . _pir_ops . create_array ( x . dtype )
2024-09-16 13:22:23 +08:00
if array . dtype != paddle . base . libpaddle . DataType . UNDEFINED :
x = paddle . cast ( x , array . dtype )
2024-01-08 10:55:39 +08:00
paddle . _pir_ops . array_write_ ( array , x , i )
2023-11-15 10:56:54 +08:00
return array
2022-12-27 09:06:13 +08:00
else :
check_variable_and_dtype ( i , ' i ' , [ ' int64 ' ] , ' array_write ' )
check_type ( x , ' x ' , ( Variable ) , ' array_write ' )
helper = LayerHelper ( ' array_write ' , * * locals ( ) )
if array is not None :
if (
not isinstance ( array , Variable )
2024-11-12 15:51:42 +08:00
or array . type != core . VarDesc . VarType . DENSE_TENSOR_ARRAY
2022-12-27 09:06:13 +08:00
) :
raise TypeError (
2024-01-18 11:31:56 +08:00
" array should be tensor array variable in array_write Op "
2022-12-27 09:06:13 +08:00
)
if array is None :
array = helper . create_variable (
2023-03-31 10:11:56 +08:00
name = f " { helper . name } .out " ,
2024-11-12 15:51:42 +08:00
type = core . VarDesc . VarType . DENSE_TENSOR_ARRAY ,
2022-12-27 09:06:13 +08:00
dtype = x . dtype ,
2022-10-23 20:01:27 +08:00
)
2022-12-27 09:06:13 +08:00
helper . append_op (
type = ' write_to_array ' ,
inputs = { ' X ' : [ x ] , ' I ' : [ i ] } ,
outputs = { ' Out ' : [ array ] } ,
2022-10-23 20:01:27 +08:00
)
2022-12-27 09:06:13 +08:00
return array
2020-12-21 18:09:22 +08:00
2024-06-13 09:31:48 +08:00
def create_array (
dtype : _typing . DTypeLike ,
initialized_list : Sequence [ paddle . Tensor ] | None = None ,
) - > paddle . Tensor | list [ paddle . Tensor ] :
2020-12-21 18:09:22 +08:00
"""
This OP creates an array. It is used as the input of :ref:`api_paddle_tensor_array_array_read` and
:ref:`api_paddle_tensor_array_array_write`.
Args:
dtype (str): The data type of the elements in the array. Support data type: float32, float64, int32, int64 and bool.
2021-09-01 10:23:01 +08:00
initialized_list(list): Used to initialize as default value for created array.
All values in initialized list should be a Tensor.
2020-12-21 18:09:22 +08:00
Returns:
2024-06-13 09:31:48 +08:00
list|Tensor, An empty array. In dynamic mode, ``array`` is a Python list. But in static graph mode, array is a Tensor
2024-11-12 15:51:42 +08:00
whose ``VarType`` is ``DENSE_TENSOR_ARRAY``.
2020-12-21 18:09:22 +08:00
Examples:
2026-01-29 20:44:09 +08:00
.. code-block:: pycon
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> import paddle
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.create_array(dtype= " float32 " )
>>> x = paddle.full(shape=[1, 3], fill_value=5, dtype= " float32 " )
>>> i = paddle.zeros(shape=[1], dtype= " int32 " )
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> arr = paddle.tensor.array_write(x, i, array=arr)
2020-12-21 18:09:22 +08:00
2023-08-18 14:38:19 +08:00
>>> item = paddle.tensor.array_read(arr, i)
>>> print(item.numpy())
[[5. 5. 5.]]
2020-12-21 18:09:22 +08:00
"""
2022-04-11 20:11:33 +08:00
array = [ ]
if initialized_list is not None :
if not isinstance ( initialized_list , ( list , tuple ) ) :
raise TypeError (
2024-04-01 10:20:33 +08:00
f " Require type(initialized_list) should be list/tuple, but received { type ( initialized_list ) } "
2022-10-23 20:01:27 +08:00
)
2022-04-11 20:11:33 +08:00
array = list ( initialized_list )
2022-12-30 11:02:06 +08:00
# NOTE: Only support plain list like [x, y,...], not support nested list in static graph mode.
2022-04-11 20:11:33 +08:00
for val in array :
2023-12-26 16:52:23 +08:00
if not isinstance ( val , ( Variable , paddle . pir . Value ) ) :
2022-04-11 20:11:33 +08:00
raise TypeError (
2024-04-01 10:20:33 +08:00
f " All values in `initialized_list` should be Variable or pir.Value, but received { type ( val ) } . "
2022-10-23 20:01:27 +08:00
)
2022-04-11 20:11:33 +08:00
2023-05-22 20:56:38 +08:00
if in_dynamic_mode ( ) :
2022-04-11 20:11:33 +08:00
return array
2023-11-15 10:56:54 +08:00
elif in_pir_mode ( ) :
if not isinstance ( dtype , ( core . VarDesc . VarType , core . DataType ) ) :
dtype = paddle . base . framework . convert_np_dtype_to_dtype_ ( dtype )
out = paddle . _pir_ops . create_array ( dtype )
for val in array :
2024-09-16 13:22:23 +08:00
if dtype != paddle . base . libpaddle . DataType . UNDEFINED :
val = paddle . cast ( val , dtype )
2024-01-19 10:13:09 +08:00
paddle . _pir_ops . array_write_ ( out , val , array_length ( out ) )
2023-11-15 10:56:54 +08:00
return out
2022-12-27 09:06:13 +08:00
else :
helper = LayerHelper ( " array " , * * locals ( ) )
2024-06-13 09:31:48 +08:00
tensor_array : paddle . Tensor = helper . create_variable (
2023-03-31 10:11:56 +08:00
name = f " { helper . name } .out " ,
2024-11-12 15:51:42 +08:00
type = core . VarDesc . VarType . DENSE_TENSOR_ARRAY ,
2022-12-27 09:06:13 +08:00
dtype = dtype ,
)
2022-04-11 20:11:33 +08:00
2022-12-27 09:06:13 +08:00
for val in array :
array_write ( x = val , i = array_length ( tensor_array ) , array = tensor_array )
2022-04-11 20:11:33 +08:00
2022-12-27 09:06:13 +08:00
return tensor_array