2020-03-23 22:01:54 +08:00
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
2022-06-05 10:58:58 +08:00
# TODO: define random functions
2020-04-07 19:34:02 +08:00
2024-06-19 19:06:53 +08:00
from __future__ import annotations
2026-02-10 15:20:14 +08:00
from typing import TYPE_CHECKING , overload
2024-06-19 19:06:53 +08:00
2022-11-28 11:52:40 +08:00
import paddle
2025-04-15 09:38:15 +08:00
from paddle import _C_ops
2026-03-02 20:16:05 +08:00
from paddle . _C_ops import poisson # noqa: F401
2023-09-27 14:55:57 +08:00
from paddle . base . framework import _current_expected_place
2023-11-30 14:08:50 +08:00
from paddle . base . libpaddle import DataType
remove fluid.initializer.UniformInitializer, ConstantInitializer, NormalInitializer, TruncatedNormalInitializer, XavierInitializer, BilinearInitializer, MSRAInitializer, NumpyArrayInitializer and calculate_gain.. (#49498)
* move UniformInitializer and ConstantInitializer
* more modify
* circular import resolved
* another circular import resolved?
* more circular import 2
* circular import 3
* change import paddle in metric.py
* BuildStrategy import from fluid
* modify the framework import path in common.py
* change rnn.py import, from static to original framework
* change import static in the nn folder
* default_main_program should import from common_ops_import
* add import paddle in param_attr.py
* use core not paddle module for using VarDesc
* another old uniform
* mistake that use Uniform instead of UniformInitializer
* modify UniformInitializer doc
* move fluid.NormalInitializer to nn.initializer.NormalInitializer
* remove import of Normal in fluid.layers.nn.py
* remove more import of old Normal
* remove more import of old Normal
* sample code modify and tests modify import
* is_listen_failed passing arg should be log file
* problem solved
* a mistake solved
* comments resoleved and remove paddle.fluid.initializer.TruncatedNormalInitializer
* remove paddle.fluid.initializer.XavierInitializer and paddle.fluid.initializer.MSRAInitializer
* remove paddle.fluid.initializer.BilinearInitializer NumpyArrayInitializer and set_global_initializer
* change fluid to static
* change static to fluid to avoid circular import in distributed_strategy.py
* fix example code and test_initializer
* ValueType
* sample code fix
* change set_global_initializer back to fluid
* put paddle.static.BuildStrategy.ReduceStrategy into the fuction to avoid circular import
* remove calculate_gain, delete BilinearInitializer and revert set_global_initializer
* change the time of using UniformInitializer, ConstantInitializer, NormalInitializer, TruncatedNormalInitializer, XavierInitializer, MSRAInitializer, NumpyArrayInitializer as few as possible
* fix argument incampatible
* fix more arg incompatible
* fix test_prelu_op_xpu.py Constant
* fix inaccurate doc
* more doc fix: default value
2023-02-01 21:38:27 +08:00
from paddle . common_ops_import import Variable
2023-09-22 11:30:19 +08:00
from paddle . framework import (
in_dynamic_mode ,
in_dynamic_or_pir_mode ,
in_pir_mode ,
2024-03-13 11:43:50 +08:00
use_pir_api ,
2023-09-22 11:30:19 +08:00
)
2025-08-25 14:20:48 +08:00
from paddle . utils . decorator_utils import (
param_one_alias ,
2025-11-26 18:14:10 +08:00
param_two_alias ,
2025-08-25 14:20:48 +08:00
size_args_decorator ,
)
2022-11-28 11:52:40 +08:00
2023-09-07 17:26:19 +08:00
from . . base . data_feeder import (
2022-10-23 20:01:27 +08:00
check_dtype ,
check_shape ,
2022-11-28 11:52:40 +08:00
check_type ,
check_variable_and_dtype ,
2022-10-23 20:01:27 +08:00
)
2022-11-28 11:52:40 +08:00
from . . framework import (
LayerHelper ,
2025-08-25 14:20:48 +08:00
_get_paddle_place ,
2022-11-28 11:52:40 +08:00
convert_np_dtype_to_dtype_ ,
core ,
dygraph_only ,
2022-10-23 20:01:27 +08:00
)
2020-04-07 19:34:02 +08:00
2024-06-19 19:06:53 +08:00
if TYPE_CHECKING :
from paddle import Tensor
2025-08-25 14:20:48 +08:00
from paddle . _typing import DTypeLike , PlaceLike , ShapeLike
2024-06-19 19:06:53 +08:00
2021-04-29 19:31:40 +08:00
__all__ = [ ]
2020-04-07 19:34:02 +08:00
2026-03-18 14:54:29 +08:00
@param_one_alias ( [ ' x ' , ' input ' ] )
2024-11-25 10:47:46 +08:00
def bernoulli (
2026-03-18 14:54:29 +08:00
x : Tensor ,
p : float | None = None ,
name : str | None = None ,
* ,
out : Tensor | None = None ,
2024-11-25 10:47:46 +08:00
) - > Tensor :
2022-10-20 11:20:33 +08:00
r """
2020-08-21 23:44:33 +08:00
2024-11-25 10:47:46 +08:00
For each element :math:`x_i` in input ``x``, take a sample from the Bernoulli distribution, also called two-point distribution,
with success probability :math:`x_i`. The Bernoulli distribution with success probability :math:`x_i` is a discrete probability
distribution with probability mass function
2020-08-21 23:44:33 +08:00
2022-06-01 17:31:16 +08:00
.. math::
2022-10-20 11:20:33 +08:00
p(y)= \ begin {cases}
x_i,&y=1 \ \
2022-06-01 17:31:16 +08:00
1-x_i,&y=0
\ end {cases} .
2020-08-21 23:44:33 +08:00
Args:
2022-06-01 17:31:16 +08:00
x (Tensor): The input Tensor, it ' s data type should be float32, float64.
2026-03-18 14:54:29 +08:00
Alias: ``input``.
2024-11-25 10:47:46 +08:00
p (float|None, optional): If ``p`` is given, the success probability will always be ``p``. Default is None, which means
to use the success probability specified by input ``x``.
2024-06-19 19:06:53 +08:00
name (str|None, optional): For details, please refer to :ref:`api_guide_Name`. Generally, no setting is required. Default: None.
2022-06-01 17:31:16 +08:00
2026-03-18 14:54:29 +08:00
Keyword args:
out(Tensor, optional): The output tensor.
2022-09-14 21:56:19 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled samples from Bernoulli distribution, whose shape and dtype are same as ``x``.
2020-08-21 23:44:33 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2020-08-21 23:44:33 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> paddle.set_device( ' cpu ' ) # on CPU device
>>> paddle.seed(100)
>>> x = paddle.rand([2,3])
>>> print(x)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.55355281, 0.20714243, 0.01162981],
[0.51577556, 0.36369765, 0.26091650]])
>>> # doctest: -SKIP
>>> out = paddle.bernoulli(x)
>>> print(out)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[1., 0., 1.],
[0., 1., 0.]])
>>> # doctest: -SKIP
2020-08-21 23:44:33 +08:00
2024-11-25 10:47:46 +08:00
>>> out = paddle.bernoulli(x, p=0)
>>> print(out)
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0., 0., 0.],
[0., 0., 0.]])
2020-08-21 23:44:33 +08:00
"""
2024-11-25 10:47:46 +08:00
if p is not None :
x = paddle . full_like ( x , p )
2023-11-17 16:24:44 +08:00
if in_dynamic_or_pir_mode ( ) :
2026-03-18 14:54:29 +08:00
return _C_ops . bernoulli ( x , out = out )
2022-12-27 09:06:13 +08:00
else :
2023-06-02 16:47:16 +08:00
check_variable_and_dtype (
x , " x " , [ " float32 " , " float64 " , " float16 " , " uint16 " ] , " bernoulli "
)
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( " randint " , * * locals ( ) )
out = helper . create_variable_for_type_inference (
dtype = x . dtype
) # maybe set out to int32 ?
helper . append_op (
type = ' bernoulli ' , inputs = { " X " : x } , outputs = { ' Out ' : out } , attrs = { }
)
out . stop_gradient = True
return out
2020-08-21 23:44:33 +08:00
2024-05-21 17:17:19 +08:00
@dygraph_only
2024-06-19 19:06:53 +08:00
def bernoulli_ (
x : Tensor , p : float | Tensor = 0.5 , name : str | None = None
) - > Tensor :
2024-05-21 17:17:19 +08:00
"""
This is the inplace version of api ``bernoulli``, which returns a Tensor filled
with random values sampled from a bernoulli distribution. The output Tensor will
2024-05-22 15:13:54 +08:00
be inplaced with input ``x``. Please refer to :ref:`api_paddle_bernoulli`.
2024-05-21 17:17:19 +08:00
Args:
x(Tensor): The input tensor to be filled with random values.
p (float|Tensor, optional): The success probability parameter of the output Tensor ' s bernoulli distribution.
If ``p`` is float, all elements of the output Tensor shared the same success probability.
If ``p`` is a Tensor, it has per-element success probabilities, and the shape should be broadcastable to ``x``.
Default is 0.5
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2024-05-21 17:17:19 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from the bernoulli distribution with success probability ``p`` .
2024-05-21 17:17:19 +08:00
Examples:
2026-01-01 18:18:18 +08:00
.. code-block:: pycon
2024-05-21 17:17:19 +08:00
>>> import paddle
2024-05-22 15:13:54 +08:00
>>> paddle.set_device( ' cpu ' )
>>> paddle.seed(200)
2024-05-21 17:17:19 +08:00
>>> x = paddle.randn([3, 4])
>>> x.bernoulli_()
>>> print(x)
Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
2026-01-01 18:18:18 +08:00
[[1., 0., 1., 0.],
[0., 0., 1., 0.],
[1., 0., 1., 1.]])
2024-05-21 17:17:19 +08:00
>>> x = paddle.randn([3, 4])
>>> p = paddle.randn([3, 1])
>>> x.bernoulli_(p)
>>> print(x)
Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
2026-01-01 18:18:18 +08:00
[[0., 0., 0., 0.],
[1., 1., 1., 1.],
[1., 1., 1., 1.]])
2024-05-21 17:17:19 +08:00
"""
x . uniform_ ( 0.0 , 1.0 )
2025-06-12 16:40:09 +08:00
ones_mask = x < p
zeros_mask = x > p
2024-05-21 17:17:19 +08:00
x . masked_fill_ ( ones_mask , 1.0 )
x . masked_fill_ ( zeros_mask , 0.0 )
return x
2024-06-19 19:06:53 +08:00
def binomial ( count : Tensor , prob : Tensor , name : str | None = None ) - > Tensor :
2023-12-13 16:50:25 +08:00
r """
Returns a tensor filled with random number from the Binomial Distribution, which supports Tensor shape
broadcasting. The returned Tensor ' s data type is int64.
.. math::
out_i \ sim Binomial (n = count_i, p = prob_i)
Args:
count(Tensor): A tensor with each element specifying the size of a binomial distribution. The input
data type should be int32 or int64.
prob(Tensor): A tensor with each element specifying the probability of success in the binomial experiment.
The input data type should be bfloat16, float16, float32, float64.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no need for user to set this
2023-12-13 16:50:25 +08:00
property. For more information, please refer to :ref:`api_guide_Name`.
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with binomial random values with the same shape as the broadcasted Tensors of
2023-12-13 16:50:25 +08:00
``count`` and ``prob``. The data type is int64.
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2023-12-13 16:50:25 +08:00
>>> import paddle
>>> paddle.set_device( ' cpu ' )
>>> paddle.seed(100)
>>> n = paddle.to_tensor([10.0, 50.0])
>>> p = paddle.to_tensor([0.2, 0.6])
>>> out = paddle.binomial(n, p)
>>> print(out)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2], dtype=int64, place=Place(cpu), stop_gradient=True,
[1 , 31])
>>> # doctest: -SKIP
"""
if in_dynamic_or_pir_mode ( ) :
count , prob = paddle . broadcast_tensors (
[ paddle . cast ( count , dtype = prob . dtype ) , prob ]
)
return _C_ops . binomial ( count , prob )
else :
check_variable_and_dtype ( count , " count " , [ " int32 " , " int64 " ] , " binomial " )
check_variable_and_dtype (
prob ,
" prob " ,
[ " bfloat16 " , " float16 " , " float32 " , " float64 " ] ,
" binomial " ,
)
count , prob = paddle . broadcast_tensors (
[ paddle . cast ( count , dtype = prob . dtype ) , prob ]
)
helper = LayerHelper ( " binomial " , * * locals ( ) )
out = helper . create_variable_for_type_inference (
dtype = convert_np_dtype_to_dtype_ ( ' int64 ' )
)
helper . append_op (
type = ' binomial ' ,
inputs = { " count " : count , " prob " : prob } ,
outputs = { ' out ' : out } ,
attrs = { } ,
)
out . stop_gradient = True
return out
2024-06-19 19:06:53 +08:00
def standard_gamma ( x : Tensor , name : str | None = None ) - > Tensor :
2024-01-04 15:52:07 +08:00
r """
Returns a tensor filled with random number from a Standard Gamma Distribution.
.. math::
out_i \ sim Gamma (alpha = x_i, beta = 1.0)
Args:
2024-01-26 10:25:04 +08:00
x(Tensor): A tensor with rate parameter of standard gamma Distribution. The data type
2024-01-04 15:52:07 +08:00
should be bfloat16, float16, float32, float64.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2024-01-04 15:52:07 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random number with the same shape and dtype as ``x``.
2024-01-04 15:52:07 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2024-01-04 15:52:07 +08:00
>>> import paddle
>>> paddle.set_device( ' cpu ' )
>>> paddle.seed(100)
2026-02-03 10:54:35 +08:00
>>> x = paddle.uniform([2, 3], min=1.0, max=5.0)
2024-01-04 15:52:07 +08:00
>>> out = paddle.standard_gamma(x)
>>> print(out)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[3.35393834, 0.80538225, 0.36511323],
[6.10344696, 4.28612375, 6.37196636]])
>>> # doctest: -SKIP
"""
if in_dynamic_or_pir_mode ( ) :
return _C_ops . standard_gamma ( x )
else :
check_variable_and_dtype (
x , " x " , [ " float32 " , " float64 " ] , " standard_gamma "
)
helper = LayerHelper ( " standard_gamma " , * * locals ( ) )
out = helper . create_variable_for_type_inference ( dtype = x . dtype )
helper . append_op (
type = ' standard_gamma ' ,
inputs = { ' x ' : x } ,
outputs = { ' out ' : out } ,
attrs = { } ,
)
return out
2024-06-19 19:06:53 +08:00
def log_normal (
mean : float | Tensor = 1.0 ,
std : float | Tensor = 2.0 ,
shape : ShapeLike | None = None ,
name : str | None = None ,
) - > Tensor :
2024-06-11 10:48:06 +08:00
r """
Returns a Tensor filled with random values sampled from a Log Normal
Distribution, with ``mean``, ``std``.
The Log Normal Distribution is defined as follows
.. math::
f(x) = \ frac {1} { x \ sigma \ sqrt { 2 \ pi}}e^ { - \ frac { ( \ ln {x} - \ mu)^2} { 2 \ sigma^2}}
Args:
mean (float|Tensor, optional): The mean of the output Tensor ' s underlying normal distribution.
If ``mean`` is float, all elements of the output Tensor share the same mean.
If ``mean`` is a Tensor(data type supports float32, float64), it has per-element means.
Default is 1.0
std (float|Tensor, optional): The standard deviation of the output Tensor ' s underlying normal distribution.
If ``std`` is float, all elements of the output Tensor share the same standard deviation.
If ``std`` is a Tensor(data type supports float32, float64), it has per-element standard deviations.
2024-12-30 10:44:59 +08:00
Default is 2.0
2024-06-19 19:06:53 +08:00
shape (tuple|list|Tensor|None, optional): Shape of the Tensor to be created. The data type is ``int32`` or ``int64`` .
2024-06-11 10:48:06 +08:00
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list. If ``mean`` or ``std``
is a Tensor, the shape of the output Tensor is the same as ``mean`` or ``std`` , attr ``shape`` is ignored.
Default is None
2024-06-19 19:06:53 +08:00
name (str|None, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
2024-06-11 10:48:06 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a log normal distribution with the underlying normal distribution ' s ``mean`` and ``std`` .
2024-06-11 10:48:06 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2024-06-11 10:48:06 +08:00
>>> import paddle
>>> paddle.seed(200)
>>> out1 = paddle.log_normal(shape=[2, 3])
>>> print(out1)
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[4.01107359 , 3.53824377 , 25.79078865],
[0.83332109 , 0.40513405 , 2.09763741 ]])
>>> mean_tensor = paddle.to_tensor([1.0, 2.0, 3.0])
>>> out2 = paddle.log_normal(mean=mean_tensor)
>>> print(out2)
Tensor(shape=[3], dtype=float32, place=Place(cpu), stop_gradient=True,
[4.45330524 , 0.57903880 , 31.82369995])
>>> std_tensor = paddle.to_tensor([1.0, 2.0, 3.0])
>>> out3 = paddle.log_normal(mean=mean_tensor, std=std_tensor)
>>> print(out3)
Tensor(shape=[3], dtype=float32, place=Place(cpu), stop_gradient=True,
[10.31321430, 8.97369766 , 35.76752090])
"""
normal_sample = paddle . normal ( mean = mean , std = std , shape = shape , name = name )
return paddle . exp ( normal_sample )
@dygraph_only
2024-06-19 19:06:53 +08:00
def log_normal_ (
x : Tensor , mean : float = 1.0 , std : float = 2.0 , name : str | None = None
) - > Tensor :
2024-06-11 10:48:06 +08:00
r """
This inplace version of api ``log_normal``, which returns a Tensor filled
with random values sampled from a log normal distribution. The output Tensor will
be inplaced with input ``x``. Please refer to :ref:`api_paddle_log_normal`.
Args:
x (Tensor): The input tensor to be filled with random values.
mean (float|int, optional): Mean of the output tensor, default is 1.0.
std (float|int, optional): Standard deviation of the output tensor, default
is 2.0.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2024-06-11 10:48:06 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a log normal distribution with the underlying normal distribution ' s ``mean`` and ``std`` .
2024-06-11 10:48:06 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2024-06-11 10:48:06 +08:00
>>> import paddle
>>> paddle.seed(200)
>>> x = paddle.randn([3, 4])
>>> x.log_normal_()
>>> print(x)
Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
[[3.99360156 , 0.11746082 , 12.14813519, 4.74383831 ],
[0.36592522 , 0.09426476 , 31.81549835, 0.61839998 ],
[1.33314908 , 12.31954002, 36.44527435, 1.69572163 ]])
"""
return normal_ ( x , mean = mean , std = std ) . exp_ ( )
2025-08-14 17:16:28 +08:00
@param_one_alias ( [ " x " , " input " ] )
2024-06-19 19:06:53 +08:00
def multinomial (
x : Tensor ,
num_samples : int = 1 ,
replacement : bool = False ,
name : str | None = None ,
2025-08-29 22:29:21 +08:00
* ,
out : Tensor | None = None ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-09-29 10:45:35 -05:00
"""
2025-07-03 22:10:31 +08:00
Returns a Tensor filled with random values sampled from a Multinomial
2020-09-29 10:45:35 -05:00
distribution. The input ``x`` is a tensor with probabilities for generating the
random number. Each element in ``x`` should be larger or equal to 0, but not all
0. ``replacement`` indicates whether it is a replaceable sample. If ``replacement``
is True, a category can be sampled more than once.
2025-08-19 17:41:55 +08:00
.. note::
Alias Support: The parameter name ``input`` can be used as an alias for ``x``.
For example, ``multinomial(input=tensor_x, ...)`` is equivalent to ``multinomial(x=tensor_x, ...)``.
2020-09-29 10:45:35 -05:00
Args:
x(Tensor): A tensor with probabilities for generating the random number. The data type
should be float32, float64.
2025-08-19 17:41:55 +08:00
alias: ``input``.
2020-09-29 10:45:35 -05:00
num_samples(int, optional): Number of samples, default is 1.
replacement(bool, optional): Whether it is a replaceable sample, default is False.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2020-09-29 10:45:35 -05:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2025-08-29 22:29:21 +08:00
out (Tensor|None, optional): The output Tensor. If set, the result will be stored in this Tensor. Default is None.
2020-09-29 10:45:35 -05:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with sampled category index after ``num_samples`` times samples.
2020-09-29 10:45:35 -05:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2020-09-29 10:45:35 -05:00
2023-09-22 10:18:14 +08:00
>>> import paddle
2026-02-03 10:54:35 +08:00
>>> paddle.seed(100) # on CPU device
2023-09-22 10:18:14 +08:00
2026-02-03 10:54:35 +08:00
>>> x = paddle.rand([2, 4])
2023-09-22 10:18:14 +08:00
>>> print(x)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.55355281, 0.20714243, 0.01162981, 0.51577556],
[0.36369765, 0.26091650, 0.18905126, 0.56219709]])
>>> # doctest: -SKIP
2026-02-03 10:54:35 +08:00
>>> paddle.seed(200) # on CPU device
2023-09-22 10:18:14 +08:00
>>> out1 = paddle.multinomial(x, num_samples=5, replacement=True)
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 5], dtype=int64, place=Place(cpu), stop_gradient=True,
[[3, 3, 0, 0, 0],
[3, 3, 3, 1, 0]])
>>> # doctest: -SKIP
>>> # out2 = paddle.multinomial(x, num_samples=5)
>>> # InvalidArgumentError: When replacement is False, number of samples
>>> # should be less than non-zero categories
2026-02-03 10:54:35 +08:00
>>> paddle.seed(300) # on CPU device
2023-09-22 10:18:14 +08:00
>>> out3 = paddle.multinomial(x, num_samples=3)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=int64, place=Place(cpu), stop_gradient=True,
[[3, 0, 1],
[3, 1, 0]])
>>> # doctest: -SKIP
2020-09-29 10:45:35 -05:00
"""
2023-09-22 11:30:19 +08:00
if in_dynamic_or_pir_mode ( ) :
2025-08-29 22:29:21 +08:00
return _C_ops . multinomial ( x , num_samples , replacement , out = out )
2022-12-27 09:06:13 +08:00
else :
2023-03-30 11:53:44 +08:00
check_variable_and_dtype (
x , " x " , [ " uint16 " , " float16 " , " float32 " , " float64 " ] , " multinomial "
)
2022-04-01 21:18:53 +08:00
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( " multinomial " , * * locals ( ) )
out = helper . create_variable_for_type_inference (
dtype = convert_np_dtype_to_dtype_ ( ' int64 ' )
2022-10-23 20:01:27 +08:00
)
2022-12-27 09:06:13 +08:00
helper . append_op (
type = ' multinomial ' ,
inputs = { " X " : x } ,
outputs = { ' Out ' : out } ,
attrs = { ' num_samples ' : num_samples , ' replacement ' : replacement } ,
)
out . stop_gradient = True
return out
2020-09-29 10:45:35 -05:00
2022-11-24 18:34:00 +08:00
def uniform_random_batch_size_like (
2024-06-19 19:06:53 +08:00
input : Tensor ,
shape : ShapeLike ,
dtype : DTypeLike = ' float32 ' ,
input_dim_idx : int = 0 ,
output_dim_idx : int = 0 ,
min : float = - 1.0 ,
max : float = 1.0 ,
seed : int = 0 ,
) - > Tensor :
2022-11-24 18:34:00 +08:00
"""
This OP initializes a variable with random values sampled from a
uniform distribution in the range [min, max). The input_dim_idx used to get the input dimension value which will be used to resize the output dimension.
.. code-block:: text
2025-12-22 19:04:48 +08:00
2022-11-24 18:34:00 +08:00
*Case 1:
Given:
input =[[0.946741 , 0.1357001 , 0.38086128]] # input.shape=[1,3]
shape=[2,4]
result.shape[output_dim_idx] = input.shape[input_dim_idx],
output_dim_idx = 0,
input_dim_idx = 0,
result.shape[0] = input.shape[0],
then:
result=[[ 0.3443427 , -0.23056602, 0.3477049 , 0.06139076]] # result.shape=[1,4]
*Case 2:
Given:
input =[[0.946741 , 0.1357001 , 0.38086128]] # input.shape=[1,3]
shape=[2,4]
input_dim_idx=1
output_dim_idx=1
result.shape[output_dim_idx] = input.shape[input_dim_idx],
output_dim_idx = 1,
input_dim_idx = 1,
result.shape[1] = input.shape[1],
then:
result=[[-0.23133647, -0.84195036, 0.21441269],
[-0.08774924, 0.25605237, -0.09403259]] # result.shape=[2,3]
2025-12-22 19:04:48 +08:00
2022-11-24 18:34:00 +08:00
Args:
2024-06-19 19:06:53 +08:00
input (Tensor): A Tensor. Supported data types: float32, float64.
2022-11-24 18:34:00 +08:00
shape (tuple|list): A python list or python tuple. The shape of the output Tensor, the data type is int.
2025-09-24 18:55:54 +08:00
dtype(str|paddle.dtype|np.dtype, optional): The data type of output Tensor. Supported data types: float32, float64. Default float32.
2022-11-24 18:34:00 +08:00
input_dim_idx (int, optional): An index used to get the input dimension value which will be used to resize the output dimension. Default 0.
output_dim_idx (int, optional): An index used to indicate the specific dimension that will be replaced by corresponding input dimension value. Default 0.
min (float, optional): The lower bound on the range of random values to generate, the min is included in the range. Default -1.0.
max (float, optional): The upper bound on the range of random values to generate, the max is excluded in the range. Default 1.0.
seed (int, optional): Random seed used for generating samples. 0 means use a seed generated by the system.Note that if seed is not 0, this operator will always generate the same random numbers every time.
2025-12-22 19:04:48 +08:00
2022-11-24 18:34:00 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor of the specified shape filled with uniform_random values. The shape of the Tensor is determined by the shape parameter and the specified dimension of the input Tensor.
2025-12-22 19:04:48 +08:00
2022-11-24 18:34:00 +08:00
Examples:
[CodeStyle][Xdoctest][8,12,15,16,18-20,22-26,29-32,34-36,38-40,42,43,45-48,50,51,60-65,75,80,82,83,85-87,89-94,99-141,143,145,147-167,169-187,207-220,257,258,260-275,277-313,315-325][API Compatibility] Update shape output format in documentation examples (#76574)
---------
Co-authored-by: SigureMo <sigure.qaq@gmail.com>
2025-11-26 10:31:46 +08:00
.. code-block:: pycon
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> import paddle.base as base
>>> from paddle.tensor import random
>>> paddle.enable_static()
>>> # example 1:
>>> input = paddle.static.data(name= " input " , shape=[1, 3], dtype= ' float32 ' )
>>> out_1 = random.uniform_random_batch_size_like(input, [2, 4])
>>> print(out_1.shape)
[CodeStyle][Xdoctest][8,12,15,16,18-20,22-26,29-32,34-36,38-40,42,43,45-48,50,51,60-65,75,80,82,83,85-87,89-94,99-141,143,145,147-167,169-187,207-220,257,258,260-275,277-313,315-325][API Compatibility] Update shape output format in documentation examples (#76574)
---------
Co-authored-by: SigureMo <sigure.qaq@gmail.com>
2025-11-26 10:31:46 +08:00
paddle.Size([1, 4])
2023-09-22 10:18:14 +08:00
>>> # example 2:
>>> out_2 = random.uniform_random_batch_size_like(input, [2, 4], input_dim_idx=1, output_dim_idx=1)
>>> print(out_2.shape)
[CodeStyle][Xdoctest][8,12,15,16,18-20,22-26,29-32,34-36,38-40,42,43,45-48,50,51,60-65,75,80,82,83,85-87,89-94,99-141,143,145,147-167,169-187,207-220,257,258,260-275,277-313,315-325][API Compatibility] Update shape output format in documentation examples (#76574)
---------
Co-authored-by: SigureMo <sigure.qaq@gmail.com>
2025-11-26 10:31:46 +08:00
paddle.Size([2, 3])
2022-11-24 18:34:00 +08:00
"""
2024-05-29 12:36:03 +08:00
if in_dynamic_or_pir_mode ( ) :
dtype = convert_np_dtype_to_dtype_ ( dtype )
return _C_ops . uniform_random_batch_size_like (
input ,
shape ,
input_dim_idx ,
output_dim_idx ,
min ,
max ,
seed ,
0 ,
0 ,
1.0 ,
dtype ,
)
2022-11-24 18:34:00 +08:00
check_variable_and_dtype (
input ,
' Input ' ,
( " float32 " , ' float64 ' , " uint16 " ) ,
' uniform_random_batch_size_like ' ,
)
check_type ( shape , ' shape ' , ( list , tuple ) , ' uniform_random_batch_size_like ' )
check_dtype (
dtype ,
' dtype ' ,
( ' float32 ' , ' float64 ' , " uint16 " ) ,
' uniform_random_batch_size_like ' ,
)
helper = LayerHelper ( ' uniform_random_batch_size_like ' , * * locals ( ) )
out = helper . create_variable_for_type_inference ( dtype )
c_dtype = convert_np_dtype_to_dtype_ ( dtype )
helper . append_op (
type = ' uniform_random_batch_size_like ' ,
inputs = { ' Input ' : input } ,
outputs = { ' Out ' : out } ,
attrs = {
' shape ' : shape ,
' input_dim_idx ' : input_dim_idx ,
' output_dim_idx ' : output_dim_idx ,
' min ' : min ,
' max ' : max ,
' seed ' : seed ,
' dtype ' : c_dtype ,
} ,
)
return out
2024-06-19 19:06:53 +08:00
def gaussian (
shape : ShapeLike ,
mean : complex = 0.0 ,
std : float = 1.0 ,
seed : int = 0 ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
2025-08-25 14:20:48 +08:00
* ,
out : paddle . Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-08-23 18:08:24 +08:00
"""
2022-05-18 11:05:32 +08:00
Returns a Tensor filled with random values sampled from a Gaussian
2020-08-23 18:08:24 +08:00
distribution, with ``shape`` and ``dtype``.
Args:
2022-11-14 20:07:08 +08:00
shape (tuple|list|Tensor): Shape of the Tensor to be created. The data type is ``int32`` or ``int64`` .
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list.
2024-06-11 10:34:21 +08:00
mean (float|int|complex, optional): Mean of the output tensor, default is 0.0.
2020-09-03 14:19:15 +08:00
std (float|int, optional): Standard deviation of the output tensor, default
2020-08-23 18:08:24 +08:00
is 1.0.
2020-09-03 14:19:15 +08:00
seed (int, optional): Random seed of generator.
2024-06-19 19:06:53 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the output Tensor.
2024-06-11 10:34:21 +08:00
Supported data types: bfloat16, float16, float32, float64, complex64, complex128.
2020-08-30 21:54:39 -05:00
Default is None, use global default dtype (see ``get_default_dtype``
for details).
2024-06-19 19:06:53 +08:00
name (str|None, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`.
2025-08-25 14:20:48 +08:00
out(Tensor, optional): The output tensor.
device(PlaceLike|None, optional): The desired device of returned tensor.
if None, uses the current device for the default tensor type (see paddle.device.set_device()).
device will be the CPU for CPU tensor types and the current CUDA device for CUDA tensor types. Default: None.
requires_grad(bool, optional): If autograd should record operations on the returned tensor. Default: False.
2020-08-23 18:08:24 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a Gaussian
distribution, with ``shape`` and ``dtype``.
2020-08-23 18:08:24 +08:00
"""
2020-09-03 14:19:15 +08:00
op_type_for_check = ' gaussian/standard_normal/randn/normal '
2024-06-11 10:34:21 +08:00
supported_dtypes = [
' float32 ' ,
' float64 ' ,
' float16 ' ,
' uint16 ' ,
' bfloat16 ' ,
' complex64 ' ,
' complex128 ' ,
]
2020-09-03 14:19:15 +08:00
2020-08-30 21:54:39 -05:00
if dtype is None :
dtype = paddle . framework . get_default_dtype ( )
2023-03-20 13:27:34 +08:00
if dtype not in supported_dtypes :
2020-08-30 21:54:39 -05:00
raise TypeError (
2024-04-01 10:20:33 +08:00
f " { op_type_for_check } only supports { supported_dtypes } , but the default dtype is { dtype } "
2022-10-23 20:01:27 +08:00
)
2023-12-27 11:05:55 +08:00
if not isinstance ( dtype , ( core . VarDesc . VarType , core . DataType ) ) :
2020-08-23 18:08:24 +08:00
dtype = convert_np_dtype_to_dtype_ ( dtype )
2024-06-11 10:34:21 +08:00
if isinstance ( mean , complex ) :
if dtype not in [
core . VarDesc . VarType . COMPLEX64 ,
core . VarDesc . VarType . COMPLEX128 ,
core . DataType . COMPLEX64 ,
core . DataType . COMPLEX128 ,
] :
raise TypeError (
2024-06-30 16:37:22 +08:00
" if mean is a complex number, dtype should be complex64 or complex128, "
2024-06-11 10:34:21 +08:00
f " but got dtype = { dtype } " ,
)
if mean . real != mean . imag :
raise ValueError (
2024-06-30 16:37:22 +08:00
" The mean of complex gaussian distribution should be a complex number with "
2024-06-11 10:34:21 +08:00
f " real part equal imaginary part, but got { mean . real } != { mean . imag } " ,
)
mean = mean . real
2023-09-22 11:30:19 +08:00
if in_dynamic_or_pir_mode ( ) :
2024-05-16 00:13:40 +08:00
if in_dynamic_mode ( ) :
shape = paddle . utils . convert_shape_to_list ( shape )
elif in_pir_mode ( ) and paddle . utils . _contain_var ( shape ) :
shape = paddle . utils . get_int_tensor_list ( shape )
2025-08-25 14:20:48 +08:00
place = (
_current_expected_place ( )
if device is None
else _get_paddle_place ( device )
)
tensor = _C_ops . gaussian (
shape , float ( mean ) , float ( std ) , seed , dtype , place , out = out
2022-10-23 20:01:27 +08:00
)
2025-08-25 14:20:48 +08:00
if requires_grad is True :
tensor . stop_gradient = False
return tensor
2022-12-27 09:06:13 +08:00
else :
check_shape ( shape , op_type_for_check )
2023-03-20 13:27:34 +08:00
check_dtype ( dtype , ' dtype ' , supported_dtypes , op_type_for_check )
2022-04-12 19:43:52 +08:00
2022-12-27 09:06:13 +08:00
inputs = { }
attrs = {
' mean ' : mean ,
' std ' : std ,
' seed ' : seed ,
' dtype ' : dtype ,
}
2023-03-09 20:30:52 +08:00
paddle . utils . get_shape_tensor_inputs (
2022-12-27 09:06:13 +08:00
inputs = inputs , attrs = attrs , shape = shape , op_type = op_type_for_check
2022-10-23 20:01:27 +08:00
)
2020-08-23 18:08:24 +08:00
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( ' gaussian ' , * * locals ( ) )
out = helper . create_variable_for_type_inference ( dtype )
helper . append_op (
type = ' gaussian_random ' ,
inputs = inputs ,
outputs = { ' Out ' : out } ,
attrs = attrs ,
)
out . stop_gradient = True
return out
2020-08-23 18:08:24 +08:00
2023-09-22 11:14:48 +08:00
@dygraph_only
2024-06-19 19:06:53 +08:00
def gaussian_ (
x : Tensor ,
mean : complex = 0.0 ,
std : float = 1.0 ,
seed : int = 0 ,
name : str | None = None ,
) - > Tensor :
2023-09-22 11:14:48 +08:00
"""
This is the inplace version of OP ``gaussian``, which returns a Tensor filled
with random values sampled from a gaussian distribution. The output Tensor will
be inplaced with input ``x``. Please refer to :ref:`api_tensor_gaussian`.
Args:
x(Tensor): The input tensor to be filled with random values.
2024-06-11 10:34:21 +08:00
mean (float|int|complex, optional): Mean of the output tensor, default is 0.0.
2023-09-22 11:14:48 +08:00
std (float|int, optional): Standard deviation of the output tensor, default
is 1.0.
seed (int, optional): Random seed of generator.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2023-09-22 11:14:48 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
Returns:
2024-06-19 19:06:53 +08:00
Tensor, The input tensor x filled with random values sampled from a gaussian
2023-09-22 11:14:48 +08:00
distribution.
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2023-09-22 11:14:48 +08:00
>>> import paddle
>>> x = paddle.randn([3, 4])
>>> paddle.tensor.random.gaussian_(x)
>>> print(x)
2026-02-03 10:54:35 +08:00
>>> # doctest: +SKIP( " Random output " )
2023-09-22 11:14:48 +08:00
Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
2026-02-03 10:54:35 +08:00
[[ 1.84037554, -1.04185271, 1.04286408, 0.48108253],
[-0.84185606, 0.18335205, 0.07997673, -0.56327361],
[-1.30208957, -0.20095424, 0.13392292, 0.68311596]])
>>> # doctest: -SKIP
2023-09-22 11:14:48 +08:00
"""
2024-06-11 10:34:21 +08:00
if isinstance ( mean , complex ) :
if x . dtype not in [
core . VarDesc . VarType . COMPLEX64 ,
core . VarDesc . VarType . COMPLEX128 ,
core . DataType . COMPLEX64 ,
core . DataType . COMPLEX128 ,
] :
raise TypeError (
2024-06-30 16:37:22 +08:00
" if mean is a complex number, x ' s dtype should be complex64 or complex128, "
2024-06-11 10:34:21 +08:00
f " but dtype = { x . dtype } " ,
)
if mean . real != mean . imag :
raise ValueError (
2024-06-30 16:37:22 +08:00
" The mean of complex gaussian distribution should be a complex number with "
2024-06-11 10:34:21 +08:00
f " real part equal imaginary part, but got { mean . real } != { mean . imag } " ,
)
mean = mean . real
2023-09-22 11:14:48 +08:00
return _C_ops . gaussian_inplace_ ( x , float ( mean ) , float ( std ) , int ( seed ) )
2024-06-19 19:06:53 +08:00
def standard_normal (
2025-08-25 14:20:48 +08:00
shape : ShapeLike ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
* ,
out : paddle . Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-08-23 18:08:24 +08:00
"""
2022-05-31 15:54:55 +08:00
Returns a Tensor filled with random values sampled from a standard
2020-08-23 18:08:24 +08:00
normal distribution with mean 0 and standard deviation 1, with ``shape``
and ``dtype``.
Args:
2022-11-14 20:07:08 +08:00
shape (tuple|list|Tensor): Shape of the Tensor to be created. The data type is ``int32`` or ``int64`` .
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list.
2024-06-19 19:06:53 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the output Tensor.
2025-05-09 09:34:05 +08:00
Supported data types: float16, bfloat16, float32, float64, complex64, complex128.
2020-08-30 21:54:39 -05:00
Default is None, use global default dtype (see ``get_default_dtype``
for details).
2024-06-19 19:06:53 +08:00
name (str|None, optional): Name for the operation (optional, default is None).
2020-08-23 18:08:24 +08:00
For more information, please refer to :ref:`api_guide_Name`.
2025-08-25 14:20:48 +08:00
out(Tensor, optional): The output tensor.
device(PlaceLike|None, optional): The desired device of returned tensor.
if None, uses the current device for the default tensor type (see paddle.device.set_device()).
device will be the CPU for CPU tensor types and the current CUDA device for CUDA tensor types. Default: None.
requires_grad(bool, optional): If autograd should record operations on the returned tensor. Default: False.
2020-08-23 18:08:24 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a standard
normal distribution with mean 0 and standard deviation 1, with
``shape`` and ``dtype``.
2020-08-23 18:08:24 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2020-08-23 18:08:24 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> # doctest: +SKIP( " Random output " )
>>> # example 1: attr shape is a list which doesn ' t contain Tensor.
>>> out1 = paddle.standard_normal(shape=[2, 3])
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[-0.33719197, -0.25688133, -0.42868865],
[-0.27804616, -0.25058213, -0.28209466]])
>>> # doctest: -SKIP
>>> # example 2: attr shape is a list which contains Tensor.
>>> dim1 = paddle.to_tensor(2, ' int64 ' )
>>> dim2 = paddle.to_tensor(3, ' int32 ' )
>>> out2 = paddle.standard_normal(shape=[dim1, dim2, 2])
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
[[[ 0.81888396, -0.64831746],
[ 1.28911388, -1.88154876],
[-0.03271919, -0.32410008]],
[[-0.20224631, 0.46683890],
[ 1.91947734, 0.71657443],
[ 0.33410960, -0.64256823]]])
>>> # doctest: -SKIP
>>> # example 3: attr shape is a Tensor, the data type must be int64 or int32.
>>> shape_tensor = paddle.to_tensor([2, 3])
>>> out3 = paddle.standard_normal(shape_tensor)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[ 0.01182475, -0.44895259, -1.79227340],
[ 1.52022707, -0.83830303, 0.05261501]])
>>> # doctest: -SKIP
2020-08-23 18:08:24 +08:00
2024-06-24 18:27:35 +08:00
>>> # example 4: attr dtype is complex64.
>>> paddle.seed(200)
>>> shape_tensor = paddle.to_tensor([2, 3])
>>> out4 = paddle.standard_normal(shape_tensor, dtype= ' complex64 ' )
>>> print(out4)
Tensor(shape=[2, 3], dtype=complex64, place=Place(cpu), stop_gradient=True,
2026-02-03 10:54:35 +08:00
[[ (0.13755313+0.09320746j), (0.79550129-0.41801897j),
(-0.67300206-0.09163689j)],
[ (0.17453042-0.90028328j), (0.16270922-1.30863023j),
(0.94287461+0.06869461j)]])
2020-08-23 18:08:24 +08:00
"""
2024-06-24 18:27:35 +08:00
if dtype is not None and not isinstance (
dtype , ( core . VarDesc . VarType , core . DataType )
) :
dtype = convert_np_dtype_to_dtype_ ( dtype )
if dtype in [
core . VarDesc . VarType . COMPLEX64 ,
core . VarDesc . VarType . COMPLEX64 ,
] :
return gaussian (
2025-08-25 14:20:48 +08:00
shape = shape ,
mean = ( 0.0 + 0.0 j ) ,
std = 1.0 ,
dtype = dtype ,
name = name ,
out = out ,
device = device ,
requires_grad = requires_grad ,
2024-06-24 18:27:35 +08:00
)
else :
return gaussian (
2025-08-25 14:20:48 +08:00
shape = shape ,
mean = 0.0 ,
std = 1.0 ,
dtype = dtype ,
name = name ,
out = out ,
device = device ,
requires_grad = requires_grad ,
2024-06-24 18:27:35 +08:00
)
else :
2025-08-25 14:20:48 +08:00
return gaussian (
shape = shape ,
mean = 0.0 ,
std = 1.0 ,
dtype = dtype ,
name = name ,
out = out ,
device = device ,
requires_grad = requires_grad ,
)
2020-08-23 18:08:24 +08:00
2026-02-10 15:20:14 +08:00
@overload
def randn (
shape : ShapeLike ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
* ,
out : paddle . Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
pin_memory : bool = False ,
) - > Tensor : . . .
@overload
def randn (
* size : int ,
out : paddle . Tensor | None = None ,
dtype : DTypeLike | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
pin_memory : bool = False ,
) - > Tensor : . . .
2025-08-25 14:20:48 +08:00
@size_args_decorator
2024-06-19 19:06:53 +08:00
def randn (
2025-08-25 14:20:48 +08:00
shape : ShapeLike ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
* ,
out : paddle . Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
2025-08-28 15:03:51 +08:00
pin_memory : bool = False ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-11-17 09:38:13 +08:00
"""
2022-05-18 11:05:32 +08:00
Returns a Tensor filled with random values sampled from a standard
2020-11-17 09:38:13 +08:00
normal distribution with mean 0 and standard deviation 1, with ``shape``
and ``dtype``.
Args:
2025-08-19 10:37:15 +08:00
shape (tuple|list|Tensor|*shape): Shape of the Tensor to be created. The data type is ``int32`` or ``int64`` .
2022-11-14 20:07:08 +08:00
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list.
2025-08-19 10:37:15 +08:00
If ``shape`` is *shape, directly pass integers as variable-length arguments (e.g., `randn(2, 3)`).
alias: ``size``.
2024-06-19 19:06:53 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the output Tensor.
2025-05-09 09:34:05 +08:00
Supported data types: float16, bfloat16, float32, float64, complex64, complex128.
2020-11-17 09:38:13 +08:00
Default is None, use global default dtype (see ``get_default_dtype``
for details).
2024-06-19 19:06:53 +08:00
name (str|None, optional): Name for the operation (optional, default is None).
2020-11-17 09:38:13 +08:00
For more information, please refer to :ref:`api_guide_Name`.
2025-08-25 14:20:48 +08:00
out(Tensor, optional): The output tensor.
device(PlaceLike|None, optional): The desired device of returned tensor.
requires_grad(bool, optional): If autograd should record operations on the returned tensor. Default: False.
2025-08-28 15:03:51 +08:00
pin_memory(bool, optional): If set, return tensor would be allocated in the pinned memory. Works only for CPU tensors. Default: False
2020-11-17 09:38:13 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a standard
2020-11-17 09:38:13 +08:00
normal distribution with mean 0 and standard deviation 1, with
``shape`` and ``dtype``.
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2020-11-17 09:38:13 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> # example 1: attr shape is a list which doesn ' t contain Tensor.
>>> out1 = paddle.randn(shape=[2, 3])
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[-0.29270014, -0.02925120, -1.07807338],
[ 1.19966674, -0.46673676, -0.18050613]])
>>> # doctest: -SKIP
>>> # example 2: attr shape is a list which contains Tensor.
>>> dim1 = paddle.to_tensor(2, ' int64 ' )
>>> dim2 = paddle.to_tensor(3, ' int32 ' )
>>> out2 = paddle.randn(shape=[dim1, dim2, 2])
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
[[[-0.26019713, 0.54994684],
[ 0.46403214, -1.41178775],
[-0.15682915, -0.26639181]],
[[ 0.01364388, -2.81676364],
[ 0.86996621, 0.07524570],
[ 0.21443737, 0.90938759]]])
>>> # doctest: -SKIP
>>> # example 3: attr shape is a Tensor, the data type must be int64 or int32.
>>> shape_tensor = paddle.to_tensor([2, 3])
>>> out3 = paddle.randn(shape_tensor)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[ 0.57575506, -1.60349274, -0.27124876],
[ 1.08381045, 0.81270242, -0.26763600]])
>>> # doctest: -SKIP
2024-06-24 18:27:35 +08:00
>>> # example 4: attr dtype is complex64.
>>> paddle.seed(200)
>>> shape_tensor = paddle.to_tensor([2, 3])
>>> out4 = paddle.randn(shape_tensor, dtype= ' complex64 ' )
>>> print(out4)
Tensor(shape=[2, 3], dtype=complex64, place=Place(cpu), stop_gradient=True,
2026-02-03 10:54:35 +08:00
[[ (0.13755313+0.09320746j), (0.79550129-0.41801897j),
(-0.67300206-0.09163689j)],
[ (0.17453042-0.90028328j), (0.16270922-1.30863023j),
(0.94287461+0.06869461j)]])
2026-02-10 15:20:14 +08:00
>>> # example 5: attr shape is *shape (integers as variable-length arguments).
>>> paddle.seed(200)
>>> out5 = paddle.randn(2, 3)
>>> print(out5)
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[ 0.19452949, 0.13181525, 1.12500870],
[-0.59116811, -0.95176864, -0.12959413]])
2020-11-17 09:38:13 +08:00
"""
2025-08-28 15:03:51 +08:00
device = (
_get_paddle_place ( device )
if device is not None
else _current_expected_place ( )
)
if (
pin_memory
and in_dynamic_mode ( )
and device is not None
and not isinstance ( device , ( core . CUDAPinnedPlace , core . XPUPinnedPlace ) )
) :
if isinstance ( device , core . CUDAPlace ) or (
isinstance ( device , core . Place ) and device . is_gpu_place ( )
) :
device = core . CUDAPinnedPlace ( )
elif isinstance ( device , core . XPUPlace ) or (
isinstance ( device , core . Place ) and device . is_xpu_place ( )
) :
device = core . XPUPinnedPlace ( )
else :
raise RuntimeError ( f " Pinning memory is not supported for { device } " )
tensor = standard_normal (
2025-08-25 14:20:48 +08:00
shape ,
dtype ,
name ,
out = out ,
device = device ,
requires_grad = requires_grad ,
)
2025-08-28 15:03:51 +08:00
if pin_memory and in_dynamic_mode ( ) :
tensor = tensor . pin_memory ( )
return tensor
2020-08-23 18:08:24 +08:00
2025-09-08 17:11:32 +08:00
@param_one_alias ( [ " x " , " input " ] )
2025-04-29 14:11:33 +08:00
def randn_like (
x : Tensor ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
2025-09-08 17:11:32 +08:00
* ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
2025-04-29 14:11:33 +08:00
) - > Tensor :
"""
Returns a tensor with the same size as input that is filled with random numbers from a normal distribution with mean 0 and variance 1.
Args:
x (Tensor): The input multi-dimensional tensor which specifies shape. The dtype of ``x``
2025-05-09 09:34:05 +08:00
can be float16, bfloat16, float32, float64, complex64, complex128.
2025-09-08 17:11:32 +08:00
alias: ``input``.
2025-04-29 14:11:33 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the
2025-05-09 09:34:05 +08:00
output tensor. Supported data types: float16, bfloat16, float32, float64, complex64, complex128. If ``dtype`` is None, the data type is the
2025-04-29 14:11:33 +08:00
same as x ' s data type. Default is None.
name (str|None, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2025-09-08 17:11:32 +08:00
device (str|paddle.Place|None, optional): The device on which to place the created tensor.
If None, the device is the same as input ' s device. Default is None.
requires_grad (bool, optional): Whether to compute gradients for the created tensor.
Default is False.
2025-04-29 14:11:33 +08:00
Returns:
Tensor, A Tensor with the same size as input that is filled with random numbers from a normal distribution with mean 0 and variance 1.
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2025-04-29 14:11:33 +08:00
>>> import paddle
>>> # example 1:
>>> # dtype is None and the dtype of x is float32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
2025-04-29 14:11:33 +08:00
>>> out1 = paddle.randn_like(x)
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
[[ 0.51785558, -0.10632933]])
>>> # doctest: -SKIP
>>> print(out1.dtype)
paddle.float32
>>> # example 2:
>>> # dtype is None and the dtype of x is float64
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float64 " )
2025-04-29 14:11:33 +08:00
>>> out2 = paddle.randn_like(x)
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=float64, place=Place(cpu), stop_gradient=True,
[[ 0.64437317, -1.26898670]])
>>> # doctest: -SKIP
>>> print(out2.dtype)
paddle.float64
>>> # example 3:
>>> # dtype is float64 and the dtype of x is float32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
2025-04-29 14:11:33 +08:00
>>> out3 = paddle.randn_like(x, dtype= " float64 " )
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=float64, place=Place(cpu), stop_gradient=True,
[[ 1.45264642, -1.33133914]])
>>> # doctest: -SKIP
>>> print(out3.dtype)
paddle.float64
2025-09-08 17:11:32 +08:00
>>> # example 4:
>>> # device and requires_grad are provided
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
>>> out4 = paddle.randn_like(x, device=paddle.CPUPlace(), requires_grad=True)
>>> print(out4)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=float32, place=Place(cpu), stop_gradient=False,
[[0.78040242, 0.29628819]])
2025-04-29 14:11:33 +08:00
"""
if dtype is None :
dtype = x . dtype
2025-09-08 17:11:32 +08:00
if device is None :
device = x . place
2025-04-29 14:11:33 +08:00
shape = paddle . shape ( x )
2025-09-08 17:11:32 +08:00
return randn (
shape = shape ,
dtype = dtype ,
name = name ,
device = device ,
requires_grad = requires_grad ,
)
2025-04-29 14:11:33 +08:00
2025-08-29 22:29:21 +08:00
def rand_like (
input ,
name : str | None = None ,
* ,
dtype : DTypeLike | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
) :
"""
Returns a tensor with the same size as input that is filled with random numbers from a uniform distribution on the interval [0, 1).
Args:
input (Tensor): The input multi-dimensional tensor which specifies shape. The dtype of ``input``
can be float16, float64, float8_e4m3fn, float32, bfloat16.
name (str|None, optional): The default value is None. Normally there is no
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the
output tensor. Supported data types: float16, float64, float8_e4m3fn, float32, bfloat16.
If ``dtype`` is None, the data type is the same as input ' s data type. Default is None.
device (str|paddle.Place|None, optional): The device on which to place the created tensor.
If None, the device is the same as input ' s device. Default is None.
requires_grad (bool, optional): Whether to compute gradients for the created tensor.
Default is False.
Returns:
Tensor: A Tensor with the same size as input that is filled with random numbers from a uniform distribution on the interval [0, 1).
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2025-08-29 22:29:21 +08:00
>>> import paddle
>>> # example 1:
>>> # dtype is None and the dtype of input is float32
>>> x = paddle.zeros((2, 3)).astype( " float32 " )
>>> out1 = paddle.rand_like(x)
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.34962332, 0.82356787, 0.91275704],
[0.12328923, 0.58439839, 0.32735515]])
>>> # doctest: -SKIP
>>> print(out1.dtype)
paddle.float32
>>> # example 2:
>>> # dtype is None and the dtype of input is float64
>>> x = paddle.zeros((2, 3)).astype( " float64 " )
>>> out2 = paddle.rand_like(x)
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float64, place=Place(cpu), stop_gradient=True,
[[0.73964721, 0.28413662, 0.91918457],
[0.62838351, 0.39185921, 0.51561823]])
>>> # doctest: -SKIP
>>> print(out2.dtype)
paddle.float64
>>> # example 3:
>>> # dtype is float64 and the dtype of input is float32
>>> x = paddle.zeros((2, 3)).astype( " float32 " )
>>> out3 = paddle.rand_like(x, dtype= " float64 " )
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float64, place=Place(cpu), stop_gradient=True,
[[0.84492219, 0.11572551, 0.73868765],
[0.90269387, 0.45644298, 0.28739912]])
>>> # doctest: -SKIP
>>> print(out3.dtype)
paddle.float64
>>> # example 4:
>>> # with requires_grad=True
>>> x = paddle.zeros((2, 2)).astype( " float32 " )
>>> out4 = paddle.rand_like(x, requires_grad=True)
>>> print(out4.stop_gradient)
False
"""
if dtype is None :
dtype = input . dtype
2025-09-08 17:11:32 +08:00
if device is None :
device = input . place
shape = paddle . shape ( input )
2025-08-29 22:29:21 +08:00
2025-09-08 17:11:32 +08:00
return rand (
shape = shape ,
2025-08-29 22:29:21 +08:00
dtype = dtype ,
name = name ,
device = device ,
requires_grad = requires_grad ,
)
2025-11-18 16:01:28 +08:00
@param_one_alias ( [ " shape " , " size " ] )
2024-06-19 19:06:53 +08:00
def normal (
mean : complex | Tensor = 0.0 ,
std : float | Tensor = 1.0 ,
shape : ShapeLike | None = None ,
name : str | None = None ,
2025-11-18 16:01:28 +08:00
* ,
out : Tensor | None = None ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-08-23 18:08:24 +08:00
"""
2022-05-31 15:54:55 +08:00
Returns a Tensor filled with random values sampled from a normal
2025-11-18 16:01:28 +08:00
distribution with ``mean`` and ``std`` (standard deviation).
2020-08-23 18:08:24 +08:00
If ``mean`` is a Tensor, the output Tensor has the same shape and data type as ``mean``.
If ``mean`` is not a Tensor and ``std`` is a Tensor, the output Tensor has the same shape and data type as ``std``.
If ``mean`` and ``std`` are not a Tensor, the output Tensor has the same shape as ``shape``, with data type float32.
If ``mean`` and ``std`` are Tensor, the num of elements of ``mean`` and ``std`` should be the same.
2025-11-18 16:01:28 +08:00
If ``mean`` is a complex number, the output Tensor follows complex normal distribution, with data type complex64.
2024-06-11 10:34:21 +08:00
If ``mean`` is a Tensor with complex data type, the output Tensor has same data type with ``mean``.
2025-11-18 16:01:28 +08:00
.. note::
Alias Support: The parameter name ``size`` can be used as an alias for ``shape``.
For example, ``normal(size=[2, 3], ...)`` is equivalent to ``normal(shape=[2, 3], ...)``.
2020-08-23 18:08:24 +08:00
Args:
2024-06-11 10:34:21 +08:00
mean (float|complex|Tensor, optional): The mean of the output Tensor ' s normal distribution.
2020-08-23 18:08:24 +08:00
If ``mean`` is float, all elements of the output Tensor shared the same mean.
2024-06-11 10:34:21 +08:00
If ``mean`` is a Tensor(data type supports float32, float64, complex64, complex128), it has per-element means.
2020-08-23 18:08:24 +08:00
Default is 0.0
2025-11-18 16:01:28 +08:00
std (float|Tensor, optional): The standard deviation of the output Tensor ' s normal distribution.
2020-08-23 18:08:24 +08:00
If ``std`` is float, all elements of the output Tensor shared the same standard deviation.
If ``std`` is a Tensor(data type supports float32, float64), it has per-element standard deviations.
2023-12-18 15:39:14 +08:00
Default is 1.0
2025-11-18 16:01:28 +08:00
shape (tuple|list|Tensor|None, optional): Shape of the Tensor to be created. The data type is ``int32`` or ``int64``.
2022-11-14 20:07:08 +08:00
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list. If ``mean`` or ``std``
2025-11-18 16:01:28 +08:00
is a Tensor, the shape of the output Tensor is the same as ``mean`` or ``std``, attr ``shape`` is ignored.
2020-08-23 18:08:24 +08:00
Default is None
2024-06-19 19:06:53 +08:00
name (str|None, optional): Name for the operation (optional, default is None).
2020-08-23 18:08:24 +08:00
For more information, please refer to :ref:`api_guide_Name`.
2025-11-18 16:01:28 +08:00
out (Tensor|None, optional): Optional output tensor. If provided, the result will be stored in this tensor.
The ``out`` tensor must have the same shape and dtype as the expected output. Default is None.
2020-08-23 18:08:24 +08:00
Returns:
2025-11-18 16:01:28 +08:00
Tensor: A Tensor filled with random values sampled from a normal distribution with ``mean`` and ``std``.
2020-08-23 18:08:24 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2020-08-23 18:08:24 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> out1 = paddle.normal(shape=[2, 3])
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[-0.85107994, -0.85490644, -1.35941815],
[-0.55500370, 0.20964541, 2.24193954]])
>>> # doctest: -SKIP
>>> mean_tensor = paddle.to_tensor([1.0, 2.0, 3.0])
>>> out2 = paddle.normal(mean=mean_tensor)
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[3], dtype=float32, place=Place(cpu), stop_gradient=True,
[1.05411839, 3.71514320, 3.42665267])
>>> # doctest: -SKIP
>>> std_tensor = paddle.to_tensor([1.0, 2.0, 3.0])
>>> out3 = paddle.normal(mean=mean_tensor, std=std_tensor)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[3], dtype=float32, place=Place(cpu), stop_gradient=True,
[0.48646951, 0.00815189, 3.74022293])
>>> # doctest: -SKIP
2024-06-24 18:27:35 +08:00
>>> paddle.seed(200)
2026-02-03 10:54:35 +08:00
>>> out4 = paddle.normal(mean=1 + 1j, shape=[2, 3])
2024-06-24 18:27:35 +08:00
>>> print(out4)
Tensor(shape=[2, 3], dtype=complex64, place=Place(cpu), stop_gradient=True,
2026-02-03 10:54:35 +08:00
[[(1.13755310+1.09320748j), (1.79550123+0.58198106j),
(0.32699794+0.90836310j)],
[(1.17453039+0.09971672j), (1.16270924-0.30863023j),
(1.94287467+1.06869459j)]])
>>> mean_tensor = paddle.to_tensor([1 + 1j, 2 + 2j, 3 + 3j])
2024-06-24 18:27:35 +08:00
>>> out5 = paddle.normal(mean=mean_tensor)
>>> print(out5)
Tensor(shape=[3], dtype=complex64, place=Place(cpu), stop_gradient=True,
2026-02-03 10:54:35 +08:00
[(1.13600969-0.11074114j), (2.52933168+2.19687510j),
(2.29101014+1.81147802j)])
2020-08-23 18:08:24 +08:00
"""
2024-03-21 14:42:08 +08:00
if not in_dynamic_mode ( ) :
check_type (
2024-06-11 10:34:21 +08:00
mean ,
' mean ' ,
( int , float , complex , Variable , paddle . pir . Value ) ,
' normal ' ,
2024-03-21 14:42:08 +08:00
)
check_type (
std , ' std ' , ( int , float , Variable , paddle . pir . Value ) , ' normal '
)
if isinstance ( mean , ( Variable , paddle . pir . Value ) ) :
2020-08-23 18:08:24 +08:00
check_dtype (
2022-10-23 20:01:27 +08:00
mean . dtype ,
' mean ' ,
2024-06-11 10:34:21 +08:00
[ ' float32 ' , ' float64 ' , ' complex64 ' , ' complex128 ' ] ,
2022-10-23 20:01:27 +08:00
' normal ' ,
2024-06-11 10:34:21 +08:00
" If mean is Tensor, it ' s data type only support float32, float64, complex64, complex128. " ,
2020-08-23 18:08:24 +08:00
)
2024-03-21 14:42:08 +08:00
if isinstance ( std , ( Variable , paddle . pir . Value ) ) :
2020-08-23 18:08:24 +08:00
check_dtype (
2022-10-23 20:01:27 +08:00
std . dtype ,
' std ' ,
[ ' float32 ' , ' float64 ' ] ,
' normal ' ,
" If std is Tensor, it ' s data type only support float32, float64. " ,
2020-08-23 18:08:24 +08:00
)
if shape is not None :
2020-09-03 14:19:15 +08:00
check_shape ( shape , ' normal ' )
2020-08-23 18:08:24 +08:00
2024-06-11 10:34:21 +08:00
if isinstance ( mean , complex ) :
if isinstance ( std , float ) :
return gaussian (
2025-11-18 16:01:28 +08:00
shape = shape ,
mean = mean ,
std = std ,
dtype = ' complex64 ' ,
name = name ,
out = out ,
2024-06-11 10:34:21 +08:00
)
2020-08-23 18:08:24 +08:00
else :
2025-11-18 16:01:28 +08:00
out_tensor = gaussian (
2024-06-11 10:34:21 +08:00
shape = paddle . shape ( std ) ,
mean = ( 0.0 + 0.0 j ) ,
std = 1.0 ,
dtype = ' complex64 ' ,
name = name ,
)
elif isinstance ( mean , ( Variable , paddle . pir . Value ) ) :
if mean . dtype in [
core . VarDesc . VarType . COMPLEX64 ,
core . VarDesc . VarType . COMPLEX128 ,
core . DataType . COMPLEX64 ,
core . DataType . COMPLEX128 ,
] :
if isinstance ( std , ( Variable , paddle . pir . Value ) ) :
mean_shape = paddle . shape ( mean )
std = paddle . reshape ( std , mean_shape )
else :
std = float ( std )
2025-11-18 16:01:28 +08:00
out_tensor = gaussian (
2024-06-11 10:34:21 +08:00
shape = paddle . shape ( mean ) ,
mean = ( 0.0 + 0.0 j ) ,
std = 1.0 ,
dtype = mean . dtype ,
name = name ,
)
else :
if isinstance ( std , ( Variable , paddle . pir . Value ) ) :
if std . dtype != mean . dtype :
std = paddle . cast ( std , mean . dtype )
mean_shape = paddle . shape ( mean )
std = paddle . reshape ( std , mean_shape )
else :
std = float ( std )
2025-11-18 16:01:28 +08:00
out_tensor = standard_normal ( paddle . shape ( mean ) , mean . dtype , name )
2024-03-21 14:42:08 +08:00
elif isinstance ( std , ( Variable , paddle . pir . Value ) ) :
2020-08-23 18:08:24 +08:00
mean = float ( mean )
2025-11-18 16:01:28 +08:00
out_tensor = standard_normal ( paddle . shape ( std ) , std . dtype , name )
2020-08-23 18:08:24 +08:00
else :
2025-11-18 16:01:28 +08:00
return gaussian ( shape = shape , mean = mean , std = std , name = name , out = out )
2020-08-23 18:08:24 +08:00
2025-11-18 16:01:28 +08:00
out_tensor = out_tensor * std + mean
2023-09-22 11:30:19 +08:00
if not in_dynamic_or_pir_mode ( ) :
2025-11-18 16:01:28 +08:00
out_tensor . stop_gradient = True
if out is not None :
paddle . assign ( out_tensor , out )
out_tensor = out
return out_tensor
2020-08-23 18:08:24 +08:00
2023-09-22 11:14:48 +08:00
@dygraph_only
2024-06-19 19:06:53 +08:00
def normal_ (
x : Tensor , mean : complex = 0.0 , std : float = 1.0 , name : str | None = None
) - > Tensor :
2023-09-22 11:14:48 +08:00
"""
This is the inplace version of api ``normal``, which returns a Tensor filled
with random values sampled from a normal distribution. The output Tensor will
2024-06-11 10:34:21 +08:00
be inplaced with input ``x``. Please refer to :ref:`api_paddle_normal`.
2023-09-22 11:14:48 +08:00
Args:
x(Tensor): The input tensor to be filled with random values.
2024-06-11 10:34:21 +08:00
mean (float|int|complex, optional): Mean of the output tensor, default is 0.0.
std (float|int, optional): Standard deviation of the output tensor, default
is 1.0.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2023-09-22 11:14:48 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a normal distribution with ``mean`` and ``std`` .
2023-09-22 11:14:48 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2023-09-22 11:14:48 +08:00
>>> import paddle
>>> x = paddle.randn([3, 4])
>>> x.normal_()
>>> # doctest: +SKIP( ' random check ' )
>>> print(x)
Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
[[ 0.06132207, 1.11349595, 0.41906244, -0.24858207],
[-1.85169315, -1.50370061, 1.73954511, 0.13331604],
[ 1.66359663, -0.55764782, -0.59911072, -0.57773495]])
"""
return gaussian_ ( x , mean = mean , std = std )
2024-06-19 19:06:53 +08:00
def uniform (
shape : ShapeLike ,
dtype : DTypeLike | None = None ,
2025-11-26 18:14:10 +08:00
min : float = 0 ,
2024-06-19 19:06:53 +08:00
max : float = 1.0 ,
seed : int = 0 ,
name : str | None = None ,
2025-08-29 22:29:21 +08:00
* ,
out : Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-08-21 00:13:01 -05:00
"""
2022-05-31 15:54:55 +08:00
Returns a Tensor filled with random values sampled from a uniform
2020-08-21 00:13:01 -05:00
distribution in the range [``min``, ``max``), with ``shape`` and ``dtype``.
Examples:
2020-10-21 11:12:23 +08:00
2020-11-17 09:38:13 +08:00
.. code-block:: text
2020-10-21 11:12:23 +08:00
2020-08-21 00:13:01 -05:00
Input:
shape = [1, 2]
Output:
result=[[0.8505902, 0.8397286]]
Args:
2022-11-14 20:07:08 +08:00
shape (tuple|list|Tensor): Shape of the Tensor to be created. The data type is ``int32`` or ``int64`` .
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list.
2025-09-24 18:55:54 +08:00
dtype(str|paddle.dtype|np.dtype, optional): The data type of the output Tensor.
2025-11-04 19:09:21 +08:00
Supported data types: float32, float64, complex64, complex128.
2020-08-30 21:54:39 -05:00
Default is None, use global default dtype (see ``get_default_dtype``
for details).
2020-08-21 00:13:01 -05:00
min(float|int, optional): The lower bound on the range of random values
2025-11-26 18:14:10 +08:00
to generate, ``min`` is included in the range. Default is 0.
2020-08-21 00:13:01 -05:00
max(float|int, optional): The upper bound on the range of random values
to generate, ``max`` is excluded in the range. Default is 1.0.
2021-08-27 20:08:37 +08:00
seed(int, optional): Random seed used for generating samples. If seed is 0,
2022-09-14 21:56:19 +08:00
it will use the seed of the global default generator (which can be set by paddle.seed).
2021-08-27 20:08:37 +08:00
Note that if seed is not 0, this operator will always generate the same random numbers every
2020-08-21 00:13:01 -05:00
time. Default is 0.
2024-06-19 19:06:53 +08:00
name(str|None, optional): Name for the operation (optional, default is None).
2022-05-31 15:54:55 +08:00
For more information, please refer to :ref:`api_guide_Name`.
2020-08-21 00:13:01 -05:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a uniform
2020-08-21 00:13:01 -05:00
distribution in the range [``min``, ``max``), with ``shape`` and ``dtype``.
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2023-09-22 10:18:14 +08:00
:name: code-example1
>>> import paddle
>>> # example 1:
>>> # attr shape is a list which doesn ' t contain Tensor.
>>> out1 = paddle.uniform(shape=[3, 4])
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
[[ 0.38170254, -0.47945309, 0.39794648, -0.94233936],
[-0.85296679, -0.76094693, 0.10565400, 0.59155810],
[ 0.11681318, -0.42144555, -0.81596589, 0.62113667]])
>>> # doctest: -SKIP
>>> # example 2:
>>> # attr shape is a list which contains Tensor.
>>> dim1 = paddle.to_tensor(2, ' int64 ' )
>>> dim2 = paddle.to_tensor(3, ' int32 ' )
>>> out2 = paddle.uniform(shape=[dim1, dim2])
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[-0.00294012, -0.07210171, -0.44236207],
[ 0.70089281, 0.21500075, -0.22084606]])
>>> # doctest: -SKIP
>>> # example 3:
>>> # attr shape is a Tensor, the data type must be int64 or int32.
>>> shape_tensor = paddle.to_tensor([2, 3])
>>> out3 = paddle.uniform(shape_tensor)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[-0.60801756, 0.32448411, 0.90269291],
[-0.66421294, -0.95218551, -0.51022208]])
>>> # doctest: -SKIP
2020-08-21 00:13:01 -05:00
"""
2025-11-04 19:09:21 +08:00
supported_dtypes = [
' float32 ' ,
' float64 ' ,
' float16 ' ,
' uint16 ' ,
' complex64 ' ,
' complex128 ' ,
]
2020-08-30 21:54:39 -05:00
if dtype is None :
dtype = paddle . framework . get_default_dtype ( )
2023-03-20 13:27:34 +08:00
if dtype not in supported_dtypes :
2020-08-30 21:54:39 -05:00
raise TypeError (
2024-04-01 10:20:33 +08:00
f " uniform/rand only supports { supported_dtypes } , but the default dtype is { dtype } "
2022-10-23 20:01:27 +08:00
)
2020-08-30 21:54:39 -05:00
2023-12-27 10:45:54 +08:00
if not isinstance ( dtype , ( core . VarDesc . VarType , core . DataType ) ) :
2020-08-21 00:13:01 -05:00
dtype = convert_np_dtype_to_dtype_ ( dtype )
2023-11-02 19:46:43 +08:00
if in_dynamic_mode ( ) :
2023-03-09 20:30:52 +08:00
shape = paddle . utils . convert_shape_to_list ( shape )
2025-08-29 22:29:21 +08:00
place = (
_current_expected_place ( )
if device is None
else _get_paddle_place ( device )
)
tensor = _C_ops . uniform (
2023-11-02 19:46:43 +08:00
shape ,
dtype ,
float ( min ) ,
float ( max ) ,
seed ,
2025-08-29 22:29:21 +08:00
place ,
out = out ,
2023-11-02 19:46:43 +08:00
)
2025-08-29 22:29:21 +08:00
if requires_grad is True :
tensor . stop_gradient = False
return tensor
2023-11-02 19:46:43 +08:00
elif in_pir_mode ( ) :
check_type (
2024-01-19 14:59:45 +08:00
shape , ' shape ' , ( list , tuple , paddle . pir . Value ) , ' uniform/rand '
2023-11-02 19:46:43 +08:00
)
check_dtype ( dtype , ' dtype ' , supported_dtypes , ' uniform/rand ' )
2024-01-19 14:59:45 +08:00
check_type ( min , ' min ' , ( float , int , paddle . pir . Value ) , ' uniform/rand ' )
check_type ( max , ' max ' , ( float , int , paddle . pir . Value ) , ' uniform/rand ' )
2023-11-02 19:46:43 +08:00
if paddle . utils . _contain_var ( shape ) :
2024-04-17 15:33:03 +08:00
shape = paddle . utils . get_int_tensor_list ( shape )
2024-06-18 12:37:31 +08:00
if isinstance ( min , int ) :
min = float ( min )
if isinstance ( max , int ) :
max = float ( max )
2025-08-29 22:29:21 +08:00
place = (
_current_expected_place ( )
if device is None
else _get_paddle_place ( device )
)
tensor = _C_ops . uniform (
2022-10-23 20:01:27 +08:00
shape ,
dtype ,
2024-06-18 12:37:31 +08:00
min ,
max ,
2022-10-23 20:01:27 +08:00
seed ,
2025-08-29 22:29:21 +08:00
place ,
out = out ,
2022-10-23 20:01:27 +08:00
)
2025-08-29 22:29:21 +08:00
if requires_grad is True :
tensor . stop_gradient = False
return tensor
2022-12-27 09:06:13 +08:00
else :
check_type ( shape , ' shape ' , ( list , tuple , Variable ) , ' uniform/rand ' )
2023-03-20 13:27:34 +08:00
check_dtype ( dtype , ' dtype ' , supported_dtypes , ' uniform/rand ' )
2022-12-27 09:06:13 +08:00
check_type ( min , ' min ' , ( float , int , Variable ) , ' uniform/rand ' )
check_type ( max , ' max ' , ( float , int , Variable ) , ' uniform/rand ' )
2023-03-23 10:16:17 +08:00
inputs = { }
2022-12-27 09:06:13 +08:00
attrs = { ' seed ' : seed , ' min ' : min , ' max ' : max , ' dtype ' : dtype }
2023-03-09 20:30:52 +08:00
paddle . utils . get_shape_tensor_inputs (
2022-12-27 09:06:13 +08:00
inputs = inputs , attrs = attrs , shape = shape , op_type = ' uniform/rand '
2022-10-23 20:01:27 +08:00
)
2020-08-21 00:13:01 -05:00
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( " uniform " , * * locals ( ) )
out = helper . create_variable_for_type_inference ( dtype )
helper . append_op (
type = " uniform_random " ,
inputs = inputs ,
attrs = attrs ,
outputs = { " Out " : out } ,
)
out . stop_gradient = True
return out
2020-08-21 00:13:01 -05:00
2025-11-26 18:14:10 +08:00
@param_two_alias ( [ " min " , " from " ] , [ " max " , " to " ] )
2021-08-27 20:08:37 +08:00
@dygraph_only
2024-06-19 19:06:53 +08:00
def uniform_ (
x : Tensor ,
2025-11-26 18:14:10 +08:00
min : float = 0 ,
2024-06-19 19:06:53 +08:00
max : float = 1.0 ,
seed : int = 0 ,
name : str | None = None ,
) - > Tensor :
2021-08-27 20:08:37 +08:00
"""
2022-09-14 21:56:19 +08:00
This is the inplace version of OP ``uniform``, which returns a Tensor filled
2021-08-27 20:08:37 +08:00
with random values sampled from a uniform distribution. The output Tensor will
2023-09-26 11:10:12 +08:00
be inplaced with input ``x``. Please refer to :ref:`api_paddle_uniform`.
2022-09-14 21:56:19 +08:00
2021-08-27 20:08:37 +08:00
Args:
x(Tensor): The input tensor to be filled with random values.
min(float|int, optional): The lower bound on the range of random values
2025-11-26 18:14:10 +08:00
to generate, ``min`` is included in the range. Default is 0.
Alias: ``from``.
2021-08-27 20:08:37 +08:00
max(float|int, optional): The upper bound on the range of random values
to generate, ``max`` is excluded in the range. Default is 1.0.
2025-11-26 18:14:10 +08:00
Alias: ``to``.
2022-09-14 21:56:19 +08:00
seed(int, optional): Random seed used for generating samples. If seed is 0,
it will use the seed of the global default generator (which can be set by paddle.seed).
2021-08-27 20:08:37 +08:00
Note that if seed is not 0, this operator will always generate the same random numbers every
time. Default is 0.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2021-08-27 20:08:37 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2025-11-26 18:14:10 +08:00
2021-08-27 20:08:37 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, The input tensor x filled with random values sampled from a uniform
2021-08-27 20:08:37 +08:00
distribution in the range [``min``, ``max``).
2025-11-26 18:14:10 +08:00
2021-08-27 20:08:37 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2022-09-14 21:56:19 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> # example:
>>> x = paddle.ones(shape=[3, 4])
>>> x.uniform_()
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[3, 4], dtype=float32, place=Place(cpu), stop_gradient=True,
[[-0.50484276, 0.49580324, 0.33357990, -0.93924278],
[ 0.39779735, 0.87677515, -0.24377221, 0.06212139],
[-0.92499518, -0.96244860, 0.79210341, -0.78228098]])
>>> # doctest: -SKIP
2021-08-27 20:08:37 +08:00
"""
2022-12-27 09:06:13 +08:00
return _C_ops . uniform_inplace_ ( x , min , max , seed , 0 , 0 , 1.0 )
2021-08-27 20:08:37 +08:00
2026-01-19 00:50:17 -08:00
@param_one_alias ( [ " shape " , " size " ] )
2024-06-19 19:06:53 +08:00
def randint (
low : int = 0 ,
2024-08-09 16:02:36 +08:00
high : int | None = None ,
2024-06-19 19:06:53 +08:00
shape : ShapeLike = [ 1 ] ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
2026-01-19 00:50:17 -08:00
* ,
out : Tensor | None = None ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-04-07 19:34:02 +08:00
"""
2022-05-31 15:54:55 +08:00
Returns a Tensor filled with random integers from a discrete uniform
2020-07-29 15:06:04 +08:00
distribution in the range [``low``, ``high``), with ``shape`` and ``dtype``.
If ``high`` is None (the default), the range is [0, ``low``).
2020-04-07 19:34:02 +08:00
Args:
2022-06-13 14:01:58 +08:00
low (int, optional): The lower bound on the range of random values to generate.
2020-07-29 15:06:04 +08:00
The ``low`` is included in the range. If ``high`` is None, the
range is [0, ``low``). Default is 0.
2020-09-03 14:19:15 +08:00
high (int, optional): The upper bound on the range of random values to
2020-07-29 15:06:04 +08:00
generate, the ``high`` is excluded in the range. Default is None
(see above for behavior if high = None). Default is None.
2022-11-14 20:07:08 +08:00
shape (tuple|list|Tensor): Shape of the Tensor to be created. The data type is ``int32`` or ``int64`` .
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list. Default is [1].
2026-01-19 00:50:17 -08:00
Alias: ``size``.
2024-06-19 19:06:53 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the
2024-02-19 11:44:52 +08:00
output tensor. Supported data types: int32, int64. If ``dtype``
2020-07-29 15:06:04 +08:00
is None, the data type is int64. Default is None.
2024-06-19 19:06:53 +08:00
name (str|None, optional): The default value is None. Normally there is no
2020-07-15 10:31:23 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2026-01-19 00:50:17 -08:00
out (Tensor|None, optional): Optional output tensor. If provided, the result will be stored in this tensor.
2020-04-07 19:34:02 +08:00
2022-09-14 21:56:19 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random integers from a discrete uniform
2020-07-29 15:06:04 +08:00
distribution in the range [``low``, ``high``), with ``shape`` and ``dtype``.
2020-04-07 19:34:02 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2020-05-14 20:38:31 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> # example 1:
>>> # attr shape is a list which doesn ' t contain Tensor.
>>> out1 = paddle.randint(low=-5, high=5, shape=[2, 3])
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=int64, place=Place(cpu), stop_gradient=True,
[[-1, 4, 4],
[-2, -5, -2]])
>>> # doctest: -SKIP
>>> # example 2:
>>> # attr shape is a list which contains Tensor.
>>> dim1 = paddle.to_tensor(2, ' int64 ' )
>>> dim2 = paddle.to_tensor(3, ' int32 ' )
>>> out2 = paddle.randint(low=-5, high=5, shape=[dim1, dim2])
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=int64, place=Place(cpu), stop_gradient=True,
[[-4, -4, 2],
[-3, -1, -5]])
>>> # doctest: -SKIP
>>> # example 3:
>>> # attr shape is a Tensor
>>> shape_tensor = paddle.to_tensor([2, 3])
>>> out3 = paddle.randint(low=-5, high=5, shape=shape_tensor)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=int64, place=Place(cpu), stop_gradient=True,
[[-1, 4, -3],
[ 1, 2, -1]])
>>> # doctest: -SKIP
>>> # example 4:
>>> # data type is int32
>>> out4 = paddle.randint(low=-5, high=5, shape=[3], dtype= ' int32 ' )
>>> print(out4)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[3], dtype=int32, place=Place(cpu), stop_gradient=True,
[4, 4, 0])
>>> # doctest: -SKIP
>>> # example 5:
>>> # Input only one parameter
>>> # low=0, high=10, shape=[1], dtype= ' int64 '
>>> out5 = paddle.randint(10)
>>> print(out5)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1], dtype=int64, place=Place(cpu), stop_gradient=True,
[7])
>>> # doctest: -SKIP
2020-04-07 19:34:02 +08:00
2020-07-15 10:31:23 +08:00
"""
if high is None :
2020-07-20 19:33:00 +08:00
if low < = 0 :
raise ValueError (
2023-11-02 18:28:29 +08:00
f " If high is None, low must be greater than 0, but received low = { low } . "
2022-10-23 20:01:27 +08:00
)
2020-07-15 10:31:23 +08:00
high = low
low = 0
2020-04-07 19:34:02 +08:00
if dtype is None :
2023-01-10 16:48:47 +08:00
dtype = core . VarDesc . VarType . INT64
2024-03-13 11:43:50 +08:00
if use_pir_api ( ) :
2023-09-22 11:30:19 +08:00
dtype = DataType . INT64
2024-03-13 11:43:50 +08:00
elif not isinstance ( dtype , ( core . VarDesc . VarType , core . DataType ) ) :
2020-07-15 10:31:23 +08:00
dtype = convert_np_dtype_to_dtype_ ( dtype )
2020-04-07 19:34:02 +08:00
2023-11-30 14:08:50 +08:00
if in_dynamic_mode ( ) :
2023-03-09 20:30:52 +08:00
shape = paddle . utils . convert_shape_to_list ( shape )
2023-11-30 14:08:50 +08:00
return _C_ops . randint (
2026-01-19 00:50:17 -08:00
low , high , shape , dtype , _current_expected_place ( ) , out = out
2023-11-30 14:08:50 +08:00
)
elif in_pir_mode ( ) :
2024-03-19 11:36:41 +08:00
check_shape ( shape , ' randint ' )
2023-11-30 14:08:50 +08:00
check_dtype ( dtype , ' dtype ' , [ ' int32 ' , ' int64 ' ] , ' randint ' )
if paddle . utils . _contain_var ( shape ) :
2024-04-17 15:33:03 +08:00
shape = paddle . utils . get_int_tensor_list ( shape )
2023-11-30 14:08:50 +08:00
return _C_ops . randint (
2026-01-19 00:50:17 -08:00
low , high , shape , dtype , _current_expected_place ( ) , out = out
2023-11-30 14:08:50 +08:00
)
2022-12-27 09:06:13 +08:00
else :
check_shape ( shape , ' randint ' )
check_dtype ( dtype , ' dtype ' , [ ' int32 ' , ' int64 ' ] , ' randint ' )
if low > = high :
raise ValueError (
2023-03-31 10:11:56 +08:00
f " randint ' s low must less then high, but received low = { low } , "
f " high = { high } "
2022-12-27 09:06:13 +08:00
)
2020-04-07 19:34:02 +08:00
2023-03-23 10:16:17 +08:00
inputs = { }
2022-12-27 09:06:13 +08:00
attrs = { ' low ' : low , ' high ' : high , ' seed ' : 0 , ' dtype ' : dtype }
2023-03-09 20:30:52 +08:00
paddle . utils . get_shape_tensor_inputs (
2022-12-27 09:06:13 +08:00
inputs = inputs , attrs = attrs , shape = shape , op_type = ' randint '
2022-10-23 20:01:27 +08:00
)
2020-04-07 19:34:02 +08:00
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( " randint " , * * locals ( ) )
2026-01-19 00:50:17 -08:00
if out is None :
out = helper . create_variable_for_type_inference ( dtype = dtype )
2022-12-27 09:06:13 +08:00
helper . append_op (
type = ' randint ' , inputs = inputs , outputs = { ' Out ' : out } , attrs = attrs
)
out . stop_gradient = True
return out
2020-04-06 11:18:39 +08:00
2025-09-18 14:40:57 +08:00
def random_ (
x : Tensor ,
from_ : int = 0 ,
to : int | None = None ,
* ,
generator : None = None ,
) - > Tensor :
"""
Fills self tensor with numbers sampled from the discrete uniform distribution over [from, to - 1].
If not specified, the values are usually only bounded by self tensor’ s data type. However,
for floating point types, if unspecified, range will be [0, 2^mantissa] to ensure that every value is representable.
Args:
from (int, optional): The lower bound on the range of random values to generate. Default is 0.
to (int|None, optional): The upper bound on the range of random values to generate. Default is None.
generator (None): Placeholder for random number generator (currently not implemented, reserved for future use).
Returns:
Tensor, A Tensor filled with random integers from a discrete uniform
distribution in the range [``from``, ``to``).
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2025-09-18 14:40:57 +08:00
>>> import paddle
>>> x = paddle.zeros([3], dtype=paddle.int32)
>>> x.random_(0, 10)
"""
dtype = x . dtype
if to is None :
if from_ == 0 :
if paddle . is_floating_point ( x ) :
if dtype == paddle . float32 :
mantissa = 24
elif dtype == paddle . float64 :
mantissa = 53
elif dtype == paddle . float16 :
mantissa = 11
else :
mantissa = 8
to = 2 * * mantissa
else :
to = paddle . iinfo ( dtype ) . max
else :
to = from_
from_ = 0
if from_ > = to :
raise ValueError (
f " random_ expects ' from ' to be less than ' to ' , but got from= { from_ } >= to= { to } "
)
return _C_ops . random_ ( x , from_ , to )
2024-06-19 19:06:53 +08:00
def randint_like (
x : Tensor ,
low : int = 0 ,
2024-08-09 16:02:36 +08:00
high : int | None = None ,
2024-06-19 19:06:53 +08:00
dtype : DTypeLike | None = None ,
name : str | None = None ,
) - > Tensor :
2021-11-02 15:15:55 +08:00
"""
2022-05-31 15:54:55 +08:00
Returns a Tensor filled with random integers from a discrete uniform
2021-11-02 15:15:55 +08:00
distribution in the range [``low``, ``high``), with the same shape as ``x``.
2022-09-14 21:56:19 +08:00
(use ``dtype`` if ``dtype`` is not None)
2021-11-02 15:15:55 +08:00
If ``high`` is None (the default), the range is [0, ``low``).
Args:
2022-12-15 10:57:42 +08:00
x (Tensor): The input multi-dimensional tensor which specifies shape. The dtype of ``x``
2021-11-02 15:15:55 +08:00
can be bool, int32, int64, float16, float32, float64.
2022-12-15 10:57:42 +08:00
low (int, optional): The lower bound on the range of random values to generate.
2021-11-02 15:15:55 +08:00
The ``low`` is included in the range. If ``high`` is None, the
range is [0, ``low``). Default is 0.
2024-06-19 19:06:53 +08:00
high (int|None, optional): The upper bound on the range of random values to
2022-12-15 10:57:42 +08:00
generate, the ``high`` is excluded in the range. Default is None.
If ``high`` is None, the range is [0, ``low``).
2024-06-19 19:06:53 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the
2022-09-14 21:56:19 +08:00
output tensor. Supported data types: bool, int32, int64, float16,
2024-02-19 11:44:52 +08:00
float32, float64. If ``dtype`` is None, the data type is the
2021-11-02 15:15:55 +08:00
same as x ' s data type. Default is None.
2024-06-19 19:06:53 +08:00
name (str|None, optional): The default value is None. Normally there is no
2021-11-02 15:15:55 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2022-09-14 21:56:19 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random integers from a discrete uniform
2021-11-02 15:15:55 +08:00
distribution in the range [``low``, ``high``), with ``shape`` and ``dtype``.
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2021-11-02 15:15:55 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> # example 1:
>>> # dtype is None and the dtype of x is float32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
2025-04-29 14:11:33 +08:00
>>> out1 = paddle.randint_like(x, low=-5, high=5)
>>> print(out1)
2023-09-22 10:18:14 +08:00
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0., 0.]])
>>> # doctest: -SKIP
2025-04-29 14:11:33 +08:00
>>> print(out1.dtype)
2023-09-22 10:18:14 +08:00
paddle.float32
>>> # example 2:
>>> # dtype is None and the dtype of x is float64
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float64 " )
2023-09-22 10:18:14 +08:00
>>> out2 = paddle.randint_like(x, low=-5, high=5)
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=float64, place=Place(cpu), stop_gradient=True,
[[ 4., -5.]])
>>> # doctest: -SKIP
>>> print(out2.dtype)
paddle.float64
>>> # example 3:
>>> # dtype is None and the dtype of x is int32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " int32 " )
2023-09-22 10:18:14 +08:00
>>> out3 = paddle.randint_like(x, low=-5, high=5)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=int32, place=Place(cpu), stop_gradient=True,
[[ 0, -4]])
>>> # doctest: -SKIP
>>> print(out3.dtype)
paddle.int32
>>> # example 4:
>>> # dtype is None and the dtype of x is int64
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " int64 " )
2023-09-22 10:18:14 +08:00
>>> out4 = paddle.randint_like(x, low=-5, high=5)
>>> print(out4)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=int64, place=Place(cpu), stop_gradient=True,
[[ 4, -3]])
>>> # doctest: -SKIP
>>> print(out4.dtype)
paddle.int64
>>> # example 5:
>>> # dtype is float64 and the dtype of x is float32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
2023-09-22 10:18:14 +08:00
>>> out5 = paddle.randint_like(x, low=-5, high=5, dtype= " float64 " )
>>> print(out5)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=float64, place=Place(cpu), stop_gradient=True,
[[3., 1.]])
>>> # doctest: -SKIP
>>> print(out5.dtype)
paddle.float64
>>> # example 6:
>>> # dtype is bool and the dtype of x is float32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
2023-09-22 10:18:14 +08:00
>>> out6 = paddle.randint_like(x, low=-5, high=5, dtype= " bool " )
>>> print(out6)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=bool, place=Place(cpu), stop_gradient=True,
[[False, True ]])
>>> # doctest: -SKIP
>>> print(out6.dtype)
paddle.bool
>>> # example 7:
>>> # dtype is int32 and the dtype of x is float32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
2023-09-22 10:18:14 +08:00
>>> out7 = paddle.randint_like(x, low=-5, high=5, dtype= " int32 " )
>>> print(out7)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=int32, place=Place(cpu), stop_gradient=True,
[[-2, -2]])
>>> # doctest: -SKIP
>>> print(out7.dtype)
paddle.int32
>>> # example 8:
>>> # dtype is int64 and the dtype of x is float32
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " float32 " )
2023-09-22 10:18:14 +08:00
>>> out8 = paddle.randint_like(x, low=-5, high=5, dtype= " int64 " )
>>> print(out8)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=int64, place=Place(cpu), stop_gradient=True,
[[-5, 4]])
>>> # doctest: -SKIP
>>> print(out8.dtype)
paddle.int64
>>> # example 9:
>>> # dtype is int64 and the dtype of x is bool
2026-02-03 10:54:35 +08:00
>>> x = paddle.zeros((1, 2)).astype( " bool " )
2023-09-22 10:18:14 +08:00
>>> out9 = paddle.randint_like(x, low=-5, high=5, dtype= " int64 " )
>>> print(out9)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[1, 2], dtype=int64, place=Place(cpu), stop_gradient=True,
[[ 1, -2]])
>>> # doctest: -SKIP
>>> print(out9.dtype)
paddle.int64
2021-11-02 15:15:55 +08:00
"""
if high is None :
if low < = 0 :
raise ValueError (
2023-11-02 18:28:29 +08:00
f " If high is None, low must be greater than 0, but received low = { low } . "
2022-10-23 20:01:27 +08:00
)
2021-11-02 15:15:55 +08:00
high = low
low = 0
if dtype is None :
dtype = x . dtype
2023-11-30 14:08:50 +08:00
else :
if not isinstance ( dtype , ( core . VarDesc . VarType , core . DataType ) ) :
dtype = convert_np_dtype_to_dtype_ ( dtype )
2022-07-21 14:21:25 +08:00
shape = paddle . shape ( x )
2021-11-02 15:15:55 +08:00
if low > = high :
raise ValueError (
2023-03-31 10:11:56 +08:00
f " randint_like ' s low must less then high, but received low = { low } , "
f " high = { high } "
2022-10-23 20:01:27 +08:00
)
2021-11-02 15:15:55 +08:00
2023-11-30 14:08:50 +08:00
if in_dynamic_or_pir_mode ( ) :
if in_dynamic_mode ( ) :
shape = paddle . utils . convert_shape_to_list ( shape )
2025-04-15 09:38:15 +08:00
out = _C_ops . randint (
low , high , shape , DataType . INT64 , _current_expected_place ( )
2023-11-30 14:08:50 +08:00
)
else :
check_type (
shape ,
' shape ' ,
2024-01-19 14:59:45 +08:00
( list , tuple , paddle . pir . Value ) ,
2023-11-30 14:08:50 +08:00
' randint_like ' ,
)
check_dtype (
dtype ,
' dtype ' ,
[ ' bool ' , ' float16 ' , ' float32 ' , ' float64 ' , ' int32 ' , ' int64 ' ] ,
' randint_like ' ,
)
if paddle . utils . _contain_var ( shape ) :
2024-04-17 15:33:03 +08:00
shape = paddle . utils . get_int_tensor_list ( shape )
2023-11-30 14:08:50 +08:00
out = _C_ops . randint (
low , high , shape , DataType . INT64 , _current_expected_place ( )
)
2021-11-02 15:15:55 +08:00
out = paddle . cast ( out , dtype )
return out
2022-12-27 09:06:13 +08:00
else :
check_shape ( shape , ' randint_like ' )
check_dtype (
dtype ,
' dtype ' ,
[ ' bool ' , ' float16 ' , ' float32 ' , ' float64 ' , ' int32 ' , ' int64 ' ] ,
' randint_like ' ,
)
2021-11-02 15:15:55 +08:00
2022-12-27 09:06:13 +08:00
inputs = { " ShapeTensor " : shape }
attrs = {
' low ' : low ,
' high ' : high ,
' seed ' : 0 ,
' dtype ' : core . VarDesc . VarType . INT64 ,
}
helper = LayerHelper ( " randint " , * * locals ( ) )
out = helper . create_variable_for_type_inference (
dtype = core . VarDesc . VarType . INT64
)
helper . append_op (
type = ' randint ' , inputs = inputs , outputs = { ' Out ' : out } , attrs = attrs
)
out . stop_gradient = True
out = paddle . cast ( out , dtype )
return out
2021-11-02 15:15:55 +08:00
2024-06-19 19:06:53 +08:00
def randperm (
2025-09-20 23:07:55 +08:00
n : int ,
dtype : DTypeLike = " int64 " ,
name : str | None = None ,
* ,
out : paddle . Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
pin_memory : bool = False ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
2020-04-06 11:18:39 +08:00
"""
2022-05-18 11:05:32 +08:00
Returns a 1-D Tensor filled with random permutation values from 0
2020-07-29 15:06:04 +08:00
to n-1, with ``dtype``.
2020-04-06 11:18:39 +08:00
Args:
2020-09-03 14:19:15 +08:00
n (int): The upper bound (exclusive), and it should be greater than 0.
2024-06-19 19:06:53 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of
2020-07-29 15:06:04 +08:00
the output Tensor. Supported data types: int32, int64, float32,
float64. Default is int64.
2024-06-19 19:06:53 +08:00
name (str|None, optional): The default value is None. Normally there is no
2020-07-29 15:06:04 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2025-09-20 23:07:55 +08:00
out(Tensor, optional): The output tensor.
device(PlaceLike|None, optional): The desired device of returned tensor.
requires_grad(bool, optional): If autograd should record operations on the returned tensor. Default: False.
pin_memory(bool, optional): If set, return tensor would be allocated in the pinned memory. Works only for CPU tensors. Default: False
2020-04-06 11:18:39 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A 1-D Tensor filled with random permutation values from 0
2020-07-29 15:06:04 +08:00
to n-1, with ``dtype``.
2020-04-06 11:18:39 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2020-04-06 11:18:39 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
2020-04-06 11:18:39 +08:00
2023-09-22 10:18:14 +08:00
>>> out1 = paddle.randperm(5)
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[5], dtype=int64, place=Place(cpu), stop_gradient=True,
[3, 0, 1, 4, 2])
2026-02-03 10:54:35 +08:00
>>> # doctest: -SKIP
2020-04-06 11:18:39 +08:00
2023-09-22 10:18:14 +08:00
>>> out2 = paddle.randperm(7, ' int32 ' )
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[7], dtype=int32, place=Place(cpu), stop_gradient=True,
[3, 2, 0, 6, 5, 4, 1])
2026-02-03 10:54:35 +08:00
>>> # doctest: -SKIP
2022-09-14 21:56:19 +08:00
2020-04-06 11:18:39 +08:00
"""
2025-09-20 23:07:55 +08:00
device = (
_get_paddle_place ( device )
if device is not None
else _current_expected_place ( )
)
if (
pin_memory
and in_dynamic_mode ( )
and device is not None
and not isinstance ( device , ( core . CUDAPinnedPlace , core . XPUPinnedPlace ) )
) :
if isinstance ( device , core . CUDAPlace ) or (
isinstance ( device , core . Place ) and device . is_gpu_place ( )
) :
device = core . CUDAPinnedPlace ( )
elif isinstance ( device , core . XPUPlace ) or (
isinstance ( device , core . Place ) and device . is_xpu_place ( )
) :
device = core . XPUPinnedPlace ( )
else :
raise RuntimeError ( f " Pinning memory is not supported for { device } " )
2024-01-17 13:04:50 +08:00
if not isinstance ( dtype , ( core . VarDesc . VarType , paddle . pir . core . DataType ) ) :
2020-07-10 09:35:46 +08:00
dtype = convert_np_dtype_to_dtype_ ( dtype )
2023-09-22 11:30:19 +08:00
if in_dynamic_or_pir_mode ( ) :
2025-09-20 23:07:55 +08:00
tensor = _C_ops . randperm ( n , dtype , device , out = out )
if requires_grad is True :
tensor . stop_gradient = False
if pin_memory and in_dynamic_mode ( ) :
tensor = tensor . pin_memory ( )
return tensor
2022-12-27 09:06:13 +08:00
else :
if n < 1 :
raise ValueError (
" The input n should be greater than 0 in randperm op. "
)
check_dtype (
dtype , ' dtype ' , [ ' int64 ' , ' int32 ' , ' float32 ' , ' float64 ' ] , ' randperm '
)
2020-04-06 11:18:39 +08:00
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( " randperm " , * * locals ( ) )
out = helper . create_variable_for_type_inference ( dtype )
attrs = { ' n ' : n , ' dtype ' : dtype , ' seed ' : 0 }
helper . append_op (
type = ' randperm ' , inputs = { } , outputs = { ' Out ' : out } , attrs = attrs
)
out . stop_gradient = True
return out
upload code for tensor.rand (#23507)
* upload code for tensor.rand
* fix import
* update example, change paddle.tensor.rand to paddle.rand
* change 'variable' to 'Variable', test=develop
change 'variable' to 'Variable' in description, test=develop
* add pre-commit check
* add pre-commit check
* pre-commit check, test=develop
* add more unittest code
* trigger ci, test=develop
* pre-commit check, test=develop
* update api and test comment, test=develop
* update api and test comment, test=develop
* add more type check, test=develop
* add detail error info for device, test=develop
* add unnittest, test=develop
* resolve conflict and pre-commit check, test=develop
2020-04-22 21:05:20 +08:00
2026-02-10 15:20:14 +08:00
@overload
def rand (
shape : ShapeLike ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
* ,
out : paddle . Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
pin_memory : bool = False ,
) - > Tensor : . . .
@overload
def rand (
* size : int ,
out : paddle . Tensor | None = None ,
dtype : DTypeLike | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
pin_memory : bool = False ,
) - > Tensor : . . .
2025-09-08 17:11:32 +08:00
@size_args_decorator
2024-06-19 19:06:53 +08:00
def rand (
2025-09-08 17:11:32 +08:00
shape : ShapeLike ,
dtype : DTypeLike | None = None ,
name : str | None = None ,
* ,
out : paddle . Tensor | None = None ,
device : PlaceLike | None = None ,
requires_grad : bool = False ,
pin_memory : bool = False ,
2024-06-19 19:06:53 +08:00
) - > Tensor :
upload code for tensor.rand (#23507)
* upload code for tensor.rand
* fix import
* update example, change paddle.tensor.rand to paddle.rand
* change 'variable' to 'Variable', test=develop
change 'variable' to 'Variable' in description, test=develop
* add pre-commit check
* add pre-commit check
* pre-commit check, test=develop
* add more unittest code
* trigger ci, test=develop
* pre-commit check, test=develop
* update api and test comment, test=develop
* update api and test comment, test=develop
* add more type check, test=develop
* add detail error info for device, test=develop
* add unnittest, test=develop
* resolve conflict and pre-commit check, test=develop
2020-04-22 21:05:20 +08:00
"""
2022-05-31 15:54:55 +08:00
Returns a Tensor filled with random values sampled from a uniform
2020-07-29 15:06:04 +08:00
distribution in the range [0, 1), with ``shape`` and ``dtype``.
upload code for tensor.rand (#23507)
* upload code for tensor.rand
* fix import
* update example, change paddle.tensor.rand to paddle.rand
* change 'variable' to 'Variable', test=develop
change 'variable' to 'Variable' in description, test=develop
* add pre-commit check
* add pre-commit check
* pre-commit check, test=develop
* add more unittest code
* trigger ci, test=develop
* pre-commit check, test=develop
* update api and test comment, test=develop
* update api and test comment, test=develop
* add more type check, test=develop
* add detail error info for device, test=develop
* add unnittest, test=develop
* resolve conflict and pre-commit check, test=develop
2020-04-22 21:05:20 +08:00
Args:
2022-11-14 20:07:08 +08:00
shape (tuple|list|Tensor): Shape of the Tensor to be created. The data type is ``int32`` or ``int64`` .
If ``shape`` is a list or tuple, each element of it should be integer or 0-D Tensor with shape [].
If ``shape`` is an Tensor, it should be an 1-D Tensor which represents a list.
2025-09-08 17:11:32 +08:00
If ``shape`` is *shape, directly pass integers as variable-length arguments (e.g., `rand(2, 3)`).
alias: ``size``.
2024-06-19 19:06:53 +08:00
dtype (str|np.dtype|paddle.dtype|None, optional): The data type of the output Tensor.
2020-08-30 21:54:39 -05:00
Supported data types: float32, float64.
2023-09-25 18:05:09 +08:00
Default is None, use global default dtype (see :ref:`get_default_dtype`
2020-08-30 21:54:39 -05:00
for details).
2024-06-19 19:06:53 +08:00
name (str|None, optional): The default value is None. Normally there is no
2020-07-07 11:33:55 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2025-09-08 17:11:32 +08:00
out(Tensor, optional): The output tensor.
device(PlaceLike|None, optional): The desired device of returned tensor.
requires_grad(bool, optional): If autograd should record operations on the returned tensor. Default: False.
pin_memory(bool, optional): If set, return tensor would be allocated in the pinned memory. Works only for CPU tensors. Default: False
2020-07-29 15:06:04 +08:00
upload code for tensor.rand (#23507)
* upload code for tensor.rand
* fix import
* update example, change paddle.tensor.rand to paddle.rand
* change 'variable' to 'Variable', test=develop
change 'variable' to 'Variable' in description, test=develop
* add pre-commit check
* add pre-commit check
* pre-commit check, test=develop
* add more unittest code
* trigger ci, test=develop
* pre-commit check, test=develop
* update api and test comment, test=develop
* update api and test comment, test=develop
* add more type check, test=develop
* add detail error info for device, test=develop
* add unnittest, test=develop
* resolve conflict and pre-commit check, test=develop
2020-04-22 21:05:20 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, A Tensor filled with random values sampled from a uniform
2020-07-29 15:06:04 +08:00
distribution in the range [0, 1), with ``shape`` and ``dtype``.
upload code for tensor.rand (#23507)
* upload code for tensor.rand
* fix import
* update example, change paddle.tensor.rand to paddle.rand
* change 'variable' to 'Variable', test=develop
change 'variable' to 'Variable' in description, test=develop
* add pre-commit check
* add pre-commit check
* pre-commit check, test=develop
* add more unittest code
* trigger ci, test=develop
* pre-commit check, test=develop
* update api and test comment, test=develop
* update api and test comment, test=develop
* add more type check, test=develop
* add detail error info for device, test=develop
* add unnittest, test=develop
* resolve conflict and pre-commit check, test=develop
2020-04-22 21:05:20 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
upload code for tensor.rand (#23507)
* upload code for tensor.rand
* fix import
* update example, change paddle.tensor.rand to paddle.rand
* change 'variable' to 'Variable', test=develop
change 'variable' to 'Variable' in description, test=develop
* add pre-commit check
* add pre-commit check
* pre-commit check, test=develop
* add more unittest code
* trigger ci, test=develop
* pre-commit check, test=develop
* update api and test comment, test=develop
* update api and test comment, test=develop
* add more type check, test=develop
* add detail error info for device, test=develop
* add unnittest, test=develop
* resolve conflict and pre-commit check, test=develop
2020-04-22 21:05:20 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> # example 1: attr shape is a list which doesn ' t contain Tensor.
>>> out1 = paddle.rand(shape=[2, 3])
>>> print(out1)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.68532258, 0.69431782, 0.44835982],
[0.13204314, 0.48128194, 0.36574543]])
>>> # doctest: -SKIP
>>> # example 2: attr shape is a list which contains Tensor.
>>> dim1 = paddle.to_tensor(2, ' int64 ' )
>>> dim2 = paddle.to_tensor(3, ' int32 ' )
>>> out2 = paddle.rand(shape=[dim1, dim2, 2])
>>> print(out2)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3, 2], dtype=float32, place=Place(cpu), stop_gradient=True,
[[[0.62102991, 0.45255184],
[0.81386960, 0.22463219],
[0.87946558, 0.28097662]],
[[0.36565998, 0.63203937],
[0.58640617, 0.92696166],
[0.85060406, 0.38138932]]])
>>> # doctest: -SKIP
>>> # example 3: attr shape is a Tensor, the data type must be int64 or int32.
>>> shape_tensor = paddle.to_tensor([2, 3])
>>> out3 = paddle.rand(shape_tensor)
>>> print(out3)
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.77650446, 0.12870903, 0.05153799],
[0.27029657, 0.03963696, 0.42487794]])
>>> # doctest: -SKIP
2026-02-10 15:20:14 +08:00
>>> # example 4: attr shape is *shape (integers as variable-length arguments).
>>> paddle.seed(200)
>>> out4 = paddle.rand(2, 3)
>>> print(out4)
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.77663314, 0.90824795, 0.15685187],
[0.04279523, 0.34468332, 0.79557180]])
upload code for tensor.rand (#23507)
* upload code for tensor.rand
* fix import
* update example, change paddle.tensor.rand to paddle.rand
* change 'variable' to 'Variable', test=develop
change 'variable' to 'Variable' in description, test=develop
* add pre-commit check
* add pre-commit check
* pre-commit check, test=develop
* add more unittest code
* trigger ci, test=develop
* pre-commit check, test=develop
* update api and test comment, test=develop
* update api and test comment, test=develop
* add more type check, test=develop
* add detail error info for device, test=develop
* add unnittest, test=develop
* resolve conflict and pre-commit check, test=develop
2020-04-22 21:05:20 +08:00
"""
2025-09-08 17:11:32 +08:00
device = (
_get_paddle_place ( device )
if device is not None
else _current_expected_place ( )
)
if (
pin_memory
and in_dynamic_mode ( )
and device is not None
and not isinstance ( device , ( core . CUDAPinnedPlace , core . XPUPinnedPlace ) )
) :
if isinstance ( device , core . CUDAPlace ) or (
isinstance ( device , core . Place ) and device . is_gpu_place ( )
) :
device = core . CUDAPinnedPlace ( )
elif isinstance ( device , core . XPUPlace ) or (
isinstance ( device , core . Place ) and device . is_xpu_place ( )
) :
device = core . XPUPinnedPlace ( )
else :
raise RuntimeError ( f " Pinning memory is not supported for { device } " )
tensor = uniform (
shape = shape ,
dtype = dtype ,
min = 0.0 ,
max = 1.0 ,
name = name ,
out = out ,
device = device ,
requires_grad = requires_grad ,
)
if pin_memory and in_dynamic_mode ( ) :
tensor = tensor . pin_memory ( )
return tensor
2021-12-24 16:18:38 +08:00
2025-08-14 17:16:28 +08:00
@param_one_alias ( [ " lam " , " lambd " ] )
2024-06-19 19:06:53 +08:00
def exponential_ (
x : Tensor , lam : float = 1.0 , name : str | None = None
) - > Tensor :
2022-02-24 11:27:33 +08:00
r """
2021-12-24 16:18:38 +08:00
This inplace OP fill input Tensor ``x`` with random number from a Exponential Distribution.
2022-09-14 21:56:19 +08:00
``lam`` is :math:` \ lambda` parameter of Exponential Distribution.
2021-12-24 16:18:38 +08:00
.. math::
f(x) = \ lambda e^ { - \ lambda x}
2025-08-19 17:41:55 +08:00
.. note::
Alias Support: The parameter name ``lambd`` can be used as an alias for ``lam``.
For example, ``exponential_(tensor_x, lambd=1.0, ...)`` is equivalent to ``exponential_(tensor_x, lam=1.0, ...)``.
2021-12-24 16:18:38 +08:00
Args:
x(Tensor): Input tensor. The data type should be float32, float64.
2021-12-27 20:48:31 +08:00
lam(float, optional): :math:` \ lambda` parameter of Exponential Distribution. Default, 1.0.
2025-08-19 17:41:55 +08:00
alias: ``lambd``.
2024-06-19 19:06:53 +08:00
name(str|None, optional): The default value is None. Normally there is no
2021-12-24 16:18:38 +08:00
need for user to set this property. For more information, please
refer to :ref:`api_guide_Name`.
2022-09-14 21:56:19 +08:00
Returns:
2024-06-19 19:06:53 +08:00
Tensor, Input Tensor ``x``.
2021-12-24 16:18:38 +08:00
Examples:
2026-02-03 10:54:35 +08:00
.. code-block:: pycon
2021-12-24 16:18:38 +08:00
2023-09-22 10:18:14 +08:00
>>> import paddle
>>> paddle.set_device( ' cpu ' )
>>> paddle.seed(100)
2026-02-03 10:54:35 +08:00
>>> x = paddle.empty([2, 3])
2023-09-22 10:18:14 +08:00
>>> x.exponential_()
>>> # doctest: +SKIP( " Random output " )
Tensor(shape=[2, 3], dtype=float32, place=Place(cpu), stop_gradient=True,
[[0.80643415, 0.23211166, 0.01169797],
[0.72520679, 0.45208144, 0.30234432]])
>>> # doctest: -SKIP
2021-12-24 16:18:38 +08:00
"""
2023-09-22 11:30:19 +08:00
if in_dynamic_or_pir_mode ( ) :
2022-08-26 16:11:47 +08:00
return _C_ops . exponential_ ( x , lam )
2022-12-27 09:06:13 +08:00
else :
2023-04-10 14:36:14 +08:00
check_variable_and_dtype (
x , " x " , [ " float16 " , " float32 " , " float64 " , " uint16 " ] , " exponential "
)
2022-12-27 09:06:13 +08:00
helper = LayerHelper ( " exponential " , * * locals ( ) )
helper . append_op (
type = ' exponential ' ,
inputs = { " X " : x } ,
outputs = { ' Out ' : x } ,
attrs = { " lambda " : lam } ,
)
return x