# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Operators for random."""
from ..._checkparam import Validator, Rel
from ...common import dtype as mstype
from ..primitive import PrimitiveWithInfer, prim_attr_register
from .._utils import get_broadcast_shape
[docs]class StandardNormal(PrimitiveWithInfer):
r"""
Generates random numbers according to the standard Normal (or Gaussian) random number distribution.
Returns the tensor with the given shape, the random numbers in it drawn from normal distributions
whose mean is 0 and standard deviation is 1.
Args:
seed (int): Random seed, must be non-negative. Default: 0.
seed2 (int): Random seed2, must be non-negative. Default: 0.
Inputs:
- **shape** (tuple) - The shape of random tensor to be generated. Only constant value is allowed.
Outputs:
Tensor. The shape is the same as the input `shape`. The dtype is float32.
Raises:
TypeError: If neither `seed` nor `seed2` is an int.
TypeError: If `shape` is not a tuple.
ValueError: If `shape` is not a constant value.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> shape = (3, 4)
>>> stdnormal = ops.StandardNormal(seed=2)
>>> output = stdnormal(shape)
>>> print(output)
[[-1.3031056 0.64198005 -0.65207404 -1.767485 ]
[-0.91792876 0.6508565 -0.9098478 -0.14092612]
[ 0.7806437 1.1585592 1.9676613 -0.00440959]]
"""
@prim_attr_register
def __init__(self, seed=0, seed2=0):
"""Initialize StandardNormal"""
self.init_prim_io_names(inputs=['shape'], outputs=['output'])
self.add_prim_attr('side_effect_mem', True)
Validator.check_non_negative_int(seed, "seed", self.name)
Validator.check_non_negative_int(seed2, "seed2", self.name)
def __infer__(self, shape):
shape_v = shape["value"]
if shape_v is None:
raise ValueError(f"For {self.name}, shape must be const.")
Validator.check_value_type("shape", shape_v, [tuple], self.name)
for i, shape_i in enumerate(shape_v):
Validator.check_positive_int(shape_i, f'shape[{i}]', self.name)
out = {
'shape': shape_v,
'dtype': mstype.float32,
'value': None}
return out
[docs]class StandardLaplace(PrimitiveWithInfer):
r"""
Generates random numbers according to the Laplace random number distribution (mean=0, lambda=1).
It is defined as:
.. math::
\text{f}(x;0,1) = \frac{1}{2}\exp(-|x|),
Args:
seed (int): Random seed. Default: 0.
seed2 (int): Random seed2. Default: 0.
Inputs:
- **shape** (tuple) - The shape of random tensor to be generated. Only constant value is allowed.
Outputs:
Tensor. The shape that the input 'shape' denotes. The dtype is float32.
Raises:
TypeError: If neither `seed` nor `seed2` is an int.
TypeError: If `shape` is not a tuple.
ValueError: If `shape` is not a constant value.
Supported Platforms:
``Ascend``
Examples:
>>> shape = (4, 16)
>>> stdlaplace = ops.StandardLaplace(seed=2)
>>> output = stdlaplace(shape)
>>> result = output.shape
>>> print(result)
(4, 16)
"""
@prim_attr_register
def __init__(self, seed=0, seed2=0):
"""Initialize StandardLaplace"""
self.init_prim_io_names(inputs=['shape'], outputs=['output'])
self.add_prim_attr('side_effect_mem', True)
Validator.check_value_type('seed', seed, [int], self.name)
Validator.check_value_type('seed2', seed2, [int], self.name)
def __infer__(self, shape):
shape_v = shape["value"]
if shape_v is None:
raise ValueError(f"For {self.name}, shape must be const.")
Validator.check_value_type("shape", shape_v, [tuple], self.name)
for i, shape_i in enumerate(shape_v):
Validator.check_positive_int(shape_i, f'shape[{i}]', self.name)
out = {
'shape': shape_v,
'dtype': mstype.float32,
'value': None}
return out
[docs]class Gamma(PrimitiveWithInfer):
r"""
Produces random positive floating-point values x, distributed according to probability density function:
.. math::
\text{P}(x|α,β) = \frac{\exp(-x/β)}{{β^α}\cdot{\Gamma(α)}}\cdot{x^{α-1}},
Args:
seed (int): Random seed, must be non-negative. Default: 0.
seed2 (int): Random seed2, must be non-negative. Default: 0.
Inputs:
- **shape** (tuple) - The shape of random tensor to be generated. Only constant value is allowed.
- **alpha** (Tensor) - The α distribution parameter. It must be greater than 0.
It is also known as the shape parameter with float32 data type.
- **beta** (Tensor) - The β distribution parameter. It must be greater than 0.
It is also known as the scale parameter with float32 data type.
Outputs:
Tensor. The shape must be the broadcasted shape of Input "shape" and shapes of alpha and beta.
The dtype is float32.
Raises:
TypeError: If neither `seed` nor `seed2` is an int.
TypeError: If neither `alpha` nor `beta` is a Tensor.
ValueError: If `shape` is not a constant value.
Supported Platforms:
``Ascend``
Examples:
>>> shape = (3, 1, 2)
>>> alpha = Tensor(np.array([[3, 4], [5, 6]]), mstype.float32)
>>> beta = Tensor(np.array([1.0]), mstype.float32)
>>> gamma = ops.Gamma(seed=3)
>>> output = gamma(shape, alpha, beta)
>>> result = output.shape
>>> print(result)
(3, 2, 2)
"""
@prim_attr_register
def __init__(self, seed=0, seed2=0):
"""Initialize Gamma"""
self.init_prim_io_names(inputs=['shape', 'alpha', 'beta'], outputs=['output'])
self.add_prim_attr('side_effect_mem', True)
Validator.check_non_negative_int(seed, "seed", self.name)
Validator.check_non_negative_int(seed2, "seed2", self.name)
def __infer__(self, shape, alpha, beta):
shape_v = shape["value"]
if shape_v is None:
raise ValueError(f"For {self.name}, shape must be const.")
Validator.check_value_type("shape", shape_v, [tuple], self.name)
for i, shape_i in enumerate(shape_v):
Validator.check_positive_int(shape_i, f'shape[{i}]', self.name)
Validator.check_tensor_dtype_valid("alpha", alpha["dtype"], [mstype.float32], self.name)
Validator.check_tensor_dtype_valid("beta", beta["dtype"], [mstype.float32], self.name)
broadcast_shape = get_broadcast_shape(alpha['shape'], beta['shape'], self.name)
broadcast_shape = get_broadcast_shape(broadcast_shape, shape_v, self.name)
out = {
'shape': broadcast_shape,
'dtype': mstype.float32,
'value': None}
return out
[docs]class Poisson(PrimitiveWithInfer):
r"""
Produces random non-negative integer values i, distributed according to discrete probability function:
.. math::
\text{P}(i|μ) = \frac{\exp(-μ)μ^{i}}{i!},
Args:
seed (int): Random seed, must be non-negative. Default: 0.
seed2 (int): Random seed2, must be non-negative. Default: 0.
Inputs:
- **shape** (tuple) - The shape of random tensor to be generated. Only constant value is allowed.
- **mean** (Tensor) - μ parameter the distribution was constructed with. The parameter defines mean number
of occurrences of the event. It must be greater than 0. With float32 data type.
Outputs:
Tensor. Its shape must be the broadcasted shape of `shape` and the shape of `mean`.
The dtype is int32.
Raises:
TypeError: If neither `seed` nor `seed2` is an int.
TypeError: If `shape` is not a tuple.
TypeError: If `mean` is not a Tensor whose dtype is not float32.
Supported Platforms:
``Ascend``
Examples:
>>> shape = (4, 1)
>>> mean = Tensor(np.array([5.0, 10.0]), mstype.float32)
>>> poisson = ops.Poisson(seed=5)
>>> output = poisson(shape, mean)
>>> result = output.shape
>>> print(result)
(4, 2)
"""
@prim_attr_register
def __init__(self, seed=0, seed2=0):
"""Initialize Poisson"""
self.init_prim_io_names(inputs=['shape', 'mean'], outputs=['output'])
self.add_prim_attr('side_effect_mem', True)
Validator.check_non_negative_int(seed, "seed", self.name)
Validator.check_non_negative_int(seed2, "seed2", self.name)
def __infer__(self, shape, mean):
shape_v = shape["value"]
if shape_v is None:
raise ValueError(f"For {self.name}, shape must be const.")
Validator.check_value_type("shape", shape_v, [tuple], self.name)
for i, shape_i in enumerate(shape_v):
Validator.check_positive_int(shape_i, f'shape[{i}]', self.name)
Validator.check_tensor_dtype_valid("mean", mean["dtype"], [mstype.float32], self.name)
broadcast_shape = get_broadcast_shape(mean['shape'], shape_v, self.name)
out = {
'shape': broadcast_shape,
'dtype': mstype.int32,
'value': None}
return out
[docs]class RandomChoiceWithMask(PrimitiveWithInfer):
"""
Generates a random sample as index tensor with a mask tensor from a given tensor.
The input must be a tensor of rank not less than 1. If its rank is greater than or equal to 2,
the first dimension specifies the number of samples.
The index tensor and the mask tensor have the fixed shapes. The index tensor denotes the index of the nonzero
sample, while the mask tensor denotes which elements in the index tensor are valid.
Args:
count (int): Number of items expected to get and the number must be greater than 0. Default: 256.
seed (int): Random seed. Default: 0.
seed2 (int): Random seed2. Default: 0.
Inputs:
- **input_x** (Tensor[bool]) - The input tensor.
The input tensor rank must be greater than or equal to 1 and less than or equal to 5.
Outputs:
Two tensors, the first one is the index tensor and the other one is the mask tensor.
- **index** (Tensor) - The output shape is 2-D.
- **mask** (Tensor) - The output shape is 1-D.
Raises:
TypeError: If `count` is not an int.
TypeError: If neither `seed` nor `seed2` is an int.
TypeError: If `input_x` is not a Tensor.
Supported Platforms:
``Ascend`` ``GPU``
Examples:
>>> rnd_choice_mask = ops.RandomChoiceWithMask()
>>> input_x = Tensor(np.ones(shape=[240000, 4]).astype(np.bool))
>>> output_y, output_mask = rnd_choice_mask(input_x)
>>> result = output_y.shape
>>> print(result)
(256, 2)
>>> result = output_mask.shape
>>> print(result)
(256,)
"""
@prim_attr_register
def __init__(self, count=256, seed=0, seed2=0):
"""Initialize RandomChoiceWithMask"""
Validator.check_value_type("count", count, [int], self.name)
Validator.check_positive_int(count, "count", self.name)
Validator.check_value_type('seed', seed, [int], self.name)
Validator.check_value_type('seed2', seed2, [int], self.name)
self.add_prim_attr('side_effect_mem', True)
def infer_shape(self, x_shape):
Validator.check_int(len(x_shape), 1, Rel.GE, "input_x rank", self.name)
Validator.check_int(len(x_shape), 5, Rel.LE, "input_x rank", self.name)
return [self.count, len(x_shape)], [self.count]
def infer_dtype(self, x_dtype):
Validator.check_tensor_dtype_valid('x', x_dtype, [mstype.bool_], self.name)
return mstype.int32, mstype.bool_
[docs]class RandomCategorical(PrimitiveWithInfer):
"""
Generates random samples from a given categorical distribution tensor.
Args:
dtype (mindspore.dtype): The type of output. Its value must be one of mindspore.int16,
mindspore.int32 and mindspore.int64. Default: mindspore.int64.
Inputs:
- **logits** (Tensor) - The input tensor. 2-D Tensor with shape [batch_size, num_classes].
- **num_sample** (int) - Number of sample to be drawn. Only constant values is allowed.
- **seed** (int) - Random seed. Default: 0. Only constant values is allowed.
Outputs:
- **output** (Tensor) - The output Tensor with shape [batch_size, num_samples].
Raises:
TypeError: If `dtype` is not one of the following: mindspore.int16, mindspore.int32, mindspore.int64.
TypeError: If `logits` is not a Tensor.
TypeError: If neither `num_sample` nor `seed` is an int.
Supported Platforms:
``Ascend`` ``GPU``
Examples:
>>> class Net(nn.Cell):
... def __init__(self, num_sample):
... super(Net, self).__init__()
... self.random_categorical = ops.RandomCategorical(mindspore.int64)
... self.num_sample = num_sample
... def construct(self, logits, seed=0):
... return self.random_categorical(logits, self.num_sample, seed)
...
>>> x = np.random.random((10, 5)).astype(np.float32)
>>> net = Net(8)
>>> output = net(Tensor(x))
>>> result = output.shape
>>> print(result)
(10, 8)
"""
@prim_attr_register
def __init__(self, dtype=mstype.int64):
"""Initialize RandomCategorical"""
self.dtype = dtype
valid_values = (mstype.int32, mstype.int16, mstype.int64)
Validator.check_type_name("dtype", dtype, valid_values, self.name)
self.init_prim_io_names(inputs=['logits', 'num_samples', 'seed'],
outputs=['output'])
self.add_prim_attr('side_effect_mem', True)
def __infer__(self, logits, num_samples, seed):
logits_dtype = logits['dtype']
valid_dtypes = (mstype.float32, mstype.float16, mstype.float64)
Validator.check_tensor_dtype_valid('logits', logits_dtype, valid_dtypes, self.name)
num_samples_v = num_samples['value']
seed_v = seed['value']
Validator.check_value_type('num_samples', num_samples_v, (int,), self.name)
Validator.check_value_type('seed', seed_v, (int,), self.name)
Validator.check_positive_int(num_samples_v, "num_samples", self.name)
x_shape = list(logits['shape'])
if len(x_shape) != 2:
raise ValueError("RandomCategorical shape should be 2-dimension.")
ndim = len(x_shape) - 1
x_shape[ndim] = num_samples_v
self.add_prim_attr('num_samples', num_samples_v)
self.add_prim_attr('seed', seed_v)
return {'shape': (x_shape),
'dtype': (self.dtype),
'value': None}
[docs]class Multinomial(PrimitiveWithInfer):
r"""
Returns a tensor sampled from the multinomial probability distribution located in the corresponding
row of tensor input.
Note:
The rows of input do not need to sum to one (in which case we use the values as weights),
but must be non-negative, finite and have a non-zero sum.
Args:
seed (int): Random seed, must be non-negative. Default: 0.
seed2 (int): Random seed2, must be non-negative. Default: 0.
Inputs:
- **x** (Tensor[float32]) - the input tensor containing the cumsum of probabilities, must be 1 or 2
dimensions.
- **num_samples** (int32) - number of samples to draw.
Outputs:
Tensor with the same rows as `x`, each row has num_samples sampled indices.
Raises:
TypeError: If neither `seed` nor `seed2` is an int.
TypeError: If `input` is not a Tensor whose dtype is float32.
TypeError: If dtype of `num_samples` is not int32.
Supported Platforms:
``GPU``
Examples:
>>> x = Tensor([0., 9., 4., 0.], mstype.float32)
>>> multinomial = ops.Multinomial(seed=10)
>>> output = multinomial(x, 2)
>>> print(output)
[2 1]
"""
@prim_attr_register
def __init__(self, seed=0, seed2=0):
"""Initialize Multinomial."""
Validator.check_non_negative_int(seed, "seed", self.name)
Validator.check_non_negative_int(seed2, "seed2", self.name)
self.init_prim_io_names(inputs=['input', 'num_sample'], outputs=['output'])
self.add_prim_attr('side_effect_mem', True)
def __infer__(self, inputs, num_samples):
input_shape = inputs["shape"]
if len(input_shape) != 1 and len(input_shape) != 2:
raise ValueError("input dim must be 1 or 2")
Validator.check_tensor_dtype_valid('inputs', inputs['dtype'], [mstype.float32], self.name)
num_samples_value = num_samples["value"]
if num_samples_value is None:
raise ValueError(f"For {self.name}, shape nust be const")
Validator.check_value_type("num_samples", num_samples_value, (int,), self.name)
Validator.check_positive_int(num_samples_value, "num_samples")
y_shape = (num_samples_value,)
if len(input_shape) == 2:
y_shape = (input_shape[0], num_samples_value)
out = {
"shape": y_shape,
"dtype": mstype.int32,
"value": None}
return out