2.4K Star 8.2K Fork 4.4K

GVPMindSpore / mindspore

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
克隆/下载
sparse_tensor.py 45.19 KB
一键复制 编辑 原始数据 按行查看 历史
宦晓玲 提交于 2024-04-18 09:38 . modify the error links
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""SparseTensor implementation."""
from __future__ import absolute_import, annotations
__all__ = ['RowTensorInner', 'RowTensor', 'SparseTensor', 'COOTensor', 'CSRTensor']
from typing import Tuple, Union
from mindspore import log as logger
from mindspore.common import dtype as mstype
from mindspore.common._register_for_tensor import tensor_operator_registry
from mindspore.common.tensor import Tensor
from mindspore._c_expression import COOTensor as COOTensor_
from mindspore._c_expression import CSRTensor as CSRTensor_
from mindspore._c_expression import RowTensor as RowTensor_
from mindspore._c_expression import Tensor as Tensor_
from mindspore import _checkparam as validator
from mindspore._checkparam import is_stub_tensor
class RowTensorInner(RowTensor_):
"""
Implementation for RowTensor, for MindSpore developers only.
"""
def __init__(self, indices=None, values=None, shape=None, row_tensor=None):
"""Init RowTensor"""
self.init_finished = False
# Directly init a RowTensor from another RowTensor
if row_tensor is not None:
if not isinstance(row_tensor, (RowTensor, RowTensor_)):
raise TypeError(f"Expect input `row_tensor` to be a RowTensor, but got {type(row_tensor)}")
if not (indices is None and values is None and shape is None):
raise TypeError("If input `row_tensor` is provided, `indices`, `values`, `shapes` should all be `None`")
RowTensor_.__init__(self, row_tensor)
# Init a RowTensor from indices, values and shape
else:
if is_stub_tensor(values):
values = values.stub_sync()
RowTensor_.__init__(self, indices, values, shape)
self.init_finished = True
def __repr__(self):
"""Avoid PyTest Segfault when RowTensor is not initialized."""
if self.init_finished:
return RowTensor_.__repr__(self)
return ''
@property
def indices(self):
"""Return RowTensor's indices."""
return Tensor(self._indices)
@property
def values(self):
"""Return RowTensor's non-zero values."""
return Tensor(self._values)
@property
def dense_shape(self):
"""Return RowTensor's shape."""
return self._shape
class RowTensor(RowTensorInner):
"""
A sparse representation of a set of tensor slices at given indices.
When the `values` of a RowTensor has a shape of :math:`(d_0, d_1, ..., d_n)`, then this RowTensor is used to
represent a subset of a larger dense tensor of shape :math:`(l_0, d_1, ..., d_n)`, where :math:`d_i` is the size of
i-th axis in RowTensor, :math:`l_0` is the size of 0-th axis of dense tensor and it satisfies :math:`l_0 > d_0`.
The parameter `indices` is used to specify locations from which the `RowTensor` is sliced in the first dimension of
the dense tensor, which means the parameters `indices` and `values` have the following relationship
:math:`dense[indices[i], :, :, :, ...] = values[i, :, :, :, ...]`.
For example, if indices is [0], values is [[1, 2]], shape is
:math:`(3, 2)` , then the dense representation of the row tensor will be:
.. code-block::
[[1, 2],
[0, 0],
[0, 0]]
.. warning::
This is an experimental API that is subjected to change or deletion.
Args:
indices (Tensor): A 1-D integer Tensor of shape :math:`(d_0)` . Default: ``None``.
values (Tensor): A Tensor of any dtype of shape :math:`(d_0, d_1, ..., d_n)` . Default: ``None``.
shape (tuple(int)): An integer tuple which contains the shape
of the corresponding dense tensor. Default: ``None``.
row_tensor (RowTensor): A RowTensor object. Default: ``None``.
Returns:
RowTensor, composed of `indices`, `values`, and `shape`.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, RowTensor
>>> indices = Tensor([0])
>>> values = Tensor([[1, 2]], dtype=ms.float32)
>>> shape = (3, 2)
>>> x = RowTensor(indices, values, shape)
>>> print(x.values)
[[1. 2.]]
>>> print(x.indices)
[0]
>>> print(x.dense_shape)
(3, 2)
"""
def __init__(self, indices=None, values=None, shape=None, row_tensor=None):
"""Init RowTensor"""
logger.warning("'RowTensor' is deprecated from version 1.7 and will be removed in a future version.")
super().__init__(indices, values, shape, row_tensor)
class SparseTensor(COOTensor_):
"""
A sparse representation of a set of nonzero elements from a tensor at given indices.
SparseTensor can only be used in the `Cell`'s construct method.
For a tensor dense, its SparseTensor(indices, values, dense_shape) has
`dense[indices[i]] = values[i]`.
For example, if indices is [[0, 1], [1, 2]], values is [1, 2], dense_shape is
(3, 4), then the dense representation of the sparse tensor will be:
.. code-block::
[[0, 1, 0, 0],
[0, 0, 2, 0],
[0, 0, 0, 0]]
Note:
The interface is deprecated from version 1.7 and will be removed in a future version.
Please use :class:`mindspore.COOTensor` instead.
Args:
indices (Tensor): A 2-D integer Tensor of shape :math:`(N, ndims)`,
where N and ndims are the number of `values` and number of dimensions in
the SparseTensor, respectively.
values (Tensor): A 1-D tensor of any type and shape :math:`(N)`, which
supplies the values for each element in `indices`.
shape (tuple(int)): An integer tuple of size :math:`(ndims)`,
which specifies the shape of the sparse tensor.
Returns:
SparseTensor, composed of `indices`, `values`, and `shape`.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, SparseTensor
>>> indices = Tensor([[0, 1], [1, 2]])
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (3, 4)
>>> x = SparseTensor(indices, values, shape)
>>> print(x.values)
[1. 2.]
>>> print(x.indices)
[[0 1]
[1 2]]
>>> print(x.shape)
(3, 4)
"""
def __init__(self, indices, values, shape):
"""Init COOTensor."""
logger.warning("'SparseTensor' is deprecated from version 1.7 and will be removed in a future version. " +
"Please use 'COOTensor' instead.")
if not (isinstance(indices, Tensor) and isinstance(values, Tensor) and isinstance(shape, tuple)):
raise TypeError("Inputs must follow: COOTensor(indices, values, shape).")
if is_stub_tensor(indices):
indices = indices.stub_sync()
if is_stub_tensor(values):
values = values.stub_sync()
COOTensor_.__init__(self, indices, values, shape)
@property
def indices(self):
"""Return SparseTensor's indices."""
return Tensor(self._indices)
@property
def values(self):
"""Return SparseTensor's non-zero values."""
return Tensor(self._values)
@property
def shape(self):
"""Return SparseTensor's shape."""
return self._shape
class COOTensor(COOTensor_):
"""
A sparse representation of a set of nonzero elements from a tensor at given indices.
For a tensor dense, its COOTensor(indices, values, shape) has
`dense[indices[i]] = values[i]`.
For example, if indices is [[0, 1], [1, 2]], values is [1, 2], shape is
(3, 4), then the dense representation of the sparse tensor will be:
.. code-block::
[[0, 1, 0, 0],
[0, 0, 2, 0],
[0, 0, 0, 0]]
Common arithmetic operations include: addition (+), subtraction (-), multiplication (*),
and division (/). For details about operations supported by `COOTensor`, see
`operators <https://www.mindspore.cn/docs/en/master/note/static_graph_syntax_support.html#operators>`_.
.. warning::
- This is an experimental API that is subject to change or deletion.
- Currently, duplicate coordinates in the indices will not be coalesced.
If the indices contain out-of-bound values, the result will be undefined.
Args:
indices (Tensor): A 2-D integer Tensor of shape :math:`(N, ndims)`,
where N and ndims are the number of `values` and number of dimensions in
the COOTensor, respectively. Currently, `ndims` must be 2. Default: ``None`` .
Please make sure that the indices are in range of the given shape.
values (Tensor): A 1-D tensor of any type and shape :math:`(N)`, which
supplies the values for each element in `indices`. Default: ``None`` .
shape (tuple(int)): An integer tuple of shape :math:`(ndims)`,
which specifies the dense_shape of the sparse tensor. Default: ``None`` .
coo_tensor (COOTensor): A COOTensor object. Default: ``None`` .
Returns:
COOTensor, composed of `indices`, `values`, and `shape`.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1], [1, 2]], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (3, 4)
>>> x = COOTensor(indices, values, shape)
>>> print(x.values)
[1. 2.]
>>> print(x.indices)
[[0 1]
[1 2]]
>>> print(x.shape)
(3, 4)
"""
def __init__(self, indices=None, values=None, shape=None, coo_tensor=None):
"""Init COOTensor"""
self.init_finished = False
# Directly init a COOTensor from another COOTensor
if coo_tensor is not None:
if not isinstance(coo_tensor, (COOTensor, COOTensor_)):
raise TypeError(f"Expect input `coo_tensor` to be a COOTensor, but got {type(coo_tensor)}")
if not (indices is None and values is None and shape is None):
raise TypeError("If input `coo_tensor` is provided, `indices`, `values`, `shapes` should all be `None`")
COOTensor_.__init__(self, coo_tensor)
# Init a COOTensor from indices, values and shape
else:
validator.check_coo_tensor_input(indices, values, shape)
validator.check_coo_tensor_shape(indices.shape, values.shape, shape)
validator.check_coo_tensor_dtype(indices.dtype)
indices = tensor_operator_registry.get('stop_gradient')(indices)
if is_stub_tensor(indices):
indices = indices.stub_sync()
if is_stub_tensor(values):
values = values.stub_sync()
COOTensor_.__init__(self, indices, values, shape)
self.init_finished = True
def __repr__(self):
"""Avoid PyTest Segfault when COOTensor is not initialized."""
if self.init_finished:
return COOTensor_.__repr__(self)
return ''
def __neg__(self):
return COOTensor(self.indices, -self.values, self.shape)
def __add__(self, other):
if not self.shape == other.shape:
raise ValueError("Input tensors should have the same shape.")
if isinstance(other, Tensor):
return tensor_operator_registry.get("tensor_scatter_add")(other, self.indices, self.values)
if isinstance(other, COOTensor):
return tensor_operator_registry.get('coo_add')(self, other, Tensor(0, self.values.dtype))
raise TypeError("COOTensor add with %s is not supported." % type(other))
def __sub__(self, other):
if not self.shape == other.shape:
raise ValueError("Input tensors should have the same shape.")
if isinstance(other, Tensor):
return tensor_operator_registry.get("tensor_scatter_add")(-other, self.indices, self.values)
if isinstance(other, COOTensor):
return tensor_operator_registry.get('coo_add')(
self, -other, Tensor(0, self.values.dtype))
raise TypeError("COOTensor subtract with %s is not supported." % type(other))
def __mul__(self, other):
if not self.shape == other.shape:
raise ValueError("Input tensors should have the same shape.")
if isinstance(other, Tensor):
other_values = tensor_operator_registry.get("gather_nd")(other, self.indices)
return COOTensor(self.indices, self.values * other_values, self.shape)
raise TypeError("COOTensor multiply with %s is not supported." % type(other))
def __div__(self, other):
if not self.shape == other.shape:
raise ValueError("Input tensors should have the same shape.")
if isinstance(other, Tensor):
logger.warning("For sparse divide, zero values in the dense tensor are ignored.")
other_values = tensor_operator_registry.get("gather_nd")(other, self.indices)
return COOTensor(self.indices, self.values / other_values, self.shape)
raise TypeError("COOTensor divide with %s is not supported." % type(other))
def __truediv__(self, other):
return self.__div__(other)
@property
def indices(self) -> Tensor:
"""Return COOTensor's indices."""
return Tensor(self._indices)
@property
def values(self) -> Tensor:
"""Return COOTensor's non-zero values."""
return Tensor(self._values)
@property
def shape(self) -> Tuple[int, ...]:
"""Return COOTensor's shape."""
return self._shape
@property
def dtype(self) -> mstype:
"""
Return the dtype of the values of COOTensor (:class:`mindspore.dtype`).
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1], [1, 2]], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (3, 4)
>>> coo_tensor = COOTensor(indices, values, shape)
>>> print(coo_tensor.dtype)
Float32
"""
return self._dtype
@property
def size(self) -> int:
"""
Return the number of non-zero values.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1, 2], [1, 0, 2]], dtype=ms.int32)
>>> values = Tensor([1, 5, 4], dtype=ms.float32)
>>> shape = (3, 3)
>>> coo_tensor = COOTensor(indices.transpose(), values, shape)
>>> print(coo_tensor.size)
3
"""
return self.values.size
@property
def itemsize(self) -> int:
"""
Return the length of one tensor element in bytes.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1], [1, 2]], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float64)
>>> shape = (3, 4)
>>> coo_tensor = COOTensor(indices, values, shape)
>>> print(coo_tensor.itemsize)
8
"""
return self.values.itemsize
@property
def ndim(self) -> int:
"""
Return the number of tensor dimensions.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1], [1, 2]], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> coo_tensor = COOTensor(indices, values, (3, 4))
>>> print(coo_tensor.ndim)
2
"""
return len(self.shape)
def coalesce(self) -> COOTensor:
"""
Returns a coalesced copy of an uncoalesced sparse tensor.
Returns:
A COOTensor.
Supported Platforms:
``GPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> x_indices = Tensor([[0, 0, 1], [1, 1, 2]], dtype=ms.int64)
>>> x_values = Tensor([1, 5, 4], dtype=ms.float32)
>>> x_shape = (3, 3)
>>> coo_tensor = COOTensor(x_indices.transpose(), x_values, x_shape)
>>> res = coo_tensor.coalesce()
>>> print(res)
COOTensor(shape=[3, 3], dtype=Float32, indices=Tensor(shape=[2, 2], dtype=Int64,
value=[[0 1] [1 2]]), values=Tensor(shape=[2], dtype=Float32, value=[6.00000000e+00 4.00000000e+00]))
"""
shape = Tensor(self.shape)
res_indices, res_values, _ = tensor_operator_registry.get("coalesce")(self.indices.transpose(),
self.values, shape)
return COOTensor(res_indices.transpose(), res_values, self.shape)
def to_csr(self) -> CSRTensor:
"""
Converts COOTensor to CSRTensor.
Note:
Currently only supports CPU backend with LLVM 12.0.1 installed.
Returns:
CSRTensor.
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1], [1, 2]], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.int32)
>>> shape = (3, 4)
>>> coo_tensor = COOTensor(indices, values, shape)
>>> print(coo_tensor.to_csr())
CSRTensor(shape=[3, 4], dtype=Int32, indptr=Tensor(shape=[4], dtype=Int32, value=[0 1 2 2]),
indices=Tensor(shape=[2], dtype=Int32, value=[1 2]), values=Tensor(shape=[2], dtype=Int32, value=[1 2]))
"""
row_indices = self.indices[:, 0]
col_indices = self.indices[:, 1]
idx_dtype = self.indices.dtype
row_indices, sort_idx = tensor_operator_registry.get("sort")(
row_indices.astype(mstype.float32))
row_indices = row_indices.astype(idx_dtype)
col_indices = col_indices[sort_idx]
values = self.values[sort_idx]
indptr = tensor_operator_registry.get("coo2csr")(row_indices, self.shape[0])
return CSRTensor(indptr, col_indices, values, self.shape)
def to_dense(self) -> Tensor:
"""
Converts COOTensor to Dense Tensor.
Returns:
Tensor.
Supported Platforms:
``GPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1, 2], [1, 0, 2]], dtype=ms.int32)
>>> values = Tensor([1, 5, 4], dtype=ms.float32)
>>> shape = (3, 3)
>>> coo_tensor = COOTensor(indices.transpose(), values, shape)
>>> print(coo_tensor.to_dense())
[[0. 1. 0.]
[5. 0. 0.]
[0. 0. 4.]]
"""
zeros_tensor = tensor_operator_registry.get("zeros")(self.shape, self.values.dtype)
return tensor_operator_registry.get("tensor_scatter_add")(
zeros_tensor, self.indices, self.values)
def astype(self, dtype: mstype) -> COOTensor:
"""
Return a copy of the COOTensor, cast its values to a specified type.
Args:
dtype (Union[:class:`mindspore.dtype`, numpy.dtype, str]): Designated tensor dtype.
Returns:
COOTensor.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1], [1, 2]], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (3, 4)
>>> coo_tensor = COOTensor(indices, values, shape)
>>> print(coo_tensor.astype(ms.float64).dtype)
Float64
"""
data = self.values.astype(dtype)
return COOTensor(self.indices, data, self.shape)
def to_tuple(self) -> Tuple[Tensor, Tensor, Tuple[int, ...]]:
"""
Return indices, values and shape as a tuple.
Returns:
Tuple.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1], [1, 2]], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (3, 4)
>>> coo_tensor = COOTensor(indices, values, shape)
>>> print(coo_tensor.to_tuple())
(Tensor(shape=[2, 2], dtype=Int32, value=
[[0, 1],
[1, 2]]), Tensor(shape=[2], dtype=Float32, value= [ 1.00000000e+00, 2.00000000e+00]), (3, 4))
"""
return self.indices, self.values, self.shape
def abs(self) -> COOTensor:
"""
Return absolute value element-wisely.
Returns:
COOTensor.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, COOTensor
>>> indices = Tensor([[0, 1, 2], [1, 0, 2]], dtype=ms.int32)
>>> values = Tensor([1, -5, -4], dtype=ms.float32)
>>> shape = (3, 3)
>>> coo_tensor = COOTensor(indices.transpose(), values, shape)
>>> res = coo_tensor.abs()
>>> print(res.values)
[1. 5. 4.]
"""
data = self.values.abs()
return COOTensor(self.indices, data, self.shape)
def add(self, other: COOTensor, thresh: Tensor) -> COOTensor:
"""
Return the sum with another COOTensor.
Args:
other(COOTensor): the second SparseTensor to sum.
thresh(Tensor): A 0-D Tensor, represents the magnitude threshold that determines
if an output value/index pair take space, Its dtype
should match that of the values if they are real. If output's
value is less than the `thresh`, it will vanish.
Returns:
COOTensor, representing the sum.
Raises:
ValueError: If any input(self/other)'s indices's dim is not equal to 2.
ValueError: If any input(self/other)'s values's dim is not equal to 1.
ValueError: If any input(self/other)'s shape's dim is not equal to 1.
ValueError: If thresh's dim is not equal to 0.
TypeError: If any input(self/other)'s indices's type is not equal to int64.
TypeError: If any input(self/other)'s shape's type is not equal to int64.
ValueError: If any input(self/other)'s indices's length is not equal to
its values's length.
TypeError: If any input(self/other)'s values's type is not equal to anf of
(int8/int16/int32/int64/float32/float64/complex64/complex128)
TypeError: If thresh's type is not equal to anf of
(int8/int16/int32/int64/float32/float64)
TypeError: If self's indices's type is not equal to other's indices's type
TypeError: If self's values's type is not equal to other's values's type
TypeError: If self's shape's type is not equal to other's shape's type
TypeError: If (self/other)'s value's type is not matched with thresh's type
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> from mindspore import Tensor, COOTensor
>>> from mindspore import dtype as mstype
>>> indics0 = Tensor([[0, 1], [1, 2]], dtype=mstype.int64)
>>> values0 = Tensor([1, 2], dtype=mstype.int32)
>>> shape0 = (3, 4)
>>> input0 = COOTensor(indics0, values0, shape0)
>>> indics1 = Tensor([[0, 0], [1, 1]], dtype=mstype.int64)
>>> values1 = Tensor([3, 4], dtype=mstype.int32)
>>> shape1 = (3, 4)
>>> input1 = COOTensor(indics1, values1, shape1)
>>> thres = Tensor(0, dtype=mstype.int32)
>>> out = input0.add(input1, thres)
>>> print(out)
COOTensor(shape=[3, 4], dtype=Int32, indices=Tensor(shape=[4, 2], dtype=Int64, value=
[[0 0]
[0 1]
[1 1]
[1 2]]), values=Tensor(shape=[4], dtype=Int32, value=[3 1 4 2]))
"""
return tensor_operator_registry.get('coo_add')(self, other, thresh)
class CSRTensor(CSRTensor_):
r"""
Constructs a sparse tensor in CSR (Compressed Sparse Row) format, with specified
values indicated by `values` and row and column positions indicated by `indptr`
and `indices`.
For example, if indptr is [0, 1, 2, 2], indices is [1, 2], values is [1., 2.], shape is
(3, 4), then the dense representation of the sparse tensor will be:
.. code-block::
[[0., 1., 0., 0.],
[0., 0., 2., 0.],
[0., 0., 0., 0.]]
Common arithmetic operations include: addition (+), subtraction (-), multiplication (*),
and division (/). For details about operations supported by `CSRTensor`, see
`operators <https://www.mindspore.cn/docs/en/master/note/static_graph_syntax_support.html#operators>`_.
.. warning::
- This is an experimental API that is subjected to change.
- If the values given by `indptr` or `indices` are invalid, the results may be undefined. Invalid values include
when the length of `values` or `indices` exceeds the range indicated by `indptr`, and when the columns
indicated by `indices` are repeated on the same row.
Args:
indptr (Tensor): 1-D Tensor of shape :math:`(M)`, which equals to `shape[0] + 1`, which indicates the
start and end point for `values` in each row. Default: ``None``. If provided,
must be int16, int32 or int64.
indices (Tensor): 1-D Tensor of shape :math:`(N)`, which has the same length as `values`. `indices`
indicates the which column `values` should be placed. Default: ``None``. If provided,
must be int16, int32 or int64.
values (Tensor): Tensor, which has the same length as `indices` (values.shape[0] == indices.shape[0]).
`values` stores the data for CSRTensor. Default: ``None``.
shape (tuple(int)): An integer tuple of shape :math:`(ndims)`, and `shape[0]` must equal to `M - 1`,
which all equal to number of rows of the CSRTensor. Default: ``None``.
csr_tensor (CSRTensor): A CSRTensor object. Values' feature dimension should match with
CSRTensor's feature dimension :math:`(values.shape[1:] == csr\_tensor.shape[2:])` . Default: ``None``.
Outputs:
CSRTensor, with shape defined by `shape`, and dtype inferred from `value`.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> # initialize a csr_tensor with indptr, indices, values and shape
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> # access a data member of CSRTensor
>>> print(indptr == csr_tensor.indptr)
[ True True True]
"""
def __init__(self, indptr=None, indices=None, values=None, shape=None, csr_tensor=None):
"Init CSRTensor"
self.init_finished = False
# Directly init a CSRTensor from another CSRTensor
if csr_tensor is not None:
if not isinstance(csr_tensor, (CSRTensor, CSRTensor_)):
raise TypeError(f"Expect input `csr_tensor` to be a CSRTensor, but got {type(csr_tensor)}")
if not (indptr is None and indices is None and values is None and shape is None):
raise TypeError(
"If input `csr_tensor` is provided, `indptr`, `indices`, `values`, `shapes` should all be `None`")
CSRTensor_.__init__(self, csr_tensor)
# Init a CSRTensor from indptr, indices, values and shape
else:
validator.check_csr_tensor_input(indptr, indices, values, shape)
validator.check_csr_tensor_shape(indptr.shape, indices.shape, values.shape, shape)
validator.check_csr_tensor_dtype(indptr.dtype, indices.dtype)
indptr = tensor_operator_registry.get('stop_gradient')(indptr)
indices = tensor_operator_registry.get('stop_gradient')(indices)
if is_stub_tensor(indptr):
indptr = indptr.stub_sync()
if is_stub_tensor(values):
values = values.stub_sync()
if is_stub_tensor(indices):
indices = indices.stub_sync()
CSRTensor_.__init__(self, indptr, indices, values, shape)
self.init_finished = True
def __repr__(self):
"""Avoid PyTest Segfault when CSRTensor is not initialized."""
if self.init_finished:
return CSRTensor_.__repr__(self)
return ''
def __mul__(self, other):
return tensor_operator_registry.get('csr_mul')(self, other)
def __div__(self, other):
logger.warning("For CSR divide, zero values in the dense tensor are ignored.")
return tensor_operator_registry.get('csr_div')(self, other)
def __truediv__(self, other):
return self.__div__(other)
def __neg__(self):
return CSRTensor(self.indptr, self.indices, -self.values, self.shape)
def __add__(self, other):
if not self.shape == other.shape:
raise ValueError("Input tensors should have the same shape.")
if isinstance(other, CSRTensor):
return tensor_operator_registry.get('csr_add')(
self, other, Tensor(1, self.values.dtype), Tensor(1, self.values.dtype))
raise TypeError("CSRTensor add with %s is not supported." % type(other))
def __sub__(self, other):
if not self.shape == other.shape:
raise ValueError("Input tensors should have the same shape.")
if isinstance(other, CSRTensor):
return tensor_operator_registry.get('csr_add')(
self, other, Tensor(1, self.values.dtype), Tensor(-1, self.values.dtype))
raise TypeError("CSRTensor subtract with %s is not supported." % type(other))
@property
def indptr(self) -> Tensor:
"""Return CSRTensor's row indices pointers."""
return Tensor(self._indptr)
@property
def indices(self) -> Tensor:
"""
Return CSRTensor's column indices.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.indices)
[0 1]
"""
return Tensor(self._indices)
@property
def values(self) -> Tensor:
"""
Return CSRTensor's non-zero values.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.values)
[1. 2.]
"""
return Tensor(self._values)
@property
def shape(self) -> Tuple[int, ...]:
"""
Return CSRTensor's shape.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.shape)
(2, 4)
"""
return self._shape
@property
def dtype(self) -> mstype:
"""
Return the dtype of the values of CSRTensor (:class:`mindspore.dtype`).
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.dtype)
Float32
"""
return self._dtype
@property
def size(self) -> int:
"""
Return the number of non-zero values.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.size)
2
"""
return self.values.size
@property
def itemsize(self) -> int:
"""
Return the length of one tensor element in bytes.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float64)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.itemsize)
8
"""
return self.values.itemsize
@property
def ndim(self) -> int:
"""
Return the number of tensor dimensions.
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.ndim)
2
"""
return len(self.shape)
def to_tuple(self) -> Tuple[Tensor, Tensor, Tensor, Tuple[int, ...]]:
"""
Return indptr, indices, values and shape as a tuple.
Returns:
Tuple.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.to_tuple())
(Tensor(shape=[3], dtype=Int32, value= [0, 1, 2]), Tensor(shape=[2], dtype=Int32, value= [0, 1]),
Tensor(shape=[2], dtype=Float32, value= [ 1.00000000e+00, 2.00000000e+00]), (2, 4))
"""
return self.indptr, self.indices, self.values, self.shape
def to_coo(self) -> COOTensor:
"""
Converts CSRTensor to COOTensor.
Note:
Currently only supports CPU backend with LLVM 12.0.1 installed.
Returns:
COOTensor.
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.int32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.to_coo())
COOTensor(shape=[2, 4], dtype=Int32, indices=Tensor(shape=[2, 2], dtype=Int32, value=
[[0 0]
[1 1]]), values=Tensor(shape=[2], dtype=Int32, value=[1 2]))
"""
if self.ndim != 2:
raise ValueError("Currently only support 2-D CSRTensor when converting to COOTensor.")
row_indices = tensor_operator_registry.get("csr2coo")(self.indptr, self.values.shape[0])
coo_indices = tensor_operator_registry.get("stack")((row_indices, self.indices), 1)
return COOTensor(coo_indices, self.values, self.shape)
def to_dense(self) -> Tensor:
"""
Converts CSRTensor to Dense Tensor.
Returns:
Tensor.
Supported Platforms:
``GPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.to_dense())
[[1. 0. 0. 0.]
[0. 2. 0. 0.]]
"""
return tensor_operator_registry.get("csr_to_dense")(self)
def astype(self, dtype: mstype) -> CSRTensor:
"""
Return a copy of the CSRTensor, cast its values to a specified type.
Args:
dtype (Union[:class:`mindspore.dtype`, numpy.dtype, str]): Designated tensor dtype.
Returns:
CSRTensor.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([1, 2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.astype(ms.float64).dtype)
Float64
"""
data = self.values.astype(dtype)
return CSRTensor(self.indptr, self.indices, data, self.shape)
def mv(self, dense_vector: Tensor) -> Tensor:
"""
Return the matrix multiplication result of the right-multiply dense matrix of the CSRTensor.
The CSRTensor with shape `[M, N]` needs to adapt the dense vector with shape `[N, 1]`
to get the dense vector with result `[M, 1]`.
Note:
Currently only supports CPU backend with LLVM 12.0.1 installed.
Args:
dense_vector (Tensor): A dense Tensor, its shape must be (csr_tensor.shape[1], 1)
Returns:
Tensor.
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> from mindspore import Tensor, CSRTensor
>>> from mindspore import dtype as mstype
>>> indptr = Tensor([0, 1, 2], dtype=mstype.int32)
>>> indices = Tensor([0, 1], dtype=mstype.int32)
>>> values = Tensor([2, 1], dtype=mstype.float32)
>>> dense_shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, dense_shape)
>>> dense = Tensor([[1], [1], [1], [1]], dtype=mstype.float32)
>>> print(csr_tensor.mv(dense))
[[2.]
[1.]]
"""
validator.check_value_type('dense_vector', dense_vector, (Tensor, Tensor_,), 'CSRTensor.mv')
return tensor_operator_registry.get("csr_mv")(self, dense_vector)
def mm(self, matrix: Union[Tensor, CSRTensor]) -> Union[Tensor, CSRTensor]:
"""
Return the matrix multiplication result of the right-multiply matrix(dense or CSRTensor) of the CSRTensor.
The CSRTensor with shape `[M, N]` needs to adapt the right matrix with shape `[N, K]`
to get the dense matrix or CSRTensor with result `[M, K]`.
Note:
If right matrix is CSRTensor, currently only supports GPU backend.
If right matrix is Tensor, currently supports CPU backend with LLVM no lower than 12.0.1, and GPU backend.
Args:
matrix (Tensor or CSRTensor): A dense Tensor or CSRTensor,
its shape[0] should be equal to csr_tensor.shape[1]
Returns:
Tensor or CSRTensor.
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> from mindspore import Tensor, CSRTensor
>>> from mindspore import dtype as mstype
>>> indptr = Tensor([0, 1, 2], dtype=mstype.int32)
>>> indices = Tensor([0, 1], dtype=mstype.int32)
>>> values = Tensor([2, 1], dtype=mstype.float32)
>>> dense_shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, dense_shape)
>>> dense_matrix = Tensor([[1., 2.], [1, 2.], [1, 2.], [1., 2.]], dtype=mstype.float32)
>>> print(csr_tensor.mm(dense_matrix))
[[2. 4.]
[1. 2.]]
"""
if isinstance(matrix, CSRTensor):
return tensor_operator_registry.get("csr_mm")(self, matrix)
validator.check_value_type('matrix', matrix, (Tensor, Tensor_,), 'CSRTensor.mm')
return tensor_operator_registry.get("csr_mm_akg")()(self.indptr, self.indices, self.values,
self.shape, matrix)
def sum(self, axis: int) -> Tensor:
"""
Reduces a dimension of a CSRTensor by summing all elements in the dimension.
Note:
Currently only supports CPU backend with LLVM 12.0.1 installed.
Args:
axis (int): The dimensions to reduce.
Returns:
Tensor, the dtype is the same as `CSRTensor.values`.
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> from mindspore import Tensor, CSRTensor
>>> from mindspore import dtype as mstype
>>> indptr = Tensor([0, 1, 2], dtype=mstype.int32)
>>> indices = Tensor([0, 1], dtype=mstype.int32)
>>> values = Tensor([2, 1], dtype=mstype.float32)
>>> dense_shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, dense_shape)
>>> print(csr_tensor.sum(1))
[[2.]
[1.]]
"""
return tensor_operator_registry.get("csr_reduce_sum")(self, axis)
def abs(self) -> CSRTensor:
"""
Return absolute value element-wisely.
Returns:
CSRTensor, with all values being non-negative.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
Examples:
>>> import mindspore as ms
>>> from mindspore import Tensor, CSRTensor
>>> indptr = Tensor([0, 1, 2], dtype=ms.int32)
>>> indices = Tensor([0, 1], dtype=ms.int32)
>>> values = Tensor([-1, -2], dtype=ms.float32)
>>> shape = (2, 4)
>>> csr_tensor = CSRTensor(indptr, indices, values, shape)
>>> print(csr_tensor.abs().values)
[1. 2.]
"""
data = self.values.abs()
return CSRTensor(self.indptr, self.indices, data, self.shape)
def add(self, b: CSRTensor, alpha: Tensor, beta: Tensor) -> CSRTensor:
"""
Addition of two CSR Tensors : C = alpha * A + beta * B
Args:
b (CSRTensor): Sparse CSR Tensor.
alpha(Tensor): Dense Tensor, its shape must be able to broadcast to self.
beta(Tensor): Dense Tensor, its shape must be able to broadcast to b.
Returns:
CSRTensor.
Supported Platforms:
``GPU`` ``CPU``
Examples:
>>> from mindspore import Tensor, CSRTensor
>>> import mindspore.common.dtype as mstype
>>> indptr = Tensor([0, 1, 2], dtype=mstype.int32)
>>> indices = Tensor([0, 1], dtype=mstype.int32)
>>> values_a = Tensor([2, 1], dtype=mstype.float32)
>>> values_b = Tensor([1, 2], dtype=mstype.float32)
>>> dense_shape = (2, 4)
>>> alpha = Tensor(1, mstype.float32)
>>> beta = Tensor(1, mstype.float32)
>>> a = CSRTensor(indptr, indices, values_a, dense_shape)
>>> b = CSRTensor(indptr, indices, values_b, dense_shape)
>>> print(a.add(b, alpha, beta))
CSRTensor(shape=[2, 4], dtype=Float32,
indptr=Tensor(shape=[3], dtype=Int32, value=[0 1 2]),
indices=Tensor(shape=[2], dtype=Int32, value=[0 1]),
values=Tensor(shape=[2], dtype=Float32, value=[ 3.00000000e+00 3.00000000e+00]))
"""
return tensor_operator_registry.get('csr_add')(self, b, alpha, beta)
Python
1
https://gitee.com/mindspore/mindspore.git
git@gitee.com:mindspore/mindspore.git
mindspore
mindspore
mindspore
master

搜索帮助