You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
AT_ASSERT(!indices.is_variable() && !values.is_variable()); // They should be plain tensors!
87
+
88
+
AT_CHECK(!indices.is_sparse(), "expected indices to be a dense tensor, but got indices of layout ", indices.layout());
89
+
AT_CHECK(!values.is_sparse(), "expected values to be a dense tensor, but got values of layout ", values.layout());
90
+
86
91
AT_CHECK(values.type().toSparse() == type(), "values type must match sparse tensor type");
87
92
AT_CHECK(indices.type().scalarType() == kLong, "indices must be an int64 tensor");
88
93
AT_CHECK(indices.type().backend() == values.type().backend(), "backend of indices (", indices.type().backend(), ") must match backend of values (", values.type().backend(), ")");
89
94
AT_CHECK(!indices.is_cuda() || indices.get_device() == values.get_device(), "device of indices (", indices.get_device(), ") must match device of values (", values.get_device(), ")");
90
95
91
-
AT_CHECK(indices.dim() == 2, "indices must be nDim x nnz, but got: ", indices.sizes());
96
+
AT_CHECK(indices.dim() == 2, "indices must be sparse_dim x nnz, but got: ", indices.sizes());
92
97
AT_CHECK(indices.size(1) == values.size(0), "indices and values must have same nnz, but got nnz from indices: ", indices.size(1), ", nnz from values: ", values.size(0));
93
-
AT_CHECK(indices.size(0) == sparseDims_, "indices has incorrect first dimension, expected ", sparseDims_, ", got ", indices.size(0));
94
-
AT_CHECK(values.dim() == denseDims_ + 1, "values has incorrect number of dimensions, expected ", denseDims_ + 1, ", got ", values.dim());
98
+
AT_CHECK(indices.size(0) == sparse_dim_, "indices has incorrect first dimension, expected ", sparse_dim_, ", got ", indices.size(0));
99
+
AT_CHECK(values.dim() == dense_dim_ + 1, "values has incorrect number of dimensions, expected ", dense_dim_ + 1, ", got ", values.dim());
95
100
96
-
auto dense_size_original = sizes().slice(sparseDims_);
101
+
auto dense_size_original = sizes().slice(sparse_dim_);
AT_CHECK(sparse_dim + dense_dim == size.size(), "number of dimensions must be sparse_dim (", sparse_dim, ") + dense_dim (", dense_dim, "), but got ", size.size());
96
96
if (nnz() > 0) {
97
97
auto alt_options_msg = "You could try the following options:\n\
98
-
1. If you need an empty sparse tensor of this size, call `x=torch.sparse_coo_tensor(size)`.\n\
98
+
1. If you need an empty sparse tensor of this size, call `x = torch.sparse_coo_tensor(size)`.\n\
99
99
2. If you need to resize this tensor, you have the following options:\n\
100
100
1. For both sparse and dense dimensions, keep the number of them constant and the size of them non-shrinking, and then try the same call again.\n\
101
101
2. Or, create a new sparse tensor with the correct indices and values from this sparse tensor.";
102
102
103
-
AT_CHECK(sparseDims == sparseDims_,
104
-
"changing the number of sparse dimensions (from ", sparseDims_, " to ", sparseDims, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg);
103
+
AT_CHECK(sparse_dim == sparse_dim_,
104
+
"changing the number of sparse dimensions (from ", sparse_dim_, " to ", sparse_dim, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg);
105
105
106
-
AT_CHECK(denseDims == denseDims_,
107
-
"changing the number of dense dimensions (from ", denseDims_, " to ", denseDims, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg);
106
+
AT_CHECK(dense_dim == dense_dim_,
107
+
"changing the number of dense dimensions (from ", dense_dim_, " to ", dense_dim, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg);
108
108
109
109
bool shrinking_sparse_dims = false;
110
-
boolshrinking_dense_dims = false;
111
-
auto sparse_size_original = sizes().slice(0, sparseDims);
112
-
auto sparse_size_new = size.slice(0, sparseDims);
113
-
for (int i = 0; i < sparseDims; i++) {
110
+
boolshrinking_dense_dim = false;
111
+
auto sparse_size_original = sizes().slice(0, sparse_dim);
112
+
auto sparse_size_new = size.slice(0, sparse_dim);
113
+
for (int i = 0; i < sparse_dim; i++) {
114
114
if (sparse_size_new[i] < sparse_size_original[i]) {
115
115
shrinking_sparse_dims = true;
116
116
break;
117
117
}
118
118
}
119
-
auto dense_size_original = sizes().slice(sparseDims);
120
-
auto dense_size_new = size.slice(sparseDims);
121
-
for (int i = 0; i < denseDims; i++) {
119
+
auto dense_size_original = sizes().slice(sparse_dim);
120
+
auto dense_size_new = size.slice(sparse_dim);
121
+
for (int i = 0; i < dense_dim; i++) {
122
122
if (dense_size_new[i] < dense_size_original[i]) {
123
-
shrinking_dense_dims = true;
123
+
shrinking_dense_dim = true;
124
124
break;
125
125
}
126
126
}
127
127
128
128
AT_CHECK(!shrinking_sparse_dims,
129
129
"shrinking the size of sparse dimensions (from ", sparse_size_original, " to ", sparse_size_new, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg);
130
130
131
-
AT_CHECK(!shrinking_dense_dims,
131
+
AT_CHECK(!shrinking_dense_dim,
132
132
"shrinking the size of dense dimensions (from ", dense_size_original, " to ", dense_size_new, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg);
0 commit comments