Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add comments and error msg #181

Merged
merged 1 commit into from
Aug 21, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 26 additions & 12 deletions include/mxnet/ndarray.h
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,8 @@ class NDArray {
*/
inline const TShape &storage_shape() const {
CHECK(ptr_ != nullptr);
CHECK_NE(storage_type(), kDefaultStorage);
CHECK_NE(storage_type(), kDefaultStorage)
<< "storage_shape() is not intended for kDefaultStorage.";
return ptr_->storage_shape;
}

Expand All @@ -216,19 +217,22 @@ class NDArray {
* \return the shape of aux data at given index
*/
inline const TShape& aux_shape(size_t index) const {
CHECK(storage_type() != kDefaultStorage);
CHECK_NE(storage_type(), kDefaultStorage)
<< "aux_shape() is not intended for kDefaultStorage.";
return ptr_->aux_shapes[index];
}

/* \return the shapes of all aux data */
const std::vector<TShape>& aux_shapes() const {
CHECK(storage_type() != kDefaultStorage);
CHECK_NE(storage_type(), kDefaultStorage)
<< "aux_shapes() is not intended for kDefaultStorage.";
return ptr_->aux_shapes;
}

/*! returns the dtypes of all aux data */
const std::vector<int>& aux_types() const {
CHECK(storage_type() != kDefaultStorage);
CHECK_NE(storage_type(), kDefaultStorage)
<< "aux_types() is not intended for kDefaultStorage.";
return ptr_->aux_types;
}

Expand Down Expand Up @@ -308,12 +312,17 @@ class NDArray {
inline bool storage_initialized() const {
if (is_none()) return false;
auto stype = storage_type();
CHECK_NE(stype, kDefaultStorage);
CHECK_NE(stype, kDefaultStorage)
<< "storage_initialized() is not intended for kDefaultStorage.";
if (stype == kRowSparseStorage) {
CHECK_EQ(aux_shape(rowsparse::kIdx)[0], storage_shape()[0]);
CHECK_EQ(aux_shape(rowsparse::kIdx)[0], storage_shape()[0])
<< "inconsistent storage shape " << storage_shape()
<< " vs. aux shape " << aux_shape(rowsparse::kIdx);
return aux_shape(0).Size() != 0;
} else if (stype == kCSRStorage) {
CHECK_EQ(aux_shape(csr::kIdx)[0], storage_shape()[0]);
CHECK_EQ(aux_shape(csr::kIdx)[0], storage_shape()[0])
<< "inconsistent storage shape " << storage_shape()
<< " vs. aux shape " << aux_shape(csr::kIdx);
return aux_shape(0).Size() != 0;
} else {
LOG(FATAL) << "Unknown storage type";
Expand Down Expand Up @@ -498,7 +507,8 @@ class NDArray {
* \return NDArray in new shape and type.
*/
inline NDArray AsArray(const TShape &shape, int dtype) const {
CHECK_EQ(storage_type(), kDefaultStorage) << "Not implemented yet";
CHECK_EQ(storage_type(), kDefaultStorage)
<< "AsArray is intended only for kDefaultStorage.";
CHECK_GE(shape_.Size() * mshadow::mshadow_sizeof(dtype_),
shape.Size() * mshadow::mshadow_sizeof(dtype))
<< "NDArray.AsArray: target memory size is bigger";
Expand Down Expand Up @@ -565,15 +575,18 @@ class NDArray {
* aux_shape is only known at run time
*/
inline void CheckAndAlloc(const std::vector<TShape> &aux_shapes) const {
CHECK_NE(storage_type(), kDefaultStorage);
CHECK_NE(storage_type(), kDefaultStorage)
<< "CheckAndAlloc(aux_shapes) is not intended for kDefaultStorage";
ptr_->CheckAndAlloc(shape_, aux_shapes, dtype_);
}
inline void CheckAndAllocData(const TShape &storage_shape) const {
CHECK_NE(storage_type(), kDefaultStorage);
CHECK_NE(storage_type(), kDefaultStorage)
<< "CheckAndAllocData is not intended for kDefaultStorage";
ptr_->CheckAndAllocData(storage_shape, dtype_);
}
inline void CheckAndAllocAuxData(size_t i, const TShape &aux_shape) const {
CHECK_NE(storage_type(), kDefaultStorage);
CHECK_NE(storage_type(), kDefaultStorage)
<< "CheckAndAllocAuxData is not intended for kDefaultStorage";
ptr_->CheckAndAllocAuxData(i, aux_shape);
}
/*!
Expand Down Expand Up @@ -741,7 +754,8 @@ class NDArray {
/*! \brief Check and alloc memory for a dense ndarray */
// size is the number of bytes
void CheckAndAlloc(uint64_t dbytes) {
CHECK_EQ(kDefaultStorage, storage_type);
CHECK_EQ(kDefaultStorage, storage_type)
<< "CheckAndAlloc(dbytes) is not intended for kDefaultStorage";
if (delay_alloc) {
shandle = Storage::Get()->Alloc(dbytes, shandle.ctx);
delay_alloc = false;
Expand Down
10 changes: 2 additions & 8 deletions python/mxnet/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -870,14 +870,8 @@ def check_symbolic_backward(sym, location, out_grads, expected, rtol=1e-5, atol=
if isinstance(expected, (list, tuple)):
expected = {k:v for k, v in zip(sym.list_arguments(), expected)}
args_grad_npy = {k:_rng.normal(size=v.shape) for k, v in expected.items()}
args_grad_data = {}
for k, v in args_grad_npy.items():
nd = mx.nd.array(v, ctx=ctx)
if grad_stypes is not None and k in grad_stypes:
out = nd.tostype(grad_stypes[k])
args_grad_data[k] = out
else:
args_grad_data[k] = nd
args_grad_data = {k: mx.nd.array(v, ctx=ctx) if grad_stypes is None or k not in grad_stypes \
else mx.nd.array(v, ctx=ctx).tostype(grad_stypes[k])}

if isinstance(grad_req, str):
grad_req = {k:grad_req for k in sym.list_arguments()}
Expand Down
2 changes: 1 addition & 1 deletion tests/python/unittest/test_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,7 +533,7 @@ def fm(factor_size, feature_dim, init):
mod.backward() # compute gradients
mod.update() # update parameters
# print('Epoch %d, Training %s' % (epoch, metric.get()))
assert(metric.get()[1] < 0.02)
assert(metric.get()[1] < 0.05), metric.get()[1]


def test_module_initializer():
Expand Down