Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove deprecated api #428

Merged
merged 6 commits into from
Mar 14, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ Datasets
.. autoclass:: torchani.data.CachedDataset
:members:
.. autofunction:: torchani.data.load_ani_dataset
.. autoclass:: torchani.data.BatchedANIDataset
.. autoclass:: torchani.data.PaddedBatchChunkDataset



Expand Down
30 changes: 8 additions & 22 deletions torchani/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from ._pyanitools import anidataloader
import torch
from .. import utils
import warnings
from .new import CachedDataset, ShuffledDataset, find_threshold

default_device = 'cuda' if torch.cuda.is_available() else 'cpu'
Expand Down Expand Up @@ -159,6 +158,13 @@ def split_whole_into_batches_and_chunks(atomic_properties, properties, batch_siz


class PaddedBatchChunkDataset(Dataset):
r""" Dataset that contains batches in 'chunks', with padded structures

This dataset acts as a container of batches to be used when training. Each
of the batches is broken up into 'chunks', each of which is a tensor has
molecules with a smiliar number of atoms, but which have been padded with
dummy atoms in order for them to have the same tensor dimensions.
"""

def __init__(self, atomic_properties, properties, batch_size,
dtype=torch.get_default_dtype(), device=default_device):
Expand Down Expand Up @@ -193,26 +199,6 @@ def __len__(self):
return len(self.batches)


class BatchedANIDataset(PaddedBatchChunkDataset):
"""Same as :func:`torchani.data.load_ani_dataset`. This API has been deprecated."""

def __init__(self, path, species_tensor_converter, batch_size,
shuffle=True, properties=('energies',), atomic_properties=(), transform=(),
dtype=torch.get_default_dtype(), device=default_device):
self.properties = properties
self.atomic_properties = atomic_properties
warnings.warn("BatchedANIDataset is deprecated; use load_ani_dataset()", DeprecationWarning)

atomic_properties, properties = load_and_pad_whole_dataset(
path, species_tensor_converter, shuffle, properties, atomic_properties)

# do transformations on data
for t in transform:
atomic_properties, properties = t(atomic_properties, properties)

super().__init__(atomic_properties, properties, batch_size, dtype, device)


def load_ani_dataset(path, species_tensor_converter, batch_size, shuffle=True,
rm_outlier=False, properties=('energies',), atomic_properties=(),
transform=(), dtype=torch.get_default_dtype(), device=default_device,
Expand Down Expand Up @@ -361,4 +347,4 @@ def load_ani_dataset(path, species_tensor_converter, batch_size, shuffle=True,
return tuple(ret)


__all__ = ['load_ani_dataset', 'BatchedANIDataset', 'CachedDataset', 'ShuffledDataset', 'find_threshold']
__all__ = ['load_ani_dataset', 'PaddedBatchChunkDataset', 'CachedDataset', 'ShuffledDataset', 'find_threshold']
4 changes: 1 addition & 3 deletions torchani/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,9 +172,7 @@ def sae(self, species):
return self_energies.sum(dim=1) + intercept

def subtract_from_dataset(self, atomic_properties, properties):
"""Transformer for :class:`torchani.data.BatchedANIDataset` that
subtract self energies.
"""
"""Transformer that subtracts self energies from a dataset"""
if self.self_energies is None:
self_energies = self.sae_from_dataset(atomic_properties, properties)
self.self_energies = torch.tensor(self_energies, dtype=torch.double)
Expand Down