Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Numerics #168

Merged
merged 10 commits into from
Sep 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 9 additions & 5 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,24 +21,28 @@ jobs:

# Skip CI if 'skip ci' is contained in latest commit message
if: "!contains(github.event.head_commit.message, 'skip ci')"

steps:

- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install .[dev]


- name: Run pre-test scripts
run: |
python tests/gen_test_data.py

- name: Test with pytest
run: |
pytest

- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
Expand Down
10 changes: 8 additions & 2 deletions careless/io/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,6 +453,8 @@ def build_model(self, parser=None, surrogate_posterior=None, prior=None, likelih
else:
raise ValueError(f"Unsupported scale bijector type, {parser.scale_bijector}")

istd = BaseModel.get_intensities(self.inputs).std()

if parser.image_layers > 0:
from careless.models.scaling.image import NeuralImageScaler
n_images = np.max(BaseModel.get_image_id(self.inputs)) + 1
Expand All @@ -462,10 +464,14 @@ def build_model(self, parser=None, surrogate_posterior=None, prior=None, likelih
parser.mlp_layers,
mlp_width,
epsilon=parser.epsilon,
scale_bijector=scale_bijector
scale_bijector=scale_bijector,
scale_multiplier=istd,
)
else:
mlp_scaler = MLPScaler(parser.mlp_layers, mlp_width, epsilon=parser.epsilon, scale_bijector=scale_bijector)
mlp_scaler = MLPScaler(
parser.mlp_layers, mlp_width,
epsilon=parser.epsilon, scale_bijector=scale_bijector, scale_multiplier=istd,
)
if parser.use_image_scales:
n_images = np.max(BaseModel.get_image_id(self.inputs)) + 1
image_scaler = ImageScaler(n_images)
Expand Down
4 changes: 2 additions & 2 deletions careless/models/scaling/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def call(self, metadata_and_image_id, *args, **kwargs):
return result

class NeuralImageScaler(Scaler):
def __init__(self, image_layers, max_images, mlp_layers, mlp_width, leakiness=0.01, epsilon=1e-7, scale_bijector=None):
def __init__(self, image_layers, max_images, mlp_layers, mlp_width, leakiness=0.01, epsilon=1e-7, scale_bijector=None, scale_multiplier=None):
super().__init__()
layers = []
if leakiness is None:
Expand All @@ -111,7 +111,7 @@ def __init__(self, image_layers, max_images, mlp_layers, mlp_width, leakiness=0.

self.image_layers = layers
from careless.models.scaling.nn import MetadataScaler
self.metadata_scaler = MetadataScaler(mlp_layers, mlp_width, leakiness, epsilon=epsilon, scale_bijector=scale_bijector)
self.metadata_scaler = MetadataScaler(mlp_layers, mlp_width, leakiness, epsilon=epsilon, scale_bijector=scale_bijector, scale_multiplier=scale_multiplier)

def call(self, inputs):
result = self.get_metadata(inputs)
Expand Down
13 changes: 12 additions & 1 deletion careless/models/scaling/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class MetadataScaler(Scaler):
Neural network based scaler with simple dense layers.
This neural network outputs a normal distribution.
"""
def __init__(self, n_layers, width, leakiness=0.01, epsilon=1e-7, scale_bijector=None):
def __init__(self, n_layers, width, leakiness=0.01, epsilon=1e-7, scale_bijector=None, scale_multiplier=None):
"""
Parameters
----------
Expand All @@ -40,6 +40,13 @@ def __init__(self, n_layers, width, leakiness=0.01, epsilon=1e-7, scale_bijector
leakiness : float or None
If float, use LeakyReLU activation with provided parameter. Otherwise
use a simple ReLU
epsilon : float
A small constant for numerical stability. This is passed to the distribution layer.
scale_bijector : tfp.bijectors.Bijector
Optional scale bijector for the ouptut distibution
scale_multiplier : float
Optional constant to multiply the output location and scale by. This can increase
numerical stability.
"""
super().__init__()

Expand Down Expand Up @@ -74,6 +81,10 @@ def __init__(self, n_layers, width, leakiness=0.01, epsilon=1e-7, scale_bijector
#The final layer converts the output to a Normal distribution
#tfp_layers.append(tfp.layers.IndependentNormal())
tfp_layers.append(NormalLayer(epsilon=epsilon, scale_bijector=scale_bijector))
if scale_multiplier is not None:
tfp_layers.append(
tfk.layers.Lambda(lambda x: tfb.Shift(scale_multiplier)(x))
)

self.network = tfk.Sequential(mlp_layers)
self.distribution = tfk.Sequential(tfp_layers)
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,4 @@ testpaths = [

[tool.setuptools.dynamic]
version = {file = "careless/VERSION"}

37 changes: 16 additions & 21 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,28 +8,23 @@
import gemmi

def pytest_sessionstart(session):
rundir = "data/"
rundir = abspath(join(dirname(__file__), rundir))

command = """
careless poly
--disable-progress-bar
--iterations=10
--merge-half-datasets
--half-dataset-repeats=3
--test-fraction=0.1
--disable-gpu
--anomalous
--wavelength-key=Wavelength
dHKL,Hobs,Kobs,Lobs,Wavelength
pyp_off.mtz
pyp_2ms.mtz
output/pyp
out_dir = "data/output"
out_dir = abspath(join(dirname(__file__), out_dir))
if exists(out_dir):
return
msg = f"""
No test data in {out_dir}. Please run gen_test_data.py and retry.
It is required to run this script prior to calling pytest. Here is
an example of how to run the careless tests.

```
cd careless
pip install -e .[dev]
python tests/gen_test_data.py
pytest
```
"""
if not exists(f"{rundir}/output"):
mkdir(f"{rundir}/output")
from subprocess import call
call(command.split(), cwd=rundir)
raise FileNotFoundError(msg)

@pytest.fixture
def cell_and_spacegroups():
Expand Down
50 changes: 50 additions & 0 deletions tests/gen_test_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
"""
Generate the test data required for running the careless test suite.
It is required to run this script prior to calling pytest. Here is
an example of how to run the careless tests.

```
cd careless
pip install -e .[dev]
python tests/gen_test_data.py
pytest
```

"""

from os import listdir,mkdir
from os.path import dirname, abspath, join, exists
import numpy as np
import pandas as pd
import re
import reciprocalspaceship as rs
import gemmi



def main():
rundir = "data/"
rundir = abspath(join(dirname(__file__), rundir))

command = """
careless poly
--disable-progress-bar
--iterations=10
--merge-half-datasets
--half-dataset-repeats=3
--test-fraction=0.1
--disable-gpu
--anomalous
--wavelength-key=Wavelength
dHKL,Hobs,Kobs,Lobs,Wavelength
pyp_off.mtz
pyp_2ms.mtz
output/pyp
"""
if not exists(f"{rundir}/output"):
mkdir(f"{rundir}/output")
from subprocess import call
call(command.split(), cwd=rundir)

if __name__=="__main__":
main()
Loading