Skip to content

Commit

Permalink
Merge pull request #50 from paulsaxe/main
Browse files Browse the repository at this point in the history
Switched to new installation scheme
  • Loading branch information
seamm authored Mar 21, 2024
2 parents 8c4652f + 28f2d7b commit b1c4700
Show file tree
Hide file tree
Showing 10 changed files with 320 additions and 518 deletions.
5 changes: 5 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
=======
History
=======
2024.3.20 -- Switched to new installation scheme
* Fully support ~/SEAMM/lammps.ini
* Updated to new installer
* Support for Conda and Docker installation.

2024.1.18 -- Restructured to support running in containers.

2023.11.7 -- Bugfix: properties that are constant
Expand Down
3 changes: 2 additions & 1 deletion devtools/conda-envs/test_env.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: test

channels:
- conda-forge
- defaults

dependencies:
# Base depends
Expand All @@ -10,7 +11,6 @@ dependencies:

# SEAMM
- kim-query
- pymbar>=4.0
- seamm
- seamm-ff-util
- statsmodels
Expand All @@ -29,6 +29,7 @@ dependencies:

# Pip-only installs
- pip:
- pymbar>=4.0
- seamm-exec
# Documentation
- sphinx-copybutton
8 changes: 8 additions & 0 deletions devtools/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
FROM molssi/mamba141

COPY ./environment.yml /root/environment.yml

RUN mamba env update -f /root/environment.yml

WORKDIR /home
ENTRYPOINT ["lmp_serial"]
10 changes: 10 additions & 0 deletions devtools/docker/environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
name: base
channels:
- conda-forge
dependencies:
- python
# Executables, etc.
- openmpi
- lammps
- openkim-models
- mpi4py
35 changes: 0 additions & 35 deletions lammps_step/data/configuration.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,41 +22,6 @@

# html = False

# Information about where/how the executables are installed
# installation may be 'path', 'conda' or 'modules'. If a module is
# specified it will be loaded and those executables used. In this
# case, any path specified using lammps-path will be ignored.

installation = not installed
conda-environment =
modules =
gpu-modules =

# The path to the executables. Can be empty or not present, in which
# case the default PATH is used. If a path is given, lmp_serial and
# lmp_mpi from this location will be used. If mpiexec is also present
# it will be used; otherwise mpiexec from the normal PATH will be
# used. If mpiexec or lmp_mpi is not found, only the serial version of
# LAMMPS will be used. Conversely, if lmp_serial is not present,
# lmp_mpi will always be used, though possible on just one core for
# smaller calculations.
#
# Ignored if a module is used. The default is to use the PATH
# environment variable.

lammps-path =

# The various LAMMPS executables. You should leave these as their default
# values unless you have a special need. These values are combined with
# lammps-path to get the location of the executables.
#
# If you are running from a queueing system such as SLURM, if MPI was compiled
# to know about the batch system, you should be able to use something like
# 'srun' or 'mpirun' with no further arguments to run parallel tasks. If
# you are not lucky, and need the parameters, you can add them to the command
# line like this:
#
# mpiexec = mpirun -n {NTASKS} -H {NODELIST} -npernode {NTASKS_PER_NODE}
#
# SEAMM picks up the environment variables such as SLURM_NTASKS, strips the
# prefix from them and replaces any instances in the command line that are
Expand Down
87 changes: 87 additions & 0 deletions lammps_step/data/lammps.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
# Configuration options for how to run LAMMPS

[docker]
# The code to use. This may maybe more than just the name of the code, and variables in
# braces {} will be expanded. For example:
# code = mpirun -np {NTASKS} lmp
# would expand {NTASKS} to the number of tasks and run the command
#
# If you are running from a queueing system such as SLURM, if MPI was compiled
# to know about the batch system, you should be able to use something like
# 'srun' or 'mpirun' with no further arguments to run parallel tasks. If
# you are not lucky, and need the parameters, you can add them to the command
# line like this:
#
# code = mpirun -n {NTASKS} -H {NODELIST} -npernode {NTASKS_PER_NODE}

code = mpirun -n {NTASKS} lmp

# The name and location of the Docker container to use, optionally with the version

container = ghcr.io/molssi-seamm/seamm-lammps:{version}

# In addition, you can specify the platform to use. This is useful on e.g. Macs with
# app silicon (M1, M3...) where the default platform is linux/arm64 but some containers
# are only available for linux/amd64.

platform = linux/amd64

[local]
# The type of local installation to use. Options are:
# conda: Use a conda environment
# modules: Use the modules system
# local: Use a local installation
# docker: Use a Docker container
# By default SEAMM installs LAMMPS using conda.

installation = conda

# The command line to use, which should start with the executable followed by any options.
# Variables in braces {} will be expanded. For example:
#
# code = mpirun -np {NTASKS} lmp
#
# would expand {NTASKS} to the number of tasks and run the command.
# For a 'local' installation, the command line should include the full path to the
# executable or it should be in the path.
#
# If you are running from a queueing system such as SLURM, if MPI was compiled
# to know about the batch system, you should be able to use something like
# 'srun' or 'mpirun' with no further arguments to run parallel tasks. If
# you are not lucky, and need the parameters, you can add them to the command
# line like this:
#
# code = mpirun -n {NTASKS} -H {NODELIST} -npernode {NTASKS_PER_NODE}

code = mpirun -n {NTASKS} lmp

######################### conda section ############################
# The full path to the conda executable:

# conda =

# The Conda environment to use. This is either the name or full path.

# conda-environment = seamm-lammps

######################### modules section ############################
# The modules to load to run LAMMPS, as a list of strings.
# For example, to load the modules lammps and openmpi, you would use:
# modules = lammps openmpi

# modules =

######################### local section ############################
# The full path to the LAMMPS executable should be in the 'code' option.

######################### docker section ############################
# The name and location of the Docker container to use, optionally with the version.
# {version} will be expanded to the version of the plug-in.

# container = ghcr.io/molssi-seamm/seamm-lammps:{version}

# In addition, you can specify the platform to use. This is useful on e.g. Macs with
# app silicon (M1, M3...) where the default platform is linux/arm64 but some containers
# are only available for linux/amd64.

platform = linux/amd64
Loading

0 comments on commit b1c4700

Please sign in to comment.