diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c712e5df52..3b82af1433 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,6 +28,9 @@ jobs: python-version: ['3.8', '3.10', '3.11'] os: ['ubuntu-20.04', 'ubuntu-22.04'] fail-fast: false + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -42,15 +45,19 @@ jobs: run: | sudo apt update sudo apt install libboost-dev gfortran libopenmpi-dev libpython3-dev \ - libblas-dev liblapack-dev + libblas-dev liblapack-dev libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies + # h5py is optional; some versions don't have binaries (yet) run: | - python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython pandas pytest pytest-github-actions-annotate-failures - python3 -m pip install h5py || true + python3 -m pip install ruamel.yaml scons==3.1.2 numpy cython pandas pytest \ + pytest-github-actions-annotate-failures + python3 -m pip install h5py - name: Build Cantera - run: python3 `which scons` build env_vars=all -j2 debug=n --debug=time + run: | + python3 `which scons` build env_vars=all -j2 debug=n --debug=time \ + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR - name: Upload shared library uses: actions/upload-artifact@v3 if: matrix.python-version == '3.10' && matrix.os == 'ubuntu-22.04' @@ -66,6 +73,9 @@ jobs: name: LLVM/Clang with Python 3.8 runs-on: ubuntu-22.04 timeout-minutes: 60 + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -79,16 +89,16 @@ jobs: - name: Install Apt dependencies run: | sudo apt update - sudo apt install libboost-dev gfortran libomp-dev libomp5 libopenblas-dev + sudo apt install libboost-dev gfortran libomp-dev libomp5 libopenblas-dev libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas pytest - pytest-github-actions-annotate-failures + run: | + python3 -m pip install ruamel.yaml scons numpy cython pandas h5py pytest pytest-github-actions-annotate-failures - name: Build Cantera run: python3 `which scons` build env_vars=all CXX=clang++-12 CC=clang-12 f90_interface=n extra_lib_dirs=/usr/lib/llvm/lib - -j2 debug=n --debug=time + -j2 debug=n --debug=time hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR - name: Test Cantera run: python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time @@ -122,7 +132,7 @@ jobs: python-version: 3.11 if: matrix.python-version == '3.11' - name: Install Brew dependencies - run: brew install boost libomp + run: brew install boost libomp hdf5 - name: Setup Homebrew Python # This path should work for future Python versions as well if: matrix.python-version != '3.11' @@ -162,6 +172,9 @@ jobs: name: Coverage runs-on: ubuntu-latest timeout-minutes: 90 + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -175,11 +188,12 @@ jobs: - name: Install Apt dependencies run: | sudo apt update - sudo apt install libboost-dev gfortran liblapack-dev libblas-dev libsundials-dev + sudo apt install libboost-dev gfortran liblapack-dev libblas-dev libsundials-dev libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas scipy pytest + run: | + python3 -m pip install ruamel.yaml scons numpy cython pandas scipy pytest h5py \ pytest-github-actions-annotate-failures pytest-cov gcovr - name: Setup .NET Core SDK uses: actions/setup-dotnet@v2 @@ -189,7 +203,8 @@ jobs: run: | python3 `which scons` build blas_lapack_libs=lapack,blas coverage=y \ optimize=n skip_slow_tests=y no_optimize_flags='-DNDEBUG -O0' \ - FORTRANFLAGS='-O0' env_vars=all -j2 --debug=time + FORTRANFLAGS='-O0' env_vars=all -j2 --debug=time \ + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR - name: Test Cantera run: python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time @@ -327,6 +342,9 @@ jobs: matrix: python-version: ['3.8', '3.10', '3.11'] fail-fast: false + env: + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - uses: actions/checkout@v2 name: Checkout the repository @@ -340,17 +358,27 @@ jobs: - name: Install Apt dependencies run: | sudo apt update - sudo apt install libboost-dev gfortran graphviz liblapack-dev libblas-dev gcc-9 g++-9 + sudo apt install libboost-dev gfortran graphviz liblapack-dev libblas-dev \ + gcc-9 g++-9 libhdf5-dev - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies run: | - python3 -m pip install ruamel.yaml scons numpy cython pandas matplotlib scipy - python3 -m pip install h5py || true + python3 -m pip install ruamel.yaml scons numpy cython pandas matplotlib scipy h5py - name: Build Cantera # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default # (GCC 7.5.0 is both default and oldest supported version) + # compile without native HDF5 support run: python3 `which scons` build -j2 debug=n CC=gcc-9 CXX=g++-9 + if: matrix.python-version != '3.10' + - name: Build Cantera (Python 3.10 with HDF) + # compile with GCC 9.4.0 on ubuntu-20.04 as an alternative to the default + # (GCC 7.5.0 is both default and oldest supported version) + # compile with native HDF5 support + run: | + python3 `which scons` build -j2 debug=n CC=gcc-9 CXX=g++-9 \ + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR + if: matrix.python-version == '3.10' - name: Run the examples # See https://unix.stackexchange.com/a/392973 for an explanation of the -exec part run: | @@ -401,11 +429,12 @@ jobs: # use boost-cpp rather than boost from conda-forge run: | conda install -q sundials=${{ matrix.sundials-ver }} scons numpy ruamel.yaml \ - cython boost-cpp fmt eigen yaml-cpp h5py pandas libgomp openblas pytest + cython boost-cpp fmt eigen yaml-cpp h5py pandas libgomp openblas pytest \ + highfive - name: Build Cantera run: | scons build system_fmt=y system_eigen=y system_yamlcpp=y system_sundials=y \ - blas_lapack_libs='lapack,blas' -j2 logging=debug debug=n \ + system_highfive=y blas_lapack_libs='lapack,blas' -j2 logging=debug debug=n \ optimize_flags='-O3 -ffast-math -fno-finite-math-only' - name: Test Cantera run: scons test show_long_tests=yes verbose_tests=yes @@ -463,7 +492,7 @@ jobs: # use boost-cpp rather than boost from conda-forge # Install SCons >=4.4.0 to make sure that MSVC_TOOLSET_VERSION variable is present run: | - mamba install -q '"scons>=4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest + mamba install -q '"scons>=4.4.0"' numpy cython ruamel.yaml boost-cpp eigen yaml-cpp h5py pandas pytest highfive shell: pwsh - name: Build Cantera run: scons build system_eigen=y system_yamlcpp=y logging=debug @@ -560,6 +589,8 @@ jobs: env: INTEL_REPO: https://apt.repos.intel.com INTEL_KEY: GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB + HDF5_LIBDIR: /usr/lib/x86_64-linux-gnu/hdf5/serial + HDF5_INCLUDEDIR: /usr/include/hdf5/serial steps: - name: Intel Apt repository timeout-minutes: 1 @@ -573,7 +604,7 @@ jobs: timeout-minutes: 5 run: | sudo apt-get install intel-oneapi-compiler-fortran intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic \ - intel-oneapi-mpi intel-oneapi-mpi-devel intel-oneapi-mkl ninja-build libboost-dev + intel-oneapi-mpi intel-oneapi-mpi-devel intel-oneapi-mkl ninja-build libboost-dev libhdf5-dev - uses: actions/checkout@v2 name: Checkout the repository with: @@ -586,7 +617,8 @@ jobs: - name: Upgrade pip run: python3 -m pip install -U pip setuptools wheel - name: Install Python dependencies - run: python3 -m pip install ruamel.yaml scons numpy cython h5py pandas pytest + run: | + python3 -m pip install ruamel.yaml scons numpy cython pandas h5py pytest \ pytest-github-actions-annotate-failures - name: Setup Intel oneAPI environment run: | @@ -594,9 +626,10 @@ jobs: printenv >> $GITHUB_ENV - name: Build Cantera run: python3 `which scons` build env_vars=all CC=icx CXX=icpx -j2 debug=n + hdf_libdir=$HDF5_LIBDIR hdf_include=$HDF5_INCLUDEDIR --debug=time f90_interface=n # FORTRAN=ifx - name: Test Cantera - run: + run: | python3 `which scons` test show_long_tests=yes verbose_tests=yes --debug=time linux-intel-oneapi-classic: @@ -718,9 +751,15 @@ jobs: - name: Install Python dependencies run: python3 -m pip install ruamel.yaml - name: Install library dependencies with Conda (Windows) - run: mamba install -q yaml-cpp mkl + run: mamba install -q yaml-cpp mkl highfive shell: pwsh if: matrix.os == 'windows-2022' + - name: Install Brew dependencies (macOS) + run: brew install hdf5 + if: matrix.os == 'macos-11' + - name: Install Apt dependencies (Ubuntu) + run: sudo apt install libhdf5-dev + if: matrix.os == 'ubuntu-22.04' - name: Setup .NET Core SDK uses: actions/setup-dotnet@v2 with: diff --git a/.gitmodules b/.gitmodules index 61b5a9e6d2..1ae6210777 100644 --- a/.gitmodules +++ b/.gitmodules @@ -13,3 +13,6 @@ [submodule "ext/yaml-cpp"] path = ext/yaml-cpp url = https://github.com/jbeder/yaml-cpp.git +[submodule "ext/HighFive"] + path = ext/HighFive + url = https://github.com/BlueBrain/HighFive.git diff --git a/SConstruct b/SConstruct index d408863ed0..70a8e73895 100644 --- a/SConstruct +++ b/SConstruct @@ -356,6 +356,34 @@ config_options = [ must include the shared version of the library, for example, 'libfmt.so'.""", "default", ("default", "y", "n")), + EnumOption( + "hdf_support", + """Select whether to support HDF5 container files natively ('y'), disable HDF5 + support ('n'), or to decide automatically based on the system configuration + ('default'). Native HDF5 support uses the HDF5 library as well as the + header-only HighFive C++ wrapper (see option 'system_highfive'). Specifying + 'hdf_include' or 'hdf_libdir' changes the default to 'y'.""", + "default", ("default", "y", "n")), + PathOption( + "hdf_include", + """The directory where the HDF5 header files are installed. This should be the + directory that contains files 'H5Version.h' and 'H5Public.h', amongst others. + Not needed if the headers are installed in a standard location, for example, + '/usr/include'.""", + "", PathVariable.PathAccept), + PathOption( + "hdf_libdir", + """The directory where the HDF5 libraries are installed. Not needed if the + libraries are installed in a standard location, for example, '/usr/lib'.""", + "", PathVariable.PathAccept), + EnumOption( + "system_highfive", + """Select whether to use HighFive from a system installation ('y'), from a + Git submodule ('n'), or to decide automatically ('default'). If HighFive + is not installed directly into a system include directory, for example, it + is installed in '/opt/include/HighFive', then you will need to add + '/opt/include/HighFive' to 'extra_inc_dirs'.""", + "default", ("default", "y", "n")), EnumOption( "system_yamlcpp", """Select whether to use the yaml-cpp library from a system installation @@ -1497,6 +1525,76 @@ else: # env['system_sundials'] == 'n' env['sundials_version'] = '5.3' env['has_sundials_lapack'] = int(env['use_lapack']) +if env["hdf_include"]: + env["hdf_include"] = Path(env["hdf_include"]).as_posix() + env.Append(CPPPATH=[env["hdf_include"]]) + env["hdf_support"] = "y" +if env["hdf_libdir"]: + env["hdf_libdir"] = Path(env["hdf_libdir"]).as_posix() + env.Append(LIBPATH=[env["hdf_libdir"]]) + env["hdf_support"] = "y" + if env["use_rpath_linkage"]: + env.Append(RPATH=env["hdf_libdir"]) + +if env["hdf_support"] == "n": + env["use_hdf5"] = False +else: + env["use_hdf5"] = conf.CheckLib("hdf5", autoadd=False) + if not env["use_hdf5"] and env["hdf_support"] == "y": + config_error("HDF5 support has been specified but libraries were not found.") + +if env["use_hdf5"] and env["system_highfive"] in ("n", "default"): + env["system_highfive"] = False + if not os.path.exists("ext/eigen/HighFive/include"): + if not os.path.exists(".git"): + config_error("HighFive is missing. Install HighFive in ext/HighFive.") + + try: + code = subprocess.call(["git", "submodule", "update", "--init", + "--recursive", "ext/HighFive"]) + except Exception: + code = -1 + if code: + config_error("HighFive not found and submodule checkout failed.\n" + "Try manually checking out the submodule with:\n\n" + " git submodule update --init --recursive ext/HighFive\n") + + env["use_hdf5"] = conf.CheckLibWithHeader( + "hdf5", "../ext/HighFive/include/highfive/H5File.hpp", + language="C++", autoadd=False) + + if env["use_hdf5"]: + logger.info("Using private installation of HighFive.") + elif env["hdf_support"] == "y": + config_error("HDF5 support has been specified but HighFive configuration failed.") + else: + logger.warning("HighFive is not configured correctly; skipping.") + env["use_hdf5"] = False + +elif env["use_hdf5"]: + env["system_highfive"] = True + env["use_hdf5"] = conf.CheckLibWithHeader( + "hdf5", "highfive/H5File.hpp", language="C++", autoadd=False) + if env["use_hdf5"]: + logger.info("Using system installation of HighFive.") + else: + config_error("Unable to locate system HighFive installation.") + +if env["use_hdf5"]: + hdf_version = textwrap.dedent("""\ + #include + #include "H5public.h" + int main(int argc, char** argv) { + std::cout << H5_VERS_MAJOR << "." << H5_VERS_MINOR << "." << H5_VERS_RELEASE; + return 0; + } + """) + retcode, hdf_version = conf.TryRun(hdf_version, ".cpp") + if retcode: + logger.info(f"Compiling against HDF5 version {hdf_version}") + else: + logger.warning("Failed to determine HDF5 version.") + def set_fortran(pattern, value): # Set compiler / flags for all Fortran versions to be the same for version in ("FORTRAN", "F77", "F90", "F95", "F03", "F08"): @@ -2023,6 +2121,8 @@ cdefine('LAPACK_FTN_TRAILING_UNDERSCORE', 'lapack_ftn_trailing_underscore') cdefine('FTN_TRAILING_UNDERSCORE', 'lapack_ftn_trailing_underscore') cdefine('LAPACK_NAMES_LOWERCASE', 'lapack_names', 'lower') cdefine('CT_USE_LAPACK', 'use_lapack') +cdefine("CT_USE_HDF5", "use_hdf5") +cdefine("CT_USE_SYSTEM_HIGHFIVE", "system_highfive") cdefine("CT_USE_SYSTEM_EIGEN", "system_eigen") cdefine("CT_USE_SYSTEM_EIGEN_PREFIXED", "system_eigen_prefixed") cdefine('CT_USE_SYSTEM_FMT', 'system_fmt') @@ -2112,6 +2212,9 @@ else: env["external_libs"] = [] env["external_libs"].extend(env["sundials_libs"]) +if env["use_hdf5"]: + env["external_libs"].append("hdf5") + if env["system_fmt"]: env["external_libs"].append("fmt") diff --git a/ext/HighFive b/ext/HighFive new file mode 160000 index 0000000000..5513f28dcc --- /dev/null +++ b/ext/HighFive @@ -0,0 +1 @@ +Subproject commit 5513f28dcced33872a3e40a63e28d49272da20fc diff --git a/ext/SConscript b/ext/SConscript index 664b036acb..eb1073e998 100644 --- a/ext/SConscript +++ b/ext/SConscript @@ -125,6 +125,14 @@ if not env['system_eigen']: copyenv.Depends(copyenv['config_h_target'], h) ext_copies.extend(h) +if not env["system_highfive"]: + localenv = prep_default(env) + license_files["HighFive"] = File("#ext/HighFive/LICENSE") + h = build(copyenv.Command('#include/cantera/ext/HighFive', '#ext/HighFive/include/highfive', + Copy('$TARGET', '$SOURCE'))) + copyenv.Depends(copyenv['config_h_target'], h) + ext_copies.extend(h) + # Google Test: Used internally for Cantera unit tests. if env['googletest'] == 'submodule': localenv = prep_gtest(env) diff --git a/include/cantera/base/AnyMap.h b/include/cantera/base/AnyMap.h index b20045fd89..7bb8cb7a38 100644 --- a/include/cantera/base/AnyMap.h +++ b/include/cantera/base/AnyMap.h @@ -12,6 +12,7 @@ #include #include +#include namespace boost { @@ -447,6 +448,10 @@ class AnyMap : public AnyBase //! messages, for example std::string keys_str() const; + //! Return an unordered set of keys + //! @since New in Cantera 3.0. + std::set keys() const; + //! Set a metadata value that applies to this AnyMap and its children. //! Mainly for internal use in reading or writing from files. void setMetadata(const std::string& key, const AnyValue& value); diff --git a/include/cantera/base/SolutionArray.h b/include/cantera/base/SolutionArray.h new file mode 100644 index 0000000000..020c25819c --- /dev/null +++ b/include/cantera/base/SolutionArray.h @@ -0,0 +1,209 @@ +//! @file SolutionArray.h + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#ifndef CT_SOLUTIONARRAY_H +#define CT_SOLUTIONARRAY_H + +#include "cantera/base/global.h" +#include "cantera/base/AnyMap.h" + +namespace Cantera +{ + +class Solution; +class ThermoPhase; + +/*! + * A container class providing a convenient interface for representing many + * thermodynamic states using the same Solution object. C++ SolutionArray objects are + * one-dimensional by design; extensions to multi-dimensional arrays need to be + * implemented in high-level API's. + * + * @since New in Cantera 3.0. + * @warning This function is an experimental part of the %Cantera API and may be + * changed or removed without notice. + */ +class SolutionArray +{ +private: + SolutionArray(const shared_ptr& sol, + size_t size, + const AnyMap& meta); + +public: + virtual ~SolutionArray() {} + + /*! + * Instantiate a new SolutionArray reference + * + * @param sol Solution object defining phase definitions + * @param size Number of SolutionArray entries + * @param meta AnyMap holding SolutionArray meta data + */ + static shared_ptr create(const shared_ptr& sol, + size_t size=0, + const AnyMap& meta={}) + { + return shared_ptr(new SolutionArray(sol, size, meta)); + } + + //! Size of SolutionArray (number of entries) + int size() const { + return m_size; + } + + //! SolutionArray meta data. + AnyMap& meta() { + return m_meta; + } + + //! Retrieve associated ThermoPhase object + shared_ptr thermo(); + + /*! + * Check whether SolutionArray contains a component (property defining state or + * auxiliary variable) + */ + bool hasComponent(const std::string& name) const; + + //! Retrieve a component of the SolutionArray by name + vector_fp getComponent(const std::string& name) const; + + /*! + * Set a component of the SolutionArray by name. + * + * @param name Name of component (property defining state or auxiliary variable) + * @param data Component data + * @param force If true, add new component to SolutionArray + */ + void setComponent(const std::string& name, const vector_fp& data, bool force=false); + + /*! + * Update the buffered index used to access entries. + */ + void setIndex(size_t index, bool restore=true); + + //! Retrieve the state vector for a single entry. + vector_fp getState(size_t index); + + //! Set the state vector for a single entry + void setState(size_t index, const vector_fp& data); + + //! Retrieve auxiliary data for a single entry. + std::map getAuxiliary(size_t index); + + /*! + * Write header data to container file. + * + * @param fname Name of HDF container file + * @param id Identifier of SolutionArray within the container file + * @param desc Description + */ + static void writeHeader(const std::string& fname, const std::string& id, + const std::string& desc); + + /*! + * Write header data to AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + * @param desc Description + */ + static void writeHeader(AnyMap& root, const std::string& id, + const std::string& desc); + + /*! + * Write SolutionArray data to container file. + * + * @param fname Name of HDF container file + * @param id Identifier of SolutionArray within the container file + * @param compression Compression level; optional (default=0; HDF only) + */ + void writeEntry(const std::string& fname, const std::string& id, + int compression=0); + + /*! + * Write SolutionArray data to AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + */ + void writeEntry(AnyMap& root, const std::string& id); + + /*! + * Save current SolutionArray and header to a container file. + * + * @param fname Name of output container file (YAML or HDF) + * @param id Identifier of SolutionArray within the container file + * @param desc Description + * @param compression Compression level; optional (default=0; HDF only) + */ + void save(const std::string& fname, const std::string& id, + const std::string& desc, int compression=0); + + /*! + * Read header data from container file. + * + * @param fname Name of HDF container file + * @param id Identifier of SolutionArray within the file structure + */ + static AnyMap readHeader(const std::string& fname, const std::string& id); + + /*! + * Read header data from AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + */ + static AnyMap readHeader(const AnyMap& root, const std::string& id); + + /*! + * Restore SolutionArray entry from a container file. + * + * @param fname Name of HDF container file + * @param id Identifier of SolutionArray within the file structure + */ + void readEntry(const std::string& fname, const std::string& id); + + /*! + * Restore SolutionArray entry from AnyMap. + * + * @param root Root node of AnyMap structure + * @param id Identifier of SolutionArray node within AnyMap structure + */ + void readEntry(const AnyMap& root, const std::string& id); + + /*! + * Restore SolutionArray entry and header from a container file. + * + * @param fname Name of container file (YAML or HDF) + * @param id Identifier of SolutionArray within the container file + */ + AnyMap restore(const std::string& fname, const std::string& id); + +protected: + /*! + * Identify storage mode of state data (combination of properties defining state); + * valid modes include Phase::nativeState ("native") or other property combinations + * defined by Phase::fullStates (three-letter acronyms, for example "TDY", "TPX"). + */ + std::string detectMode(const std::set& names, bool native=true); + + //! Retrieve set containing list of properties defining state + std::set stateProperties(const std::string& mode, bool alias=false); + + shared_ptr m_sol; //!< Solution object associated with state data + size_t m_size; //!< Number of entries in SolutionArray + size_t m_stride; //!< Stride between SolutionArray entries + AnyMap m_meta; //!< Metadata + size_t m_index = npos; //!< Buffered index + + vector_fp m_data; //!< Work vector holding states + std::map m_extra; //!< Auxiliary data +}; + +} + +#endif diff --git a/include/cantera/base/Storage.h b/include/cantera/base/Storage.h new file mode 100644 index 0000000000..8ea9501960 --- /dev/null +++ b/include/cantera/base/Storage.h @@ -0,0 +1,119 @@ +//! @file Storage.h + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#ifndef CT_STORAGE_H +#define CT_STORAGE_H + +#include "cantera/base/ct_defs.h" +#include "cantera/base/stringUtils.h" +#include + +#if CT_USE_HDF5 + +#ifdef _WIN32 + // see https://github.com/microsoft/vcpkg/issues/24293 + #define H5_BUILT_AS_DYNAMIC_LIB +#else + #define H5_BUILT_AS_STATIC_LIB +#endif + +namespace HighFive { + class File; +} + +#endif + +namespace Cantera +{ + +/*! + * A wrapper class handling storage to HDF; acts as a thin wrapper for HighFive + * + * @since New in Cantera 3.0. + * @warning This class is an experimental part of the %Cantera API and may be + * changed or removed without notice. + */ +class Storage +{ +public: + Storage(std::string fname, bool write); + + ~Storage(); + + //! Set compression level (0..9) + /*! + * Compression is only applied to species data; note that compression may increase + * file size for small data sets (compression requires setting of chunk sizes, + * which involves considerable overhead for metadata). + */ + void setCompressionLevel(int level); + + //! Check whether path go location exists + //! If the file has write access, create location if necessary + //! @param id storage location within file + bool checkGroup(const std::string& id); + + //! Retrieve contents of file from a specified location + //! @param id storage location within file + //! @returns pair containing size and list of entry names of stored data set + std::pair> contents(const std::string& id) const; + + //! Read attributes from a specified location + //! @param id storage location within file + //! @param recursive boolean indicating whether subgroups should be included + //! @returns AnyMap containing attributes + AnyMap readAttributes(const std::string& id, bool recursive) const; + + //! Write attributes to a specified location + //! @param id storage location within file + //! @param meta AnyMap containing attributes + void writeAttributes(const std::string& id, const AnyMap& meta); + + //! Read data vector from a specified location + //! @param id storage location within file + //! @param name name of data vector entry + //! @param size size of data vector entry + //! @returns data vector + vector_fp readVector(const std::string& id, + const std::string& name, size_t size) const; + + //! Write data vector to a specified location + //! @param id storage location within file + //! @param name name of data vector entry + //! @param data data vector + void writeVector(const std::string& id, + const std::string& name, const vector_fp& data); + + //! Read matrix from a specified location + //! @param id storage location within file + //! @param name name of matrix entry + //! @param rows number of matrix rows + //! @param cols number of matrix columns + //! @returns matrix containing data (vector of vectors) + std::vector readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const; + + //! Write matrix to a specified location + //! @param id storage location within file + //! @param name name of matrix entry + //! @param data matrix containing data (vector of vectors) + void writeMatrix(const std::string& id, + const std::string& name, const std::vector& data); + +private: +#if CT_USE_HDF5 + bool checkGroupRead(const std::string& id) const; + bool checkGroupWrite(const std::string& id); + + std::unique_ptr m_file; + bool m_write; + int m_compressionLevel=0; +#endif +}; + +} + +#endif diff --git a/include/cantera/base/config.h.in b/include/cantera/base/config.h.in index 00471f5025..b7e80a3ca4 100644 --- a/include/cantera/base/config.h.in +++ b/include/cantera/base/config.h.in @@ -62,8 +62,12 @@ typedef int ftnlen; // Fortran hidden string length type //-------------- Optional Cantera Capabilities ---------------------- -// Enable Sundials to use an external BLAS/LAPACK library if it was -// built to use this option +// Enable Sundials to use an external BLAS/LAPACK library if it was +// built to use this option {CT_SUNDIALS_USE_LAPACK!s} +// Enable export/import of HDF data via C++ HighFive +{CT_USE_HDF5!s} +{CT_USE_SYSTEM_HIGHFIVE!s} + #endif diff --git a/include/cantera/base/global.h b/include/cantera/base/global.h index 914f49a5a9..4021d8513b 100644 --- a/include/cantera/base/global.h +++ b/include/cantera/base/global.h @@ -105,6 +105,10 @@ std::string gitCommit(); //! preprocessor macro is defined. bool debugModeEnabled(); +//! Returns true if Cantera was compiled with C++ HDF5 support. +//! @since New in Cantera 3.0. +bool usesHDF5(); + /*! * @defgroup logs Diagnostic Output * diff --git a/include/cantera/base/stringUtils.h b/include/cantera/base/stringUtils.h index 726ae654d1..eb343eeeb1 100644 --- a/include/cantera/base/stringUtils.h +++ b/include/cantera/base/stringUtils.h @@ -102,6 +102,19 @@ doublereal fpValueCheck(const std::string& val); void tokenizeString(const std::string& oval, std::vector& v); +//! This function separates a string up into tokens according to the location of +//! path separators. +/*! + * The separate tokens are returned in a string vector, v. + * + * @param oval String to be broken up + * @param v Output vector of tokens. + * + * @since New in Cantera 3.0. + */ +void tokenizePath(const std::string& oval, + std::vector& v); + //! Copy the contents of a std::string into a char array of a given length /*! * If *length* is less than the size of *source*, the string will be truncated diff --git a/include/cantera/oneD/Boundary1D.h b/include/cantera/oneD/Boundary1D.h index 61cebf850a..33c3a8e2ec 100644 --- a/include/cantera/oneD/Boundary1D.h +++ b/include/cantera/oneD/Boundary1D.h @@ -106,8 +106,9 @@ class Inlet1D : public Boundary1D public: Inlet1D(); - Inlet1D(shared_ptr solution) : Inlet1D() { + Inlet1D(shared_ptr solution, const std::string& id="") : Inlet1D() { m_solution = solution; + m_id = id; } //! set spreading rate @@ -135,8 +136,8 @@ class Inlet1D : public Boundary1D virtual void init(); virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel); protected: int m_ilr; @@ -158,8 +159,9 @@ class Empty1D : public Boundary1D m_type = cEmptyType; } - Empty1D(shared_ptr solution) : Empty1D() { + Empty1D(shared_ptr solution, const std::string& id="") : Empty1D() { m_solution = solution; + m_id = id; } virtual void showSolution(const double* x) {} @@ -169,7 +171,8 @@ class Empty1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; /** @@ -184,8 +187,9 @@ class Symm1D : public Boundary1D m_type = cSymmType; } - Symm1D(shared_ptr solution) : Symm1D() { + Symm1D(shared_ptr solution, const std::string& id="") : Symm1D() { m_solution = solution; + m_id = id; } virtual void init(); @@ -193,7 +197,8 @@ class Symm1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -208,8 +213,9 @@ class Outlet1D : public Boundary1D m_type = cOutletType; } - Outlet1D(shared_ptr solution) : Outlet1D() { + Outlet1D(shared_ptr solution, const std::string& id="") : Outlet1D() { m_solution = solution; + m_id = id; } virtual void init(); @@ -217,7 +223,8 @@ class Outlet1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel) {} }; @@ -230,8 +237,11 @@ class OutletRes1D : public Boundary1D public: OutletRes1D(); - OutletRes1D(shared_ptr solution) : OutletRes1D() { + OutletRes1D(shared_ptr solution, const std::string& id="") + : OutletRes1D() + { m_solution = solution; + m_id = id; } virtual void showSolution(const double* x) {} @@ -248,8 +258,8 @@ class OutletRes1D : public Boundary1D virtual void init(); virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel); protected: size_t m_nsp; @@ -271,8 +281,9 @@ class Surf1D : public Boundary1D m_type = cSurfType; } - Surf1D(shared_ptr solution) : Surf1D() { + Surf1D(shared_ptr solution, const std::string& id="") : Surf1D() { m_solution = solution; + m_id = id; } virtual void init(); @@ -280,8 +291,8 @@ class Surf1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel); virtual void showSolution_s(std::ostream& s, const double* x); @@ -296,7 +307,7 @@ class ReactingSurf1D : public Boundary1D { public: ReactingSurf1D(); - ReactingSurf1D(shared_ptr solution); + ReactingSurf1D(shared_ptr solution, const std::string& id=""); void setKineticsMgr(InterfaceKinetics* kin); @@ -316,8 +327,8 @@ class ReactingSurf1D : public Boundary1D virtual void eval(size_t jg, double* xg, double* rg, integer* diagg, double rdt); - virtual AnyMap serialize(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel); virtual void _getInitialSoln(double* x) { m_sphase->getCoverages(x); diff --git a/include/cantera/oneD/Domain1D.h b/include/cantera/oneD/Domain1D.h index 12a189b23c..67cceafdaa 100644 --- a/include/cantera/oneD/Domain1D.h +++ b/include/cantera/oneD/Domain1D.h @@ -29,6 +29,7 @@ class OneDim; class Refiner; class AnyMap; class Solution; +class SolutionArray; /** * Base class for one-dimensional domains. @@ -312,8 +313,20 @@ class Domain1D //! Save the state of this domain as an AnyMap /*! * @param soln local solution vector for this domain + * + * @deprecated To be removed after Cantera 3.0; superseded by asArray. */ - virtual AnyMap serialize(const double* soln) const; + AnyMap serialize(const double* soln) const; + + //! Save the state of this domain as a SolutionArray + /*! + * @param soln local solution vector for this domain + * + * @since New in Cantera 3.0. + */ + virtual shared_ptr asArray(const double* soln) const { + throw NotImplementedError("Domain1D::asArray", "Needs to be overloaded."); + } //! Restore the solution for this domain from an AnyMap /*! @@ -321,8 +334,23 @@ class Domain1D * @param[out] soln Value of the solution vector, local to this domain * @param[in] loglevel 0 to suppress all output; 1 to show warnings; 2 for * verbose output + * + * @deprecated To be removed after Cantera 3.0; restore from SolutionArray instead. + */ + void restore(const AnyMap& state, double* soln, int loglevel); + + //! Restore the solution for this domain from a SolutionArray + /*! + * @param[in] arr SolutionArray defining the state of this domain + * @param[out] soln Value of the solution vector, local to this domain + * @param[in] loglevel 0 to suppress all output; 1 to show warnings; 2 for + * verbose output + * + * @since New in Cantera 3.0. */ - virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual void restore(SolutionArray& arr, double* soln, int loglevel) { + throw NotImplementedError("Domain1D::restore", "Needs to be overloaded."); + } //! Return thermo/kinetics/transport manager used in the domain //! @since New in Cantera 3.0. @@ -475,6 +503,12 @@ class Domain1D } protected: + //! Retrieve meta data + virtual AnyMap getMeta() const; + + //! Retrieve meta data + virtual void setMeta(const AnyMap& meta, int loglevel); + doublereal m_rdt; size_t m_nv; size_t m_points; @@ -507,7 +541,7 @@ class Domain1D bool m_force_full_update; //! Composite thermo/kinetics/transport handler - std::shared_ptr m_solution; + shared_ptr m_solution; }; } diff --git a/include/cantera/oneD/IonFlow.h b/include/cantera/oneD/IonFlow.h index 0ae63b66e3..57ef4f9d9e 100644 --- a/include/cantera/oneD/IonFlow.h +++ b/include/cantera/oneD/IonFlow.h @@ -33,7 +33,12 @@ class IonFlow : public StFlow public: IonFlow(ThermoPhase* ph = 0, size_t nsp = 1, size_t points = 1); - IonFlow(shared_ptr sol, size_t nsp = 1, size_t points = 1); + //! Create a new flow domain. + //! @param sol Solution object used to evaluate all thermodynamic, kinetic, and + //! transport properties + //! @param id name of flow domain + //! @param points initial number of grid points + IonFlow(shared_ptr sol, const std::string& id="", size_t points = 1); //! set the solving stage virtual void setSolvingStage(const size_t phase); diff --git a/include/cantera/oneD/Sim1D.h b/include/cantera/oneD/Sim1D.h index 06a52135c0..973becb501 100644 --- a/include/cantera/oneD/Sim1D.h +++ b/include/cantera/oneD/Sim1D.h @@ -117,9 +117,10 @@ class Sim1D : public OneDim * @param id Identifier of solution within the container file * @param desc Description of the solution * @param loglevel Level of diagnostic output + * @param compression Compression level (optional; HDF only) */ void save(const std::string& fname, const std::string& id, - const std::string& desc, int loglevel=1); + const std::string& desc, int loglevel=1, int compression=0); /** * Save the residual of the current solution to a container file. @@ -136,8 +137,9 @@ class Sim1D : public OneDim * @param fname Name of container file * @param id Identifier of solution within the container file * @param loglevel Level of diagnostic output + * @return AnyMap containing header information */ - void restore(const std::string& fname, const std::string& id, int loglevel=2); + AnyMap restore(const std::string& fname, const std::string& id, int loglevel=2); //! @} diff --git a/include/cantera/oneD/StFlow.h b/include/cantera/oneD/StFlow.h index 23826cb8df..dfd152d2e4 100644 --- a/include/cantera/oneD/StFlow.h +++ b/include/cantera/oneD/StFlow.h @@ -56,7 +56,12 @@ class StFlow : public Domain1D StFlow(th.get(), nsp, points) { } - StFlow(shared_ptr sol, size_t nsp = 1, size_t points = 1); + //! Create a new flow domain. + //! @param sol Solution object used to evaluate all thermodynamic, kinetic, and + //! transport properties + //! @param id name of flow domain + //! @param points initial number of grid points + StFlow(shared_ptr sol, const std::string& id="", size_t points=1); //! @name Problem Specification //! @{ @@ -157,8 +162,8 @@ class StFlow : public Domain1D //! Print the solution. virtual void showSolution(const doublereal* x); - virtual AnyMap serialize(const double* soln) const; - virtual void restore(const AnyMap& state, double* soln, int loglevel); + virtual shared_ptr asArray(const double* soln) const; + virtual void restore(SolutionArray& arr, double* soln, int loglevel); //! Set flow configuration for freely-propagating flames, using an internal //! point with a fixed temperature as the condition to determine the inlet @@ -282,6 +287,9 @@ class StFlow : public Domain1D } protected: + virtual AnyMap getMeta() const; + virtual void setMeta(const AnyMap& state, int loglevel); + doublereal wdot(size_t k, size_t j) const { return m_wdot(k,j); } @@ -444,7 +452,7 @@ class StFlow : public Domain1D // Smart pointer preventing garbage collection when the transport model of an // associated Solution object changes: the transport model of the StFlow object // will remain unaffected by an external change. - std::shared_ptr m_trans_shared; + shared_ptr m_trans_shared; // boundary emissivities for the radiation calculations doublereal m_epsilon_left; diff --git a/interfaces/cython/cantera/_onedim.pxd b/interfaces/cython/cantera/_onedim.pxd index 7e8ed4d8ea..73946912a8 100644 --- a/interfaces/cython/cantera/_onedim.pxd +++ b/interfaces/cython/cantera/_onedim.pxd @@ -48,25 +48,25 @@ cdef extern from "cantera/oneD/Boundary1D.h": double massFraction(size_t) cdef cppclass CxxInlet1D "Cantera::Inlet1D": - CxxInlet1D(shared_ptr[CxxSolution]) + CxxInlet1D(shared_ptr[CxxSolution], const string&) double spreadRate() void setSpreadRate(double) cdef cppclass CxxOutlet1D "Cantera::Outlet1D": - CxxOutlet1D(shared_ptr[CxxSolution]) + CxxOutlet1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxOutletRes1D "Cantera::OutletRes1D": - CxxOutletRes1D(shared_ptr[CxxSolution]) + CxxOutletRes1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxSymm1D "Cantera::Symm1D": - CxxSymm1D(shared_ptr[CxxSolution]) + CxxSymm1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxSurf1D "Cantera::Surf1D": - CxxSurf1D(shared_ptr[CxxSolution]) + CxxSurf1D(shared_ptr[CxxSolution], const string&) cdef cppclass CxxReactingSurf1D "Cantera::ReactingSurf1D": CxxReactingSurf1D() # deprecated in Python API (Cantera 3.0) - CxxReactingSurf1D(shared_ptr[CxxSolution]) except +translate_exception + CxxReactingSurf1D(shared_ptr[CxxSolution], const string&) except +translate_exception void setKineticsMgr(CxxInterfaceKinetics*) except +translate_exception void enableCoverageEquations(cbool) except +translate_exception cbool coverageEnabled() @@ -74,7 +74,7 @@ cdef extern from "cantera/oneD/Boundary1D.h": cdef extern from "cantera/oneD/StFlow.h": cdef cppclass CxxStFlow "Cantera::StFlow": - CxxStFlow(shared_ptr[CxxSolution], int, int) except +translate_exception + CxxStFlow(shared_ptr[CxxSolution], const string&, int) except +translate_exception void setTransportModel(const string&) except +translate_exception void setTransport(CxxTransport&) except +translate_exception string transportModel() @@ -99,7 +99,7 @@ cdef extern from "cantera/oneD/StFlow.h": cdef extern from "cantera/oneD/IonFlow.h": cdef cppclass CxxIonFlow "Cantera::IonFlow": - CxxIonFlow(shared_ptr[CxxSolution], int, int) except +translate_exception + CxxIonFlow(shared_ptr[CxxSolution], const string&, int) except +translate_exception void setSolvingStage(int) void solveElectricField() void fixElectricField() @@ -123,8 +123,8 @@ cdef extern from "cantera/oneD/Sim1D.h": void refine(int) except +translate_exception void setRefineCriteria(size_t, double, double, double, double) except +translate_exception vector[double] getRefineCriteria(int) except +translate_exception - void save(string, string, string, int) except +translate_exception - void restore(string, string, int) except +translate_exception + void save(string, string, string, int, int) except +translate_exception + CxxAnyMap restore(string, string, int) except +translate_exception void writeStats(int) except +translate_exception void clearStats() void resize() except +translate_exception diff --git a/interfaces/cython/cantera/_onedim.pyx b/interfaces/cython/cantera/_onedim.pyx index 9389b8887a..0a7ae145b1 100644 --- a/interfaces/cython/cantera/_onedim.pyx +++ b/interfaces/cython/cantera/_onedim.pyx @@ -6,7 +6,7 @@ import warnings from collections import OrderedDict import numpy as np -from ._utils cimport stringify, pystr +from ._utils cimport stringify, pystr, anymap_to_dict from ._utils import CanteraError from cython.operator import dereference as deref @@ -19,14 +19,11 @@ cdef class Domain1D: def __cinit__(self, _SolutionBase phase not None, *args, **kwargs): self.domain = NULL - def __init__(self, phase, *args, name=None, **kwargs): + def __init__(self, phase, *args, **kwargs): self._weakref_proxy = _WeakrefProxy() if self.domain is NULL: raise TypeError("Can't instantiate abstract class Domain1D.") - if name is not None: - self.name = name - self.gas = phase self.gas._references[self._weakref_proxy] = True self.set_default_tolerances() @@ -343,8 +340,8 @@ cdef class Inlet1D(Boundary1D): domain - it must be either the leftmost or rightmost domain in a stack. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.inlet = new CxxInlet1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.inlet = new CxxInlet1D(phase._base, stringify(name)) self.boundary = (self.inlet) def __dealloc__(self): @@ -365,8 +362,8 @@ cdef class Outlet1D(Boundary1D): A one-dimensional outlet. An outlet imposes a zero-gradient boundary condition on the flow. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.outlet = new CxxOutlet1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.outlet = new CxxOutlet1D(phase._base, stringify(name)) self.boundary = (self.outlet) def __dealloc__(self): @@ -377,8 +374,8 @@ cdef class OutletReservoir1D(Boundary1D): """ A one-dimensional outlet into a reservoir. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.outlet = new CxxOutletRes1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.outlet = new CxxOutletRes1D(phase._base, stringify(name)) self.boundary = (self.outlet) def __dealloc__(self): @@ -387,8 +384,8 @@ cdef class OutletReservoir1D(Boundary1D): cdef class SymmetryPlane1D(Boundary1D): """A symmetry plane.""" - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.symm = new CxxSymm1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.symm = new CxxSymm1D(phase._base, stringify(name)) self.boundary = (self.symm) def __dealloc__(self): @@ -397,8 +394,8 @@ cdef class SymmetryPlane1D(Boundary1D): cdef class Surface1D(Boundary1D): """A solid surface.""" - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.surf = new CxxSurf1D(phase._base) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.surf = new CxxSurf1D(phase._base, stringify(name)) self.boundary = (self.surf) def __dealloc__(self): @@ -416,9 +413,9 @@ cdef class ReactingSurface1D(Boundary1D): Starting in Cantera 3.0, parameter `phase` should reference surface instead of gas phase. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): if phase.phase_of_matter != "gas": - self.surf = new CxxReactingSurf1D(phase._base) + self.surf = new CxxReactingSurf1D(phase._base, stringify(name)) else: # legacy pathway - deprecation is handled in __init__ self.surf = new CxxReactingSurf1D() @@ -429,7 +426,9 @@ cdef class ReactingSurface1D(Boundary1D): if phase.phase_of_matter == "gas": warnings.warn("Starting in Cantera 3.0, parameter 'phase' should " "reference surface instead of gas phase.", DeprecationWarning) - super().__init__(phase, name=name) + super().__init__(phase) + if name is not None: + self.name = name else: sol = phase gas = None @@ -707,8 +706,8 @@ cdef class IdealGasFlow(_FlowBase): equations assume an ideal gas mixture. Arbitrary chemistry is allowed, as well as arbitrary variation of the transport properties. """ - def __cinit__(self, _SolutionBase phase, *args, **kwargs): - self.flow = new CxxStFlow(phase._base, phase.n_species, 2) + def __cinit__(self, _SolutionBase phase, *args, name="", **kwargs): + self.flow = new CxxStFlow(phase._base, stringify(name), 2) cdef class IonFlow(_FlowBase): @@ -717,8 +716,9 @@ cdef class IonFlow(_FlowBase): In an ion flow domain, the electric drift is added to the diffusion flux """ - def __cinit__(self, _SolutionBase thermo, *args, **kwargs): - self.flow = (new CxxIonFlow(thermo._base, thermo.n_species, 2)) + def __cinit__(self, _SolutionBase thermo, *args, name="", **kwargs): + self.flow = ( + new CxxIonFlow(thermo._base, stringify(name), 2)) def set_solving_stage(self, stage): """ @@ -1464,9 +1464,9 @@ cdef class Sim1D: return self.sim.fixedTemperatureLocation() def save(self, filename='soln.yaml', name='solution', description='none', - loglevel=1): + loglevel=1, compression=0): """ - Save the solution in YAML format. + Save the solution in YAML or HDF format. :param filename: solution file @@ -1474,13 +1474,15 @@ cdef class Sim1D: solution name within the file :param description: custom description text + :param compression: + compression level 0..9; optional (HDF only) >>> s.save(filename='save.yaml', name='energy_off', ... description='solution with energy eqn. disabled') """ self.sim.save(stringify(str(filename)), stringify(name), - stringify(description), loglevel) + stringify(description), loglevel, compression) def restore(self, filename='soln.yaml', name='solution', loglevel=2): """Set the solution vector to a previously-saved solution. @@ -1492,11 +1494,18 @@ cdef class Sim1D: :param loglevel: Amount of logging information to display while restoring, from 0 (disabled) to 2 (most verbose). + :return: + dictionary containing meta data >>> s.restore(filename='save.yaml', name='energy_off') + + .. versionchanged:: 3.0 + Implemented return value for meta data """ - self.sim.restore(stringify(str(filename)), stringify(name), loglevel) + cdef CxxAnyMap header + header = self.sim.restore(stringify(str(filename)), stringify(name), loglevel) self._initialized = True + return anymap_to_dict(header) def restore_time_stepping_solution(self): """ diff --git a/interfaces/cython/cantera/_utils.pxd b/interfaces/cython/cantera/_utils.pxd index bb3abb8193..453d1c4ca9 100644 --- a/interfaces/cython/cantera/_utils.pxd +++ b/interfaces/cython/cantera/_utils.pxd @@ -72,6 +72,7 @@ cdef extern from "cantera/base/global.h" namespace "Cantera": cdef void Cxx_suppress_thermo_warnings "Cantera::suppress_thermo_warnings" (cbool) cdef void Cxx_use_legacy_rate_constants "Cantera::use_legacy_rate_constants" (cbool) cdef string CxxGitCommit "Cantera::gitCommit" () + cdef cbool CxxUsesHDF5 "Cantera::usesHDF5" () cdef cbool CxxDebugModeEnabled "Cantera::debugModeEnabled" () diff --git a/interfaces/cython/cantera/_utils.pyx b/interfaces/cython/cantera/_utils.pyx index 4a402d0159..2083a33036 100644 --- a/interfaces/cython/cantera/_utils.pyx +++ b/interfaces/cython/cantera/_utils.pyx @@ -93,6 +93,25 @@ def use_legacy_rate_constants(pybool legacy): """ Cxx_use_legacy_rate_constants(legacy) +def hdf_support(): + """ + Returns list of libraries that include HDF support: + - 'h5py': HDF support by Python package 'h5py'. + - 'native': if Cantera was compiled with C++ HighFive HDF5 support. + + .. versionadded:: 3.0 + """ + out = [] + try: + pkg_resources.get_distribution("h5py") + except pkg_resources.DistributionNotFound: + pass + else: + out.append("h5py") + if CxxUsesHDF5(): + out.append("native") + return set(out) + cdef Composition comp_map(X) except *: if isinstance(X, (str, bytes)): return parseCompString(stringify(X)) diff --git a/interfaces/cython/cantera/composite.py b/interfaces/cython/cantera/composite.py index 25eae072fc..2b26c3c4bf 100644 --- a/interfaces/cython/cantera/composite.py +++ b/interfaces/cython/cantera/composite.py @@ -7,17 +7,20 @@ from collections import OrderedDict import csv as _csv -import pkg_resources - -# avoid explicit dependence of cantera on h5py -try: - pkg_resources.get_distribution('h5py') -except pkg_resources.DistributionNotFound: - _h5py = ImportError('Method requires a working h5py installation.') -else: - import h5py as _h5py +def _import_h5py(): + # avoid explicit dependence of cantera on h5py + import pkg_resources # local import to reduce overall import time + try: + pkg_resources.get_distribution('h5py') + except pkg_resources.DistributionNotFound: + raise ImportError('Method requires a working h5py installation.') + else: + import h5py + return h5py # avoid explicit dependence of cantera on pandas +import pkg_resources + try: pkg_resources.get_distribution('pandas') except pkg_resources.DistributionNotFound: @@ -953,12 +956,14 @@ def join(species): # determine suitable thermo properties for reconstruction basis = 'mass' if self.basis == 'mass' else 'mole' - prop = {'T': ('T'), 'P': ('P'), 'Q': ('Q'), - 'D': ('density', 'density_{}'.format(basis)), - 'U': ('u', 'int_energy_{}'.format(basis)), - 'V': ('v', 'volume_{}'.format(basis)), - 'H': ('h', 'enthalpy_{}'.format(basis)), - 'S': ('s', 'entropy_{}'.format(basis))} + prop = {"T": ("T", "temperature"), + "P": ("P", "pressure"), + "Q": ("Q", "quality"), + "D": ("D", "density", f"density_{basis}"), + "U": ("u", f"int_energy_{basis}"), + "V": ("v", f"volume_{basis}"), + "H": ("h", f"enthalpy_{basis}"), + "S": ("s", f"entropy_{basis}")} for st in states: # identify property specifiers state = [{st[i]: p for p in prop[st[i]] if p in labels} @@ -1309,8 +1314,7 @@ def write_hdf(self, filename, *args, cols=None, group=None, subgroup=None, requires a working installation of *h5py* (``h5py`` can be installed using pip or conda). """ - if isinstance(_h5py, ImportError): - raise _h5py + h5py = _import_h5py() # collect data data = self.collect_data(*args, cols=cols, **kwargs) @@ -1320,7 +1324,7 @@ def write_hdf(self, filename, *args, cols=None, group=None, subgroup=None, hdf_kwargs = {k: v for k, v in hdf_kwargs.items() if v is not None} # save to container file - with _h5py.File(filename, mode) as hdf: + with h5py.File(filename, mode) as hdf: # check existence of tagged item if not group: @@ -1393,10 +1397,9 @@ def read_hdf(self, filename, group=None, subgroup=None, force=False, normalize=T The method imports data using `restore_data` and requires a working installation of *h5py* (``h5py`` can be installed using pip or conda). """ - if isinstance(_h5py, ImportError): - raise _h5py + h5py = _import_h5py() - with _h5py.File(filename, 'r') as hdf: + with h5py.File(filename, 'r') as hdf: groups = list(hdf.keys()) if not len(groups): @@ -1413,13 +1416,13 @@ def read_hdf(self, filename, group=None, subgroup=None, force=False, normalize=T root = hdf[group] # identify subgroup - sub_names = [key for key, value in root.items() - if isinstance(value, _h5py.Group)] - if not len(sub_names): - msg = "HDF group '{}' does not contain valid data" - raise IOError(msg.format(group)) - if subgroup is not None: + sub_names = [key for key, value in root.items() + if isinstance(value, h5py.Group)] + if not len(sub_names): + msg = "HDF group '{}' does not contain valid data" + raise IOError(msg.format(group)) + if subgroup not in sub_names: msg = ("HDF file does not contain data set '{}' within " "group '{}'; available data sets are: {}") @@ -1440,20 +1443,25 @@ def strip_ext(source): return out # ensure that mechanisms are matching - sol_source = strip_ext(dgroup['phase'].attrs['source']) - source = strip_ext(self.source) - if sol_source != source and not force: - msg = ("Sources of thermodynamic phases do not match: '{}' vs " - "'{}'; use option 'force' to override this error.") - raise IOError(msg.format(sol_source, source)) + if "phase" in dgroup: + sol_source = strip_ext(dgroup['phase'].attrs['source']).split("/")[-1] + source = strip_ext(self.source) + if sol_source != source and not force: + msg = ("Sources of thermodynamic phases do not match: '{}' vs " + "'{}'; use option 'force' to override this error.") + raise IOError(msg.format(sol_source, source)) # load metadata self._meta = dict(dgroup.attrs.items()) + for name, value in dgroup.items(): + # support one level of recursion + if isinstance(value, h5py.Group): + self._meta[name] = dict(value.attrs.items()) # load data data = OrderedDict() for name, value in dgroup.items(): - if name == 'phase': + if isinstance(value, h5py.Group): continue elif value.dtype.type == np.bytes_: data[name] = np.array(value).astype('U') diff --git a/interfaces/cython/cantera/onedim.py b/interfaces/cython/cantera/onedim.py index 5749a2cc7c..135ba2a3db 100644 --- a/interfaces/cython/cantera/onedim.py +++ b/interfaces/cython/cantera/onedim.py @@ -2,6 +2,7 @@ # at https://cantera.org/license.txt for license and copyright information. from math import erf +from pathlib import Path from email.utils import formatdate import warnings import numpy as np @@ -124,7 +125,8 @@ def set_initial_guess(self, *args, data=None, group=None, **kwargs): # already a solution array arr = data - elif isinstance(data, str): + elif isinstance(data, (str, Path)): + data = str(data) if data.endswith('.hdf5') or data.endswith('.h5'): # data source identifies a HDF file arr = SolutionArray(self.gas, extra=self.other_components()) @@ -561,7 +563,14 @@ def write_hdf(self, filename, *args, group=None, species='X', mode='a', `SolutionArray.collect_data`. The method exports data using `SolutionArray.write_hdf` via `to_solution_array` and requires a working installation of *h5py* (``h5py`` can be installed using pip or conda). + + .. deprecated:: 3.0 + + Method to be removed after Cantera 3.0; replaceable by 'Sim1D.save'. """ + warnings.warn( + "Method to be removed after Cantera 3.0; use 'Sim1D.save' instead.", + DeprecationWarning) cols = ('extra', 'T', 'P', species) meta = self.settings meta['date'] = formatdate(localtime=True) diff --git a/platform/posix/Cantera.mak.in b/platform/posix/Cantera.mak.in index 7665e3a21e..12ce59c599 100644 --- a/platform/posix/Cantera.mak.in +++ b/platform/posix/Cantera.mak.in @@ -64,12 +64,20 @@ CANTERA_SUNDIALS_LIBS=@mak_sundials_libdir@ @mak_sundials_libs@ CANTERA_BLAS_LAPACK_LIBS=@mak_blas_lapack_libs@ +############################################################################### +# HDF5 SUPPORT +############################################################################### + +CANTERA_HDF5_INCLUDES=@mak_hdf_include@ +CANTERA_HDF5_LIBS=@mak_hdf_libs@ + ############################################################################### # COMBINATIONS OF INCLUDES AND LIBS ############################################################################### CANTERA_INCLUDES=$(CANTERA_CORE_INCLUDES) $(CANTERA_SUNDIALS_INCLUDE) \ - $(CANTERA_BOOST_INCLUDES) $(CANTERA_EXTRA_INCLUDES) + $(CANTERA_BOOST_INCLUDES) $(CANTERA_HDF5_INCLUDES) \ + $(CANTERA_EXTRA_INCLUDES) CANTERA_TOTAL_INCLUDES = $(CANTERA_INCLUDES) @@ -78,16 +86,16 @@ CANTERA_DEFINES = -DCANTERA_VERSION=@cantera_version@ CANTERA_LIBS=$(CANTERA_CORE_LIBS) \ $(CANTERA_EXTRA_LIBDIRS) $(CANTERA_SUNDIALS_LIBS) \ - $(CANTERA_BLAS_LAPACK_LIBS) + $(CANTERA_BLAS_LAPACK_LIBS) $(CANTERA_HDF5_LIBS) CANTERA_TOTAL_LIBS=$(CANTERA_LIBS) -CANTERA_TOTAL_LIBS_DEP= $(CANTERA_CORE_LIBS_DEP) \ - $(CANTERA_SUNDIALS_LIBS_DEP) +CANTERA_TOTAL_LIBS_DEP= $(CANTERA_CORE_LIBS_DEP) CANTERA_FORTRAN_LIBS=$(CANTERA_CORE_FTN) \ $(CANTERA_EXTRA_LIBDIRS) $(CANTERA_SUNDIALS_LIBS) \ - $(CANTERA_BLAS_LAPACK_LIBS) $(CANTERA_FORTRAN_SYSLIBS) + $(CANTERA_BLAS_LAPACK_LIBS) $(CANTERA_FORTRAN_SYSLIBS) \ + $(CANTERA_HDF5_LIBS) ############################################################################### # END diff --git a/platform/posix/SConscript b/platform/posix/SConscript index 90cc845456..9f090768ad 100644 --- a/platform/posix/SConscript +++ b/platform/posix/SConscript @@ -58,6 +58,14 @@ if localenv["boost_inc_dir"] and not localenv["package_build"]: else: localenv['mak_boost_include'] = '' +if localenv["use_hdf5"] and not localenv["package_build"]: + localenv["mak_hdf_include"] = f"-I{localenv['hdf_include']}" + pc_incdirs.append(localenv["hdf_include"]) + localenv["mak_hdf_libs"] = f"-L{localenv['hdf_libdir']} -lhdf5" +else: + localenv["mak_hdf_include"] = "" + localenv["mak_hdf_libs"] = "-lhdf5" + # Handle BLAS/LAPACK linkage blas_lapack_libs = " ".join(f"-l{lib}" for lib in localenv["blas_lapack_libs"]) if localenv["blas_lapack_dir"] and not localenv["package_build"]: diff --git a/samples/cxx/SConscript b/samples/cxx/SConscript index d6c306e92b..74232957e0 100644 --- a/samples/cxx/SConscript +++ b/samples/cxx/SConscript @@ -65,9 +65,11 @@ set(CMAKE_EXE_LINKER_FLAGS ${CMAKE_EXE_LINKER_FLAGS} ${OpenMP_EXE_LINKER_FLAGS}) excludes) else: incdirs.extend([localenv["sundials_include"], localenv["boost_inc_dir"]]) + incdirs.append(localenv["hdf_include"]) incdirs.extend(localenv["extra_inc_dirs"]) incdirs = list(set(incdirs)) libdirs.extend([localenv["sundials_libdir"], localenv["blas_lapack_dir"]]) + libdirs.append(localenv["hdf_libdir"]) libdirs.extend(localenv["extra_lib_dirs"]) libdirs = list(set(libdirs)) diff --git a/samples/cxx/flamespeed/flamespeed.cpp b/samples/cxx/flamespeed/flamespeed.cpp index 6c2cd90b3e..3f8abe36b0 100644 --- a/samples/cxx/flamespeed/flamespeed.cpp +++ b/samples/cxx/flamespeed/flamespeed.cpp @@ -7,7 +7,8 @@ * * Usage: flamespeed [equivalence_ratio] [refine_grid] [loglevel] * - * Keywords: combustion, 1D flow, premixed flame, flame speed + * Requires: cantera >= 3.0 + * Keywords: combustion, 1D flow, premixed flame, flame speed, saving output */ // This file is part of Cantera. See License.txt in the top-level directory or @@ -52,7 +53,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) //-------- step 1: create the flow ------------- - StFlow flow(sol); + StFlow flow(sol, "flow"); flow.setFreeFlow(); // create an initial grid @@ -68,7 +69,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) //------- step 2: create the inlet ----------------------- - Inlet1D inlet; + Inlet1D inlet(sol, "inlet"); inlet.setMoleFractions(x.data()); double mdot=uin*rho_in; @@ -77,7 +78,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) //------- step 3: create the outlet --------------------- - Outlet1D outlet; + Outlet1D outlet(sol, "outlet"); //=================== create the container and insert the domains ===== @@ -113,6 +114,21 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flame.setRefineCriteria(flowdomain,ratio,slope,curve); + // Save initial guess to container file + + // Solution is saved in HDF5 or YAML file format + std::string fileName; + if (usesHDF5()) { + // Cantera is compiled with native HDF5 support + fileName = "flamespeed.h5"; + } else { + fileName = "flamespeed.yaml"; + } + if (std::ifstream(fileName).good()) { + std::remove(fileName.c_str()); + } + flame.save(fileName, "initial-guess", "Initial guess", 0); + // Solve freely propagating flame // Linearly interpolate to find location where this temperature would @@ -126,6 +142,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flow.componentIndex("velocity"),0); print("Flame speed with mixture-averaged transport: {} m/s\n", flameSpeed_mix); + flame.save(fileName, "mix", "Solution with mixture-averaged transport", 0); // now switch to multicomponent transport flow.setTransportModel("multicomponent"); @@ -134,6 +151,7 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flow.componentIndex("velocity"),0); print("Flame speed with multicomponent transport: {} m/s\n", flameSpeed_multi); + flame.save(fileName, "multi", "Solution with multicomponent transport", 0); // now enable Soret diffusion flow.enableSoret(true); @@ -142,6 +160,8 @@ int flamespeed(double phi, bool refine_grid, int loglevel) flow.componentIndex("velocity"),0); print("Flame speed with multicomponent transport + Soret: {} m/s\n", flameSpeed_full); + flame.save(fileName, "soret", + "Solution with mixture-averaged transport and Soret", 0); vector_fp zvec,Tvec,COvec,CO2vec,Uvec; diff --git a/samples/f77/SConscript b/samples/f77/SConscript index 1b9df6153c..5b0d91628c 100644 --- a/samples/f77/SConscript +++ b/samples/f77/SConscript @@ -20,7 +20,7 @@ for program_name, fortran_sources in samples: CPPPATH=['#build/src/fortran', '#include'], LIBS=env['cantera_libs']+['cantera_fortran']+env['cxx_stdlib'], LIBPATH=[env['sundials_libdir'], localenv['blas_lapack_dir'], - env['extra_lib_dirs'], '#build/lib'], + env['extra_lib_dirs'], env["hdf_libdir"], '#build/lib'], LINK='$FORTRAN_LINK') # Generate SConstruct file to be installed @@ -37,9 +37,11 @@ if localenv["package_build"]: excludes) else: incdirs.extend([localenv["sundials_include"], localenv["boost_inc_dir"]]) + incdirs.append(localenv["hdf_include"]) incdirs.extend(localenv["extra_inc_dirs"]) incdirs = list(set(incdirs)) libdirs.extend([localenv["sundials_libdir"], localenv["blas_lapack_dir"]]) + libdirs.append(localenv["hdf_libdir"]) libdirs.extend(localenv["extra_lib_dirs"]) libdirs = list(set(libdirs)) diff --git a/samples/f90/SConscript b/samples/f90/SConscript index 73cb74ee4b..690a35351d 100644 --- a/samples/f90/SConscript +++ b/samples/f90/SConscript @@ -14,7 +14,7 @@ for programName, sources in samples: F90PATH='#build/src/fortran', LIBS=['cantera_fortran']+env['cantera_libs']+env['cxx_stdlib'], LIBPATH=[env['sundials_libdir'], env['blas_lapack_dir'], - env['extra_lib_dirs'], '#build/lib'], + env['extra_lib_dirs'], env["hdf_libdir"], '#build/lib'], LINK='$FORTRAN_LINK') # Generate SConstruct files to be installed @@ -22,9 +22,11 @@ for programName, sources in samples: libdirs = [localenv["ct_libdir"]] if not localenv["package_build"]: incdirs.extend([localenv["sundials_include"], localenv["boost_inc_dir"]]) + incdirs.append(localenv["hdf_include"]) incdirs.extend(localenv["extra_inc_dirs"]) incdirs = list(set(incdirs)) libdirs.extend([localenv["sundials_libdir"], localenv["blas_lapack_dir"]]) + libdirs.append(localenv["hdf_libdir"]) libdirs.extend(localenv["extra_lib_dirs"]) libdirs = list(set(libdirs)) diff --git a/samples/python/onedim/adiabatic_flame.py b/samples/python/onedim/adiabatic_flame.py index 7e7d176e21..0297d573d1 100644 --- a/samples/python/onedim/adiabatic_flame.py +++ b/samples/python/onedim/adiabatic_flame.py @@ -2,13 +2,15 @@ A freely-propagating, premixed hydrogen flat flame with multicomponent transport properties. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, multicomponent transport, saving output """ +from pathlib import Path import cantera as ct + # Simulation parameters p = ct.one_atm # pressure [Pa] Tin = 300.0 # unburned gas temperature [K] @@ -30,29 +32,24 @@ f.transport_model = 'Mix' f.solve(loglevel=loglevel, auto=True) +if "native" in ct.hdf_support(): + output = Path() / "adiabatic_flame.h5" +else: + output = Path() / "adiabatic_flame.yaml" +output.unlink(missing_ok=True) + # Solve with the energy equation enabled -try: - # save to HDF container file if h5py is installed - f.write_hdf('adiabatic_flame.h5', group='mix', mode='w', - description='solution with mixture-averaged transport') -except ImportError: - f.save('adiabatic_flame.yaml', 'mix', - 'solution with mixture-averaged transport') +f.save(output, name="mix", description="solution with mixture-averaged transport") f.show_solution() -print('mixture-averaged flamespeed = {0:7f} m/s'.format(f.velocity[0])) +print(f"mixture-averaged flamespeed = {f.velocity[0]:7f} m/s") # Solve with multi-component transport properties f.transport_model = 'Multi' f.solve(loglevel) # don't use 'auto' on subsequent solves f.show_solution() -print('multicomponent flamespeed = {0:7f} m/s'.format(f.velocity[0])) -try: - f.write_hdf('adiabatic_flame.h5', group='multi', - description='solution with multicomponent transport') -except ImportError: - f.save('adiabatic_flame.yaml', 'multi', - 'solution with multicomponent transport') +print(f"multicomponent flamespeed = {f.velocity[0]:7f} m/s") +f.save(output, name="multi", description="solution with multicomponent transport") # write the velocity, temperature, density, and mole fractions to a CSV file f.write_csv('adiabatic_flame.csv', quiet=False) diff --git a/samples/python/onedim/burner_flame.py b/samples/python/onedim/burner_flame.py index 6035bf4926..4001476ef8 100644 --- a/samples/python/onedim/burner_flame.py +++ b/samples/python/onedim/burner_flame.py @@ -1,11 +1,12 @@ """ A burner-stabilized lean premixed hydrogen-oxygen flame at low pressure. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, saving output, multicomponent transport """ +from pathlib import Path import cantera as ct p = 0.05 * ct.one_atm @@ -25,20 +26,18 @@ f.transport_model = 'Mix' f.solve(loglevel, auto=True) -try: - # save to HDF container file if h5py is installed - f.write_hdf('burner_flame.h5', group='mix', mode='w', - description='solution with mixture-averaged transport') -except ImportError: - f.save('burner_flame.yaml', 'mix', 'solution with mixture-averaged transport') + +if "native" in ct.hdf_support(): + output = Path() / "burner_flame.h5" +else: + output = Path() / "burner_flame.yaml" +output.unlink(missing_ok=True) + +f.save(output, name="mix", description="solution with mixture-averaged transport") f.transport_model = 'Multi' f.solve(loglevel) # don't use 'auto' on subsequent solves f.show_solution() -try: - f.write_hdf('burner_flame.h5', group='multi', - description='solution with multicomponent transport') -except ImportError: - f.save('burner_flame.yaml', 'multi', 'solution with multicomponent transport') +f.save(output, name="multi", description="solution with multicomponent transport") f.write_csv('burner_flame.csv', quiet=False) diff --git a/samples/python/onedim/diffusion_flame.py b/samples/python/onedim/diffusion_flame.py index b707300e1d..50b5d1e4c0 100644 --- a/samples/python/onedim/diffusion_flame.py +++ b/samples/python/onedim/diffusion_flame.py @@ -1,14 +1,16 @@ """ An opposed-flow ethane/air diffusion flame -Requires: cantera >= 2.5.0, matplotlib >= 2.0 +Requires: cantera >= 3.0, matplotlib >= 2.0 Keywords: combustion, 1D flow, diffusion flame, strained flame, plotting, saving output """ +from pathlib import Path import cantera as ct import matplotlib.pyplot as plt + # Input parameters p = ct.one_atm # pressure tin_f = 300.0 # fuel inlet temperature @@ -52,11 +54,14 @@ # Solve the problem f.solve(loglevel, auto=True) f.show_solution() -try: - # save to HDF container file if h5py is installed - f.write_hdf('diffusion_flame.h5', mode='w') -except ImportError: - f.save('diffusion_flame.yaml') + +if "native" in ct.hdf_support(): + output = Path() / "diffusion_flame.h5" +else: + output = Path() / "diffusion_flame.yaml" +output.unlink(missing_ok=True) + +f.save(output) # write the velocity, temperature, and mole fractions to a CSV file f.write_csv('diffusion_flame.csv', quiet=False) diff --git a/samples/python/onedim/diffusion_flame_batch.py b/samples/python/onedim/diffusion_flame_batch.py index 6937ba700f..2459573aed 100644 --- a/samples/python/onedim/diffusion_flame_batch.py +++ b/samples/python/onedim/diffusion_flame_batch.py @@ -13,13 +13,12 @@ This example can, for example, be used to iterate to a counterflow diffusion flame to an awkward pressure and strain rate, or to create the basis for a flamelet table. -Requires: cantera >= 2.5.0, matplotlib >= 2.0 +Requires: cantera >= 3.0, matplotlib >= 2.0 Keywords: combustion, 1D flow, extinction, diffusion flame, strained flame, saving output, plotting """ -import os -import importlib +from pathlib import Path import numpy as np import matplotlib.pyplot as plt @@ -30,18 +29,6 @@ class FlameExtinguished(Exception): pass -hdf_output = importlib.util.find_spec('h5py') is not None - -if not hdf_output: - # Create directory for output data files - data_directory = 'diffusion_flame_batch_data' - if not os.path.exists(data_directory): - os.makedirs(data_directory) - fig_name = os.path.join(data_directory, 'figure_{0}.png') -else: - fig_name = 'diffusion_flame_batch_{0}.png' - - # PART 1: INITIALIZATION # Set up an initial hydrogen-oxygen counterflow flame at 1 bar and low strain @@ -82,18 +69,28 @@ def interrupt_extinction(t): print('Creating the initial solution') f.solve(loglevel=0, auto=True) -# Save to data directory +# Define output locations +output_path = Path() / "diffusion_flame_batch_data" +output_path.mkdir(parents=True, exist_ok=True) + +hdf_output = "native" in ct.hdf_support() if hdf_output: - # save to HDF container file if h5py is installed - file_name = 'diffusion_flame_batch.h5' - f.write_hdf(file_name, group='initial_solution', mode='w', quiet=False, - description=('Initial hydrogen-oxygen counterflow flame ' - 'at 1 bar and low strain rate')) -else: - file_name = 'initial_solution.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', - description='Initial hydrogen-oxygen counterflow flame ' - 'at 1 bar and low strain rate') + file_name = output_path / "flame_data.h5" + file_name.unlink(missing_ok=True) + +def names(test): + if hdf_output: + # use internal container structure for HDF + file_name = output_path / "flame_data.h5" + return file_name, test + # use separate files for YAML + file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") + return file_name, "solution" + +# Save to data directory +file_name, entry = names("initial-solution") +desc = "Initial hydrogen-oxygen counterflow flame at 1 bar and low strain rate" +f.save(file_name, name=entry, description=desc) # PART 2: BATCH PRESSURE LOOP @@ -139,23 +136,13 @@ def interrupt_extinction(t): try: # Try solving the flame f.solve(loglevel=0) - if hdf_output: - group = 'pressure_loop/{:05.1f}'.format(p) - f.write_hdf(file_name, group=group, quiet=False, - description='pressure = {0} bar'.format(p)) - else: - file_name = 'pressure_loop_' + format(p, '05.1f') + '.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', loglevel=1, - description='pressure = {0} bar'.format(p)) + file_name, entry = names(f"pressure-loop/{p:05.1f}") + f.save(file_name, name=entry, loglevel=1, description=f"pressure = {p} bar") p_previous = p except ct.CanteraError as e: print('Error occurred while solving:', e, 'Try next pressure level') # If solution failed: Restore the last successful solution and continue - if hdf_output: - f.read_hdf(file_name, group=group) - else: - f.restore(filename=os.path.join(data_directory, file_name), name='solution', - loglevel=0) + f.restore(file_name, name=entry, loglevel=0) # PART 3: STRAIN RATE LOOP @@ -174,11 +161,8 @@ def interrupt_extinction(t): exp_mdot_a = 1. / 2. # Restore initial solution -if hdf_output: - f.read_hdf(file_name, group='initial_solution') -else: - file_name = 'initial_solution.yaml' - f.restore(filename=os.path.join(data_directory, file_name), name='solution', loglevel=0) +file_name, entry = names("initial-solution") +f.restore(file_name, name=entry, loglevel=0) # Counter to identify the loop n = 0 @@ -203,14 +187,9 @@ def interrupt_extinction(t): try: # Try solving the flame f.solve(loglevel=0) - if hdf_output: - group = 'strain_loop/{:02d}'.format(n) - f.write_hdf(file_name, group=group, quiet=False, - description='strain rate iteration {}'.format(n)) - else: - file_name = 'strain_loop_' + format(n, '02d') + '.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', loglevel=1, - description='strain rate iteration {}'.format(n)) + file_name, entry = names(f"strain-loop/{n:02d}") + f.save(file_name, name=entry, loglevel=1, + description=f"strain rate iteration {n}") except FlameExtinguished: print('Flame extinguished') break @@ -228,30 +207,25 @@ def interrupt_extinction(t): p_selected = p_range[::7] for p in p_selected: - if hdf_output: - group = 'pressure_loop/{0:05.1f}'.format(p) - f.read_hdf(file_name, group=group) - else: - file_name = 'pressure_loop_{0:05.1f}.yaml'.format(p) - f.restore(filename=os.path.join(data_directory, file_name), name='solution', loglevel=0) + file_name, entry = names(f"pressure-loop/{p:05.1f}") + f.restore(file_name, name=entry) # Plot the temperature profiles for selected pressures - ax1.plot(f.grid / f.grid[-1], f.T, label='{0:05.1f} bar'.format(p)) + ax1.plot(f.grid / f.grid[-1], f.T, label=f"{p:05.1f} bar") # Plot the axial velocity profiles (normalized by the fuel inlet velocity) # for selected pressures - ax2.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], - label='{0:05.1f} bar'.format(p)) + ax2.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], label=f"{p:05.1f} bar") ax1.legend(loc=0) ax1.set_xlabel(r'$z/z_{max}$') ax1.set_ylabel(r'$T$ [K]') -fig1.savefig(fig_name.format('T_p')) +fig1.savefig(output_path / "figure_T_p.png") ax2.legend(loc=0) ax2.set_xlabel(r'$z/z_{max}$') ax2.set_ylabel(r'$u/u_f$') -fig2.savefig(fig_name.format('u_p')) +fig1.savefig(output_path / "figure_u_p.png") fig3 = plt.figure() fig4 = plt.figure() @@ -259,29 +233,23 @@ def interrupt_extinction(t): ax4 = fig4.add_subplot(1, 1, 1) n_selected = range(1, n, 5) for n in n_selected: - if hdf_output: - group = 'strain_loop/{0:02d}'.format(n) - f.read_hdf(file_name, group=group) - else: - file_name = 'strain_loop_{0:02d}.yaml'.format(n) - f.restore(filename=os.path.join(data_directory, file_name), - name='solution', loglevel=0) + file_name, entry = names(f"strain-loop/{n:02d}") + f.restore(file_name, name=entry, loglevel=0) a_max = f.strain_rate('max') # the maximum axial strain rate # Plot the temperature profiles for the strain rate loop (selected) - ax3.plot(f.grid / f.grid[-1], f.T, label='{0:.2e} 1/s'.format(a_max)) + ax3.plot(f.grid / f.grid[-1], f.T, label=f"{a_max:.2e} 1/s") # Plot the axial velocity profiles (normalized by the fuel inlet velocity) # for the strain rate loop (selected) - ax4.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], - label=format(a_max, '.2e') + ' 1/s') + ax4.plot(f.grid / f.grid[-1], f.velocity / f.velocity[0], label=f"{a_max:.2e} 1/s") ax3.legend(loc=0) ax3.set_xlabel(r'$z/z_{max}$') ax3.set_ylabel(r'$T$ [K]') -fig3.savefig(fig_name.format('T_a')) +fig1.savefig(output_path / "figure_T_a.png") ax4.legend(loc=0) ax4.set_xlabel(r'$z/z_{max}$') ax4.set_ylabel(r'$u/u_f$') -fig4.savefig(fig_name.format('u_a')) +fig1.savefig(output_path / "figure_u_a.png") diff --git a/samples/python/onedim/diffusion_flame_extinction.py b/samples/python/onedim/diffusion_flame_extinction.py index b6fbcb5bac..081e5b24c8 100644 --- a/samples/python/onedim/diffusion_flame_extinction.py +++ b/samples/python/onedim/diffusion_flame_extinction.py @@ -9,28 +9,18 @@ (doi:10.1155/2014/484372). Please refer to this publication for a detailed explanation. Also, please don't forget to cite it if you make use of it. -Requires: cantera >= 2.5.0, matplotlib >= 2.0 +Requires: cantera >= 3.0, matplotlib >= 2.0 Keywords: combustion, 1D flow, diffusion flame, strained flame, extinction, saving output, plotting """ -import os -import importlib +from pathlib import Path import numpy as np import matplotlib.pyplot as plt import cantera as ct -hdf_output = importlib.util.find_spec('h5py') is not None - -if not hdf_output: - # Create directory for output data files - data_directory = 'diffusion_flame_extinction_data' - if not os.path.exists(data_directory): - os.makedirs(data_directory) - - # PART 1: INITIALIZATION # Set up an initial hydrogen-oxygen counterflow flame at 1 bar and low strain @@ -61,15 +51,26 @@ print('Creating the initial solution') f.solve(loglevel=0, auto=True) +# Define output locations +output_path = Path() / "diffusion_flame_extinction_data" +output_path.mkdir(parents=True, exist_ok=True) + +hdf_output = "native" in ct.hdf_support() if hdf_output: - file_name = 'diffusion_flame_extinction.h5' - f.write_hdf(file_name, group='initial_solution', mode='w', quiet=False, - description=('Initial solution')) -else: - # Save to data directory - file_name = 'initial_solution.yaml' - f.save(os.path.join(data_directory, file_name), name='solution', - description="Initial solution") + file_name = output_path / "flame_data.h5" + file_name.unlink(missing_ok=True) + +def names(test): + if hdf_output: + # use internal container structure for HDF + file_name = output_path / "flame_data.h5" + return file_name, test + # use separate files for YAML + file_name = output_path / f"{test}.yaml".replace("-", "_").replace("/", "_") + return file_name, "solution" + +file_name, entry = names("initial-solution") +f.save(file_name, name=entry, description="Initial solution") # PART 2: COMPUTE EXTINCTION STRAIN @@ -132,33 +133,24 @@ f.solve(loglevel=0) except ct.CanteraError as e: print('Error: Did not converge at n =', n, e) + + T_max.append(np.max(f.T)) + a_max.append(np.max(np.abs(np.gradient(f.velocity) / np.gradient(f.grid)))) if not np.isclose(np.max(f.T), temperature_limit_extinction): # Flame is still burning, so proceed to next strain rate n_last_burning = n - if hdf_output: - group = 'extinction/{0:04d}'.format(n) - f.write_hdf(file_name, group=group, quiet=True) - else: - file_name = 'extinction_{0:04d}.yaml'.format(n) - f.save(os.path.join(data_directory, file_name), - name='solution', loglevel=0, - description=f"Solution at alpha = {alpha[-1]}") - T_max.append(np.max(f.T)) - a_max.append(np.max(np.abs(np.gradient(f.velocity) / np.gradient(f.grid)))) + file_name, entry = names(f"extinction/{n:04d}") + f.save(file_name, name=entry, description=f"Solution at alpha = {alpha[-1]}") + print('Flame burning at alpha = {:8.4F}. Proceeding to the next iteration, ' 'with delta_alpha = {}'.format(alpha[-1], delta_alpha)) elif ((T_max[-2] - T_max[-1] < delta_T_min) and (delta_alpha < delta_alpha_min)): # If the temperature difference is too small and the minimum relative # strain rate increase is reached, save the last, non-burning, solution # to the output file and break the loop - T_max.append(np.max(f.T)) - a_max.append(np.max(np.abs(np.gradient(f.velocity) / np.gradient(f.grid)))) - if hdf_output: - group = 'extinction/{0:04d}'.format(n) - f.write_hdf(file_name, group=group, quiet=True) - else: - file_name = 'extinction_{0:04d}.yaml'.format(n) - f.save(os.path.join(data_directory, file_name), name='solution', loglevel=0) + file_name, entry = names(f"extinction/{n:04d}") + f.save(file_name, name=entry, description=f"Flame extinguished at alpha={alpha[-1]}") + print('Flame extinguished at alpha = {0:8.4F}.'.format(alpha[-1]), 'Abortion criterion satisfied.') break @@ -172,24 +164,15 @@ alpha[-1], alpha[n_last_burning], delta_alpha)) # Restore last burning solution - if hdf_output: - group = 'extinction/{0:04d}'.format(n_last_burning) - f.read_hdf(file_name, group=group) - else: - file_name = 'extinction_{0:04d}.yaml'.format(n_last_burning) - f.restore(os.path.join(data_directory, file_name), - name='solution', loglevel=0) + file_name, entry = names(f"extinction/{n_last_burning:04d}") + f.restore(file_name, entry, loglevel=0) # Print some parameters at the extinction point, after restoring the last burning # solution -if hdf_output: - group = 'extinction/{0:04d}'.format(n_last_burning) - f.read_hdf(file_name, group=group) -else: - file_name = 'extinction_{0:04d}.yaml'.format(n_last_burning) - f.restore(os.path.join(data_directory, file_name), - name='solution', loglevel=0) +file_name, entry = names(f"extinction/{n_last_burning:04d}") +f.restore(file_name, entry, loglevel=0) + print('----------------------------------------------------------------------') print('Parameters at the extinction point:') print('Pressure p={0} bar'.format(f.P / 1e5)) @@ -208,7 +191,4 @@ plt.semilogx(a_max, T_max) plt.xlabel(r'$a_{max}$ [1/s]') plt.ylabel(r'$T_{max}$ [K]') -if hdf_output: - plt.savefig('diffusion_flame_extinction_T_max_a_max.png') -else: - plt.savefig(os.path.join(data_directory, 'figure_T_max_a_max.png')) +plt.savefig(output_path / "figure_T_max_a_max.png") diff --git a/samples/python/onedim/flame_fixed_T.py b/samples/python/onedim/flame_fixed_T.py index a9abda6701..5a9b5055e0 100644 --- a/samples/python/onedim/flame_fixed_T.py +++ b/samples/python/onedim/flame_fixed_T.py @@ -2,14 +2,15 @@ A burner-stabilized, premixed methane/air flat flame with multicomponent transport properties and a specified temperature profile. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, burner-stabilized flame, premixed flame, plotting, saving output """ -import cantera as ct -import numpy as np from pathlib import Path +import numpy as np +import cantera as ct + ################################################################ # parameter values @@ -60,25 +61,21 @@ f.set_refine_criteria(ratio=3.0, slope=0.3, curve=1) f.solve(loglevel, refine_grid) -try: - # save to HDF container file if h5py is installed - f.write_hdf('flame_fixed_T.h5', group='mix', mode='w', - description='solution with mixture-averaged transport') -except ImportError: - f.save('flame_fixed_T.yaml','mixav', - 'solution with mixture-averaged transport') + +if "native" in ct.hdf_support(): + output = Path() / "flame_fixed_T.h5" +else: + output = Path() / "flame_fixed_T.yaml" +output.unlink(missing_ok=True) + +f.save(output, name="mix", description="solution with mixture-averaged transport") print('\n\n switching to multicomponent transport...\n\n') f.transport_model = 'Multi' f.set_refine_criteria(ratio=3.0, slope=0.1, curve=0.2) f.solve(loglevel, refine_grid) -try: - f.write_hdf('flame_fixed_T.h5', group='multi', - description='solution with multicomponent transport') -except ImportError: - f.save('flame_fixed_T.yaml','multi', - 'solution with multicomponent transport') +f.save(output, name="multi", description="solution with multicomponent transport") # write the velocity, temperature, density, and mole fractions to a CSV file f.write_csv('flame_fixed_T.csv', quiet=False) diff --git a/samples/python/onedim/flame_initial_guess.py b/samples/python/onedim/flame_initial_guess.py index 704da5d9a4..2cd07d67a0 100644 --- a/samples/python/onedim/flame_initial_guess.py +++ b/samples/python/onedim/flame_initial_guess.py @@ -6,16 +6,14 @@ Requires: cantera >= 3.0 Keywords: combustion, 1D flow, flame speed, premixed flame, saving output """ -import os import sys +from pathlib import Path import cantera as ct try: import pandas as pd except ImportError: pd = None -data_directory = "flame_initial_guess_data" -os.makedirs(data_directory, exist_ok=True) # Simulation parameters p = ct.one_atm # pressure [Pa] @@ -52,26 +50,24 @@ def describe(flame): # Save the flame in a few different formats +output_path = Path() / "flame_initial_guess_data" +output_path.mkdir(parents=True, exist_ok=True) + print("Save YAML") -yaml_filepath = os.path.join(data_directory, "flame.yaml") +yaml_filepath = output_path / "flame.yaml" f.save(yaml_filepath, name="solution", description="Initial methane flame") print("Save CSV") -csv_filepath = os.path.join(data_directory, "flame.csv") +csv_filepath = output_path / "flame.csv" f.write_csv(csv_filepath) try: # HDF is not a required dependency - hdf_filepath = os.path.join(data_directory, "flame.h5") - f.write_hdf( - hdf_filepath, - group="freeflame", - mode="w", - quiet=False, - description=("Initial methane flame"), - ) + hdf_filepath = output_path / "flame.h5" + hdf_filepath.unlink(missing_ok=True) + f.save(hdf_filepath, name="freeflame", description="Initial methane flame") print("Save HDF\n") -except ImportError as err: +except ct.CanteraError as err: print(f"Skipping HDF: {err}\n") hdf_filepath = None @@ -87,7 +83,7 @@ def describe(flame): print("Restore solution from HDF") gas.TPX = Tin, p, reactants f2 = ct.FreeFlame(gas, width=width) - f2.read_hdf(hdf_filepath, group="freeflame") + f2.restore(hdf_filepath, name="freeflame", loglevel=0) describe(f2) # Restore the flame via initial guess diff --git a/samples/python/onedim/flamespeed_sensitivity.py b/samples/python/onedim/flamespeed_sensitivity.py index d8e4de4079..be76729a1c 100644 --- a/samples/python/onedim/flamespeed_sensitivity.py +++ b/samples/python/onedim/flamespeed_sensitivity.py @@ -25,7 +25,7 @@ f.set_refine_criteria(ratio=3, slope=0.07, curve=0.14) f.solve(loglevel=1, auto=True) -print('\nmixture-averaged flamespeed = {:7f} m/s\n'.format(f.velocity[0])) +print(f"\nmixture-averaged flamespeed = {f.velocity[0]:7f} m/s\n") # Use the adjoint method to calculate sensitivities sens = f.get_flame_speed_reaction_sensitivities() @@ -34,5 +34,4 @@ print('Rxn # k/S*dS/dk Reaction Equation') print('----- ---------- ----------------------------------') for m in range(gas.n_reactions): - print('{: 5d} {: 10.3e} {}'.format( - m, sens[m], gas.reaction(m).equation)) + print(f"{m: 5d} {sens[m]: 10.3e} {gas.reaction(m).equation}") diff --git a/samples/python/onedim/ion_burner_flame.py b/samples/python/onedim/ion_burner_flame.py index 2713445f03..6b4a8c35a4 100644 --- a/samples/python/onedim/ion_burner_flame.py +++ b/samples/python/onedim/ion_burner_flame.py @@ -1,12 +1,14 @@ """ A burner-stabilized premixed methane-air flame with charged species. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, burner-stabilized flame, plasma, premixed flame """ +from pathlib import Path import cantera as ct + p = ct.one_atm tburner = 600.0 reactants = 'CH4:1.0, O2:2.0, N2:7.52' # premixed gas composition @@ -25,11 +27,13 @@ f.transport_model = 'Ion' f.solve(loglevel, auto=True) f.solve(loglevel=loglevel, stage=2, enable_energy=True) -try: - # save to HDF container file if h5py is installed - f.write_hdf('ion_burner_flame.h5', group='ion', mode='w', - description='solution with ionized gas transport') -except ImportError: - f.save('ion_burner_flame.yaml', 'mix', 'solution with mixture-averaged transport') + +if "native" in ct.hdf_support(): + output = Path() / "ion_burner_flame.h5" +else: + output = Path() / "ion_burner_flame.yaml" +output.unlink(missing_ok=True) + +f.save(output, name="mix", description="solution with mixture-averaged transport") f.write_csv('ion_burner_flame.csv', quiet=False) diff --git a/samples/python/onedim/ion_free_flame.py b/samples/python/onedim/ion_free_flame.py index df53cadfe8..f7b27225ef 100644 --- a/samples/python/onedim/ion_free_flame.py +++ b/samples/python/onedim/ion_free_flame.py @@ -1,12 +1,14 @@ """ A freely-propagating, premixed methane-air flat flame with charged species. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, burner-stabilized flame, plasma, premixed flame """ +from pathlib import Path import cantera as ct + # Simulation parameters p = ct.one_atm # pressure [Pa] Tin = 300.0 # unburned gas temperature [K] @@ -30,15 +32,16 @@ # stage two f.solve(loglevel=loglevel, stage=2, enable_energy=True) -try: - # save to HDF container file if h5py is installed - f.write_hdf('ion_free_flame.h5', group='ion', mode='w', - description='solution with ionized gas transport') -except ImportError: - f.save('ion_free_flame.yaml', 'ion', 'solution with ionized gas transport') +if "native" in ct.hdf_support(): + output = Path() / "ion_free_flame.h5" +else: + output = Path() / "ion_free_flame.yaml" +output.unlink(missing_ok=True) + +f.save(output, name="ion", description="solution with ionized gas transport") f.show_solution() -print('mixture-averaged flamespeed = {0:7f} m/s'.format(f.velocity[0])) +print(f"mixture-averaged flamespeed = {f.velocity[0]:7f} m/s") # write the velocity, temperature, density, and mole fractions to a CSV file f.write_csv('ion_free_flame.csv', quiet=False) diff --git a/samples/python/onedim/premixed_counterflow_flame.py b/samples/python/onedim/premixed_counterflow_flame.py index 0e4e48dbc7..b50c43a5c2 100644 --- a/samples/python/onedim/premixed_counterflow_flame.py +++ b/samples/python/onedim/premixed_counterflow_flame.py @@ -4,12 +4,14 @@ This script simulates a lean hydrogen-oxygen flame stabilized in a strained flowfield, with an opposed flow consisting of equilibrium products. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, strained flame """ +from pathlib import Path import cantera as ct + # parameter values p = 0.05 * ct.one_atm # pressure T_in = 373.0 # inlet temperature @@ -42,7 +44,15 @@ sim.solve(loglevel, auto=True) +if "native" in ct.hdf_support(): + output = Path() / "premixed_counterflow_flame.h5" +else: + output = Path() / "premixed_counterflow_flame.yaml" +output.unlink(missing_ok=True) + +sim.save(output, name="mix", description="solution with mixture-averaged transport") + # write the velocity, temperature, and mole fractions to a CSV file -sim.write_csv('premixed_counterflow.csv', quiet=False) +sim.write_csv("premixed_counterflow_flame.csv", quiet=False) sim.show_stats() sim.show_solution() diff --git a/samples/python/onedim/premixed_counterflow_twin_flame.py b/samples/python/onedim/premixed_counterflow_twin_flame.py index e04386b005..5289b28609 100644 --- a/samples/python/onedim/premixed_counterflow_twin_flame.py +++ b/samples/python/onedim/premixed_counterflow_twin_flame.py @@ -1,17 +1,16 @@ -# coding: utf-8 - """ Simulate two counter-flow jets of reactants shooting into each other. This simulation differs from the similar premixed_counterflow_flame.py example as the latter simulates a jet of reactants shooting into products. -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, strained flame, plotting """ -import cantera as ct -import numpy as np import sys +from pathlib import Path +import numpy as np +import cantera as ct # Differentiation function for data that has variable grid spacing Used here to @@ -117,10 +116,18 @@ def solveOpposedFlame(oppFlame, massFlux=0.12, loglevel=1, Sc = computeConsumptionSpeed(oppFlame) -print("Peak temperature: {0:.1f} K".format(T)) -print("Strain Rate: {0:.1f} 1/s".format(K)) -print("Consumption Speed: {0:.2f} cm/s".format(Sc*100)) -oppFlame.write_csv("premixed_twin_flame.csv", quiet=False) +if "native" in ct.hdf_support(): + output = Path() / "premixed_counterflow_twin_flame.h5" +else: + output = Path() / "premixed_counterflow_twin_flame.yaml" +output.unlink(missing_ok=True) + +oppFlame.save(output, name="mix") + +print(f"Peak temperature: {T:.1f} K") +print(f"Strain Rate: {K:.1f} 1/s") +print(f"Consumption Speed: {Sc * 100:.2f} cm/s") +oppFlame.write_csv("premixed_counterflow_twin_flame.csv", quiet=False) # Generate plots to see results, if user desires if '--plot' in sys.argv: diff --git a/samples/python/onedim/stagnation_flame.py b/samples/python/onedim/stagnation_flame.py index 028fd9b7f3..a61fc5c127 100644 --- a/samples/python/onedim/stagnation_flame.py +++ b/samples/python/onedim/stagnation_flame.py @@ -14,18 +14,14 @@ points would be concentrated upsteam of the flame, where the flamefront had been previously. (To see this, try setting prune to zero.) -Requires: cantera >= 2.5.0 +Requires: cantera >= 3.0 Keywords: combustion, 1D flow, premixed flame, strained flame """ -import os -import importlib - +from pathlib import Path import cantera as ct -hdf_output = importlib.util.find_spec('h5py') is not None - # parameter values p = 0.05 * ct.one_atm # pressure tburner = 373.0 # burner temperature @@ -74,24 +70,21 @@ sim.solve(loglevel, auto=True) -if hdf_output: - outfile = 'stagnation_flame.h5' +output_path = Path() / "stagnation_flame_data" +output_path.mkdir(parents=True, exist_ok=True) + +if "native" in ct.hdf_support(): + output = output_path / "stagnation_flame.h5" else: - outfile = 'stagnation_flame.yaml' -if os.path.exists(outfile): - os.remove(outfile) + output = output_path / "stagnation_flame.yaml" +output.unlink(missing_ok=True) for m, md in enumerate(mdot): sim.inlet.mdot = md sim.solve(loglevel) - if hdf_output: - sim.write_hdf(outfile, group='mdot{0}'.format(m), - description='mdot = {0} kg/m2/s'.format(md)) - else: - sim.save(outfile, 'mdot{0}'.format(m), - 'mdot = {0} kg/m2/s'.format(md)) + sim.save(output, name=f"mdot-{m}", description=f"mdot = {md} kg/m2/s") # write the velocity, temperature, and mole fractions to a CSV file - sim.write_csv('stagnation_flame_{0}.csv'.format(m), quiet=False) + sim.write_csv(output_path / f"stagnation_flame_{m}.csv", quiet=False) sim.show_stats() diff --git a/src/base/AnyMap.cpp b/src/base/AnyMap.cpp index 0da576385b..47d557f754 100644 --- a/src/base/AnyMap.cpp +++ b/src/base/AnyMap.cpp @@ -1442,6 +1442,17 @@ std::string AnyMap::keys_str() const return to_string(b); } +std::set AnyMap::keys() const +{ + std::set out; + auto iter = this->begin(); + while (iter != this->end()) { + out.insert(iter->first); + ++iter; + } + return out; +} + void AnyMap::propagateMetadata(shared_ptr& metadata) { m_metadata = metadata; diff --git a/src/base/SolutionArray.cpp b/src/base/SolutionArray.cpp new file mode 100644 index 0000000000..33ab606532 --- /dev/null +++ b/src/base/SolutionArray.cpp @@ -0,0 +1,686 @@ +/** + * @file SolutionArray.cpp + * Definition file for class SolutionArray. + */ + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#include "cantera/base/SolutionArray.h" +#include "cantera/base/Solution.h" +#include "cantera/base/Storage.h" +#include "cantera/base/stringUtils.h" +#include "cantera/thermo/ThermoPhase.h" +#include "cantera/thermo/SurfPhase.h" +#include +#include +#include + + +namespace ba = boost::algorithm; + +const std::map aliasMap = { + {"T", "temperature"}, + {"P", "pressure"}, + {"D", "density"}, + {"Y", "mass-fractions"}, + {"X", "mole-fractions"}, + {"C", "coverages"}, + {"U", "specific-internal-energy"}, + {"V", "specific-volume"}, + {"H", "specific-enthalpy"}, + {"S", "specific-entropy"}, + {"Q", "vapor-fraction"}, +}; + +namespace Cantera +{ + +SolutionArray::SolutionArray( + const shared_ptr& sol, + size_t size, + const AnyMap& meta) + : m_sol(sol) + , m_size(size) + , m_meta(meta) +{ + if (!m_sol) { + throw CanteraError("SolutionArray::SolutionArray", + "Unable to create SolutionArray from invalid Solution object."); + } + m_stride = m_sol->thermo()->stateSize(); + m_data.resize(m_size * m_stride, 0.); +} + +shared_ptr SolutionArray::thermo() +{ + return m_sol->thermo(); +} + +bool SolutionArray::hasComponent(const std::string& name) const +{ + if (m_extra.count(name)) { + // auxiliary data + return true; + } + if (m_sol->thermo()->speciesIndex(name) != npos) { + // species + return true; + } + if (name == "X" || name == "Y") { + // reserved names + return false; + } + // native state + return (m_sol->thermo()->nativeState().count(name)); +} + +vector_fp SolutionArray::getComponent(const std::string& name) const +{ + if (!hasComponent(name)) { + throw CanteraError("SolutionArray::getComponent", "no component named " + name); + } + + vector_fp out(m_size); + if (m_extra.count(name)) { + // auxiliary data + out = m_extra.at(name); + return out; + } + + size_t ix = m_sol->thermo()->speciesIndex(name); + if (ix == npos) { + ix = m_sol->thermo()->nativeState()[name]; + } else { + ix += m_stride - m_sol->thermo()->nSpecies(); + } + for (size_t k = 0; k < m_size; ++k) { + out[k] = m_data[k * m_stride + ix]; + } + return out; +} + +void SolutionArray::setComponent( + const std::string& name, const vector_fp& data, bool force) +{ + if (!hasComponent(name)) { + if (force) { + m_extra.emplace(name, data); + return; + } + throw CanteraError("SolutionArray::setComponent", "no component named " + name); + } + if (data.size() != m_size) { + throw CanteraError("SolutionArray::setComponent", "incompatible sizes"); + } + + if (m_extra.count(name)) { + // auxiliary data + m_extra[name] = data; + } + + size_t ix = m_sol->thermo()->speciesIndex(name); + if (ix == npos) { + ix = m_sol->thermo()->nativeState()[name]; + } else { + ix += m_stride - m_sol->thermo()->nSpecies(); + } + for (size_t k = 0; k < m_size; ++k) { + m_data[k * m_stride + ix] = data[k]; + } +} + +void SolutionArray::setIndex(size_t index, bool restore) +{ + if (m_size == 0) { + throw CanteraError("SolutionArray::setIndex", + "Unable to set index in empty SolutionArray."); + } else if (index == npos) { + if (m_index == npos) { + throw CanteraError("SolutionArray::setIndex", + "Both current and buffered indices are invalid."); + } + return; + } else if (index == m_index) { + return; + } else if (index >= m_size) { + throw IndexError("SolutionArray::setIndex", "entries", index, m_size - 1); + } + m_index = index; + if (restore) { + size_t nState = m_sol->thermo()->stateSize(); + m_sol->thermo()->restoreState(nState, &m_data[m_index * m_stride]); + } +} + +vector_fp SolutionArray::getState(size_t index) +{ + setIndex(index); + size_t nState = m_sol->thermo()->stateSize(); + vector_fp out(nState); + m_sol->thermo()->saveState(out); // thermo contains current state + return out; +} + +void SolutionArray::setState(size_t index, const vector_fp& data) +{ + setIndex(index, false); + m_sol->thermo()->restoreState(data); + size_t nState = m_sol->thermo()->stateSize(); + m_sol->thermo()->saveState(nState, &m_data[m_index * m_stride]); +} + +std::map SolutionArray::getAuxiliary(size_t index) +{ + setIndex(index); + std::map out; + for (auto& item : m_extra) { + out[item.first] = item.second[m_index]; + } + return out; +} + +AnyMap preamble(const std::string& desc) +{ + AnyMap data; + data["description"] = desc; + data["generator"] = "Cantera SolutionArray"; + data["cantera-version"] = CANTERA_VERSION; + data["git-commit"] = gitCommit(); + + // Add a timestamp indicating the current time + time_t aclock; + ::time(&aclock); // Get time in seconds + struct tm* newtime = localtime(&aclock); // Convert time to struct tm form + data["date"] = stripnonprint(asctime(newtime)); + + // Force metadata fields to the top of the file + data["description"].setLoc(-6, 0); + data["generator"].setLoc(-5, 0); + data["cantera-version"].setLoc(-4, 0); + data["git-commit"].setLoc(-3, 0); + data["date"].setLoc(-2, 0); + + return data; +} + +void SolutionArray::writeHeader(const std::string& fname, const std::string& id, + const std::string& desc) +{ + Storage file(fname, true); + file.checkGroup(id); + file.writeAttributes(id, preamble(desc)); +} + +void SolutionArray::writeHeader(AnyMap& root, const std::string& id, + const std::string& desc) +{ + root[id] = preamble(desc); +} + +void SolutionArray::writeEntry(const std::string& fname, const std::string& id, + int compression) +{ + Storage file(fname, true); + if (compression) { + file.setCompressionLevel(compression); + } + file.checkGroup(id); + file.writeAttributes(id, m_meta); + if (!m_size) { + return; + } + + const auto& nativeState = m_sol->thermo()->nativeState(); + size_t nSpecies = m_sol->thermo()->nSpecies(); + for (auto& state : nativeState) { + std::string name = state.first; + if (name == "X" || name == "Y") { + size_t offset = state.second; + std::vector prop; + for (size_t i = 0; i < m_size; i++) { + size_t first = offset + i * m_stride; + prop.push_back(vector_fp(&m_data[first], &m_data[first + nSpecies])); + } + file.writeMatrix(id, name, prop); + } else { + auto data = getComponent(name); + file.writeVector(id, name, data); + } + } + + for (auto& extra : m_extra) { + file.writeVector(id, extra.first, extra.second); + } +} + +AnyMap& openField(AnyMap& root, const std::string& id) +{ + // locate field based on 'id' + std::vector tokens; + tokenizePath(id, tokens); + AnyMap* ptr = &root; // use raw pointer to avoid copying + std::string path = ""; + for (auto& field : tokens) { + path += "/" + field; + AnyMap& sub = *ptr; + if (sub.hasKey(field) && !sub[field].is()) { + throw CanteraError("openField", + "Encountered invalid existing field '{}'", path); + } else if (!sub.hasKey(field)) { + sub[field] = AnyMap(); + } + ptr = &sub[field].as(); + } + return *ptr; +} + +void SolutionArray::writeEntry(AnyMap& root, const std::string& id) +{ + AnyMap& data = openField(root, id); + bool preexisting = !data.empty(); + data["points"] = int(m_size); + data.update(m_meta); + + for (auto& extra : m_extra) { + data[extra.first] = extra.second; + } + + auto phase = m_sol->thermo(); + if (m_size == 1) { + setIndex(0); + data["temperature"] = phase->temperature(); + data["pressure"] = phase->pressure(); + auto surf = std::dynamic_pointer_cast(phase); + auto nSpecies = phase->nSpecies(); + vector_fp values(nSpecies); + if (surf) { + surf->getCoverages(&values[0]); + } else { + phase->getMassFractions(&values[0]); + } + AnyMap items; + for (size_t k = 0; k < nSpecies; k++) { + if (values[k] != 0.0) { + items[phase->speciesName(k)] = values[k]; + } + } + if (surf) { + data["coverages"] = std::move(items); + } else { + data["mass-fractions"] = std::move(items); + } + } else if (m_size > 1) { + std::vector components; + for (auto& extra : m_extra) { + components.push_back(extra.first); + } + + const auto& nativeState = phase->nativeState(); + for (auto& state : nativeState) { + std::string name = state.first; + if (name == "X" || name == "Y") { + for (auto& spc : phase->speciesNames()) { + data[spc] = getComponent(spc); + components.push_back(spc); + } + data["basis"] = name == "X" ? "mole" : "mass"; + } else { + data[name] = getComponent(name); + components.push_back(name); + } + } + data["components"] = components; + } + + // If this is not replacing an existing solution, put it at the end + if (!preexisting) { + data.setLoc(INT_MAX, 0); + } +} + +void SolutionArray::save(const std::string& fname, const std::string& id, + const std::string& desc, int compression) +{ + size_t dot = fname.find_last_of("."); + std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { + writeHeader(fname, id, desc); + writeEntry(fname, id, compression); + return; + } + if (extension == "yaml" || extension == "yml") { + // Check for an existing file and load it if present + AnyMap data; + if (std::ifstream(fname).good()) { + data = AnyMap::fromYamlFile(fname); + } + writeEntry(data, id); + writeHeader(data, id, desc); + + // Write the output file and remove the now-outdated cached file + std::ofstream out(fname); + out << data.toYamlString(); + AnyMap::clearCachedFile(fname); + return; + } + throw CanteraError("SolutionArray::save", + "Unknown file extension '{}'", extension); +} + +AnyMap SolutionArray::readHeader(const std::string& fname, const std::string& id) +{ + Storage file(fname, false); + file.checkGroup(id); + return file.readAttributes(id, false); +} + +const AnyMap& locateField(const AnyMap& root, const std::string& id) +{ + // locate field based on 'id' + std::vector tokens; + tokenizePath(id, tokens); + const AnyMap* ptr = &root; // use raw pointer to avoid copying + std::string path = ""; + for (auto& field : tokens) { + path += "/" + field; + const AnyMap& sub = *ptr; + if (!sub.hasKey(field) || !sub[field].is()) { + throw CanteraError("SolutionArray::locateField", + "No field or solution with id '{}'", path); + } + ptr = &sub[field].as(); // AnyMap lacks 'operator=' for const AnyMap + } + return *ptr; +} + +AnyMap SolutionArray::readHeader(const AnyMap& root, const std::string& id) +{ + auto sub = locateField(root, id); + AnyMap header; + for (const auto& item : sub) { + if (!sub[item.first].is()) { + header[item.first] = item.second; + } + } + return header; +} + +AnyMap SolutionArray::restore(const std::string& fname, const std::string& id) +{ + size_t dot = fname.find_last_of("."); + std::string extension = (dot != npos) ? toLowerCopy(fname.substr(dot + 1)) : ""; + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { + readEntry(fname, id); + return readHeader(fname, id); + } + if (extension == "yaml" || extension == "yml") { + const AnyMap& root = AnyMap::fromYamlFile(fname); + readEntry(root, id); + return readHeader(root, id); + } + throw CanteraError("SolutionArray::restore", + "Unknown file extension '{}'", extension); +} + +std::string SolutionArray::detectMode(const std::set& names, bool native) +{ + // check set of available names against state acronyms defined by Phase::fullStates + std::string mode = ""; + const auto& nativeState = m_sol->thermo()->nativeState(); + bool usesNativeState; + auto surf = std::dynamic_pointer_cast(m_sol->thermo()); + for (const auto& item : m_sol->thermo()->fullStates()) { + bool found = true; + std::string name; + usesNativeState = true; + for (size_t i = 0; i < item.size(); i++) { + // pick i-th letter from "full" state acronym + name = std::string(1, item[i]); + if (surf && (name == "X" || name == "Y")) { + // override native state to enable detection of surface phases + name = "C"; + usesNativeState = false; + break; + } + if (names.count(name)) { + // property is stored using letter acronym + usesNativeState &= nativeState.count(name) > 0; + } else if (aliasMap.count(name) && names.count(aliasMap.at(name))) { + // property is stored using property name + usesNativeState &= nativeState.count(name) > 0; + } else { + found = false; + break; + } + } + if (found) { + mode = (name == "C") ? item.substr(0, 2) + "C" : item; + break; + } + } + if (usesNativeState && native) { + return "native"; + } + return mode; +} + +std::set SolutionArray::stateProperties( + const std::string& mode, bool alias) +{ + std::set states; + if (mode == "native") { + for (const auto& item : m_sol->thermo()->nativeState()) { + states.insert(alias ? aliasMap.at(item.first) : item.first); + } + } else { + for (const auto& m : mode) { + const std::string name = std::string(1, m); + states.insert(alias ? aliasMap.at(name) : name); + } + } + + return states; +} + +void SolutionArray::readEntry(const std::string& fname, const std::string& id) +{ + Storage file(fname, false); + file.checkGroup(id); + m_meta = file.readAttributes(id, true); + + auto contents = file.contents(id); + m_size = contents.first; + m_data.resize(m_size * m_stride, 0.); + std::set names = contents.second; + + if (m_size == 0) { + return; + } + + // determine storage mode of state data + std::string mode = detectMode(names); + std::set states = stateProperties(mode); + if (states.count("C")) { + if (names.count("X")) { + states.erase("C"); + states.insert("X"); + mode = "TPX"; + } else if (names.count("Y")) { + states.erase("C"); + states.insert("Y"); + mode = "TPY"; + } + } + + // restore state data + size_t nSpecies = m_sol->thermo()->nSpecies(); + size_t nState = m_sol->thermo()->stateSize(); + const auto& nativeStates = m_sol->thermo()->nativeState(); + if (mode == "native") { + // native state can be written directly into data storage + for (const auto& item : nativeStates) { + std::string name = item.first; + if (name == "X" || name == "Y") { + size_t offset = item.second; + auto prop = file.readMatrix(id, name, m_size, nSpecies); + for (size_t i = 0; i < m_size; i++) { + std::copy(prop[i].begin(), prop[i].end(), + &m_data[offset + i * m_stride]); + } + } else { + setComponent(name, file.readVector(id, name, m_size)); + } + } + } else if (mode == "TPX") { + // data format used by Python h5py export (Cantera 2.5) + vector_fp T = file.readVector(id, "T", m_size); + vector_fp P = file.readVector(id, "P", m_size); + auto X = file.readMatrix(id, "X", m_size, nSpecies); + for (size_t i = 0; i < m_size; i++) { + m_sol->thermo()->setState_TPX(T[i], P[i], X[i].data()); + m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); + } + } else if (mode == "TPY") { + vector_fp T = file.readVector(id, "T", m_size); + vector_fp P = file.readVector(id, "P", m_size); + auto Y = file.readMatrix(id, "Y", m_size, nSpecies); + for (size_t i = 0; i < m_size; i++) { + m_sol->thermo()->setState_TPY(T[i], P[i], Y[i].data()); + m_sol->thermo()->saveState(nState, &m_data[i * m_stride]); + } + } else if (mode == "") { + throw CanteraError("SolutionArray::readEntry", + "Data are not consistent with full state modes."); + } else { + throw NotImplementedError("SolutionArray::readEntry", + "Import of '{}' data is not supported.", mode); + } + + // restore remaining data + for (const auto& name : names) { + if (!states.count(name)) { + vector_fp data = file.readVector(id, name, m_size); + m_extra.emplace(name, data); + } + } +} + +void SolutionArray::readEntry(const AnyMap& root, const std::string& id) +{ + auto sub = locateField(root, id); + + // set size and initialize + m_size = sub.getInt("points", 0); + if (!sub.hasKey("T") && !sub.hasKey("temperature")) { + // overwrite size - Sim1D erroneously assigns '1' (Cantera 2.6) + m_size = 0; + } + m_data.resize(m_size * m_stride, 0.); + + // restore data + std::set exclude = {"points", "X", "Y"}; + std::set names = sub.keys(); + size_t nState = m_sol->thermo()->stateSize(); + if (m_size == 0) { + // no data points + } else if (m_size == 1) { + // single data point + std::string mode = detectMode(names, false); + if (mode == "") { + // missing property - Sim1D (Cantera 2.6) + names.insert("pressure"); + mode = detectMode(names, false); + } + if (mode == "TPY") { + // single data point uses long names + double T = sub["temperature"].asDouble(); + double P = sub.getDouble("pressure", OneAtm); // missing - Sim1D (Cantera 2.6) + auto Y = sub["mass-fractions"].asMap(); + m_sol->thermo()->setState_TPY(T, P, Y); + } else if (mode == "TPC") { + auto surf = std::dynamic_pointer_cast(m_sol->thermo()); + if (!surf) { + throw CanteraError("SolutionArray::readEntry", + "Restoring of coverages requires surface phase"); + } + double T = sub["temperature"].asDouble(); + double P = sub.getDouble("pressure", OneAtm); // missing - Sim1D (Cantera 2.6) + m_sol->thermo()->setState_TP(T, P); + auto cov = sub["coverages"].asMap(); + surf->setCoveragesByName(cov); + } else if (mode == "") { + throw CanteraError("SolutionArray::readEntry", + "Data are not consistent with full state modes."); + } else { + throw NotImplementedError("SolutionArray::readEntry", + "Import of '{}' data is not supported.", mode); + } + m_sol->thermo()->saveState(nState, m_data.data()); + auto props = stateProperties(mode, true); + exclude.insert(props.begin(), props.end()); + } else { + // multiple data points + if (sub.hasKey("components")) { + const auto& components = sub["components"].as>(); + for (const auto& name : components) { + auto data = sub[name].asVector(m_size); + setComponent(name, data, true); + exclude.insert(name); + } + } else { + // legacy YAML format does not provide for list of components + for (const auto& item : sub) { + const std::string& name = item.first; + const AnyValue& value = item.second; + if (value.is>()) { + const vector_fp& data = value.as>(); + if (data.size() == m_size) { + setComponent(name, data, true); + exclude.insert(item.first); + } + } + } + } + + // check that state data are complete + const auto& nativeState = m_sol->thermo()->nativeState(); + std::set props = {}; + std::set missingProps = {}; + for (const auto& item : nativeState) { + if (exclude.count(item.first)) { + props.insert(item.first); + } else { + missingProps.insert(item.first); + } + } + + std::set TY = {"T", "Y"}; + if (props == TY && missingProps.count("D") && sub.hasKey("pressure")) { + // missing "D" - Sim1D (Cantera 2.6) + double P = sub["pressure"].asDouble(); + const size_t offset_T = nativeState.find("T")->second; + const size_t offset_D = nativeState.find("D")->second; + const size_t offset_Y = nativeState.find("Y")->second; + for (size_t i = 0; i < m_size; i++) { + double T = m_data[offset_T + i * m_stride]; + m_sol->thermo()->setState_TPY(T, P, &m_data[offset_Y + i * m_stride]); + m_data[offset_D + i * m_stride] = m_sol->thermo()->density(); + } + } else if (missingProps.size()) { + throw CanteraError("SolutionArray::readEntry", + "Incomplete state information: missing '{}'", + ba::join(missingProps, "', '")); + } + } + + // add meta data + for (const auto& item : sub) { + if (!exclude.count(item.first)) { + m_meta[item.first] = item.second; + } + } + m_meta.erase("points"); +} + +} diff --git a/src/base/Storage.cpp b/src/base/Storage.cpp new file mode 100644 index 0000000000..ba19f5cf40 --- /dev/null +++ b/src/base/Storage.cpp @@ -0,0 +1,466 @@ +/** + * @file Storage.cpp + * Definition file for class Storage. + */ + +// This file is part of Cantera. See License.txt in the top-level directory or +// at https://cantera.org/license.txt for license and copyright information. + +#include "cantera/base/AnyMap.h" +#include "cantera/base/Storage.h" + +#if CT_USE_HDF5 + +#if CT_USE_SYSTEM_HIGHFIVE + #include + #include + #include + #include + #include + #include +#else + #include "cantera/ext/HighFive/H5Attribute.hpp" + #include "cantera/ext/HighFive/H5DataSet.hpp" + #include "cantera/ext/HighFive/H5DataSpace.hpp" + #include "cantera/ext/HighFive/H5DataType.hpp" + #include "cantera/ext/HighFive/H5File.hpp" + #include "cantera/ext/HighFive/H5Group.hpp" +#endif + +namespace h5 = HighFive; + +enum class H5Boolean { + FALSE = 0, + TRUE = 1, +}; + +h5::EnumType create_enum_boolean() { + return {{"FALSE", H5Boolean::FALSE}, + {"TRUE", H5Boolean::TRUE}}; +} + +HIGHFIVE_REGISTER_TYPE(H5Boolean, create_enum_boolean) + +#endif + +namespace Cantera +{ + +#if CT_USE_HDF5 + +Storage::Storage(std::string fname, bool write) : m_write(write) +{ + if (m_write) { + m_file.reset(new h5::File(fname, h5::File::OpenOrCreate)); + } else { + m_file.reset(new h5::File(fname, h5::File::ReadOnly)); + } +} + +Storage::~Storage() +{ + m_file->flush(); +} + +void Storage::setCompressionLevel(int level) +{ + if (level < 0 || level > 9) { + throw CanteraError("Storage::setCompressionLevel", + "Invalid compression level '{}' (needs to be 0..9).", level); + } + m_compressionLevel = level; +} + +bool Storage::checkGroupRead(const std::string& id) const +{ + std::vector tokens; + tokenizePath(id, tokens); + std::string grp = tokens[0]; + if (!m_file->exist(grp) || m_file->getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroupRead", + "No group with id '{}' found", grp); + } + + std::string path = grp; + h5::Group sub = m_file->getGroup(grp); + tokens.erase(tokens.begin()); + for (auto& grp : tokens) { + path += "/" + grp; + if (!sub.exist(grp) || sub.getObjectType(grp) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroupRead", + "No group with id '{}' found", path); + } + sub = sub.getGroup(grp); + } + return true; +} + +bool Storage::checkGroupWrite(const std::string& id) +{ + if (!m_file->exist(id)) { + m_file->createGroup(id); + return true; + } + if (m_file->getObjectType(id) != h5::ObjectType::Group) { + throw CanteraError("Storage::checkGroupWrite", + "Invalid object with id '{}' exists", id); + } + return true; +} + +bool Storage::checkGroup(const std::string& id) { + if (m_write) { + return checkGroupWrite(id); + } + return checkGroupRead(id); +} + +std::pair> Storage::contents(const std::string& id) const +{ + h5::Group sub = m_file->getGroup(id); + std::set names; + size_t nDims = npos; + size_t nElements = 0; + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Dataset) { + h5::DataSpace space = sub.getDataSet(name).getSpace(); + names.insert(name); + if (space.getNumberDimensions() < nDims) { + nDims = space.getNumberDimensions(); + nElements = space.getElementCount(); + } + } + } + if (nDims != 1 && nDims != npos) { + throw NotImplementedError("Storage::content", + "Encountered invalid data with {} dimensions.", nDims); + } + return std::make_pair(nElements, names); +} + +AnyMap readH5Attributes(const h5::Group& sub, bool recursive) +{ + // restore meta data from attributes + AnyMap out; + for (auto& name : sub.listAttributeNames()) { + h5::Attribute attr = sub.getAttribute(name); + h5::DataType dtype = attr.getDataType(); + h5::DataTypeClass dclass = dtype.getClass(); + if (dclass == h5::DataTypeClass::Float) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + double value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::Integer) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + int value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::String) { + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + out[name] = values; + } else { + std::string value; + attr.read(value); + out[name] = value; + } + } else if (dclass == h5::DataTypeClass::Enum) { + // only booleans are supported + if (attr.getSpace().getElementCount() > 1) { + std::vector values; + attr.read(values); + std::vector bValues; + for (auto v : values) { + bValues.push_back(bool(v)); + } + out[name] = bValues; + } else { + H5Boolean value; + attr.read(value); + out[name] = bool(value); + } + } else { + throw NotImplementedError("readH5Attributes", + "Unable to read attribute '{}' with type '{}'", name, dtype.string()); + } + } + + if (recursive) { + for (auto& name : sub.listObjectNames()) { + if (sub.getObjectType(name) == h5::ObjectType::Group) { + out[name] = readH5Attributes(sub.getGroup(name), recursive); + } + } + } + + return out; +} + +AnyMap Storage::readAttributes(const std::string& id, bool recursive) const +{ + h5::Group sub = m_file->getGroup(id); + try { + return readH5Attributes(sub, recursive); + } catch (const Cantera::NotImplementedError& err) { + throw NotImplementedError("Storage::readAttribute", + "{} in group '{}'.", err.getMessage(), id); + } +} + +void writeH5Attributes(h5::Group sub, const AnyMap& meta) +{ + for (auto& item : meta) { + if (sub.hasAttribute(item.first)) { + throw NotImplementedError("writeH5Attributes", + "Unable to overwrite existing Attribute '{}'", item.first); + } + if (item.second.is()) { + double value = item.second.asDouble(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is() || item.second.is()) { + int value = item.second.asInt(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + std::string value = item.second.asString(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is()) { + bool bValue = item.second.asBool(); + H5Boolean value = bValue ? H5Boolean::TRUE : H5Boolean::FALSE; + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(value)); + attr.write(value); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto values = item.second.as>(); + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is>()) { + auto bValue = item.second.as>(); + std::vector values; + for (auto b : bValue) { + values.push_back(b ? H5Boolean::TRUE : H5Boolean::FALSE); + } + h5::Attribute attr = sub.createAttribute( + item.first, h5::DataSpace::From(values)); + attr.write(values); + } else if (item.second.is()) { + // step into recursion + auto value = item.second.as(); + auto grp = sub.createGroup(item.first); + writeH5Attributes(grp, value); + } else { + throw NotImplementedError("writeH5Attributes", + "Unable to write attribute '{}' with type '{}'", + item.first, item.second.type_str()); + } + } +} + +void Storage::writeAttributes(const std::string& id, const AnyMap& meta) +{ + h5::Group sub = m_file->getGroup(id); + try { + writeH5Attributes(sub, meta); + } catch (const Cantera::NotImplementedError& err) { + throw NotImplementedError("Storage::writeAttribute", + "{} in group '{}'.", err.getMessage(), id); + } +} + +vector_fp Storage::readVector(const std::string& id, + const std::string& name, size_t size) const +{ + h5::Group sub = m_file->getGroup(id); + if (!sub.exist(name)) { + throw CanteraError("Storage::readVector", + "DataSet '{}' not found in group '{}'.", name, id); + } + h5::DataSet dataset = sub.getDataSet(name); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("Storage::readVector", + "Type of DataSet '{}' is inconsistent; expected HDF float.", name); + } + if (dataset.getElementCount() != size) { + throw CanteraError("Storage::readVector", + "Size of DataSet '{}' is inconsistent; expected {} elements but " + "received {} elements.", name, size, dataset.getElementCount()); + } + vector_fp out; + dataset.read(out); + return out; +} + +void Storage::writeVector(const std::string& id, + const std::string& name, const vector_fp& data) +{ + h5::Group sub = m_file->getGroup(id); + if (sub.exist(name)) { + throw NotImplementedError("Storage::writeVector", + "Unable to overwrite existing DataSet '{}' in group '{}'.", name, id); + } + std::vector dims{data.size()}; + h5::DataSet dataset = sub.createDataSet(name, h5::DataSpace(dims)); + dataset.write(data); +} + +std::vector Storage::readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const +{ + h5::Group sub = m_file->getGroup(id); + if (!sub.exist(name)) { + throw CanteraError("Storage::readMatrix", + "DataSet '{}' not found in group '{}'.", name, id); + } + h5::DataSet dataset = sub.getDataSet(name); + if (dataset.getDataType().getClass() != h5::DataTypeClass::Float) { + throw CanteraError("Storage::readMatrix", + "Type of DataSet '{}' is inconsistent; expected HDF float.", name); + } + h5::DataSpace space = dataset.getSpace(); + if (space.getNumberDimensions() != 2) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected two dimensions.", name); + } + const auto& shape = space.getDimensions(); + if (shape[0] != rows) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} rows.", name, rows); + } + if (shape[1] != cols) { + throw CanteraError("Storage::readMatrix", + "Shape of DataSet '{}' is inconsistent; expected {} columns.", name, cols); + } + std::vector out; + dataset.read(out); + return out; +} + +void Storage::writeMatrix(const std::string& id, + const std::string& name, const std::vector& data) +{ + h5::Group sub = m_file->getGroup(id); + if (sub.exist(name)) { + throw NotImplementedError("Storage::writeMatrix", + "Unable to overwrite existing DataSet '{}' in group '{}'.", name, id); + } + std::vector dims{data.size()}; + dims.push_back(data.size() ? data[0].size() : 0); + if (m_compressionLevel) { + // Set chunk size to single chunk and apply compression level; for caveats, see + // https://stackoverflow.com/questions/32994766/compressed-files-bigger-in-h5py + h5::DataSpace space(dims, dims); //{h5::DataSpace::UNLIMITED, dims[1]}); + h5::DataSetCreateProps props; + props.add(h5::Chunking(std::vector{dims[0], dims[1]})); + props.add(h5::Deflate(m_compressionLevel)); + h5::DataSet dataset = sub.createDataSet(name, space, props); + dataset.write(data); + } else { + h5::DataSpace space(dims); + h5::DataSet dataset = sub.createDataSet(name, space); + dataset.write(data); + } +} + +#else + +Storage::Storage(std::string fname, bool write) +{ + throw CanteraError("Storage::Storage", + "Saving to HDF requires HighFive installation."); +} + +Storage::~Storage() +{ +} + +void Storage::setCompressionLevel(int level) +{ + throw CanteraError("Storage::setCompressionLevel", + "Saving to HDF requires HighFive installation."); +} + +bool Storage::checkGroup(const std::string& id) +{ + throw CanteraError("Storage::checkGroup", + "Saving to HDF requires HighFive installation."); +} + +std::pair> Storage::contents(const std::string& id) const +{ + throw CanteraError("Storage::contents", + "Saving to HDF requires HighFive installation."); +} + +AnyMap Storage::readAttributes(const std::string& id, bool recursive) const +{ + throw CanteraError("Storage::readAttributes", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeAttributes(const std::string& id, const AnyMap& meta) +{ + throw CanteraError("Storage::writeAttributes", + "Saving to HDF requires HighFive installation."); +} + +vector_fp Storage::readVector(const std::string& id, + const std::string& name, size_t size) const +{ + throw CanteraError("Storage::readVector", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeVector(const std::string& id, + const std::string& name, const vector_fp& data) +{ + throw CanteraError("Storage::writeVector", + "Saving to HDF requires HighFive installation."); +} + +std::vector Storage::readMatrix(const std::string& id, + const std::string& name, + size_t rows, size_t cols) const +{ + throw CanteraError("Storage::readMatrix", + "Saving to HDF requires HighFive installation."); +} + +void Storage::writeMatrix(const std::string& id, + const std::string& name, const std::vector& data) +{ + throw CanteraError("Storage::writeMatrix", + "Saving to HDF requires HighFive installation."); +} + +#endif + +} diff --git a/src/base/global.cpp b/src/base/global.cpp index 1ac3a2b2ec..563326cd77 100644 --- a/src/base/global.cpp +++ b/src/base/global.cpp @@ -170,6 +170,15 @@ bool debugModeEnabled() #endif } +bool usesHDF5() +{ +#if CT_USE_HDF5 + return true; +#else + return false; +#endif +} + std::vector FactoryBase::s_vFactoryRegistry; std::string demangle(const std::type_info& type) diff --git a/src/base/stringUtils.cpp b/src/base/stringUtils.cpp index dd15e33a92..25edff942a 100644 --- a/src/base/stringUtils.cpp +++ b/src/base/stringUtils.cpp @@ -195,6 +195,13 @@ void tokenizeString(const std::string& in_val, std::vector& v) ba::split(v, val, ba::is_space(), ba::token_compress_on); } +void tokenizePath(const std::string& in_val, std::vector& v) +{ + std::string val = ba::trim_copy(in_val); + v.clear(); + ba::split(v, val, ba::is_any_of("/\\"), ba::token_compress_on); +} + size_t copyString(const std::string& source, char* dest, size_t length) { const char* c_src = source.c_str(); diff --git a/src/oneD/Boundary1D.cpp b/src/oneD/Boundary1D.cpp index 9d0bcfa1d8..8baa518556 100644 --- a/src/oneD/Boundary1D.cpp +++ b/src/oneD/Boundary1D.cpp @@ -3,6 +3,7 @@ // This file is part of Cantera. See License.txt in the top-level directory or // at https://cantera.org/license.txt for license and copyright information. +#include "cantera/base/SolutionArray.h" #include "cantera/oneD/Boundary1D.h" #include "cantera/oneD/OneDim.h" #include "cantera/oneD/StFlow.h" @@ -218,43 +219,39 @@ void Inlet1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap Inlet1D::serialize(const double* soln) const +shared_ptr Inlet1D::asArray(const double* soln) const { - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "inlet"; - state["temperature"] = m_temp; - state["mass-flux"] = m_mdot; - AnyMap Y; - for (size_t k = 0; k < m_nsp; k++) { - if (m_yin[k] != 0.0) { - Y[m_flow->phase().speciesName(k)] = m_yin[k]; - } - } - state["mass-fractions"] = std::move(Y); - return state; + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "inlet"; + meta["mass-flux"] = m_mdot; + auto arr = SolutionArray::create(m_solution, 1, meta); + + // set gas state (using pressure from adjacent domain) + double pressure = m_flow->phase().pressure(); + auto phase = m_solution->thermo(); + phase->setState_TPY(m_temp, pressure, m_yin.data()); + vector_fp data(phase->stateSize()); + phase->saveState(data); + + arr->setState(0, data); + return arr; } -void Inlet1D::restore(const AnyMap& state, double* soln, int loglevel) +void Inlet1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(state, soln, loglevel); - m_mdot = state["mass-flux"].asDouble(); - m_temp = state["temperature"].asDouble(); - const auto& Y = state["mass-fractions"].as(); - ThermoPhase& thermo = m_flow->phase(); - for (size_t k = 0; k < m_nsp; k++) { - m_yin[k] = Y.getDouble(thermo.speciesName(k), 0.0); - } - - // Warn about species not in the current phase - if (loglevel) { - for (auto& item : Y) { - if (thermo.speciesIndex(item.first) == npos) { - warn_user("Inlet1D::restore", "Phase '{}' does not contain a species " - "named '{}'\n which was specified as having a mass fraction of {}.", - thermo.name(), item.first, item.second.asDouble()); - } - } + Boundary1D::setMeta(arr.meta(), loglevel); + arr.setIndex(0); + auto phase = arr.thermo(); + auto meta = arr.meta(); + m_temp = phase->temperature(); + if (meta.hasKey("mass-flux")) { + m_mdot = meta.at("mass-flux").asDouble(); + } else { + // convert data format used by Python h5py export (Cantera < 3.0) + auto aux = arr.getAuxiliary(0); + m_mdot = phase->density() * aux["velocity"]; } + phase->getMassFractions(m_yin.data()); } // ------------- Empty1D ------------- @@ -269,11 +266,11 @@ void Empty1D::eval(size_t jg, double* xg, double* rg, { } -AnyMap Empty1D::serialize(const double* soln) const +shared_ptr Empty1D::asArray(const double* soln) const { - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "empty"; - return state; + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "empty"; + return SolutionArray::create(m_solution, 0, meta); } // -------------- Symm1D -------------- @@ -322,11 +319,11 @@ void Symm1D::eval(size_t jg, double* xg, double* rg, integer* diagg, } } -AnyMap Symm1D::serialize(const double* soln) const +shared_ptr Symm1D::asArray(const double* soln) const { - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "symmetry"; - return state; + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "symmetry"; + return SolutionArray::create(m_solution, 0, meta); } // -------- Outlet1D -------- @@ -400,11 +397,11 @@ void Outlet1D::eval(size_t jg, double* xg, double* rg, integer* diagg, } } -AnyMap Outlet1D::serialize(const double* soln) const +shared_ptr Outlet1D::asArray(const double* soln) const { - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "outlet"; - return state; + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "outlet"; + return SolutionArray::create(m_solution, 0, meta); } // -------- OutletRes1D -------- @@ -505,42 +502,32 @@ void OutletRes1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap OutletRes1D::serialize(const double* soln) const +shared_ptr OutletRes1D::asArray(const double* soln) const { - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "outlet-reservoir"; - state["temperature"] = m_temp; - AnyMap Y; - for (size_t k = 0; k < m_nsp; k++) { - if (m_yres[k] != 0.0) { - Y[m_flow->phase().speciesName(k)] = m_yres[k]; - } - } - state["mass-fractions"] = std::move(Y); - return state; + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "outlet-reservoir"; + meta["temperature"] = m_temp; + auto arr = SolutionArray::create(m_solution, 1, meta); + + // set gas state (using pressure from adjacent domain) + double pressure = m_flow->phase().pressure(); + auto phase = m_solution->thermo(); + phase->setState_TPY(m_temp, pressure, &m_yres[0]); + vector_fp data(phase->stateSize()); + phase->saveState(data); + + arr->setState(0, data); + return arr; } -void OutletRes1D::restore(const AnyMap& state, double* soln, int loglevel) +void OutletRes1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(state, soln, loglevel); - m_temp = state["temperature"].asDouble(); - const auto& Y = state["mass-fractions"].as(); - ThermoPhase& thermo = m_flow->phase(); - for (size_t k = 0; k < m_nsp; k++) { - m_yres[k] = Y.getDouble(thermo.speciesName(k), 0.0); - } - - // Warn about species not in the current phase - if (loglevel) { - for (auto& item : Y) { - if (thermo.speciesIndex(item.first) == npos) { - warn_user("OutletRes1D::restore", "Phase '{}' does not contain a " - "species named '{}'\nwhich was specified as having a mass " - "fraction of {}.", - thermo.name(), item.first, item.second.asDouble()); - } - } - } + Boundary1D::setMeta(arr.meta(), loglevel); + arr.setIndex(0); + auto phase = arr.thermo(); + m_temp = phase->temperature(); + auto Y = phase->massFractions(); + std::copy(Y, Y + m_nsp, &m_yres[0]); } // -------- Surf1D -------- @@ -575,18 +562,19 @@ void Surf1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap Surf1D::serialize(const double* soln) const +shared_ptr Surf1D::asArray(const double* soln) const { - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "surface"; - state["temperature"] = m_temp; - return state; + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "surface"; + meta["temperature"] = m_temp; + return SolutionArray::create(m_solution, 0, meta); } -void Surf1D::restore(const AnyMap& state, double* soln, int loglevel) +void Surf1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(state, soln, loglevel); - m_temp = state["temperature"].asDouble(); + Boundary1D::setMeta(arr.meta(), loglevel); + arr.setIndex(0); + m_temp = arr.thermo()->temperature(); } void Surf1D::showSolution_s(std::ostream& s, const double* x) @@ -610,7 +598,7 @@ ReactingSurf1D::ReactingSurf1D() m_type = cSurfType; } -ReactingSurf1D::ReactingSurf1D(shared_ptr solution) +ReactingSurf1D::ReactingSurf1D(shared_ptr solution, const std::string& id) { auto phase = std::dynamic_pointer_cast(solution->thermo()); if (!phase) { @@ -624,6 +612,7 @@ ReactingSurf1D::ReactingSurf1D(shared_ptr solution) solution->kinetics()->kineticsType()); } m_solution = solution; + m_id = id; m_kin = kin.get(); m_sphase = phase.get(); @@ -753,43 +742,37 @@ void ReactingSurf1D::eval(size_t jg, double* xg, double* rg, } } -AnyMap ReactingSurf1D::serialize(const double* soln) const +shared_ptr ReactingSurf1D::asArray(const double* soln) const { - AnyMap state = Boundary1D::serialize(soln); - state["type"] = "reacting-surface"; - state["temperature"] = m_temp; - state["phase"]["name"] = m_sphase->name(); - AnyValue source =m_sphase->input().getMetadata("filename"); - state["phase"]["source"] = source.empty() ? "" : source.asString(); - AnyMap cov; - for (size_t k = 0; k < m_nsp; k++) { - cov[m_sphase->speciesName(k)] = soln[k]; - } - state["coverages"] = std::move(cov); + AnyMap meta = Boundary1D::getMeta(); + meta["type"] = "reacting-surface"; + meta["temperature"] = m_temp; + meta["phase"]["name"] = m_sphase->name(); + AnyValue source = m_sphase->input().getMetadata("filename"); + meta["phase"]["source"] = source.empty() ? "" : source.asString(); + + // set state of surface phase + m_sphase->setState_TP(m_temp, m_sphase->pressure()); + m_sphase->setCoverages(soln); + vector_fp data(m_sphase->stateSize()); + m_sphase->saveState(data.size(), &data[0]); - return state; + auto arr = SolutionArray::create(m_solution, 1, meta); + arr->setState(0, data); + return arr; } -void ReactingSurf1D::restore(const AnyMap& state, double* soln, int loglevel) +void ReactingSurf1D::restore(SolutionArray& arr, double* soln, int loglevel) { - Boundary1D::restore(state, soln, loglevel); - m_temp = state["temperature"].asDouble(); - const auto& cov = state["coverages"].as(); - for (size_t k = 0; k < m_nsp; k++) { - soln[k] = cov.getDouble(m_sphase->speciesName(k), 0.0); - } - - // Warn about species not in the current phase - if (loglevel) { - for (auto& item : cov) { - if (m_sphase->speciesIndex(item.first) == npos) { - warn_user("OutletRes1D::restore", "Phase '{}' does not contain a " - "species named '{}'\nwhich was specified as having a coverage " - "of {}.", - m_sphase->name(), item.first, item.second.asDouble()); - } - } + Boundary1D::setMeta(arr.meta(), loglevel); + arr.setIndex(0); + auto surf = std::dynamic_pointer_cast(arr.thermo()); + if (!surf) { + throw CanteraError("ReactingSurf1D::restore", + "Restoring of coverages requires surface phase"); } + m_temp = surf->temperature(); + surf->getCoverages(soln); } void ReactingSurf1D::showSolution(const double* x) diff --git a/src/oneD/Domain1D.cpp b/src/oneD/Domain1D.cpp index e1ddcc672e..af30fdb421 100644 --- a/src/oneD/Domain1D.cpp +++ b/src/oneD/Domain1D.cpp @@ -9,6 +9,7 @@ #include "cantera/oneD/MultiJac.h" #include "cantera/oneD/refine.h" #include "cantera/base/AnyMap.h" +#include "cantera/base/SolutionArray.h" #include @@ -114,7 +115,7 @@ void Domain1D::needJacUpdate() } } -AnyMap Domain1D::serialize(const double* soln) const +AnyMap Domain1D::getMeta() const { auto wrap_tols = [this](const vector_fp& tols) { // If all tolerances are the same, just store the scalar value. @@ -141,7 +142,17 @@ AnyMap Domain1D::serialize(const double* soln) const return state; } -void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) +AnyMap Domain1D::serialize(const double* soln) const +{ + warn_deprecated("Domain1D::serialize", + "To be removed after Cantera 3.0; superseded by asArray."); + AnyMap out; + auto arr = asArray(soln); + arr->writeEntry(out, ""); + return out; +} + +void Domain1D::setMeta(const AnyMap& meta, int loglevel) { auto set_tols = [&](const AnyValue& tols, const string& which, vector_fp& out) { @@ -157,15 +168,15 @@ void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) if (tol.hasKey(name)) { out[i] = tol[name].asDouble(); } else if (loglevel) { - warn_user("Domain1D::restore", "No {} found for component '{}'", + warn_user("Domain1D::setMeta", "No {} found for component '{}'", which, name); } } } }; - if (state.hasKey("tolerances")) { - const auto& tols = state["tolerances"]; + if (meta.hasKey("tolerances")) { + const auto& tols = meta["tolerances"]; set_tols(tols, "transient-abstol", m_atol_ts); set_tols(tols, "transient-reltol", m_rtol_ts); set_tols(tols, "steady-abstol", m_atol_ss); @@ -173,6 +184,15 @@ void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) } } +void Domain1D::restore(const AnyMap& state, double* soln, int loglevel) +{ + warn_deprecated("Domain1D::restore", + "To be removed after Cantera 3.0; restore from SolutionArray instead."); + auto arr = SolutionArray::create(solution()); + arr->readEntry(state, ""); + restore(*arr, soln, loglevel); +} + void Domain1D::locate() { if (m_left) { diff --git a/src/oneD/IonFlow.cpp b/src/oneD/IonFlow.cpp index edbe88bf07..b5d9eddacc 100644 --- a/src/oneD/IonFlow.cpp +++ b/src/oneD/IonFlow.cpp @@ -58,10 +58,11 @@ IonFlow::IonFlow(ThermoPhase* ph, size_t nsp, size_t points) : m_do_electric_field.resize(m_points,false); } -IonFlow::IonFlow(shared_ptr sol, size_t nsp, size_t points) : - IonFlow(sol->thermo().get(), nsp, points) +IonFlow::IonFlow(shared_ptr sol, const std::string& id, size_t points) + : IonFlow(sol->thermo().get(), sol->thermo()->nSpecies(), points) { m_solution = sol; + m_id = id; m_kin = m_solution->kinetics().get(); m_trans_shared = m_solution->transport(); m_trans = m_trans_shared.get(); diff --git a/src/oneD/Sim1D.cpp b/src/oneD/Sim1D.cpp index b0c6f47278..90f9773210 100644 --- a/src/oneD/Sim1D.cpp +++ b/src/oneD/Sim1D.cpp @@ -12,6 +12,7 @@ #include "cantera/oneD/refine.h" #include "cantera/numerics/funcs.h" #include "cantera/base/stringUtils.h" +#include "cantera/base/SolutionArray.h" #include "cantera/numerics/Func1.h" #include #include @@ -94,49 +95,45 @@ void Sim1D::setProfile(size_t dom, size_t comp, } void Sim1D::save(const std::string& fname, const std::string& id, - const std::string& desc, int loglevel) + const std::string& desc, int loglevel, int compression) { - // Check for an existing file and load it if present - AnyMap data; - if (ifstream(fname).good()) { - data = AnyMap::fromYamlFile(fname); + size_t dot = fname.find_last_of("."); + string extension = (dot != npos) ? toLowerCopy(fname.substr(dot+1)) : ""; + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { + for (auto dom : m_dom) { + auto arr = dom->asArray(m_x.data() + dom->loc()); + arr->writeEntry(fname, id + "/" + dom->id(), compression); + } + SolutionArray::writeHeader(fname, id, desc); + if (loglevel > 0) { + writelog("Solution saved to file '{}' as group '{}'.\n", fname, id); + } + return; } - bool preexisting = data.hasKey(id); - - // Add this simulation to the YAML - data[id] = serialize(m_x.data()); - - // Add metadata - data[id]["description"] = desc; - data[id]["generator"] = "Cantera Sim1D"; - data[id]["cantera-version"] = CANTERA_VERSION; - data[id]["git-commit"] = gitCommit(); - - // Add a timestamp indicating the current time - time_t aclock; - ::time(&aclock); // Get time in seconds - struct tm* newtime = localtime(&aclock); // Convert time to struct tm form - data[id]["date"] = stripnonprint(asctime(newtime)); - - // Force metadata fields to the top of the file - data[id]["description"].setLoc(-6, 0); - data[id]["generator"].setLoc(-5, 0); - data[id]["cantera-version"].setLoc(-4, 0); - data[id]["git-commit"].setLoc(-3, 0); - data[id]["date"].setLoc(-2, 0); + if (extension == "yaml" || extension == "yml") { + // Check for an existing file and load it if present + AnyMap data; + if (std::ifstream(fname).good()) { + data = AnyMap::fromYamlFile(fname); + } + SolutionArray::writeHeader(data, id, desc); - // If this is not replacing an existing solution, put it at the end - if (!preexisting) { - data[id].setLoc(INT_MAX, 0); - } + for (auto dom : m_dom) { + auto arr = dom->asArray(m_x.data() + dom->loc()); + arr->writeEntry(data, id + "/" + dom->id()); + } - // Write the output file and remove the now-outdated cached file - std::ofstream out(fname); - out << data.toYamlString(); - AnyMap::clearCachedFile(fname); - if (loglevel > 0) { - writelog("Solution saved to file {} as solution '{}'.\n", fname, id); + // Write the output file and remove the now-outdated cached file + std::ofstream out(fname); + out << data.toYamlString(); + AnyMap::clearCachedFile(fname); + if (loglevel > 0) { + writelog("Solution saved to file '{}' as entry '{}'.\n", fname, id); + } + return; } + throw CanteraError("Sim1D::save", + "Unsupported file format '{}'", extension); } void Sim1D::saveResidual(const std::string& fname, const std::string& id, @@ -151,7 +148,87 @@ void Sim1D::saveResidual(const std::string& fname, const std::string& id, std::swap(res, m_x); } -void Sim1D::restore(const std::string& fname, const std::string& id, +//! convert data format used by Python h5py export (Cantera < 3.0) +AnyMap legacyH5(shared_ptr arr, const AnyMap& header={}) +{ + auto meta = arr->meta(); + AnyMap out; + + std::map meta_pairs = { + {"type", "Domain1D_type"}, + {"name", "name"}, + {"emissivity-left", "emissivity_left"}, + {"emissivity-right", "emissivity_right"}, + }; + for (const auto& item : meta_pairs) { + if (meta.hasKey(item.second)) { + out[item.first] = meta[item.second]; + } + } + + std::map tol_pairs = { + {"transient-abstol", "transient_abstol"}, + {"steady-abstol", "steady_abstol"}, + {"transient-reltol", "transient_reltol"}, + {"steady-reltol", "steady_reltol"}, + }; + for (const auto& item : tol_pairs) { + if (meta.hasKey(item.second)) { + out["tolerances"][item.first] = meta[item.second]; + } + } + + if (meta.hasKey("phase")) { + out["phase"]["name"] = meta["phase"]["name"]; + out["phase"]["source"] = meta["phase"]["source"]; + } + + if (arr->size() <= 1) { + return out; + } + + std::map header_pairs = { + {"transport-model", "transport_model"}, + {"radiation-enabled", "radiation_enabled"}, + {"energy-enabled", "energy_enabled"}, + {"Soret-enabled", "soret_enabled"}, + {"species-enabled", "species_enabled"}, + }; + for (const auto& item : header_pairs) { + if (header.hasKey(item.second)) { + out[item.first] = header[item.second]; + } + } + + std::map refiner_pairs = { + {"ratio", "ratio"}, + {"slope", "slope"}, + {"curve", "curve"}, + {"prune", "prune"}, + // {"grid-min", "???"}, // missing + {"max-points", "max_grid_points"}, + }; + for (const auto& item : refiner_pairs) { + if (header.hasKey(item.second)) { + out["refine-criteria"][item.first] = header[item.second]; + } + } + + if (header.hasKey("fixed_temperature")) { + double temp = header.getDouble("fixed_temperature", -1.); + auto profile = arr->getComponent("T"); + size_t ix = 0; + while (profile[ix] <= temp && ix < arr->size()) { + ix++; + } + out["fixed-point"]["location"] = arr->getComponent("grid")[ix - 1]; + out["fixed-point"]["temperature"] = temp; + } + + return out; +} + +AnyMap Sim1D::restore(const std::string& fname, const std::string& id, int loglevel) { size_t dot = fname.find_last_of("."); @@ -160,27 +237,49 @@ void Sim1D::restore(const std::string& fname, const std::string& id, throw CanteraError("Sim1D::restore", "Restoring from XML is no longer supported."); } - AnyMap root = AnyMap::fromYamlFile(fname); - if (!root.hasKey(id)) { - throw InputFileError("Sim1D::restore", root, - "No solution with id '{}'", id); - } - const auto& state = root[id]; - for (auto dom : m_dom) { - if (!state.hasKey(dom->id())) { - throw InputFileError("Sim1D::restore", state, - "Saved state '{}' does not contain a domain named '{}'.", - id, dom->id()); + AnyMap header; + if (extension == "h5" || extension == "hdf" || extension == "hdf5") { + std::map> arrs; + header = SolutionArray::readHeader(fname, id); + + for (auto dom : m_dom) { + auto arr = SolutionArray::create(dom->solution()); + arr->readEntry(fname, id + "/" + dom->id()); + dom->resize(dom->nComponents(), arr->size()); + if (!header.hasKey("generator")) { + arr->meta() = legacyH5(arr, header); + } + arrs[dom->id()] = arr; } - dom->resize(dom->nComponents(), state[dom->id()]["points"].asInt()); - } - resize(); - m_xlast_ts.clear(); - for (auto dom : m_dom) { - dom->restore(state[dom->id()].as(), m_x.data() + dom->loc(), - loglevel); + resize(); + m_xlast_ts.clear(); + for (auto dom : m_dom) { + dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); + } + finalize(); + } else if (extension == "yaml" || extension == "yml") { + AnyMap root = AnyMap::fromYamlFile(fname); + std::map> arrs; + header = SolutionArray::readHeader(root, id); + + for (auto dom : m_dom) { + auto arr = SolutionArray::create(dom->solution()); + arr->readEntry(root, id + "/" + dom->id()); + dom->resize(dom->nComponents(), arr->size()); + arrs[dom->id()] = arr; + } + resize(); + m_xlast_ts.clear(); + for (auto dom : m_dom) { + dom->restore(*arrs[dom->id()], m_x.data() + dom->loc(), loglevel); + } + finalize(); + } else { + throw CanteraError("Sim1D::restore", + "Unknown file extension '{}'; supported extensions include " + "'h5'/'hdf'/'hdf5' and 'yml'/'yaml'.", extension); } - finalize(); + return header; } void Sim1D::setFlatProfile(size_t dom, size_t comp, doublereal v) diff --git a/src/oneD/StFlow.cpp b/src/oneD/StFlow.cpp index 5dc144614e..70661e7a79 100644 --- a/src/oneD/StFlow.cpp +++ b/src/oneD/StFlow.cpp @@ -3,6 +3,7 @@ // This file is part of Cantera. See License.txt in the top-level directory or // at https://cantera.org/license.txt for license and copyright information. +#include "cantera/base/SolutionArray.h" #include "cantera/oneD/StFlow.h" #include "cantera/oneD/refine.h" #include "cantera/transport/Transport.h" @@ -106,10 +107,11 @@ StFlow::StFlow(ThermoPhase* ph, size_t nsp, size_t points) : m_kRadiating[1] = m_thermo->speciesIndex("H2O"); } -StFlow::StFlow(std::shared_ptr sol, size_t nsp, size_t points) : - StFlow(sol->thermo().get(), nsp, points) +StFlow::StFlow(shared_ptr sol, const std::string& id, size_t points) + : StFlow(sol->thermo().get(), sol->thermo()->nSpecies(), points) { m_solution = sol; + m_id = id; m_kin = m_solution->kinetics().get(); m_trans_shared = m_solution->transport(); m_trans = m_trans_shared.get(); @@ -679,11 +681,10 @@ bool StFlow::componentActive(size_t n) const } } -AnyMap StFlow::serialize(const double* soln) const +AnyMap StFlow::getMeta() const { - AnyMap state = Domain1D::serialize(soln); + AnyMap state = Domain1D::getMeta(); state["type"] = flowType(); - state["pressure"] = m_press; state["transport-model"] = m_trans->transportModel(); state["phase"]["name"] = m_thermo->name(); @@ -692,7 +693,6 @@ AnyMap StFlow::serialize(const double* soln) const state["radiation-enabled"] = m_do_radiation; if (m_do_radiation) { - state["radiative-heat-loss"] = m_qdotRadiation; state["emissivity-left"] = m_epsilon_left; state["emissivity-right"] = m_epsilon_right; } @@ -728,33 +728,48 @@ AnyMap StFlow::serialize(const double* soln) const state["fixed-point"]["temperature"] = m_tfixed; } - state["grid"] = m_z; + return state; +} + +shared_ptr StFlow::asArray(const double* soln) const +{ + auto arr = SolutionArray::create(m_solution, nPoints(), getMeta()); + arr->setComponent("grid", m_z, true); vector_fp data(nPoints()); for (size_t i = 0; i < nComponents(); i++) { if (componentActive(i)) { for (size_t j = 0; j < nPoints(); j++) { - data[j] = soln[index(i,j)]; + data[j] = soln[index(i, j)]; } - state[componentName(i)] = data; + arr->setComponent(componentName(i), data, true); } } + arr->setComponent("D", m_rho); // use density rather than pressure - return state; + if (m_do_radiation) { + arr->setComponent("radiative-heat-loss", m_qdotRadiation, true); + } + + return arr; } -void StFlow::restore(const AnyMap& state, double* soln, int loglevel) +void StFlow::restore(SolutionArray& arr, double* soln, int loglevel) { - Domain1D::restore(state, soln, loglevel); - m_press = state["pressure"].asDouble(); - setupGrid(nPoints(), state["grid"].asVector(nPoints()).data()); + Domain1D::setMeta(arr.meta(), loglevel); + arr.setIndex(0); + auto phase = arr.thermo(); + m_press = phase->pressure(); + + const auto grid = arr.getComponent("grid"); + setupGrid(nPoints(), &grid[0]); for (size_t i = 0; i < nComponents(); i++) { if (!componentActive(i)) { continue; } std::string name = componentName(i); - if (state.hasKey(name)) { - const vector_fp& data = state[name].asVector(nPoints()); + if (arr.hasComponent(name)) { + const vector_fp data = arr.getComponent(name); for (size_t j = 0; j < nPoints(); j++) { soln[index(i,j)] = data[j]; } @@ -764,6 +779,12 @@ void StFlow::restore(const AnyMap& state, double* soln, int loglevel) } } + updateProperties(npos, soln + loc(), 0, m_points - 1); + setMeta(arr.meta(), loglevel); +} + +void StFlow::setMeta(const AnyMap& state, int loglevel) +{ if (state.hasKey("energy-enabled")) { const AnyValue& ee = state["energy-enabled"]; if (ee.isScalar()) { diff --git a/test/data/adiabatic_flame_legacy.yaml b/test/data/adiabatic_flame_legacy.yaml new file mode 100644 index 0000000000..dc4b3359f6 --- /dev/null +++ b/test/data/adiabatic_flame_legacy.yaml @@ -0,0 +1,78 @@ +setup: + description: initial guess + generator: Cantera Sim1D + cantera-version: 2.6.0 + git-commit: unknown + date: Thu Dec 22 22:50:32 2022 + reactants: + points: 1 + type: inlet + temperature: 300.0 + mass-flux: 1.338612362800703 + mass-fractions: + H2: 9.478316470455491e-03 + O2: 0.1367636951757011 + AR: 0.8537579883538435 + flame: + points: 9 + tolerances: + transient-abstol: 1.0e-11 + steady-abstol: 1.0e-09 + transient-reltol: 1.0e-04 + steady-reltol: 1.0e-04 + type: Free Flame + pressure: 1.01325e+05 + phase: + name: ohmech + source: /opt/homebrew/Caskroom/miniforge/base/envs/cantera/lib/python3.10/site-packages/cantera/data/h2o2.yaml + radiation-enabled: false + energy-enabled: true + Soret-enabled: false + species-enabled: true + refine-criteria: + ratio: 3.0 + slope: 0.06 + curve: 0.12 + prune: 0.0 + grid-min: 1.0e-10 + max-points: 1000 + fixed-point: + location: 0.0105 + temperature: 698.1678485027373 + grid: [0.0, 6.0e-03, 9.0e-03, 0.0105, 0.012, 0.015, 0.018, 0.024, 0.03] + velocity: [1.0, 1.0, 1.0, 2.205581220696009, 3.411162441392019, 5.822324882784038, + 5.822324882784038, 5.822324882784038, 5.822324882784038] + T: [300.0, 300.0, 300.0, 698.1678485027371, 1096.335697005475, + 1892.671394010949, 1892.671394010949, 1892.671394010949, + 1892.671394010949] + H2: [9.478316470455491e-03, 9.478316470455491e-03, 9.478316470455491e-03, + 7.109842855845580e-03, 4.741369241235666e-03, 4.422012015843635e-06, + 4.422012015843405e-06, 4.422012015843405e-06, 4.422012015843405e-06] + H: [0.0, 0.0, 0.0, 4.691903645074276e-08, 9.383807290148556e-08, + 1.876761458029711e-07, 1.876761458029711e-07, 1.876761458029711e-07, + 1.876761458029711e-07] + O: [0.0, 0.0, 0.0, 8.204442509821813e-06, 1.640888501964364e-05, + 3.281777003928727e-05, 3.281777003928727e-05, 3.281777003928727e-05, + 3.281777003928727e-05] + O2: [0.1367636951757011, 0.1367636951757011, 0.1367636951757011, + 0.1178952114224941, 0.09902672766928713, 0.06128976016287324, + 0.06128976016287325, 0.06128976016287325, 0.06128976016287325] + OH: [0.0, 0.0, 0.0, 1.365789095524984e-04, 2.731578191049970e-04, + 5.463156382099939e-04, 5.463156382099939e-04, 5.463156382099939e-04, + 5.463156382099939e-04] + H2O: [0.0, 0.0, 0.0, 0.02109188182623040, 0.04218376365246081, + 0.08436752730492161, 0.08436752730492161, 0.08436752730492161, + 0.08436752730492161] + HO2: [0.0, 0.0, 0.0, 2.318933184184578e-07, 4.637866368369159e-07, + 9.275732736738316e-07, 9.275732736738315e-07, 9.275732736738315e-07, + 9.275732736738315e-07] + H2O2: [0.0, 0.0, 0.0, 1.337716924587937e-08, 2.675433849175876e-08, + 5.350867698351750e-08, 5.350867698351750e-08, 5.350867698351750e-08, + 5.350867698351750e-08] + AR: [0.8537579883538435, 0.8537579883538435, 0.8537579883538435, + 0.8537579883538435, 0.8537579883538436, 0.8537579883538436, + 0.8537579883538436, 0.8537579883538436, 0.8537579883538436] + N2: [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + products: + points: 1 + type: outlet diff --git a/test/python/test_composite.py b/test/python/test_composite.py index eeaba69959..4c980d5018 100644 --- a/test/python/test_composite.py +++ b/test/python/test_composite.py @@ -5,7 +5,14 @@ import pickle import cantera as ct -from cantera.composite import _h5py, _pandas + +try: + h5py = ct.composite._import_h5py() + have_h5py = True +except ImportError: + have_h5py = False + +from cantera.composite import _pandas from . import utilities @@ -255,7 +262,7 @@ def test_append_no_norm_data(self): self.assertEqual(states[0].P, gas.P) self.assertArrayNear(states[0].Y, gas.Y) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_import_no_norm_data(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -336,7 +343,7 @@ def test_to_pandas(self): with self.assertRaisesRegex(NotImplementedError, 'not supported'): states.to_pandas() - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_write_hdf(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -365,11 +372,11 @@ def test_write_hdf(self): gas = ct.Solution('gri30.yaml', transport_model=None) ct.SolutionArray(gas, 10).write_hdf(outfile) - with _h5py.File(outfile, 'a') as hdf: + with h5py.File(outfile, 'a') as hdf: hdf.create_group('spam') c = ct.SolutionArray(self.gas) - with self.assertRaisesRegex(IOError, 'does not contain valid data'): + with self.assertRaisesRegex(ValueError, 'requires a non-empty data dictionary'): c.read_hdf(outfile, group='spam') with self.assertRaisesRegex(IOError, 'does not contain group'): c.read_hdf(outfile, group='eggs') @@ -382,7 +389,7 @@ def test_write_hdf(self): c.read_hdf(outfile, group='foo/bar/baz') self.assertArrayNear(states.T, c.T) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_write_hdf_str_column(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -396,7 +403,7 @@ def test_write_hdf_str_column(self): b.read_hdf(outfile) self.assertEqual(list(states.spam), list(b.spam)) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_write_hdf_multidim_column(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument @@ -571,7 +578,7 @@ def check(a, b): b.restore_data(data) check(a, b) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") + @utilities.unittest.skipIf(not have_h5py, "h5py is not installed") def test_import_no_norm_water(self): outfile = self.test_work_path / "solutionarray.h5" # In Python >= 3.8, this can be replaced by the missing_ok argument diff --git a/test/python/test_onedim.py b/test/python/test_onedim.py index e65488761e..07e64ae6ae 100644 --- a/test/python/test_onedim.py +++ b/test/python/test_onedim.py @@ -4,8 +4,6 @@ from .utilities import allow_deprecated import pytest -from cantera.composite import _h5py - class TestOnedim(utilities.CanteraTest): def test_instantiate(self): @@ -517,7 +515,28 @@ def test_prune(self): # TODO: check that the solution is actually correct (that is, that the # residual satisfies the error tolerances) on the new grid. - def test_save_restore_yaml(self): + def test_restore_legacy_yaml(self): + reactants = 'H2:1.1, O2:1, AR:5' + p = 5 * ct.one_atm + Tin = 600 + self.create_sim(p, Tin, reactants) + meta = self.sim.restore("adiabatic_flame_legacy.yaml", "setup") + assert meta["generator"] == "Cantera Sim1D" + assert meta["cantera-version"] == "2.6.0" + assert self.sim.inlet.T == 300 + assert self.sim.P == pytest.approx(ct.one_atm) + assert len(self.sim.grid) == 9 + + def test_save_restore_yaml_array(self): + # save and restore using YAML format + self.run_save_restore("array") + + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") + def test_save_restore_hdf_array(self): + # save and restore using HDF format + self.run_save_restore("hdf") + + def run_save_restore(self, mode): reactants = "H2:1.1, O2:1, AR:5" p = 2 * ct.one_atm Tin = 400 @@ -528,17 +547,17 @@ def test_save_restore_yaml(self): self.sim.flame.set_steady_tolerances(T=(T_rtol, T_atol)) self.solve_fixed_T() - filename = self.test_work_path / "onedim-fixed-T.yaml" - # In Python >= 3.8, this can be replaced by the missing_ok argument - if filename.is_file(): - filename.unlink() + if mode == "hdf": + filename = self.test_work_path / f"onedim-fixed-T.h5" + else: + filename = self.test_work_path / f"onedim-fixed-T-{mode}.yaml" + filename.unlink(missing_ok=True) Y1 = self.sim.Y u1 = self.sim.velocity V1 = self.sim.spread_rate P1 = self.sim.P T1 = self.sim.T - self.sim.save(filename, "test", loglevel=0) # Save a second solution to the same file @@ -590,6 +609,8 @@ def test_save_restore_yaml(self): self.assertTrue(self.sim.radiation_enabled) self.assertEqual(self.sim.boundary_emissivities, (0.3, 0.8)) + self.sim.solve(loglevel=0) + def test_array_properties(self): self.create_sim(ct.one_atm, 300, 'H2:1.1, O2:1, AR:5') grid_shape = self.sim.grid.shape @@ -702,27 +723,48 @@ def test_write_csv(self): k = self.gas.species_index('H2') self.assertArrayNear(data.X[:, k], self.sim.X[k, :]) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") - def test_write_hdf(self): - filename = self.test_work_path / "onedim-write_hdf.h5" - # In Python >= 3.8, this can be replaced by the missing_ok argument - if filename.is_file(): - filename.unlink() + @pytest.mark.usefixtures("allow_deprecated") + @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") + def test_write_hdf_legacy(self): + # save and restore legacy h5py format + self.run_freeflame_write_hdf("legacy") + + @pytest.mark.usefixtures("allow_deprecated") + @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") + def test_write_hdf_transition(self): + # save legacy h5py format / restore with HighFive + self.run_freeflame_write_hdf("transition") + + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") + def test_write_hdf_native(self): + # save and restore with updated format (HighFive only) + self.run_freeflame_write_hdf("native") + + def run_freeflame_write_hdf(self, mode): + filename = self.test_work_path / f"onedim-write_hdf_{mode}.h5" + filename.unlink(missing_ok=True) self.run_mix(phi=1.1, T=350, width=2.0, p=2.0, refine=False) desc = 'mixture-averaged simulation' - self.sim.write_hdf(filename, description=desc) + if mode == "native": + self.sim.save(filename, "test", description=desc, loglevel=0) + else: + self.sim.write_hdf(filename, group="test", description=desc) f = ct.FreeFlame(self.gas) - meta = f.read_hdf(filename, normalize=False) + if mode == "legacy": + meta = f.read_hdf(filename, group="test", normalize=False) + self.assertEqual(meta['description'], desc) + self.assertEqual(meta['cantera_version'], ct.__version__) + self.assertEqual(meta['git_commit'], ct.__git_commit__) + else: + f.restore(filename, "test", loglevel=0) + self.assertArrayNear(f.grid, self.sim.grid) self.assertArrayNear(f.T, self.sim.T) - self.assertEqual(meta['description'], desc) k = self.gas.species_index('H2') self.assertArrayNear(f.X[k, :], self.sim.X[k, :]) self.assertArrayNear(f.inlet.X, self.sim.inlet.X) - self.assertEqual(meta['cantera_version'], ct.__version__) - self.assertEqual(meta['git_commit'], ct.__git_commit__) settings = self.sim.settings for k, v in f.settings.items(): @@ -734,6 +776,8 @@ def test_write_hdf(self): self.assertIn(k, settings) self.assertEqual(settings[k], v) + f.solve(loglevel=0) + def test_refine_criteria_boundscheck(self): self.create_sim(ct.one_atm, 300.0, 'H2:1.1, O2:1, AR:5') good = [3.0, 0.1, 0.2, 0.05] @@ -1267,26 +1311,51 @@ def test_reacting_surface_case2(self): def test_reacting_surface_case3(self): self.run_reacting_surface(xch4=0.2, tsurf=800.0, mdot=0.1, width=0.2) - @utilities.unittest.skipIf(isinstance(_h5py, ImportError), "h5py is not installed") - def test_write_hdf(self): - filename = self.test_work_path / "impingingjet-write_hdf.h5" - # In Python >= 3.8, this can be replaced by the missing_ok argument - if filename.is_file(): - filename.unlink() + @pytest.mark.usefixtures("allow_deprecated") + @utilities.unittest.skipIf("h5py" not in ct.hdf_support(), "h5py not installed") + def test_write_hdf_legacy(self): + # save and restore legacy h5py format + self.run_impingingjet_write("legacy") + + @pytest.mark.usefixtures("allow_deprecated") + @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") + def test_write_hdf_transition(self): + # save legacy h5py format and restore using HighFive + self.run_impingingjet_write("transition") + + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") + def test_write_hdf_native(self): + # save and restore updated HDF format + self.run_impingingjet_write("native") + + def test_write_yaml_native(self): + self.run_impingingjet_write("yaml") + + def run_impingingjet_write(self, mode): + if mode == "yaml": + filename = self.test_work_path / f"impingingjet-write_yaml.yaml" + else: + filename = self.test_work_path / f"impingingjet-write_hdf_{mode}.h5" + filename.unlink(missing_ok=True) self.run_reacting_surface(xch4=0.095, tsurf=900.0, mdot=0.06, width=0.1) - self.sim.write_hdf(filename) + if mode in {"native", "yaml"}: + self.sim.save(filename, "test", loglevel=0) + else: + self.sim.write_hdf(filename, group="test") tinlet = 300.0 # inlet temperature comp = {'CH4': .1, 'O2':0.21, 'N2':0.79} jet = self.create_reacting_surface(comp, 700.0, 500., width=0.2) - jet.read_hdf(filename) + if mode == "legacy": + jet.read_hdf(filename, group="test") + else: + jet.restore(filename, "test", loglevel=0) self.assertArrayNear(jet.grid, self.sim.grid) self.assertArrayNear(jet.T, self.sim.T) k = self.sim.gas.species_index('H2') self.assertArrayNear(jet.X[k, :], self.sim.X[k, :]) - self.assertArrayNear(jet.surface.surface.X, self.sim.surface.surface.X) settings = self.sim.settings for k, v in jet.settings.items(): @@ -1294,7 +1363,19 @@ def test_write_hdf(self): if k != 'fixed_temperature': self.assertEqual(settings[k], v) - def test_save_restore(self): + if mode == "legacy": + # legacy HDF restore does not set state + return + self.assertArrayNear(jet.surface.surface.X, self.sim.surface.surface.X) + for i in range(self.sim.surface.n_components): + self.assertNear( + self.sim.value("surface", i, 0), + jet.value("surface", i, 0) + ) + + jet.solve(loglevel=0) + + def test_save_restore_yaml_array(self): comp = {'CH4': 0.095, 'O2': 0.21, 'N2': 0.79} self.sim = self.create_reacting_surface(comp, tsurf=900, tinlet=300, width=0.1) @@ -1305,21 +1386,25 @@ def test_save_restore(self): self.sim.solve(loglevel=0, auto=False) - filename = self.test_work_path / "impingingjet1.yaml" - self.sim.save(filename) + filename = self.test_work_path / f"impingingjet.yaml" + filename.unlink(missing_ok=True) + self.sim.save(filename, "test", loglevel=0) self.surf_phase.TPX = 300, ct.one_atm, "PT(S):1" sim2 = ct.ImpingingJet(gas=self.gas, width=0.12, surface=self.surf_phase) - sim2.restore(filename) + sim2.restore(filename, "test", loglevel=0) self.assertArrayNear(self.sim.grid, sim2.grid) self.assertArrayNear(self.sim.Y, sim2.Y) + self.assertArrayNear(self.sim.surface.surface.X, sim2.surface.surface.X) for i in range(self.sim.surface.n_components): self.assertNear( self.sim.value("surface", i, 0), sim2.value("surface", i, 0) ) + sim2.solve(loglevel=0) + class TestTwinFlame(utilities.CanteraTest): def solve(self, phi, T, width, P): @@ -1349,6 +1434,45 @@ def test_restart(self): self.assertNear(mdot[0], sim.reactants.mdot, 1e-4) self.assertNear(sim.T[0], sim.reactants.T, 1e-4) + def test_save_restore_yaml(self): + # save and restore using YAML format + self.run_save_restore("yaml") + + @utilities.unittest.skipIf("native" not in ct.hdf_support(), "HighFive not installed") + def test_save_restore_hdf(self): + # save and restore using HDF format + self.run_save_restore("hdf") + + def run_save_restore(self, mode): + filename = self.test_work_path / f"twinflame.{mode}" + filename.unlink(missing_ok=True) + + sim = self.solve(phi=0.4, T=300, width=0.05, P=0.1) + sim.save(filename, loglevel=0, compression=7) + + gas = ct.Solution("h2o2.yaml") + sim2 = ct.CounterflowTwinPremixedFlame(gas=gas) + sim2.restore(filename) + + self.assertArrayNear(sim.grid, sim2.grid) + self.assertArrayNear(sim.Y, sim2.Y) + + sim2.solve(loglevel=0) + + @utilities.unittest.skipIf(ct.hdf_support() != {"h5py", "native"}, "h5py and/or HighFive not installed") + def test_backwards_compatibility(self): + filename = self.test_work_path / f"twinflame.h5" + filename.unlink(missing_ok=True) + + sim = self.solve(phi=0.4, T=300, width=0.05, P=0.1) + sim.save(filename, loglevel=0, compression=7) + + # load parts using h5py + for sub, points in {"flame": len(sim.grid), "reactants": 1}.items(): + arr = ct.SolutionArray(ct.Solution("h2o2.yaml")) + arr.read_hdf(filename, "solution", sub) + assert arr.size == points + class TestIonFreeFlame(utilities.CanteraTest): @utilities.slow_test diff --git a/test/python/test_reactor.py b/test/python/test_reactor.py index 25feeb899c..1df66fc61a 100644 --- a/test/python/test_reactor.py +++ b/test/python/test_reactor.py @@ -258,7 +258,7 @@ def integrate(atol, rtol): return nSteps n_baseline = integrate(1e-10, 1e-20) - n_rtol = integrate(5e-7, 1e-20) + n_rtol = integrate(1e-7, 1e-20) n_atol = integrate(1e-10, 1e-5) assert n_baseline > n_rtol assert n_baseline > n_atol diff --git a/test_problems/cxx_samples/cxx_flamespeed_blessed.txt b/test_problems/cxx_samples/cxx_flamespeed_blessed.txt index f65184bb7d..983b3af42b 100644 --- a/test_problems/cxx_samples/cxx_flamespeed_blessed.txt +++ b/test_problems/cxx_samples/cxx_flamespeed_blessed.txt @@ -1,154 +1,154 @@ phi = 0.9, Tad = 2133.7925650475245 ->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> domain 0 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> inlet <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< - Mass Flux: 0.3381 kg/m^2/s - Temperature: 300 K - Mass Fractions: - O2 0.2213 - CH4 0.04993 - N2 0.7288 + Mass Flux: 0.3381 kg/m^2/s + Temperature: 300 K + Mass Fractions: + O2 0.2213 + CH4 0.04993 + N2 0.7288 ->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> domain 1 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> flow <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< Pressure: 1.013e+05 Pa ------------------------------------------------------------------------------- - z velocity spread_rate T lambda eField + z velocity spread_rate T lambda eField ------------------------------------------------------------------------------- - 0 0.3 0 300 0 0 - 0.02 0.3 0 300 0 0 - 0.04 0.7598 0 758.4 0 0 - 0.06 1.679 0 1675 0 0 - 0.08 2.139 0 2134 0 0 - 0.1 2.139 0 2134 0 0 + 0 0.3 0 300 0 0 + 0.02 0.3 0 300 0 0 + 0.04 0.7598 0 758.4 0 0 + 0.06 1.679 0 1675 0 0 + 0.08 2.139 0 2134 0 0 + 0.1 2.139 0 2134 0 0 ------------------------------------------------------------------------------- - z H2 H O O2 OH + z H2 H O O2 OH ------------------------------------------------------------------------------- - 0 0 0 0 0.2213 0 - 0.02 0 0 0 0.2213 0 - 0.04 1.696e-05 1.065e-06 3.44e-05 0.1713 0.0004119 - 0.06 5.087e-05 3.194e-06 0.0001032 0.07133 0.001236 - 0.08 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 - 0.1 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 + 0 0 0 0 0.2213 0 + 0.02 0 0 0 0.2213 0 + 0.04 1.696e-05 1.065e-06 3.44e-05 0.1713 0.0004119 + 0.06 5.087e-05 3.194e-06 0.0001032 0.07133 0.001236 + 0.08 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 + 0.1 6.782e-05 4.259e-06 0.0001376 0.02135 0.001648 ------------------------------------------------------------------------------- - z H2O HO2 H2O2 C CH + z H2O HO2 H2O2 C CH ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 0.02765 2.96e-07 2.011e-08 4.454e-20 4.963e-21 - 0.06 0.08296 8.88e-07 6.033e-08 1.336e-19 1.489e-20 - 0.08 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 - 0.1 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 0.02765 2.96e-07 2.011e-08 4.454e-20 4.963e-21 + 0.06 0.08296 8.88e-07 6.033e-08 1.336e-19 1.489e-20 + 0.08 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 + 0.1 0.1106 1.184e-06 8.044e-08 1.782e-19 1.985e-20 ------------------------------------------------------------------------------- - z CH2 CH2(S) CH3 CH4 CO + z CH2 CH2(S) CH3 CH4 CO ------------------------------------------------------------------------------- - 0 0 0 0 0.04993 0 - 0.02 0 0 0 0.04993 0 - 0.04 1.299e-20 7.228e-22 7.945e-20 0.03744 0.000587 - 0.06 3.897e-20 2.168e-21 2.384e-19 0.01248 0.001761 - 0.08 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 - 0.1 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 + 0 0 0 0 0.04993 0 + 0.02 0 0 0 0.04993 0 + 0.04 1.299e-20 7.228e-22 7.945e-20 0.03744 0.000587 + 0.06 3.897e-20 2.168e-21 2.384e-19 0.01248 0.001761 + 0.08 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 + 0.1 5.196e-20 2.891e-21 3.178e-19 1.401e-19 0.002348 ------------------------------------------------------------------------------- - z CO2 HCO CH2O CH2OH CH3O + z CO2 HCO CH2O CH2OH CH3O ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 0.03332 1.924e-11 2.395e-13 3.147e-19 4.683e-21 - 0.06 0.09995 5.772e-11 7.185e-13 9.441e-19 1.405e-20 - 0.08 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 - 0.1 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 0.03332 1.924e-11 2.395e-13 3.147e-19 4.683e-21 + 0.06 0.09995 5.772e-11 7.185e-13 9.441e-19 1.405e-20 + 0.08 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 + 0.1 0.1333 7.696e-11 9.58e-13 1.259e-18 1.873e-20 ------------------------------------------------------------------------------- - z CH3OH C2H C2H2 C2H3 C2H4 + z CH3OH C2H C2H2 C2H3 C2H4 ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 2.19e-20 1.096e-27 3.177e-25 1.094e-30 1.075e-30 - 0.06 6.569e-20 3.287e-27 9.531e-25 3.283e-30 3.224e-30 - 0.08 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 - 0.1 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 2.19e-20 1.096e-27 3.177e-25 1.094e-30 1.075e-30 + 0.06 6.569e-20 3.287e-27 9.531e-25 3.283e-30 3.224e-30 + 0.08 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 + 0.1 8.758e-20 4.383e-27 1.271e-24 4.377e-30 4.298e-30 ------------------------------------------------------------------------------- - z C2H5 C2H6 HCCO CH2CO HCCOH + z C2H5 C2H6 HCCO CH2CO HCCOH ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 5.292e-36 3.24e-37 1.308e-22 1.521e-22 1.095e-25 - 0.06 1.588e-35 9.719e-37 3.923e-22 4.563e-22 3.285e-25 - 0.08 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 - 0.1 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 5.292e-36 3.24e-37 1.308e-22 1.521e-22 1.095e-25 + 0.06 1.588e-35 9.719e-37 3.923e-22 4.563e-22 3.285e-25 + 0.08 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 + 0.1 2.117e-35 1.296e-36 5.23e-22 6.084e-22 4.381e-25 ------------------------------------------------------------------------------- - z N NH NH2 NH3 NNH + z N NH NH2 NH3 NNH ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 5.944e-10 7.145e-11 2.291e-11 6.197e-11 5.73e-11 - 0.06 1.783e-09 2.144e-10 6.873e-11 1.859e-10 1.719e-10 - 0.08 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 - 0.1 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 5.944e-10 7.145e-11 2.291e-11 6.197e-11 5.73e-11 + 0.06 1.783e-09 2.144e-10 6.873e-11 1.859e-10 1.719e-10 + 0.08 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 + 0.1 2.378e-09 2.858e-10 9.164e-11 2.479e-10 2.292e-10 ------------------------------------------------------------------------------- - z NO NO2 N2O HNO CN + z NO NO2 N2O HNO CN ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 0.000833 5.323e-07 6.55e-08 7.525e-09 5.744e-16 - 0.06 0.002499 1.597e-06 1.965e-07 2.257e-08 1.723e-15 - 0.08 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 - 0.1 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 0.000833 5.323e-07 6.55e-08 7.525e-09 5.744e-16 + 0.06 0.002499 1.597e-06 1.965e-07 2.257e-08 1.723e-15 + 0.08 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 + 0.1 0.003332 2.129e-06 2.62e-07 3.01e-08 2.297e-15 ------------------------------------------------------------------------------- - z HCN H2CN HCNN HCNO HOCN + z HCN H2CN HCNN HCNO HOCN ------------------------------------------------------------------------------- - 0 0 0 0 0 0 - 0.02 0 0 0 0 0 - 0.04 1.756e-13 2.02e-20 8.356e-24 2.661e-18 4.599e-14 - 0.06 5.268e-13 6.061e-20 2.507e-23 7.983e-18 1.38e-13 - 0.08 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 - 0.1 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 + 0 0 0 0 0 0 + 0.02 0 0 0 0 0 + 0.04 1.756e-13 2.02e-20 8.356e-24 2.661e-18 4.599e-14 + 0.06 5.268e-13 6.061e-20 2.507e-23 7.983e-18 1.38e-13 + 0.08 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 + 0.1 7.024e-13 8.082e-20 3.342e-23 1.064e-17 1.84e-13 ------------------------------------------------------------------------------- - z HNCO NCO N2 AR C3H7 + z HNCO NCO N2 AR C3H7 ------------------------------------------------------------------------------- - 0 0 0 0.7288 0 0 - 0.02 0 0 0.7288 0 0 - 0.04 1.988e-11 8.476e-13 0.7284 0 9.35e-53 - 0.06 5.963e-11 2.543e-12 0.7276 0 2.805e-52 - 0.08 7.95e-11 3.391e-12 0.7272 0 3.74e-52 - 0.1 7.95e-11 3.391e-12 0.7272 0 3.74e-52 + 0 0 0 0.7288 0 0 + 0.02 0 0 0.7288 0 0 + 0.04 1.988e-11 8.476e-13 0.7284 0 9.35e-53 + 0.06 5.963e-11 2.543e-12 0.7276 0 2.805e-52 + 0.08 7.95e-11 3.391e-12 0.7272 0 3.74e-52 + 0.1 7.95e-11 3.391e-12 0.7272 0 3.74e-52 ------------------------------------------------------------------------------- - z C3H8 CH2CHO CH3CHO + z C3H8 CH2CHO CH3CHO ------------------------------------------------------------------------------- - 0 0 0 0 - 0.02 0 0 0 - 0.04 5.365e-54 2.809e-28 4.525e-29 - 0.06 1.61e-53 8.426e-28 1.358e-28 - 0.08 2.146e-53 1.123e-27 1.81e-28 - 0.1 2.146e-53 1.123e-27 1.81e-28 + 0 0 0 0 + 0.02 0 0 0 + 0.04 5.365e-54 2.809e-28 4.525e-29 + 0.06 1.61e-53 8.426e-28 1.358e-28 + 0.08 2.146e-53 1.123e-27 1.81e-28 + 0.1 2.146e-53 1.123e-27 1.81e-28 ->>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> domain 2 <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> outlet <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< ------------------------------------------------------------------------------- - z + z ------------------------------------------------------------------------------- - 0 + 0 Flame speed with mixture-averaged transport: 0.3158995218368368 m/s Flame speed with multicomponent transport: 0.318579363934684 m/s Flame speed with multicomponent transport + Soret: 0.3183709757821478 m/s -z (m) T (K) U (m/s) Y(CO) +z (m) T (K) U (m/s) Y(CO) 0.000000 300.000 0.318 0.00000 0.020000 300.070 0.318 0.00000 0.040000 319.350 0.339 0.00074