diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..fbf8ea1989 --- /dev/null +++ b/LICENSE @@ -0,0 +1,39 @@ +Copyright (c) 2013-2020, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047; +Land Ice: LA-CC-13-117) and the University Corporation for Atmospheric Research (UCAR). + +All rights reserved. + +LANS is the operator of the Los Alamos National Laboratory under Contract No. +DE-AC52-06NA25396 with the U.S. Department of Energy. UCAR manages the National +Center for Atmospheric Research under Cooperative Agreement ATM-0753581 with the +National Science Foundation. The U.S. Government has rights to use, reproduce, +and distribute this software. NO WARRANTY, EXPRESS OR IMPLIED IS OFFERED BY +LANS, UCAR OR THE GOVERNMENT AND NONE OF THEM ASSUME ANY LIABILITY FOR THE USE +OF THIS SOFTWARE. If software is modified to produce derivative works, such +modified software should be clearly marked, so as not to confuse it with the +version available from LANS and UCAR. + +Additionally, redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1) Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2) Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3) None of the names of LANS, UCAR or the names of its contributors, if any, may +be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/MPAS-Model/landice/develop b/MPAS-Model/landice/develop index 766804e5d9..454e0fc8bf 160000 --- a/MPAS-Model/landice/develop +++ b/MPAS-Model/landice/develop @@ -1 +1 @@ -Subproject commit 766804e5d90348ca5a830281e8e72a0b919ff452 +Subproject commit 454e0fc8bf384bee1c1560d6c3eaa5fae43bfdda diff --git a/MPAS-Model/ocean/develop b/MPAS-Model/ocean/develop index b3d870d755..29a819a00d 160000 --- a/MPAS-Model/ocean/develop +++ b/MPAS-Model/ocean/develop @@ -1 +1 @@ -Subproject commit b3d870d755f9c65d0ec2950d9b713fcdaa96e64b +Subproject commit 29a819a00d008fec9eb3e359c4a9d866266e5b87 diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 4c8cc3a8f6..734587b34b 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -14,13 +14,20 @@ pr: jobs: - job: - displayName: docs + displayName: linux pool: vmImage: 'ubuntu-16.04' strategy: matrix: - Python38: + nompi: + python.version: '3.8' + mpi: 'nompi' + openmpi: python.version: '3.8' + mpi: 'openmpi' + mpich: + python.version: '3.8' + mpi: 'mpich' steps: - bash: echo "##vso[task.prependpath]$CONDA/bin" @@ -31,18 +38,41 @@ jobs: eval "$(conda shell.bash hook)" conda config --add channels conda-forge conda config --set channel_priority strict - displayName: Configure conda + conda create --yes --quiet --name build python=$PYTHON_VERSION conda conda-build + displayName: Create Anaconda build environment + + - bash: | + eval "$(conda shell.bash hook)" + conda activate build + conda build -m ci/mpi_$(mpi).yaml recipe + displayName: Build COMPASS metapackage + + - bash: | + set -e + eval "$(conda shell.bash hook)" + conda activate build + conda create --yes --quiet --name compass -c ${CONDA_PREFIX}/conda-bld/ \ + python=$PYTHON_VERSION compass sphinx mock sphinx_rtd_theme m2r + displayName: Create compass conda environment - bash: | set -e eval "$(conda shell.bash hook)" - conda create -y -n docs python=$PYTHON_VERSION sphinx mock sphinx_rtd_theme m2r - displayName: Create docs environment + conda activate compass + + compass list + compass list --machines + compass list --suites + compass list --help + compass setup --help + compass suite --help + compass clean --help + displayName: Test compass - bash: | set -e eval "$(conda shell.bash hook)" - conda activate docs + conda activate compass echo "source branch: $(Build.SourceBranch)" echo "repository: $(Build.Repository.Name)" @@ -122,60 +152,9 @@ jobs: git push -fq origin $PUBLICATION_BRANCH fi popd || exit 1 + condition: eq(variables['mpi'], 'mpich') displayName: build and deploy docs -- job: - displayName: linux - pool: - vmImage: 'ubuntu-16.04' - strategy: - matrix: - nompi: - python.version: '3.8' - mpi: 'nompi' - openmpi: - python.version: '3.8' - mpi: 'openmpi' - mpich: - python.version: '3.8' - mpi: 'mpich' - - steps: - - bash: echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda config --add channels conda-forge - conda config --set channel_priority strict - conda create --yes --quiet --name build python=$PYTHON_VERSION conda conda-build - displayName: Create Anaconda build environment - - - bash: | - eval "$(conda shell.bash hook)" - conda activate build - conda build -m ci/mpi_$(mpi).yaml recipe - displayName: Build COMPASS metapackage - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate build - conda create --yes --quiet --name compass -c ${CONDA_PREFIX}/conda-bld/ \ - python=$PYTHON_VERSION compass - displayName: Create compass conda environment - - - bash: | - set -e - eval "$(conda shell.bash hook)" - conda activate compass - - ./list_testcases.py -h - ./setup_testcase.py -h - ./clean_testcase.py -h - ./manage_regression_suite.py -h - displayName: Test compass - job: @@ -225,9 +204,12 @@ jobs: eval "$(conda shell.bash hook)" conda activate compass - ./list_testcases.py -h - ./setup_testcase.py -h - ./clean_testcase.py -h - ./manage_regression_suite.py -h + compass list + compass list --machines + compass list --suites + compass list --help + compass setup --help + compass suite --help + compass clean --help displayName: Test compass diff --git a/compass/__init__.py b/compass/__init__.py new file mode 100644 index 0000000000..91163e8e7b --- /dev/null +++ b/compass/__init__.py @@ -0,0 +1,7 @@ +from compass.mpas_core import MpasCore +from compass.testgroup import TestGroup +from compass.testcase import TestCase +from compass.step import Step + +__version_info__ = (1, 0, 0) +__version__ = '.'.join(str(vi) for vi in __version_info__) diff --git a/compass/__main__.py b/compass/__main__.py new file mode 100644 index 0000000000..f7dbad8d26 --- /dev/null +++ b/compass/__main__.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +import sys +import argparse + +import compass +from compass import list, setup, clean, suite + + +def main(): + """ + Entry point for the main script ``compass`` + """ + + parser = argparse.ArgumentParser( + description="Perform compass operations", + usage=''' +compass [] + +The available compass commands are: + list List the available test cases + setup Set up a test case + clean Clean up a test case + suite Manage a regression test suite + + To get help on an individual command, run: + + compass --help + ''') + + parser.add_argument('command', help='command to run') + parser.add_argument('-v', '--version', + action='version', + version='compass {}'.format(compass.__version__), + help="Show version number and exit") + if len(sys.argv) == 1: + parser.print_help() + sys.exit(0) + + args = parser.parse_args(sys.argv[1:2]) + + commands = {'list': list.main, + 'setup': setup.main, + 'clean': clean.main, + 'suite': suite.main} + if args.command not in commands: + print('Unrecognized command {}'.format(args.command)) + parser.print_help() + exit(1) + + # call the function associated with the requested command + commands[args.command]() + + +if __name__ == "__main__": + main() diff --git a/compass/clean.py b/compass/clean.py new file mode 100644 index 0000000000..77f8ecdb87 --- /dev/null +++ b/compass/clean.py @@ -0,0 +1,90 @@ +import argparse +import sys +import os +import shutil + +from compass.mpas_cores import get_mpas_cores +from compass import provenance + + +def clean_cases(tests=None, numbers=None, work_dir=None): + """ + Set up one or more test cases + + Parameters + ---------- + tests : list of str, optional + Relative paths for a test cases to set up + + numbers : list of int, optional + Case numbers to setup, as listed from ``compass list`` + + work_dir : str, optional + A directory that will serve as the base for creating case directories + """ + + if tests is None and numbers is None: + raise ValueError('At least one of tests or numbers is needed.') + + if work_dir is None: + work_dir = os.getcwd() + + mpas_cores = get_mpas_cores() + all_test_cases = dict() + for mpas_core in mpas_cores: + for test_group in mpas_core.test_groups.values(): + for test_case in test_group.test_cases.values(): + all_test_cases[test_case.path] = test_case + + test_cases = dict() + if numbers is not None: + keys = list(all_test_cases) + for number in numbers: + if number >= len(keys): + raise ValueError('test number {} is out of range. There are ' + 'only {} tests.'.format(number, len(keys))) + path = keys[number] + test_cases[path] = all_test_cases[path] + + if tests is not None: + for path in tests: + if path not in all_test_cases: + raise ValueError('Test case with path {} is not in ' + 'the list of test cases'.format(path)) + test_cases[path] = all_test_cases[path] + + provenance.write(work_dir, test_cases) + + print('Cleaning test cases:') + for path in test_cases.keys(): + print(' {}'.format(path)) + + test_case_dir = os.path.join(work_dir, path) + try: + shutil.rmtree(test_case_dir) + except OSError: + pass + + +def main(): + parser = argparse.ArgumentParser( + description='Clean up one or more test cases', prog='compass clean') + + parser.add_argument("-t", "--test", dest="test", + help="Relative path for a test case to set up", + metavar="PATH") + parser.add_argument("-n", "--case_number", nargs='+', dest="case_num", + type=int, + help="Case number(s) to setup, as listed from " + "'compass list'. Can be a space-separated" + "list of case numbers.", metavar="NUM") + parser.add_argument("-w", "--work_dir", dest="work_dir", + help="If set, case directories are created in " + "work_dir rather than the current directory.", + metavar="PATH") + args = parser.parse_args(sys.argv[2:]) + if args.test is None: + tests = None + else: + tests = [args.test] + clean_cases(tests=tests, numbers=args.case_num, work_dir=args.work_dir) diff --git a/compass/config.py b/compass/config.py new file mode 100644 index 0000000000..29e05ab9e1 --- /dev/null +++ b/compass/config.py @@ -0,0 +1,100 @@ +import os +import configparser +from io import StringIO +from importlib import resources + + +def duplicate_config(config): + """ + Make a deep copy of config to changes can be made without affecting the + original + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options + + Returns + ------- + new_config : configparser.ConfigParser + Deep copy of configuration options + """ + + config_string = StringIO() + config.write(config_string) + # We must reset the buffer to make it ready for reading. + config_string.seek(0) + new_config = configparser.ConfigParser( + interpolation=configparser.ExtendedInterpolation()) + new_config.read_file(config_string) + return new_config + + +def add_config(config, package, config_file, exception=True): + """ + Add the contents of a config file within a package to the current config + parser + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options + + package : str or Package + The package where ``config_file`` is found + + config_file : str + The name of the config file to add + + exception : bool + Whether to raise an exception if the config file isn't found + """ + try: + with resources.path(package, config_file) as path: + config.read(path) + except (ModuleNotFoundError, FileNotFoundError): + if exception: + raise + + +def ensure_absolute_paths(config): + """ + make sure all paths in the paths, namelists and streams sections are + absolute paths + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options + """ + for section in ['paths', 'namelists', 'streams', 'executables']: + for option, value in config.items(section): + value = os.path.abspath(value) + config.set(section, option, value) + + +def get_source_file(source_path, source, config): + """ + Get an absolute path given a tag name for that path + + Parameters + ---------- + source_path : str + The keyword path for a path as defined in :ref:`dev_config`, + a config option from a relative or absolute directory for the source + + source : str + The basename or relative path of the source within the ``source_path`` + directory + + config : configparser.ConfigParser + Configuration options used to determine the the absolute paths for the + given ``source_path`` + """ + + if config.has_option('paths', source_path): + source_path = config.get('paths', source_path) + + source_file = '{}/{}'.format(source_path, source) + source_file = os.path.abspath(source_file) + return source_file diff --git a/compass/default.cfg b/compass/default.cfg new file mode 100644 index 0000000000..3ed26d155c --- /dev/null +++ b/compass/default.cfg @@ -0,0 +1,23 @@ +# Options related to downloading files +[download] + +# the base url for the server from which meshes, initial conditions, and other +# data sets can be downloaded +server_base_url = https://web.lcrc.anl.gov/public/e3sm/mpas_standalonedata + +# whether to download files during setup that have not been cached locally +download = True + +# whether to check the size of files that have been downloaded to make sure +# they are the right size +check_size = False + +# whether to verify SSL certificates for HTTPS requests +verify = True + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# the program to use for graph partitioning +partition_executable = gpmetis diff --git a/compass/io.py b/compass/io.py new file mode 100644 index 0000000000..60e24925fa --- /dev/null +++ b/compass/io.py @@ -0,0 +1,209 @@ +import os +import tempfile +import requests +import progressbar + +from compass.config import get_source_file + + +def download(file_name, url, config, dest_path=None, dest_option=None, + exceptions=True): + """ + Download a file from a URL to the given path or path name + + Parameters + ---------- + file_name : str + The relative path of the source file relative to ``url`` and the + destination path relative to ``dest_path`` (or the associated config + option) + + url : str + The URL where ``file_name`` can be found + + config : configparser.ConfigParser + Configuration options used to find custom paths if ``dest_path`` is + a config option + + dest_path : str, optional + The output path; either ``dest_path`` or ``dest_option`` should be + specified + + dest_option : str, optional + An option in the ``paths`` config section defining an output path; + either ``dest_path`` or ``dest_option`` should be specified + + exceptions : bool, optional + Whether to raise exceptions when the download fails + + Returns + ------- + out_file_name : str + The resulting file name if the download was successful + """ + + if dest_option is not None: + out_file_name = get_source_file(dest_option, file_name, config) + elif dest_path is not None: + out_file_name = '{}/{}'.format(dest_path, file_name) + else: + raise ValueError('One of "dest_option" and "dest_path" must be ' + 'specified.') + + do_download = config.getboolean('download', 'download') + check_size = config.getboolean('download', 'check_size') + verify = config.getboolean('download', 'verify') + + if not do_download: + if not os.path.exists(out_file_name): + raise OSError('File not found and downloading is disabled: ' + '{}'.format(out_file_name)) + return out_file_name + + if not check_size and os.path.exists(out_file_name): + return out_file_name + + session = requests.Session() + if not verify: + session.verify = False + + # out_file_name contains full path, so we need to make the relevant + # subdirectories if they do not exist already + directory = os.path.dirname(out_file_name) + try: + os.makedirs(directory) + except OSError: + pass + + url = '{}/{}'.format(url, file_name) + try: + response = session.get(url, stream=True) + totalSize = response.headers.get('content-length') + except requests.exceptions.RequestException: + if exceptions: + raise + else: + print(' {} could not be reached!'.format(url)) + return None + + try: + response.raise_for_status() + except requests.exceptions.HTTPError as e: + if exceptions: + raise + else: + print('ERROR while downloading {}:'.format(file_name)) + print(e) + return None + + if totalSize is None: + # no content length header + if not os.path.exists(out_file_name): + with open(out_file_name, 'wb') as f: + print('Downloading {}...'.format(file_name)) + try: + f.write(response.content) + except requests.exceptions.RequestException: + if exceptions: + raise + else: + print(' {} failed!'.format(file_name)) + return None + else: + print(' {} done.'.format(file_name)) + else: + # we can do the download in chunks and use a progress bar, yay! + + totalSize = int(totalSize) + if os.path.exists(out_file_name) and \ + totalSize == os.path.getsize(out_file_name): + # we already have the file, so just return + return out_file_name + + print('Downloading {} ({})...'.format(file_name, + _sizeof_fmt(totalSize))) + widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), + ' ', progressbar.ETA()] + bar = progressbar.ProgressBar(widgets=widgets, + max_value=totalSize).start() + size = 0 + with open(out_file_name, 'wb') as f: + try: + for data in response.iter_content(chunk_size=4096): + size += len(data) + f.write(data) + bar.update(size) + bar.finish() + except requests.exceptions.RequestException: + if exceptions: + raise + else: + print(' {} failed!'.format(file_name)) + return None + else: + print(' {} done.'.format(file_name)) + return out_file_name + + +def symlink(target, link_name, overwrite=True): + """ + From https://stackoverflow.com/a/55742015/7728169 + Create a symbolic link named link_name pointing to target. + If link_name exists then FileExistsError is raised, unless overwrite=True. + When trying to overwrite a directory, IsADirectoryError is raised. + + Parameters + ---------- + target : str + The file path to link to + + link_name : str + The name of the new link + + overwrite : bool, optional + Whether to replace an existing link if one already exists + """ + + if not overwrite: + os.symlink(target, link_name) + return + + # os.replace() may fail if files are on different filesystems + link_dir = os.path.dirname(link_name) + + # Create link to target with temporary file_name + while True: + temp_link_name = tempfile.mktemp(dir=link_dir) + + # os.* functions mimic as closely as possible system functions + # The POSIX symlink() returns EEXIST if link_name already exists + # https://pubs.opengroup.org/onlinepubs/9699919799/functions/symlink.html + try: + os.symlink(target, temp_link_name) + break + except FileExistsError: + pass + + # Replace link_name with temp_link_name + try: + # Preempt os.replace on a directory with a nicer message + if not os.path.islink(link_name) and os.path.isdir(link_name): + raise IsADirectoryError( + f"Cannot symlink over existing directory: '{link_name}'") + os.replace(temp_link_name, link_name) + except BaseException: + if os.path.islink(temp_link_name): + os.remove(temp_link_name) + raise + + +# From https://stackoverflow.com/a/1094933/7728169 +def _sizeof_fmt(num, suffix='B'): + """ + Covert a number of bytes to a human-readable file size + """ + for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: + if abs(num) < 1024.0: + return "%3.1f%s%s" % (num, unit, suffix) + num /= 1024.0 + return "%.1f%s%s" % (num, 'Yi', suffix) diff --git a/compass/landice/__init__.py b/compass/landice/__init__.py new file mode 100644 index 0000000000..bf4f6194c0 --- /dev/null +++ b/compass/landice/__init__.py @@ -0,0 +1,24 @@ +from compass.mpas_core import MpasCore +from compass.landice.tests.dome import Dome +from compass.landice.tests.eismint2 import Eismint2 +from compass.landice.tests.enthalpy_benchmark import EnthalpyBenchmark +from compass.landice.tests.greenland import Greenland +from compass.landice.tests.hydro_radial import HydroRadial + + +class Landice(MpasCore): + """ + The collection of all test case for the MALI core + """ + + def __init__(self): + """ + Construct the collection of MALI test cases + """ + super().__init__(name='landice') + + self.add_test_group(Dome(mpas_core=self)) + self.add_test_group(Eismint2(mpas_core=self)) + self.add_test_group(EnthalpyBenchmark(mpas_core=self)) + self.add_test_group(Greenland(mpas_core=self)) + self.add_test_group(HydroRadial(mpas_core=self)) diff --git a/compass/landice/landice.cfg b/compass/landice/landice.cfg new file mode 100644 index 0000000000..02c2a4b8ca --- /dev/null +++ b/compass/landice/landice.cfg @@ -0,0 +1,39 @@ +# This config file has default config options for the landice core + +# The paths section points compass to external paths +[paths] + +# the relative or absolute path to the root of a branch where MALI has been +# built +mpas_model = MPAS-Model/landice/develop + +# The namelists section defines paths to example_compact namelists that will be used +# to generate specific namelists. By default, these point to the forward and +# init namelists in the default_inputs directory after a successful build of +# the landice model. Change these in a custom config file if you need a different +# example_compact. +[namelists] +forward = ${paths:mpas_model}/default_inputs/namelist.landice + +# The streams section defines paths to example_compact streams files that will be used +# to generate specific streams files. By default, these point to the forward and +# init streams files in the default_inputs directory after a successful build of +# the landice model. Change these in a custom config file if you need a different +# example_compact. +[streams] +forward = ${paths:mpas_model}/default_inputs/streams.landice + + +# The executables section defines paths to required executables. These +# executables are provided for use by specific test cases. Most tools that +# compass needs should be in the conda environment, so this is only the path +# to the MALI executable by default. +[executables] +model = ${paths:mpas_model}/landice_model + + +# Options related to downloading files +[download] + +# the path on the server for MALI +core_path = mpas-albany-landice diff --git a/compass/landice/namelists/__init__.py b/compass/landice/namelists/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/landice/streams/__init__.py b/compass/landice/streams/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/landice/suites/__init__.py b/compass/landice/suites/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/landice/suites/sia_integration.txt b/compass/landice/suites/sia_integration.txt new file mode 100644 index 0000000000..7667d9d2a0 --- /dev/null +++ b/compass/landice/suites/sia_integration.txt @@ -0,0 +1,13 @@ +landice/dome/2000m/restart_test +landice/dome/2000m/decomposition_test +landice/dome/variable_resolution/restart_test +landice/dome/variable_resolution/decomposition_test +landice/enthalpy_benchmark/A +landice/eismint2/decomposition_test +landice/eismint2/enthalpy_decomposition_test +landice/eismint2/restart_test +landice/eismint2/enthalpy_restart_test +landice/greenland/restart_test +landice/greenland/decomposition_test +landice/hydro_radial/restart_test +landice/hydro_radial/decomposition_test diff --git a/compass/landice/tests/__init__.py b/compass/landice/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/landice/tests/dome/__init__.py b/compass/landice/tests/dome/__init__.py new file mode 100644 index 0000000000..e59f725f0b --- /dev/null +++ b/compass/landice/tests/dome/__init__.py @@ -0,0 +1,24 @@ +from compass.testgroup import TestGroup +from compass.landice.tests.dome.smoke_test import SmokeTest +from compass.landice.tests.dome.decomposition_test import DecompositionTest +from compass.landice.tests.dome.restart_test import RestartTest + + +class Dome(TestGroup): + """ + A test group for dome test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.landice.Landice + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='dome') + + for mesh_type in ['2000m', 'variable_resolution']: + self.add_test_case( + SmokeTest(test_group=self, mesh_type=mesh_type)) + self.add_test_case( + DecompositionTest(test_group=self, mesh_type=mesh_type)) + self.add_test_case( + RestartTest(test_group=self, mesh_type=mesh_type)) diff --git a/compass/landice/tests/dome/decomposition_test/__init__.py b/compass/landice/tests/dome/decomposition_test/__init__.py new file mode 100644 index 0000000000..75ccf5e959 --- /dev/null +++ b/compass/landice/tests/dome/decomposition_test/__init__.py @@ -0,0 +1,67 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.dome.setup_mesh import SetupMesh +from compass.landice.tests.dome.run_model import RunModel +from compass.landice.tests.dome.visualize import Visualize + + +class DecompositionTest(TestCase): + """ + A test case for performing two MALI runs of a dome setup, one with one core + and one with four. The test case verifies that the results of the two runs + are identical. + + Attributes + ---------- + mesh_type : str + The resolution or tye of mesh of the test case + """ + + def __init__(self, test_group, mesh_type): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.dome.Dome + The test group that this test case belongs to + + mesh_type : str + The resolution or tye of mesh of the test case + """ + name = 'decomposition_test' + self.mesh_type = mesh_type + subdir = '{}/{}'.format(mesh_type, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + SetupMesh(test_case=self, mesh_type=mesh_type)) + + for procs in [1, 4]: + name = '{}proc_run'.format(procs) + self.add_step( + RunModel(test_case=self, name=name, subdir=name, cores=procs, + threads=1, mesh_type=mesh_type)) + + input_dir = name + name = 'visualize_{}'.format(name) + step = Visualize(test_case=self, mesh_type=mesh_type, name=name, + subdir=name, input_dir=input_dir) + self.add_step(step, run_by_default=False) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['thickness', 'normalVelocity'] + steps = self.steps_to_run + if '1proc_run' in steps and '4proc_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='1proc_run/output.nc', + filename2='4proc_run/output.nc') diff --git a/compass/landice/tests/dome/dome.cfg b/compass/landice/tests/dome/dome.cfg new file mode 100644 index 0000000000..9a2ece1137 --- /dev/null +++ b/compass/landice/tests/dome/dome.cfg @@ -0,0 +1,37 @@ +# config options for dome test cases +[dome] + +# sizes (in cells) for the 2000m uniform mesh +nx = 30 +ny = 34 + +# resolution (in m) for the 2000m uniform mesh +dc = 2000.0 + +# number of levels in the mesh +levels = 10 + +# the dome type ('halfar' or 'cism') +dome_type = halfar + +# Whether to center the dome in the center of the cell that is closest to the +# center of the domain +put_origin_on_a_cell = True + +# whether to add a small shelf to the test +shelf = False + +# whether to add hydrology to the initial condition +hydro = False + +# config options related to visualization for dome test cases +[dome_viz] + +# which time index to visualize +time_slice = 0 + +# whether to save image files +save_images = True + +# whether to hide figures (typically when save_images = True) +hide_figs = True diff --git a/compass/landice/tests/dome/namelist.landice b/compass/landice/tests/dome/namelist.landice new file mode 100644 index 0000000000..7ecfb0e8bb --- /dev/null +++ b/compass/landice/tests/dome/namelist.landice @@ -0,0 +1,3 @@ +config_dt = '0001-00-00_00:00:00' +config_run_duration = '0200-00-00_00:00:00' +config_block_decomp_file_prefix = 'graph.info.part.' diff --git a/compass/landice/tests/dome/restart_test/__init__.py b/compass/landice/tests/dome/restart_test/__init__.py new file mode 100644 index 0000000000..b93a47b64a --- /dev/null +++ b/compass/landice/tests/dome/restart_test/__init__.py @@ -0,0 +1,100 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.dome.setup_mesh import SetupMesh +from compass.landice.tests.dome.run_model import RunModel +from compass.landice.tests.dome.visualize import Visualize + + +class RestartTest(TestCase): + """ + A test case for performing two MALI runs of a dome setup, one full run and + one run broken into two segments with a restart. The test case verifies + that the results of the two runs are identical. + + Attributes + ---------- + mesh_type : str + The resolution or tye of mesh of the test case + """ + + def __init__(self, test_group, mesh_type): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.dome.Dome + The test group that this test case belongs to + + mesh_type : str + The resolution or tye of mesh of the test case + """ + name = 'restart_test' + self.mesh_type = mesh_type + subdir = '{}/{}'.format(mesh_type, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + SetupMesh(test_case=self, mesh_type=mesh_type)) + + name = 'full_run' + step = RunModel(test_case=self, name=name, subdir=name, cores=4, + threads=1, mesh_type=mesh_type) + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.dome.restart_test', + 'namelist.full', out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.dome.restart_test', + 'streams.full', out_name='streams.landice') + self.add_step(step) + + input_dir = name + name = 'visualize_{}'.format(name) + step = Visualize(test_case=self, mesh_type=mesh_type, name=name, + subdir=name, input_dir=input_dir) + self.add_step(step, run_by_default=False) + + name = 'restart_run' + step = RunModel(test_case=self, name=name, subdir=name, cores=4, + threads=1, mesh_type=mesh_type, + suffixes=['landice', 'landice.rst']) + + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.dome.restart_test', + 'namelist.restart', out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.dome.restart_test', + 'streams.restart', out_name='streams.landice') + + step.add_namelist_file( + 'compass.landice.tests.dome.restart_test', + 'namelist.restart.rst', out_name='namelist.landice.rst') + step.add_streams_file( + 'compass.landice.tests.dome.restart_test', + 'streams.restart.rst', out_name='streams.landice.rst') + self.add_step(step) + + input_dir = name + name = 'visualize_{}'.format(name) + step = Visualize(test_case=self, mesh_type=mesh_type, name=name, + subdir=name, input_dir=input_dir) + self.add_step(step, run_by_default=False) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['thickness', 'normalVelocity'] + steps = self.steps_to_run + if 'full_run' in steps and 'restart_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/output.nc', + filename2='restart_run/output.nc') diff --git a/compass/landice/tests/dome/restart_test/namelist.full b/compass/landice/tests/dome/restart_test/namelist.full new file mode 100644 index 0000000000..b2fea592f8 --- /dev/null +++ b/compass/landice/tests/dome/restart_test/namelist.full @@ -0,0 +1,5 @@ +config_start_time = '0000-01-01_00:00:00' +config_run_duration = '0002-00-00_00:00:00' +config_dt = '0001-00-00_00:00:00' +config_write_output_on_startup = .true. +config_do_restart = .false. diff --git a/compass/landice/tests/dome/restart_test/namelist.restart b/compass/landice/tests/dome/restart_test/namelist.restart new file mode 100644 index 0000000000..8bea56b0f8 --- /dev/null +++ b/compass/landice/tests/dome/restart_test/namelist.restart @@ -0,0 +1,5 @@ +config_start_time = '0000-01-01_00:00:00' +config_run_duration = '0001-00-00_00:00:00' +config_dt = '0001-00-00_00:00:00' +config_write_output_on_startup = .true. +config_do_restart = .false. diff --git a/compass/landice/tests/dome/restart_test/namelist.restart.rst b/compass/landice/tests/dome/restart_test/namelist.restart.rst new file mode 100644 index 0000000000..552690e80f --- /dev/null +++ b/compass/landice/tests/dome/restart_test/namelist.restart.rst @@ -0,0 +1,5 @@ +config_start_time = '0001-01-01_00:00:00' +config_run_duration = '0001-00-00_00:00:00' +config_dt = '0001-00-00_00:00:00' +config_write_output_on_startup = .true. +config_do_restart = .true. diff --git a/compass/landice/tests/dome/restart_test/streams.full b/compass/landice/tests/dome/restart_test/streams.full new file mode 100644 index 0000000000..e8ae571f33 --- /dev/null +++ b/compass/landice/tests/dome/restart_test/streams.full @@ -0,0 +1,13 @@ + + + + + + + + diff --git a/compass/landice/tests/dome/restart_test/streams.restart b/compass/landice/tests/dome/restart_test/streams.restart new file mode 100644 index 0000000000..ecf3f18c9a --- /dev/null +++ b/compass/landice/tests/dome/restart_test/streams.restart @@ -0,0 +1,14 @@ + + + + + + + + diff --git a/compass/landice/tests/dome/restart_test/streams.restart.rst b/compass/landice/tests/dome/restart_test/streams.restart.rst new file mode 100644 index 0000000000..b4b146f529 --- /dev/null +++ b/compass/landice/tests/dome/restart_test/streams.restart.rst @@ -0,0 +1,14 @@ + + + + + + + + diff --git a/compass/landice/tests/dome/run_model.py b/compass/landice/tests/dome/run_model.py new file mode 100644 index 0000000000..c538243119 --- /dev/null +++ b/compass/landice/tests/dome/run_model.py @@ -0,0 +1,99 @@ +from compass.model import run_model +from compass.step import Step + + +class RunModel(Step): + """ + A step for performing forward MALI runs as part of dome test cases. + + Attributes + ---------- + mesh_type : str + The resolution or mesh type of the test case + + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + def __init__(self, test_case, mesh_type, name='run_model', subdir=None, + cores=1, min_cores=None, threads=1, suffixes=None): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + mesh_type : str + The resolution or mesh type of the test case + + name : str, optional + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + self.mesh_type = mesh_type + if suffixes is None: + suffixes = ['landice'] + self.suffixes = suffixes + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + for suffix in suffixes: + self.add_namelist_file( + 'compass.landice.tests.dome', 'namelist.landice', + out_name='namelist.{}'.format(suffix)) + + self.add_streams_file( + 'compass.landice.tests.dome', 'streams.landice', + out_name='streams.{}'.format(suffix)) + + self.add_input_file(filename='landice_grid.nc', + target='../setup_mesh/landice_grid.nc') + self.add_input_file(filename='graph.info', + target='../setup_mesh/graph.info') + + self.add_output_file(filename='output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + for suffix in self.suffixes: + run_model(step=self, namelist='namelist.{}'.format(suffix), + streams='streams.{}'.format(suffix)) diff --git a/compass/landice/tests/dome/setup_mesh.py b/compass/landice/tests/dome/setup_mesh.py new file mode 100644 index 0000000000..b2ac0a6922 --- /dev/null +++ b/compass/landice/tests/dome/setup_mesh.py @@ -0,0 +1,199 @@ +import numpy +from netCDF4 import Dataset as NetCDFFile + +from mpas_tools.planar_hex import make_planar_hex_mesh +from mpas_tools.io import write_netcdf +from mpas_tools.mesh.conversion import convert, cull +from mpas_tools.logging import check_call + +from compass.model import make_graph_file +from compass.step import Step + + +class SetupMesh(Step): + """ + A step for creating a mesh and initial condition for dome test cases + + Attributes + ---------- + mesh_type : str + The resolution or mesh type of the test case + """ + def __init__(self, test_case, mesh_type): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + mesh_type : str + The resolution or mesh type of the test case + """ + super().__init__(test_case=test_case, name='setup_mesh') + self.mesh_type = mesh_type + + if mesh_type == 'variable_resolution': + # download and link the mesh + # the empty database is a trick for downloading to the root of + # the local MALI file cache + self.add_input_file(filename='mpas_grid.nc', + target='dome_varres_grid.nc', database='') + + self.add_output_file(filename='graph.info') + self.add_output_file(filename='landice_grid.nc') + + # no setup() method is needed + + def run(self): + """ + Run this step of the test case + """ + mesh_type = self.mesh_type + logger = self.logger + config = self.config + section = config['dome'] + + if mesh_type == '2000m': + nx = section.getint('nx') + ny = section.getint('ny') + dc = section.getfloat('dc') + + dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, + nonperiodic_x=True, + nonperiodic_y=True) + + write_netcdf(dsMesh, 'grid.nc') + + dsMesh = cull(dsMesh, logger=logger) + dsMesh = convert(dsMesh, logger=logger) + write_netcdf(dsMesh, 'mpas_grid.nc') + + levels = section.get('levels') + args = ['create_landice_grid_from_generic_MPAS_grid.py', + '-i', 'mpas_grid.nc', + '-o', 'landice_grid.nc', + '-l', levels] + + check_call(args, logger) + + make_graph_file(mesh_filename='landice_grid.nc', + graph_filename='graph.info') + + _setup_dome_initial_conditions(config, logger, + filename='landice_grid.nc') + + +def _setup_dome_initial_conditions(config, logger, filename): + """ + Add the initial condition to the given MPAS mesh file + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options for this test case, a combination of the defaults + for the machine, core and configuration + + logger : logging.Logger + A logger for output from the step + + filename : str + file to setup dome + """ + section = config['dome'] + dome_type = section.get('dome_type') + put_origin_on_a_cell = section.getboolean('put_origin_on_a_cell') + shelf = section.getboolean('shelf') + hydro = section.getboolean('hyrdo') + + # Open the file, get needed dimensions + gridfile = NetCDFFile(filename, 'r+') + nVertLevels = len(gridfile.dimensions['nVertLevels']) + # Get variables + xCell = gridfile.variables['xCell'] + yCell = gridfile.variables['yCell'] + xEdge = gridfile.variables['xEdge'] + yEdge = gridfile.variables['yEdge'] + xVertex = gridfile.variables['xVertex'] + yVertex = gridfile.variables['yVertex'] + thickness = gridfile.variables['thickness'] + bedTopography = gridfile.variables['bedTopography'] + layerThicknessFractions = gridfile.variables['layerThicknessFractions'] + SMB = gridfile.variables['sfcMassBal'] + + # Find center of domain + x0 = xCell[:].min() + 0.5 * (xCell[:].max() - xCell[:].min()) + y0 = yCell[:].min() + 0.5 * (yCell[:].max() - yCell[:].min()) + # Calculate distance of each cell center from dome center + r = ((xCell[:] - x0) ** 2 + (yCell[:] - y0) ** 2) ** 0.5 + + if put_origin_on_a_cell: + # Center the dome in the center of the cell that is closest to the + # center of the domain. + centerCellIndex = numpy.abs(r[:]).argmin() + xShift = -1.0 * xCell[centerCellIndex] + yShift = -1.0 * yCell[centerCellIndex] + xCell[:] = xCell[:] + xShift + yCell[:] = yCell[:] + yShift + xEdge[:] = xEdge[:] + xShift + yEdge[:] = yEdge[:] + yShift + xVertex[:] = xVertex[:] + xShift + yVertex[:] = yVertex[:] + yShift + # Now update origin location and distance array + x0 = 0.0 + y0 = 0.0 + r = ((xCell[:] - x0) ** 2 + (yCell[:] - y0) ** 2) ** 0.5 + + # Assign variable values for dome + # Define dome dimensions - all in meters + r0 = 60000.0 * numpy.sqrt(0.125) + h0 = 2000.0 * numpy.sqrt(0.125) + # Set default value for non-dome cells + thickness[:] = 0.0 + # Calculate the dome thickness for cells within the desired radius + # (thickness will be NaN otherwise) + thickness_field = thickness[0, :] + if dome_type == 'cism': + thickness_field[r < r0] = h0 * (1.0 - (r[r < r0] / r0) ** 2) ** 0.5 + elif dome_type == 'halfar': + thickness_field[r < r0] = h0 * ( + 1.0 - (r[r < r0] / r0) ** (4.0 / 3.0)) ** (3.0 / 7.0) + else: + raise ValueError('Unexpected dome_type: {}'.format(dome_type)) + thickness[0, :] = thickness_field + + # zero velocity everywhere + # normalVelocity[:] = 0.0 + # flat bed at sea level + bedTopography[:] = 0.0 + if shelf: + # this line will make a small shelf: + bedTopography[0, xCell[:] < -10000.0] = -2000.0 + # Setup layerThicknessFractions + layerThicknessFractions[:] = 1.0 / nVertLevels + + # boundary conditions + # Sample values to use, or comment these out for them to be 0. + SMB[:] = 0.0 + # beta[:] = 50000. + # units: m/yr, lapse rate of 1 m/yr with 0 at 500 m + # SMB[:] = 2.0/1000.0 * (thickness[:] + bedTopography[:]) - 1.0 + # Convert from units of m/yr to kg/m2/s using an assumed ice density + SMB[:] = SMB[:] * 910.0 / (3600.0 * 24.0 * 365.0) + + # lapse rate of 5 deg / km + # Tsfc[:, 0] = -5.0/1000.0 * (thickness[0,:] + bedTopography[0,:]) + # G = 0.01 + # BMB[:] = -20.0 # units: m/yr + + if hydro: + gridfile.variables['uReconstructX'][:] = 5.0 / (3600.0 * 24.0 * 365.0) + gridfile.variables['basalMeltInput'][:] = 0.06 / 335000.0 * 50.0 + gridfile.variables['externalWaterInput'][:] = 0.0 + gridfile.variables['waterThickness'][:] = 0.08 + + gridfile.close() + + logger.info('Successfully added dome initial conditions to: {}'.format( + filename)) diff --git a/compass/landice/tests/dome/smoke_test/__init__.py b/compass/landice/tests/dome/smoke_test/__init__.py new file mode 100644 index 0000000000..3755b8b402 --- /dev/null +++ b/compass/landice/tests/dome/smoke_test/__init__.py @@ -0,0 +1,47 @@ +from compass.testcase import TestCase +from compass.landice.tests.dome.setup_mesh import SetupMesh +from compass.landice.tests.dome.run_model import RunModel +from compass.landice.tests.dome.visualize import Visualize + + +class SmokeTest(TestCase): + """ + The default test case for the dome test group simply creates the mesh and + initial condition, then performs a short forward run on 4 cores. + + Attributes + ---------- + mesh_type : str + The resolution or tye of mesh of the test case + """ + + def __init__(self, test_group, mesh_type): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.dome.Dome + The test group that this test case belongs to + + mesh_type : str + The resolution or tye of mesh of the test case + """ + name = 'smoke_test' + self.mesh_type = mesh_type + subdir = '{}/{}'.format(mesh_type, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + SetupMesh(test_case=self, mesh_type=mesh_type)) + self.add_step( + RunModel(test_case=self, cores=4, threads=1, mesh_type=mesh_type)) + step = Visualize(test_case=self, mesh_type=mesh_type) + self.add_step(step, run_by_default=False) + + # no configure() method is needed because we will use the default dome + # config options + + # no run() method is needed because we're doing the default: running all + # steps diff --git a/compass/landice/tests/dome/streams.landice b/compass/landice/tests/dome/streams.landice new file mode 100644 index 0000000000..42fae7c927 --- /dev/null +++ b/compass/landice/tests/dome/streams.landice @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/landice/tests/dome/visualize.py b/compass/landice/tests/dome/visualize.py new file mode 100644 index 0000000000..5d4d0231c5 --- /dev/null +++ b/compass/landice/tests/dome/visualize.py @@ -0,0 +1,206 @@ +import numpy +import netCDF4 +import matplotlib.pyplot as plt + +from compass.step import Step + + +class Visualize(Step): + """ + A step for visualizing the output from a dome test case + + Attributes + ---------- + mesh_type : str + The resolution or mesh type of the test case + """ + def __init__(self, test_case, mesh_type, name='visualize', subdir=None, + input_dir='run_model'): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + mesh_type : str + The resolution or mesh type of the test case + + name : str, optional + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + input_dir : str, optional + The input directory within the test case with a file ``output.nc`` + to visualize + """ + super().__init__(test_case=test_case, name=name, subdir=subdir) + self.mesh_type = mesh_type + + self.add_input_file(filename='output.nc', + target='../{}/output.nc'.format(input_dir)) + + # depending on settings, this may produce no outputs, so we won't add + # any + + # no setup method is needed + + def run(self): + """ + Run this step of the test case + """ + visualize_dome(self.config, self.logger, filename='output.nc') + + +def visualize_dome(config, logger, filename): + """ + Plot the output from a dome test case + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options for this test case, a combination of the defaults + for the machine, core and configuration + + logger : logging.Logger + A logger for output from the step + + filename : str + file to visualize + """ + section = config['dome_viz'] + + time_slice = section.getint('time_slice') + save_images = section.getboolean('save_images') + hide_figs = section.getboolean('hide_figs') + + # Note: this may be slightly wrong for some calendar types! + secInYr = 3600.0 * 24.0 * 365.0 + + f = netCDF4.Dataset(filename, 'r') + + times = f.variables['xtime'] + thickness = f.variables['thickness'] + # dcEdge = f.variables['dcEdge'] + # bedTopography = f.variables['bedTopography'] # not needed + xCell = f.variables['xCell'] + yCell = f.variables['yCell'] + xEdge = f.variables['xEdge'] + yEdge = f.variables['yEdge'] + angleEdge = f.variables['angleEdge'] + temperature = f.variables['temperature'] + lowerSurface = f.variables['lowerSurface'] + upperSurface = f.variables['upperSurface'] + normalVelocity = f.variables['normalVelocity'] + # uReconstructX = f.variables['uReconstructX'] + uReconstructX = f.variables['uReconstructX'] + uReconstructY = f.variables['uReconstructY'] + + vert_levs = len(f.dimensions['nVertLevels']) + + time_length = times.shape[0] + + logger.info("vert_levs = {}; time_length = {}".format(vert_levs, + time_length)) + + var_slice = thickness[time_slice, :] + + fig = plt.figure(1, facecolor='w') + fig.add_subplot(111, aspect='equal') + # C = plt.contourf(xCell, yCell, var_slice ) + plt.scatter(xCell[:], yCell[:], 80, var_slice, marker='h', + edgecolors='none') + plt.colorbar() + plt.title('thickness at time {}'.format(time_slice)) + plt.draw() + if save_images: + logger.info("Saving figures to files.") + plt.savefig('dome_thickness.png') + + fig = plt.figure(2) + fig.add_subplot(121, aspect='equal') + plt.scatter(xCell[:], yCell[:], 80, lowerSurface[time_slice, :], + marker='h', edgecolors='none') + plt.colorbar() + plt.title('lower surface at time {}'.format(time_slice)) + plt.draw() + fig.add_subplot(122, aspect='equal') + plt.scatter(xCell[:], yCell[:], 80, upperSurface[time_slice, :], + marker='h', edgecolors='none') + plt.colorbar() + plt.title('upper surface at time {}'.format(time_slice)) + plt.draw() + if save_images: + plt.savefig('dome_surfaces.png') + + fig = plt.figure(3) + for templevel in range(0, vert_levs): + fig.add_subplot(3, 4, templevel+1, aspect='equal') + var_slice = temperature[time_slice, :, templevel] + # C = plt.contourf(xCell, yCell, var_slice ) + plt.scatter(xCell[:], yCell[:], 40, var_slice, marker='h', + edgecolors='none') + plt.colorbar() + plt.title('temperature at level {} at time {}'.format(templevel, + time_slice)) + plt.draw() + if save_images: + plt.savefig('dome_temperature.png') + + fig = plt.figure(4) + fig.add_subplot(121, aspect='equal') + plt.scatter(xEdge[:], yEdge[:], 80, + normalVelocity[time_slice, :, vert_levs-1] * secInYr, + marker='h', edgecolors='none') + plt.colorbar() + normalVel = normalVelocity[time_slice, :, vert_levs-1] + plt.quiver(xEdge[:], yEdge[:], + numpy.cos(angleEdge[:]) * normalVel * secInYr, + numpy.sin(angleEdge[:]) * normalVel * secInYr) + plt.title('normalVelocity of bottom layer at time {}'.format(time_slice)) + plt.draw() + fig.add_subplot(122, aspect='equal') + plt.scatter(xEdge[:], yEdge[:], 80, + normalVelocity[time_slice, :, 0] * secInYr, marker='h', + edgecolors='none') + plt.colorbar() + normalVel = normalVelocity[time_slice, :, 0] + plt.quiver(xEdge[:], yEdge[:], + numpy.cos(angleEdge[:]) * normalVel * secInYr, + numpy.sin(angleEdge[:]) * normalVel * secInYr) + plt.title('normalVelocity of top layer at time {}'.format(time_slice)) + plt.draw() + if save_images: + plt.savefig('dome_normalVelocity.png') + + fig = plt.figure(5, facecolor='w') + fig.add_subplot(121, aspect='equal') + plt.scatter(xCell[:], yCell[:], 80, + uReconstructX[time_slice, :, 0] * secInYr, marker='h', + edgecolors='none') + plt.colorbar() + plt.quiver(xCell[:], yCell[:], uReconstructX[time_slice, :, 0] * secInYr, + uReconstructY[time_slice, :, 0] * secInYr) + plt.title('uReconstructX of top layer at time {}'.format(time_slice)) + plt.draw() + fig.add_subplot(122, aspect='equal') + plt.scatter(xCell[:], yCell[:], 80, + uReconstructY[time_slice, :, 0] * secInYr, marker='h', + edgecolors='none') + plt.colorbar() + plt.quiver(xCell[:], yCell[:], uReconstructX[time_slice, :, 0] * secInYr, + uReconstructY[time_slice, :, 0] * secInYr) + plt.title('uReconstructY of top layer at time {}'.format(time_slice)) + plt.draw() + if save_images: + plt.savefig('dome_uReconstruct.png') + + if hide_figs: + logger.info("Plot display disabled with hide_plot config option.") + else: + plt.show() + + f.close() diff --git a/compass/landice/tests/eismint2/__init__.py b/compass/landice/tests/eismint2/__init__.py new file mode 100644 index 0000000000..5054731c41 --- /dev/null +++ b/compass/landice/tests/eismint2/__init__.py @@ -0,0 +1,27 @@ +from compass.testgroup import TestGroup +from compass.landice.tests.eismint2.standard_experiments import \ + StandardExperiments +from compass.landice.tests.eismint2.decomposition_test import DecompositionTest +from compass.landice.tests.eismint2.restart_test import RestartTest + + +class Eismint2(TestGroup): + """ + A test group for eismint2 test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.landice.Landice + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='eismint2') + + self.add_test_case(StandardExperiments(test_group=self)) + + for thermal_solver in ['temperature', 'enthalpy']: + self.add_test_case( + DecompositionTest( + test_group=self, thermal_solver=thermal_solver)) + self.add_test_case( + RestartTest( + test_group=self, thermal_solver=thermal_solver)) diff --git a/compass/landice/tests/eismint2/decomposition_test/__init__.py b/compass/landice/tests/eismint2/decomposition_test/__init__.py new file mode 100644 index 0000000000..57d850a18f --- /dev/null +++ b/compass/landice/tests/eismint2/decomposition_test/__init__.py @@ -0,0 +1,68 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.eismint2.setup_mesh import SetupMesh +from compass.landice.tests.eismint2.run_experiment import RunExperiment + + +class DecompositionTest(TestCase): + """ + A test case for performing two MALI runs of a EISMINT2 setup, one with one + core and one with four. The test case verifies that the results of the two + runs are identical. + """ + def __init__(self, test_group, thermal_solver): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.eismint2.Eismint2 + The test group that this test case belongs to + + thermal_solver : {'temperature', 'enthalpy'} + The formulation of the thermodynamics to use + """ + if thermal_solver == 'enthalpy': + name = 'enthalpy_decomposition_test' + elif thermal_solver == 'temperature': + name = 'decomposition_test' + else: + raise ValueError( + 'Unknown thermal_solver {}'.format(thermal_solver)) + super().__init__(test_group=test_group, name=name) + + self.add_step( + SetupMesh(test_case=self)) + + options = {'config_run_duration': "'3000-00-00_00:00:00'", + 'config_thermal_solver': "'{}'".format(thermal_solver)} + + experiment = 'f' + for procs in [1, 4]: + name = '{}proc_run'.format(procs) + step = RunExperiment(test_case=self, name=name, subdir=name, + cores=procs, threads=1, experiment=experiment) + + step.add_namelist_options(options) + + step.add_streams_file( + 'compass.landice.tests.eismint2.decomposition_test', + 'streams.landice') + self.add_step(step) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['thickness', 'temperature', 'basalTemperature', + 'heatDissipation'] + steps = self.steps_to_run + if '1proc_run' in steps and '4proc_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='1proc_run/output.nc', + filename2='4proc_run/output.nc') diff --git a/compass/landice/tests/eismint2/decomposition_test/streams.landice b/compass/landice/tests/eismint2/decomposition_test/streams.landice new file mode 100644 index 0000000000..6e9aa34176 --- /dev/null +++ b/compass/landice/tests/eismint2/decomposition_test/streams.landice @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/eismint2/eismint2.cfg b/compass/landice/tests/eismint2/eismint2.cfg new file mode 100644 index 0000000000..1fd017b914 --- /dev/null +++ b/compass/landice/tests/eismint2/eismint2.cfg @@ -0,0 +1,29 @@ +# config options for EISMINT2 test cases +[eismint2] + +# sizes (in cells) for the 25000m uniform mesh +nx = 64 +ny = 74 + +# resolution (in m) for the 25000m uniform mesh +dc = 25000.0 + +# number of levels in the mesh +levels = 10 + +# radius (km) used to cull the mesh +radius = 750.0 + + +# config options related to visualization for EISMINT2 test cases +[eismint2_viz] + +# the name of the experiment or experiments to visualize ('a','b','c','d','f', +# or 'g'). For multiple experiments, give a comma-separated list +experiment = a + +# whether to save image files +save_images = True + +# whether to hide figures (typically when save_images = True) +hide_figs = True diff --git a/compass/landice/tests/eismint2/namelist.landice b/compass/landice/tests/eismint2/namelist.landice new file mode 100644 index 0000000000..8b73cf5ebb --- /dev/null +++ b/compass/landice/tests/eismint2/namelist.landice @@ -0,0 +1,25 @@ +config_dt = '0000-06-00_00:00:00' +config_start_time = '0000-01-01_00:00:00' +config_stop_time = '200000-01-01_00:00:00' +config_adaptive_timestep = .true. +# 100 years +config_max_adaptive_timestep = 3153600000 +config_adaptive_timestep_include_DCFL = .true. +config_adaptive_timestep_force_interval = '900000-00-00_00:00:00' +config_block_decomp_file_prefix = 'graph.info.part.' +config_velocity_solver = "sia" +config_flowParamA_calculation = "PB1982" +config_tracer_advection = 'fo' +config_thermal_solver = 'temperature' +config_thermal_calculate_bmb = .false. +config_surface_air_temperature_source = 'file' +config_basal_heat_flux_source = 'constant' +config_basal_heat_flux_value = 4.2e-2 +config_ice_density = 910.0 +config_dynamic_thickness = 10.0 +config_year_digits = 6 + +config_AM_globalStats_enable = .true. +config_AM_globalStats_compute_interval = 'output_interval' +config_AM_globalStats_compute_on_startup = .true. +config_AM_globalStats_write_on_startup = .true. diff --git a/compass/landice/tests/eismint2/restart_test/__init__.py b/compass/landice/tests/eismint2/restart_test/__init__.py new file mode 100644 index 0000000000..53cf17a261 --- /dev/null +++ b/compass/landice/tests/eismint2/restart_test/__init__.py @@ -0,0 +1,94 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.eismint2.setup_mesh import SetupMesh +from compass.landice.tests.eismint2.run_experiment import RunExperiment + + +class RestartTest(TestCase): + """ + A test case for performing two MALI runs of an EISMINT2 setup, one full run + and one run broken into two segments with a restart. The test case + verifies that the results of the two runs are identical. + """ + + def __init__(self, test_group, thermal_solver): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.eismint2.Eismint2 + The test group that this test case belongs to + + thermal_solver : {'temperature', 'enthalpy'} + The formulation of the thermodynamics to use + """ + if thermal_solver == 'enthalpy': + name = 'enthalpy_restart_test' + elif thermal_solver == 'temperature': + name = 'restart_test' + else: + raise ValueError( + 'Unknown thermal_solver {}'.format(thermal_solver)) + super().__init__(test_group=test_group, name=name) + + self.add_step( + SetupMesh(test_case=self)) + + experiment = 'f' + + name = 'full_run' + step = RunExperiment(test_case=self, name=name, subdir=name, cores=4, + threads=1, experiment=experiment) + + options = {'config_thermal_solver': "'{}'".format(thermal_solver)} + + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.eismint2.restart_test', + 'namelist.full', out_name='namelist.landice') + step.add_namelist_options(options, out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.eismint2.restart_test', + 'streams.full', out_name='streams.landice') + self.add_step(step) + + name = 'restart_run' + step = RunExperiment(test_case=self, name=name, subdir=name, cores=4, + threads=1, experiment=experiment, + suffixes=['landice', 'landice.rst']) + + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.eismint2.restart_test', + 'namelist.restart', out_name='namelist.landice') + step.add_namelist_options(options, out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.eismint2.restart_test', + 'streams.restart', out_name='streams.landice') + + step.add_namelist_file( + 'compass.landice.tests.eismint2.restart_test', + 'namelist.restart.rst', out_name='namelist.landice.rst') + step.add_namelist_options(options, out_name='namelist.landice.rst') + step.add_streams_file( + 'compass.landice.tests.eismint2.restart_test', + 'streams.restart.rst', out_name='streams.landice.rst') + self.add_step(step) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['thickness', 'temperature', 'basalTemperature', + 'heatDissipation'] + steps = self.steps_to_run + if 'full_run' in steps and 'restart_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/output.nc', + filename2='restart_run/output.nc') diff --git a/compass/landice/tests/eismint2/restart_test/namelist.full b/compass/landice/tests/eismint2/restart_test/namelist.full new file mode 100644 index 0000000000..eb043c2c92 --- /dev/null +++ b/compass/landice/tests/eismint2/restart_test/namelist.full @@ -0,0 +1,5 @@ +config_run_duration = '3000-00-00_00:00:00' +config_dt = '0100-00-00_00:00:00' +config_adaptive_timestep = .false. +config_write_output_on_startup = .true. +config_do_restart = .false. diff --git a/compass/landice/tests/eismint2/restart_test/namelist.restart b/compass/landice/tests/eismint2/restart_test/namelist.restart new file mode 100644 index 0000000000..cbcf5acc86 --- /dev/null +++ b/compass/landice/tests/eismint2/restart_test/namelist.restart @@ -0,0 +1,5 @@ +config_run_duration = '2000-00-00_00:00:00' +config_dt = '0100-00-00_00:00:00' +config_adaptive_timestep = .false. +config_write_output_on_startup = .true. +config_do_restart = .false. diff --git a/compass/landice/tests/eismint2/restart_test/namelist.restart.rst b/compass/landice/tests/eismint2/restart_test/namelist.restart.rst new file mode 100644 index 0000000000..2b2ae9d5bd --- /dev/null +++ b/compass/landice/tests/eismint2/restart_test/namelist.restart.rst @@ -0,0 +1,6 @@ +config_run_duration = '1000-00-00_00:00:00' +config_start_time = 'file' +config_dt = '0100-00-00_00:00:00' +config_adaptive_timestep = .false. +config_write_output_on_startup = .true. +config_do_restart = .true. diff --git a/compass/landice/tests/eismint2/restart_test/streams.full b/compass/landice/tests/eismint2/restart_test/streams.full new file mode 100644 index 0000000000..5ccb3cd861 --- /dev/null +++ b/compass/landice/tests/eismint2/restart_test/streams.full @@ -0,0 +1,11 @@ + + + + + + + diff --git a/compass/landice/tests/eismint2/restart_test/streams.restart b/compass/landice/tests/eismint2/restart_test/streams.restart new file mode 100644 index 0000000000..8c853a819d --- /dev/null +++ b/compass/landice/tests/eismint2/restart_test/streams.restart @@ -0,0 +1,12 @@ + + + + + + + diff --git a/compass/landice/tests/eismint2/restart_test/streams.restart.rst b/compass/landice/tests/eismint2/restart_test/streams.restart.rst new file mode 100644 index 0000000000..f046cfb592 --- /dev/null +++ b/compass/landice/tests/eismint2/restart_test/streams.restart.rst @@ -0,0 +1,12 @@ + + + + + + + diff --git a/compass/landice/tests/eismint2/run_experiment.py b/compass/landice/tests/eismint2/run_experiment.py new file mode 100644 index 0000000000..971f0d7ed9 --- /dev/null +++ b/compass/landice/tests/eismint2/run_experiment.py @@ -0,0 +1,296 @@ +import numpy +from netCDF4 import Dataset as NetCDFFile +import shutil + +from mpas_tools.logging import check_call + +from compass.model import run_model +from compass.step import Step + + +class RunExperiment(Step): + """ + A step for performing forward MALI runs as part of eismint2 test cases. + + Attributes + ---------- + experiment : {'a', 'b', 'c', 'd', 'f', 'g'} + The EISMINT2 experiment (a-d or f-g) to perform + + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + def __init__(self, test_case, experiment, name='run_model', subdir=None, + cores=1, min_cores=None, threads=1, suffixes=None): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + experiment : {'a', 'b', 'c', 'd', 'f', 'g'} + The EISMINT2 experiment (a-d or f-g) to perform + + name : str, optional + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + self.experiment = experiment + if suffixes is None: + suffixes = ['landice'] + self.suffixes = suffixes + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + for suffix in suffixes: + self.add_namelist_file( + 'compass.landice.tests.eismint2', 'namelist.landice', + out_name='namelist.{}'.format(suffix)) + + self.add_streams_file( + 'compass.landice.tests.eismint2', 'streams.landice', + out_name='streams.{}'.format(suffix)) + + if experiment in ('a', 'f', 'g'): + self.add_input_file(filename='landice_grid.nc', + target='../setup_mesh/landice_grid.nc') + else: + self.add_input_file(filename='experiment_a_output.nc', + target='../experiment_a/output.nc') + + self.add_input_file(filename='graph.info', + target='../setup_mesh/graph.info') + + self.add_output_file(filename='output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + _setup_eismint2_initial_conditions(self.logger, self.experiment, + filename='initial_condition.nc') + + for suffix in self.suffixes: + run_model(self, namelist='namelist.{}'.format(suffix), + streams='streams.{}'.format(suffix)) + + +def _setup_eismint2_initial_conditions(logger, experiment, filename): + """ + Add the initial condition for the given EISMINT2 experiment to the given + MPAS mesh file + + Parameters + ---------- + logger : logging.Logger + A logger for output from the step + + experiment : {'a', 'b', 'c', 'd', 'f', 'g'} + The name of the experiment + + filename : str + file to add the initial condition to + + """ + if experiment in ('a', 'b', 'c', 'd', 'f', 'g'): + logger.info('Setting up EISMINT2 Experiment {}'.format(experiment)) + else: + raise ValueError("Invalid experiment specified: {}. Please specify " + "an experiment between 'a' and 'g', excluding " + "'e'".format(experiment)) + + # Setup dictionaries of parameter values for each experiment + # Mmax: Maximum SMB at center of domain (m a-1) + # Sb: gradient of SMB with horizontal distance (m a-1 km-1) + # Rel: radial distance from summit where SMB = 0 (km) + # Tmin: surface temperature at summit (K) + # ST: gradient of air temperature with horizontal distance (K km-1) + # beta: basal traction coefficient (Pa m-1 a) + # Note: beta is the inverse of parameter B in Payne et al. (2000) + exp_params = {'a': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0, + 'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e8}, + 'b': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0, + 'Tmin': 243.15, 'ST': 1.67e-2, 'beta': 1.0e8}, + 'c': {'Mmax': 0.25, 'Sb': 10.0**-2, 'Rel': 425.0, + 'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e8}, + 'd': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 425.0, + 'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e8}, + 'f': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0, + 'Tmin': 223.15, 'ST': 1.67e-2, 'beta': 1.0e8}, + 'g': {'Mmax': 0.5, 'Sb': 10.0**-2, 'Rel': 450.0, + 'Tmin': 238.15, 'ST': 1.67e-2, 'beta': 1.0e3}} + xsummit = 750000.0 + ysummit = 750000.0 + rhoi = 910.0 + scyr = 3600.0 * 24.0 * 365.0 + + # Some experiments start from scratch, others start from the SS of a previous experiment + if experiment in ('a', 'f', 'g'): + # we will build the mesh from scratch + shutil.copyfile('landice_grid.nc', filename) + else: + # use the final state of experiment A + args = ['ncks', '-O', '-d', 'Time,-1', 'experiment_a_output.nc', + filename] + check_call(args, logger) + + # Open the new input file, get needed dimensions & variables + gridfile = NetCDFFile(filename, 'r+') + nVertLevels = len(gridfile.dimensions['nVertLevels']) + # Get variables + xCell = gridfile.variables['xCell'][:] + yCell = gridfile.variables['yCell'][:] + xEdge = gridfile.variables['xEdge'][:] + yEdge = gridfile.variables['yEdge'][:] + xVertex = gridfile.variables['xVertex'][:] + yVertex = gridfile.variables['yVertex'][:] + + # =================== + # initial conditions + # =================== + # If starting from scratch, setup dimension variables and initial condition + # variables + if experiment in ('a', 'f', 'g'): + # Find center of domain + x0 = xCell[:].min() + 0.5 * (xCell[:].max() - xCell[:].min()) + y0 = yCell[:].min() + 0.5 * (yCell[:].max() - yCell[:].min()) + # Calculate distance of each cell center from dome center + r = ((xCell[:] - x0)**2 + (yCell[:] - y0)**2)**0.5 + + # Center the dome in the center of the cell that is closest to the + # center of the domain. + centerCellIndex = numpy.abs(r[:]).argmin() + # EISMINT-2 puts the center of the domain at 750,750 km instead of 0,0. + # Adjust to use that origin. + + xShift = -1.0 * xCell[centerCellIndex] + xsummit + yShift = -1.0 * yCell[centerCellIndex] + ysummit + xCell[:] = xCell[:] + xShift + yCell[:] = yCell[:] + yShift + xEdge[:] = xEdge[:] + xShift + yEdge[:] = yEdge[:] + yShift + xVertex[:] = xVertex[:] + xShift + yVertex[:] = yVertex[:] + yShift + gridfile.variables['xCell'][:] = xCell[:] + gridfile.variables['yCell'][:] = yCell[:] + gridfile.variables['xEdge'][:] = xEdge[:] + gridfile.variables['yEdge'][:] = yEdge[:] + gridfile.variables['xVertex'][:] = xVertex[:] + gridfile.variables['yVertex'][:] = yVertex[:] + + # Assign initial condition variable values for EISMINT-2 experiment + # Start with no ice + gridfile.variables['thickness'][:] = 0.0 + # flat bed at sea level + gridfile.variables['bedTopography'][:] = 0.0 + # constant, arbitrary temperature, degrees K (doesn't matter since + # there is no ice initially) + gridfile.variables['temperature'][:] = 273.15 + # Setup layerThicknessFractions + gridfile.variables['layerThicknessFractions'][:] = 1.0 / nVertLevels + else: + StrLen = len(gridfile.dimensions['StrLen']) + gridfile.variables['xtime'][0, :] = list( + '000000-01-01_00:00:00'.ljust(StrLen, ' ')) + + # Now update/set origin location and distance array + r = ((xCell[:] - xsummit)**2 + (yCell[:] - ysummit)**2)**0.5 + + # =================== + # boundary conditions + # =================== + # Define values prescribed by Payne et al. 2000 paper. + + params = exp_params[experiment] + logger.info("Parameters for this experiment: {}".format(params)) + + # SMB field specified by EISMINT, constant in time for EISMINT2 + # It is a function of geographical position (not elevation) + + # maximum accumulation rate [m/yr] converted to [m/s] + Mmax = params['Mmax'] / scyr + # gradient of accumulation rate change with horizontal distance [m/a/km] + # converted to [m/s/m] + Sb = params['Sb'] / scyr / 1000.0 + # accumulation rate at 0 position [km] converted to [m] + Rel = params['Rel'] * 1000.0 + + SMB = numpy.minimum(Mmax, Sb * (Rel - r)) # [m ice/s] + SMB = SMB * rhoi # in kg/m2/s + if 'sfcMassBal' in gridfile.variables: + sfcMassBalVar = gridfile.variables['sfcMassBal'] + else: + datatype = gridfile.variables[ + 'xCell'].dtype # Get the datatype for double precision float + sfcMassBalVar = gridfile.createVariable('sfcMassBal', datatype, + ('Time', 'nCells')) + sfcMassBalVar[0, :] = SMB + + # Surface temperature + + # minimum surface air temperature [K] + Tmin = params['Tmin'] + # gradient of air temperature change with horizontal distance [K/km] + # converted to [K/m] + ST = params['ST'] / 1000.0 + + if 'surfaceAirTemperature' in gridfile.variables: + surfaceAirTemperatureVar = gridfile.variables['surfaceAirTemperature'] + else: + datatype = gridfile.variables[ + 'xCell'].dtype # Get the datatype for double precision float + surfaceAirTemperatureVar = gridfile.createVariable( + 'surfaceAirTemperature', datatype, ('Time', 'nCells')) + surfaceAirTemperatureVar[0, :] = Tmin + ST * r + + # beta + beta = params['beta'] + if 'beta' in gridfile.variables: + betaVar = gridfile.variables['beta'] + else: + datatype = gridfile.variables[ + 'xCell'].dtype # Get the datatype for double precision float + betaVar = gridfile.createVariable('beta', datatype, ('Time', 'nCells')) + betaVar[0, :] = beta + + gridfile.close() + logger.info('Successfully added initial conditions for EISMINT2, ' + 'experiment {} to the file: {}'.format(experiment, filename)) diff --git a/compass/landice/tests/eismint2/setup_mesh.py b/compass/landice/tests/eismint2/setup_mesh.py new file mode 100644 index 0000000000..d3fb7d635e --- /dev/null +++ b/compass/landice/tests/eismint2/setup_mesh.py @@ -0,0 +1,74 @@ +import xarray + +from mpas_tools.planar_hex import make_planar_hex_mesh +from mpas_tools.io import write_netcdf +from mpas_tools.mesh.conversion import convert, cull +from mpas_tools.logging import check_call + +from compass.model import make_graph_file +from compass.step import Step + + +class SetupMesh(Step): + """ + A step for creating a mesh and initial condition for EISMINT2 test cases + """ + def __init__(self, test_case): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + """ + super().__init__(test_case=test_case, name='setup_mesh') + + self.add_output_file(filename='graph.info') + self.add_output_file(filename='landice_grid.nc') + + # no setup() method is needed + + def run(self): + """ + Run this step of the test case + """ + logger = self.logger + section = self.config['eismint2'] + + nx = section.getint('nx') + ny = section.getint('ny') + dc = section.getfloat('dc') + + dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=False, + nonperiodic_y=False) + + dsMesh = convert(dsMesh, logger=logger) + write_netcdf(dsMesh, 'mpas_grid.nc') + dsMesh.close() + + radius = section.get('radius') + args = ['define_cullMask.py', + '-f', 'mpas_grid.nc', + '-m', 'radius', + '-d', radius] + + check_call(args, logger) + + dsMesh = xarray.open_dataset('mpas_grid.nc') + dsMesh = cull(dsMesh, logger=logger) + dsMesh = convert(dsMesh, logger=logger) + write_netcdf(dsMesh, 'mpas_grid2.nc') + + levels = section.get('levels') + args = ['create_landice_grid_from_generic_MPAS_grid.py', + '-i', 'mpas_grid2.nc', + '-o', 'landice_grid.nc', + '-l', levels, + '--thermal', + '--beta'] + + check_call(args, logger) + + make_graph_file(mesh_filename='landice_grid.nc', + graph_filename='graph.info') diff --git a/compass/landice/tests/eismint2/standard_experiments/__init__.py b/compass/landice/tests/eismint2/standard_experiments/__init__.py new file mode 100644 index 0000000000..65cb0bd2f8 --- /dev/null +++ b/compass/landice/tests/eismint2/standard_experiments/__init__.py @@ -0,0 +1,48 @@ +from compass.testcase import TestCase +from compass.landice.tests.eismint2.setup_mesh import SetupMesh +from compass.landice.tests.eismint2.run_experiment import RunExperiment +from compass.landice.tests.eismint2.standard_experiments.visualize import \ + Visualize + + +class StandardExperiments(TestCase): + """ + A test case for performing the standard EISMINT2 experiments. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.eismint2.Eismint2 + The test group that this test case belongs to + + mesh_type : str + The resolution or tye of mesh of the test case + """ + name = 'standard_experiments' + super().__init__(test_group=test_group, name=name) + + self.add_step( + SetupMesh(test_case=self)) + + for experiment in ['a', 'b', 'c', 'd', 'f', 'g']: + name = 'experiment_{}'.format(experiment) + self.add_step( + RunExperiment(test_case=self, name=name, subdir=name, cores=4, + threads=1, experiment=experiment)) + + self.add_step( + Visualize(test_case=self)) + + def configure(self): + """ + Modify the configuration options for this test case + """ + # We want to visualize all test cases by default + self.config.set('eismint2_viz', 'experiment', 'a, b, c, d, f, g') + + # no run() method is needed because we will just do the default: run all + # the steps diff --git a/compass/landice/tests/eismint2/standard_experiments/visualize.py b/compass/landice/tests/eismint2/standard_experiments/visualize.py new file mode 100644 index 0000000000..20e3a7b991 --- /dev/null +++ b/compass/landice/tests/eismint2/standard_experiments/visualize.py @@ -0,0 +1,481 @@ +import datetime +import netCDF4 +import matplotlib.pyplot as plt +import numpy as np +from scipy.interpolate import griddata + +from compass.step import Step + + +class Visualize(Step): + """ + A step for visualizing the output from a EISMINT2 test case + """ + def __init__(self, test_case): + """ + Create the step + + Parameters + ---------- + test_case : compass.landice.tests.eismint2.standard_experiments.StandardExperiments + The test case this step belongs to + """ + super().__init__(test_case=test_case, name='visualize') + + # depending on settings, this may produce no outputs, so we won't add + # any + + # no setup() method is needed + + def run(self): + """ + Run this step of the test case + """ + config = self.config + logger = self.logger + experiment = config.get('eismint2_viz', 'experiment') + + if ',' in experiment: + experiments = [exp.strip() for exp in experiment.split(',')] + else: + experiments = [experiment] + + for experiment in experiments: + logger.info('Plotting Experiment {}'.format(experiment)) + visualize_eismint2(config, logger, experiment) + + +def visualize_eismint2(config, logger, experiment): + """ + Plot the output from an EISMINT2 experiment + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options for this test case, a combination of the defaults + for the machine, core and configuration + + logger : logging.Logger + A logger for output from the step + + experiment : {'a', 'b', 'c', 'd', 'f', 'g'} + The name of the experiment + """ + + section = config['eismint2_viz'] + save_images = section.getboolean('save_images') + hide_figs = section.getboolean('hide_figs') + + filename = '../experiment_{}/output.nc'.format(experiment) + + # open supplied MPAS output file and get variables needed + filein = netCDF4.Dataset(filename, 'r') + xCell = filein.variables['xCell'][:]/1000.0 + yCell = filein.variables['yCell'][:]/1000.0 + xtime = filein.variables['xtime'][:] + nCells = len(filein.dimensions['nCells']) + nVertLevels = len(filein.dimensions['nVertLevels']) + years = _xtime_get_year(xtime) + + thickness = filein.variables['thickness'] + basalTemperature = filein.variables['basalTemperature'] + basalPmpTemperature = filein.variables['basalPmpTemperature'] + flwa = filein.variables['flowParamA'] + uReconstructX = filein.variables['uReconstructX'] + uReconstructY = filein.variables['uReconstructY'] + areaCell = filein.variables['areaCell'][:] + layerThicknessFractions = filein.variables['layerThicknessFractions'][:] + + # Use final time + timelev = -1 + logger.info('Using final model time of {} \n'.format( + xtime[timelev, :].tostring().strip().decode('utf-8'))) + + # ================ + # ================ + # Plot the results + # ================ + # ================ + + # ================ + # BASAL TEMPERATURE MAP + # ================ + + # make an educated guess about how big the markers should be. + if nCells**0.5 < 100.0: + markersize = max(int(round(3600.0/(nCells**0.5))), 1) + # use hexes if the points are big enough, otherwise just dots + markershape = 'h' + else: + markersize = max(int(round(1800.0/(nCells**0.5))), 1) + markershape = '.' + logger.info('Using a markersize of {}'.format(markersize)) + + fig = plt.figure(1, facecolor='w') + fig.suptitle('Payne et al. Fig. 1, 3, 6, 9, or 11', fontsize=10, fontweight='bold') + + iceIndices = np.where(thickness[timelev, :] > 10.0)[0] + plt.scatter(xCell[iceIndices], yCell[iceIndices], markersize, + c=np.array([[0.8, 0.8, 0.8], ]), marker=markershape, + edgecolors='none') + + # add contours of ice temperature over the top + basalTemp = basalTemperature[timelev, :] + # fill places below dynamic limit with non-ice value of 273.15 + basalTemp[np.where(thickness[timelev, :] < 10.0)] = 273.15 + _contour_mpas(basalTemp, nCells, xCell, yCell, + contour_levs=np.linspace(240.0, 275.0, 8)) + + plt.axis('equal') + plt.title('Modeled basal temperature (K) \n at time {}'.format( + netCDF4.chartostring(xtime)[timelev].strip())) + plt.xlim((0.0, 1500.0)) + plt.ylim((0.0, 1500.0)) + plt.xlabel('X position (km)') + plt.ylabel('Y position (km)') + + if save_images: + plt.savefig('EISMINT2-{}-basaltemp.png'.format(experiment), dpi=150) + + # ================ + # STEADY STATE MAPS - panels b and c are switched and with incorrect units in the paper + # ================ + fig = plt.figure(2, facecolor='w', figsize=(12, 6), dpi=72) + fig.suptitle('Payne et al. Fig. 2 or 4', fontsize=10, fontweight='bold') + + # ================ + # panel a - thickness + ax1 = fig.add_subplot(131) + + plt.scatter(xCell[iceIndices], yCell[iceIndices], markersize, + c=np.array([[0.8, 0.8, 0.8], ]), marker=markershape, + edgecolors='none') + + # add contours of ice thickness over the top + contour_intervals = np.linspace(0.0, 5000.0, int(5000.0/250.0)+1) + _contour_mpas(thickness[timelev, :], nCells, xCell, yCell, + contour_levs=contour_intervals) + + plt.title('Final thickness (m)') + ax1.set_aspect('equal') + plt.xlabel('X position (km)') + plt.ylabel('Y position (km)') + + # ================ + # panel c - flux + ax = fig.add_subplot(133, sharex=ax1, sharey=ax1) + + flux = np.zeros((nCells,)) + for k in range(nVertLevels): + speedLevel = (uReconstructX[timelev, :, k:k+2].mean(axis=1)**2 + + uReconstructY[timelev, :, k:k+2].mean(axis=1)**2)**0.5 + flux += speedLevel * thickness[timelev, :] * layerThicknessFractions[k] + + plt.scatter(xCell[iceIndices], yCell[iceIndices], markersize, + c=np.array([[0.8, 0.8, 0.8], ]), marker=markershape, + edgecolors='none') + + # add contours over the top + contour_intervals = np.linspace(0.0, 20.0, 11) + _contour_mpas(flux * 3600.0*24.0*365.0 / 10000.0, nCells, xCell, yCell, + contour_levs=contour_intervals) + ax.set_aspect('equal') + plt.title('Final flux (m$^2$ a$^{-1}$ / 10000)') + plt.xlabel('X position (km)') + plt.ylabel('Y position (km)') + + # ================ + # panel b - flow factor + ax = fig.add_subplot(132, sharex=ax1, sharey=ax1) + + plt.scatter(xCell[iceIndices], yCell[iceIndices], markersize, + c=np.array([[0.8, 0.8, 0.8], ]), marker=markershape, + edgecolors='none') + + # add contours over the top + # contour_intervals = np.linspace(0.0, 16.0, int(16.0/0.5)+1) + + # this is not used if FO velo solver is used + if flwa[timelev, :, :].max() > 0.0: + # NOT SURE WHICH LEVEL FLWA SHOULD COME FROM - so taking column average + _contour_mpas( + flwa[timelev, :, :].mean(axis=1) * 3600.0*24.0*365.0 / 1.0e-17, + nCells, xCell, yCell) + ax.set_aspect('equal') + # Note: the paper's figure claims units of 10$^{-25}$ Pa$^{-3}$ a$^{-1}$ + # but the time unit appears to be 10^-17 + plt.title('Final flow factor (10$^{-17}$ Pa$^{-3}$ a$^{-1}$)') + plt.xlabel('X position (km)') + plt.ylabel('Y position (km)') + + if save_images: + plt.savefig('EISMINT2-{}-steady.png'.format(experiment), dpi=150) + + # ================ + # DIVIDE EVOLUTION TIME SERIES + # ================ + fig = plt.figure(3, facecolor='w') + fig.suptitle('Payne et al. Fig. 5, 7, or 8', fontsize=10, fontweight='bold') + + # get indices for given time + if experiment == 'b': + endTime = 40000.0 + elif experiment == 'g': + # WHL - Might change later to 80000 + endTime = 40000.0 + else: + endTime = 80000.0 + + # get index at divide - we set this up to be 750,750 + divideIndex = np.logical_and(xCell == 750.0, yCell == 750.0) + + # panel a - thickness + fig.add_subplot(211) + timeInd = np.nonzero(years <= endTime)[0][0:] + plt.plot(years[timeInd]/1000.0, thickness[timeInd, divideIndex], 'k.-') + plt.ylabel('Thickness (m)') + + # panel b - basal temperature + fig.add_subplot(212) + # skip the first index cause basalTemperature isn't calculated then + timeInd = np.nonzero(years <= endTime)[0][1:] + plt.plot(years[timeInd]/1000.0, basalTemperature[timeInd, divideIndex], 'k.-') + plt.ylabel('Basal temperature (K)') + plt.xlabel('Time (kyr)') + + if save_images: + plt.savefig('EISMINT2-{}-divide.png'.format(experiment), dpi=150) + + # ================ + # TABLES + # ================ + # Setup dictionaries of benchmark results for each experiment - values are + # mean, min, max from Tables in Payne et al. 2000 + benchmarks = {'a': {'stattype': 'absolute', + 'volume': (2.128, 2.060, 2.205), + 'area': (1.034, 1.011, 1.097), + 'meltfraction': (0.718, 0.587, 0.877), + 'dividethickness': (3688.342, 3644.0, 3740.74), + 'dividebasaltemp': (255.605, 254.16, 257.089)}, + 'b': {'stattype': 'relative', + 'volume': (-2.589, -3.079, -2.132), + 'area': (0.0, 0.0, 0.0), + 'meltfraction': (11.836, 3.307, 21.976), + 'dividethickness': (-4.927, -5.387, -4.071), + 'dividebasaltemp': (4.623, 4.47, 4.988)}, + 'c': {'stattype': 'relative', + 'volume': (-28.505, -29.226, -28.022), + 'area': (-19.515, -20.369, -16.815), + 'meltfraction': (-27.806, -39.353, -7.982), + 'dividethickness': (-12.928, -13.948, -12.447), + 'dividebasaltemp': (3.707, 3.389, 4.004)}, + 'd': {'stattype': 'relative', + 'volume': (-12.085, -12.890, -11.654), + 'area': (-9.489, -10.184, -6.924), + 'meltfraction': (-1.613, -4.744, 1.001), + 'dividethickness': (-2.181, -2.517, -1.985), + 'dividebasaltemp': (-0.188, -0.209, -0.149)}, + 'f': {'stattype': 'absolute', + 'volume': (0.0, 0.0, 0.0), + 'area': (0.0, 0.0, 0.0), + 'meltfraction': (0.0, 0.0, 0.0), + 'dividethickness': (0.0, 0.0, 0.0), + 'dividebasaltemp': (0.0, 0.0, 0.0)}, + 'g': {'stattype': 'absolute', + 'volume': (1.589, 1.503, 2.205), + 'area': (1.032, 1.016, 1.087), + 'meltfraction': (0.352, 0.250, 0.780), + 'dividethickness': (2365.206, 2212.550, 3681.431), + 'dividebasaltemp': (249.134, 247.700, 255.381)}} + + # Get the benchmark dictionary + bench = benchmarks[experiment] + + fig = plt.figure(4, facecolor='w') + fig.suptitle('Payne et al. Table 4, 5, 6, 7, 8, or 9: showing ' + 'min/mean/max of community', fontsize=10, fontweight='bold') + + fig.add_subplot(151) + volume = ((thickness[timelev, iceIndices] * areaCell[iceIndices]).sum() + / 1000.0**3 / 10.0**6) + # benchmark results + plt.plot(np.zeros((3,)), bench['volume'], 'k*') + if bench['stattype'] == 'relative': + initIceIndices = np.where(thickness[0, :] > 0.0)[0] + total_volume = \ + (thickness[0, initIceIndices] * areaCell[initIceIndices]).sum() + volume = (volume / (total_volume / 1000.0**3 / 10.0**6) - 1.0) * 100.0 + plt.ylabel('Volume change (%)') + else: + plt.ylabel('Volume (10$^6$ km$^3$)') + # MPAS results + plt.plot((0.0,), volume, 'ro') + plt.xticks(()) + logger.info("MALI volume = {}".format(volume)) + + fig.add_subplot(152) + area = (areaCell[iceIndices]).sum() / 1000.0**2 / 10.0**6 + areaAbsolute = area + # benchmark results + plt.plot(np.zeros((3,)), bench['area'], 'k*') + if bench['stattype'] == 'relative': + initArea = (areaCell[initIceIndices]).sum() / 1000.0**2 / 10.0**6 + area = (area / initArea - 1.0) * 100.0 + plt.ylabel('Area change (%)') + else: + plt.ylabel('Area (10$^6$ km$^2$)') + # MPAS results + plt.plot((0.0,), area, 'ro') + plt.xticks(()) + logger.info("MALI area = {}".format(area)) + + fig.add_subplot(153) + # using threshold here to identify melted locations + warmBedIndices = np.where( + np.logical_and(thickness[timelev, :] > 0.0, + basalTemperature[timelev, :] >= + (basalPmpTemperature[timelev, :] - 0.01)))[0] + meltfraction = (areaCell[warmBedIndices].sum() / 1000.0**2 / 10.0**6 / + areaAbsolute) + # benchmark results + plt.plot(np.zeros((3,)), bench['meltfraction'], 'k*') + if bench['stattype'] == 'relative': + # use time 1 instead of 0 since these fields aren't fully populated at + # time 0 + initIceIndices = np.where(thickness[1, :] > 0.0)[0] + initArea = (areaCell[initIceIndices].sum() / 1000.0**2 / 10.0**6) + # using threshold here to identify melted locations + initWarmBedIndices = \ + np.where(np.logical_and(thickness[1, :] > 0.0, + basalTemperature[1, :] >= + (basalPmpTemperature[1, :] - 0.01)))[0] + initWarmArea = (areaCell[initWarmBedIndices].sum() / 1000.0**2 / + 10.0**6) + initMeltFraction = initWarmArea / initArea + meltfraction = (meltfraction / initMeltFraction - 1.0) * 100.0 + plt.ylabel('Melt fraction change (%)') + else: + plt.ylabel('Melt fraction') + # MPAS results + plt.plot((0.0,), meltfraction, 'ro') + plt.xticks(()) + logger.info("MALI melt fraction = {}".format(meltfraction)) + + fig.add_subplot(154) + dividethickness = thickness[timelev, divideIndex] + # benchmark results + plt.plot(np.zeros((3,)), bench['dividethickness'], 'k*') + if bench['stattype'] == 'relative': + dividethickness = \ + (dividethickness / thickness[0, divideIndex] - 1.0) * 100.0 + plt.ylabel('Divide thickness change (%)') + else: + plt.ylabel('Divide thickness (m)') + plt.plot((0.0,), dividethickness, 'ro') # MPAS results + plt.xticks(()) + logger.info("MALI divide thickness = {}".format(dividethickness[0])) + + fig.add_subplot(155) + dividebasaltemp = basalTemperature[timelev, divideIndex] + # benchmark results + plt.plot(np.zeros((3,)), bench['dividebasaltemp'], 'k*') + if bench['stattype'] == 'relative': + # use time 1 instead of 0 since these fields aren't fully populated at + # time 0 + dividebasaltemp = dividebasaltemp - basalTemperature[1, divideIndex] + plt.ylabel('Divide basal temp. change (K)') + else: + plt.ylabel('Divide basal temp. (K)') + plt.plot((0.0,), dividebasaltemp, 'ro') # MPAS results + plt.xticks(()) + logger.info( + "MALI divide basal temperature = {}".format(dividebasaltemp[0])) + + plt.tight_layout() + + plt.draw() + if save_images: + plt.savefig('EISMINT2-{}-table.png'.format(experiment), dpi=150) + + if hide_figs: + logger.info("Plot display disabled with hide_plot config option.") + else: + plt.show() + + plt.close('all') + + +def _xtime_to_numtime(xtime): + """ + Define a function to convert xtime character array to numeric time values + using datetime objects + """ + # First parse the xtime character array into a string + + # convert from the character array to an array of strings using the netCDF4 + # module's function + xtimestr = netCDF4.chartostring(xtime) + + dt = [] + for stritem in xtimestr: + # Get an array of strings that are Y,M,D,h,m,s + itemarray = \ + stritem.strip().replace('_', '-').replace(':', '-').split('-') + results = [int(i) for i in itemarray] + # datetime has a bug where years less than 1900 are invalid on some + # systems + if results[0] < 1900: + results[0] += 1900 + # * notation passes in the array as arguments + dt.append(datetime.datetime(*results)) + + # use the netCDF4 module's function for converting a datetime to a time + # number + numtime = netCDF4.date2num(dt, units='seconds since ' + str(dt[0])) + return numtime + + +def _xtime_get_year(xtime): + """ + Get an array of years from an xtime array, ignoring any partial year + information + """ + # First parse the xtime character array into a string + + # convert from the character array to an array of strings using the netCDF4 + # module's function + xtimestr = netCDF4.chartostring(xtime) + years = np.zeros((len(xtimestr),)) + for i in range(len(xtimestr)): + # Get the year part and make it an integer + years[i] = (int(xtimestr[i].split('-')[0])) + return years + + +def _contour_mpas(field, nCells, xCell, yCell, contour_levs=None): + """Contours irregular MPAS data on cells""" + + if contour_levs is None: + contour_levs = np.array([0]) + + # -- Now let's grid your data. + # First we'll make a regular grid to interpolate onto. + + # may want to adjust the density of the regular grid + numcols = int(nCells**0.5 * 4.0) + numrows = numcols + xc = np.linspace(xCell.min(), xCell.max(), numcols) + yc = np.linspace(yCell.min(), yCell.max(), numrows) + xi, yi = np.meshgrid(xc, yc) + # -- Interpolate at the points in xi, yi + zi = griddata((xCell, yCell), field, (xi, yi)) + # -- Display the results + if len(contour_levs) == 1: + im = plt.contour(xi, yi, zi) + else: + im = plt.contour(xi, yi, zi, contour_levs, cmap=plt.cm.jet) + + # to see the raw data on top + # plt.scatter(xCell, yCell, c=temperature[timelev,:,-1], s=100, + # vmin=zi.min(), vmax=zi.max()) + plt.colorbar(im) diff --git a/compass/landice/tests/eismint2/streams.landice b/compass/landice/tests/eismint2/streams.landice new file mode 100644 index 0000000000..70e191e88d --- /dev/null +++ b/compass/landice/tests/eismint2/streams.landice @@ -0,0 +1,58 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/landice/tests/enthalpy_benchmark/A/A.cfg b/compass/landice/tests/enthalpy_benchmark/A/A.cfg new file mode 100644 index 0000000000..7054ce7105 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/A.cfg @@ -0,0 +1,26 @@ +# config options for enthalpy benchmark test cases +[enthalpy_benchmark] + +# number of levels in the mesh +levels = 50 + +# the initial thickness of the ice sheet (in m) +thickness = 1000.0 + +# the basal heat flux (in W m^{-2}) +basal_heat_flux = 0.042 + +# the initial surface air temperature (in K) +surface_air_temperature = 243.15 + +# the initial ice temperature (in K) +temperature = 243.15 + +# the surface air temperature (in K) for the first 100,000 years +phase1_surface_air_temperature = 243.15 + +# the surface air temperature (in K) for the next 50,000 years +phase2_surface_air_temperature = 268.15 + +# the surface air temperature (in K) for the final 150,000 years +phase3_surface_air_temperature = 243.15 diff --git a/compass/landice/tests/enthalpy_benchmark/A/__init__.py b/compass/landice/tests/enthalpy_benchmark/A/__init__.py new file mode 100644 index 0000000000..6992513183 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/__init__.py @@ -0,0 +1,71 @@ +from importlib.resources import path + +from compass.io import symlink +from compass.config import add_config +from compass.validate import compare_variables +from compass.landice.tests.enthalpy_benchmark.setup_mesh import SetupMesh +from compass.landice.tests.enthalpy_benchmark.run_model import RunModel +from compass.landice.tests.enthalpy_benchmark.A.visualize import Visualize +from compass.testcase import TestCase + + +class A(TestCase): + """ + The Kleiner enthalpy benchmark test case A + + Attributes + ---------- + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.enthalpy_benchmark.EnthalpyBenchmark + The test group that this test case belongs to + """ + super().__init__(test_group=test_group, name='A') + module = self.__module__ + + self.add_step( + SetupMesh(test_case=self)) + + restart_filenames = ['../setup_mesh/landice_grid.nc', + '../phase1/restart.100000.nc', + '../phase2/restart.150000.nc'] + for index, restart_filename in enumerate(restart_filenames): + name = 'phase{}'.format(index+1) + step = RunModel(test_case=self, cores=1, threads=1, name=name, + subdir=name, restart_filename=restart_filename) + + suffix = 'landice{}'.format(index+1) + step.add_namelist_file(module, 'namelist.{}'.format(suffix)) + step.add_streams_file(module, 'streams.{}'.format(suffix)) + self.add_step(step) + + self.add_step( + Visualize(test_case=self)) + + def configure(self): + """ + Modify the configuration options for this test case + """ + add_config(self.config, 'compass.landice.tests.enthalpy_benchmark.A', + 'A.cfg', exception=True) + + with path('compass.landice.tests.enthalpy_benchmark', 'README') as \ + target: + symlink(str(target), '{}/README'.format(self.work_dir)) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + variables = ['temperature', 'basalWaterThickness', + 'groundedBasalMassBal'] + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='phase3/output.nc') diff --git a/compass/landice/tests/enthalpy_benchmark/A/enthA_analy_result.mat b/compass/landice/tests/enthalpy_benchmark/A/enthA_analy_result.mat new file mode 100755 index 0000000000..5def1953fd Binary files /dev/null and b/compass/landice/tests/enthalpy_benchmark/A/enthA_analy_result.mat differ diff --git a/compass/landice/tests/enthalpy_benchmark/A/namelist.landice1 b/compass/landice/tests/enthalpy_benchmark/A/namelist.landice1 new file mode 100644 index 0000000000..7aeaf73574 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/namelist.landice1 @@ -0,0 +1 @@ +config_dt = '25-00-00_00:00:00' diff --git a/compass/landice/tests/enthalpy_benchmark/A/namelist.landice2 b/compass/landice/tests/enthalpy_benchmark/A/namelist.landice2 new file mode 100644 index 0000000000..613b076d18 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/namelist.landice2 @@ -0,0 +1,5 @@ +config_surface_air_temperature_value = 268.15 +config_do_restart = .true. +config_start_time = '100000-01-01_00:00:00' +config_run_duration = '050000-00-00_00:00:00' +config_dt = '25-00-00_00:00:00' diff --git a/compass/landice/tests/enthalpy_benchmark/A/namelist.landice3 b/compass/landice/tests/enthalpy_benchmark/A/namelist.landice3 new file mode 100644 index 0000000000..7f5b3020ab --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/namelist.landice3 @@ -0,0 +1,4 @@ +config_do_restart = .true. +config_start_time = '150000-01-01_00:00:00' +config_run_duration = '150000-00-00_00:00:00' +config_dt = '25-00-00_00:00:00' diff --git a/compass/landice/tests/enthalpy_benchmark/A/streams.landice1 b/compass/landice/tests/enthalpy_benchmark/A/streams.landice1 new file mode 100644 index 0000000000..d6bbce3081 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/streams.landice1 @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/enthalpy_benchmark/A/streams.landice2 b/compass/landice/tests/enthalpy_benchmark/A/streams.landice2 new file mode 100644 index 0000000000..d6bbce3081 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/streams.landice2 @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/enthalpy_benchmark/A/streams.landice3 b/compass/landice/tests/enthalpy_benchmark/A/streams.landice3 new file mode 100644 index 0000000000..43633e95a3 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/streams.landice3 @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/enthalpy_benchmark/A/visualize.py b/compass/landice/tests/enthalpy_benchmark/A/visualize.py new file mode 100644 index 0000000000..ab015b62b0 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/A/visualize.py @@ -0,0 +1,131 @@ +import numpy as np +from netCDF4 import Dataset +import matplotlib.pyplot as plt +from scipy.io import loadmat +from importlib.resources import path + +from compass.step import Step + + +class Visualize(Step): + """ + A step for visualizing the output from a dome test case + """ + def __init__(self, test_case): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + """ + super().__init__(test_case=test_case, name='visualize') + + for phase in range(1, 4): + self.add_input_file(filename='output{}.nc'.format(phase), + target='../phase{}/output.nc'.format(phase)) + + filename = 'enthA_analy_result.mat' + with path('compass.landice.tests.enthalpy_benchmark.A', filename) as \ + target: + self.add_input_file(filename=filename, target=str(target)) + + # no setup() method is needed + + def run(self): + """ + Run this step of the test case + """ + logger = self.logger + section = self.config['enthalpy_benchmark_viz'] + + display_image = section.getboolean('display_image') + + if not display_image: + plt.switch_backend('Agg') + + anaData = loadmat('enthA_analy_result.mat') + basalMelt = anaData['basalMelt'] + + SPY = 31556926 + + years = list() + basalMeanTs = list() + basalMeanBmbs = list() + basalMeanWaterThicknesses = list() + + for phase in range(1, 4): + filename = 'output{}.nc'.format(phase) + year, basalMeanT, basalMeanBmb, basalMeanWaterThickness = \ + _get_data(filename, SPY) + years.append(year) + basalMeanTs.append(basalMeanT) + basalMeanBmbs.append(basalMeanBmb) + basalMeanWaterThicknesses.append(basalMeanWaterThickness) + + year = np.concatenate(years)[1::] / 1000.0 + basalMeanT = np.concatenate(basalMeanTs)[1::] + basalMeanBmb = np.concatenate(basalMeanBmbs)[1::] + basalMeanWaterThickness = np.concatenate(basalMeanWaterThicknesses)[1::] + + plt.figure(1) + plt.subplot(311) + plt.plot(year, basalMeanT - 273.15) + plt.ylabel(r'$T_{\rm b}$ ($^\circ \rm C$)') + plt.text(10, -28, '(a)', fontsize=20) + plt.grid(True) + + plt.subplot(312) + plt.plot(year, -basalMeanBmb * SPY) + plt.plot(basalMelt[1, :] / 1000.0, basalMelt[0, :], linewidth=2) + plt.ylabel(r'$a_{\rm b}$ (mm a$^{-1}$ w.e.)') + plt.text(10, -1.6, '(b)', fontsize=20) + plt.grid(True) + + plt.subplot(313) + plt.plot(year, basalMeanWaterThickness * 910.0 / 1000.0) + plt.ylabel(r'$H_{\rm w}$ (m)') + plt.xlabel('Year (ka)') + plt.text(10, 8, '(c)', fontsize=20) + plt.grid(True) + + # Create image plot + plotname = 'enthalpy_A_results.png' + plt.savefig(plotname, dpi=150) + logger.info('Saved plot as {}'.format(plotname)) + + if display_image: + plt.show() + + +def _get_data(filename, SPY): + G = 0.042 + kc = 2.1 + rhow = 1000.0 + Lw = 3.34e5 + + dz = 2.5 + with Dataset(filename, 'r') as data: + yr = data.variables['daysSinceStart'][:] / 365.0 + + basalT = data.variables['basalTemperature'][:, :] + basalMeanT = np.mean(basalT, axis=1) + + basalBmb = data.variables['groundedBasalMassBal'][:, :] + basalMeanBmb = np.mean(basalBmb, axis=1) + + basalWaterThickness = data.variables['basalWaterThickness'][:, :] + basalMeanWaterThickness = np.mean(basalWaterThickness, axis=1) + + T = data.variables['temperature'][:, :, :] + TMean = np.mean(T, axis=1) + TMean_nvert = TMean[:, -1] + + basalbmb = SPY * (G + kc * (TMean_nvert - basalMeanT) / dz) / (rhow * Lw) + + Hw = np.copy(basalbmb) + for i in range(len(basalbmb)): + Hw[i] = sum(basalbmb[0:i]) * 10 + + return yr, basalMeanT, basalMeanBmb, basalMeanWaterThickness diff --git a/compass/landice/tests/enthalpy_benchmark/B/B.cfg b/compass/landice/tests/enthalpy_benchmark/B/B.cfg new file mode 100644 index 0000000000..571215db4a --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/B/B.cfg @@ -0,0 +1,17 @@ +# config options for enthalpy benchmark test cases +[enthalpy_benchmark] + +# number of levels in the mesh +levels = 400 + +# the initial thickness of the ice sheet (in m) +thickness = 200.0 + +# the basal heat flux (in W m^{-2}) +basal_heat_flux = 0.0 + +# the initial surface air temperature (in K) +surface_air_temperature = 270.15 + +# the initial ice temperature (in K) +temperature = 270.15 diff --git a/compass/landice/tests/enthalpy_benchmark/B/__init__.py b/compass/landice/tests/enthalpy_benchmark/B/__init__.py new file mode 100644 index 0000000000..04cb229a33 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/B/__init__.py @@ -0,0 +1,48 @@ +from importlib.resources import path + +from compass.io import symlink +from compass.config import add_config +from compass.landice.tests.enthalpy_benchmark.setup_mesh import SetupMesh +from compass.landice.tests.enthalpy_benchmark.run_model import RunModel +from compass.landice.tests.enthalpy_benchmark.A.visualize import Visualize +from compass.testcase import TestCase + + +class B(TestCase): + """ + The Kleiner enthalpy benchmark test case B + + Attributes + ---------- + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.enthalpy_benchmark.EnthalpyBenchmark + The test group that this test case belongs to + """ + super().__init__(test_group=test_group, name='B') + + self.add_step( + SetupMesh(test_case=self)) + self.add_step( + RunModel(test_case=self, name='run_model', cores=1, threads=1)) + self.add_step( + Visualize(test_case=self)) + + def configure(self): + """ + Modify the configuration options for this test case + """ + add_config(self.config, 'compass.landice.tests.enthalpy_benchmark.B', + 'B.cfg', exception=True) + + with path('compass.landice.tests.enthalpy_benchmark', 'README') as \ + target: + symlink(str(target), '{}/README'.format(self.work_dir)) + + # no run() method needed: we just run the steps, the default behavior diff --git a/compass/landice/tests/enthalpy_benchmark/B/enthB_analy_result.mat b/compass/landice/tests/enthalpy_benchmark/B/enthB_analy_result.mat new file mode 100755 index 0000000000..ec12705e44 Binary files /dev/null and b/compass/landice/tests/enthalpy_benchmark/B/enthB_analy_result.mat differ diff --git a/compass/landice/tests/enthalpy_benchmark/B/namelist.landice b/compass/landice/tests/enthalpy_benchmark/B/namelist.landice new file mode 100644 index 0000000000..07fd1b1854 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/B/namelist.landice @@ -0,0 +1 @@ +config_run_duration = 10000-00-00_00:00:00 \ No newline at end of file diff --git a/compass/landice/tests/enthalpy_benchmark/B/streams.landice b/compass/landice/tests/enthalpy_benchmark/B/streams.landice new file mode 100644 index 0000000000..cee6085ae5 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/B/streams.landice @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/enthalpy_benchmark/B/visualize.py b/compass/landice/tests/enthalpy_benchmark/B/visualize.py new file mode 100644 index 0000000000..d50b55cf99 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/B/visualize.py @@ -0,0 +1,112 @@ +import numpy as np +from netCDF4 import Dataset +import matplotlib.pyplot as plt +from scipy.io import loadmat +from importlib.resources import path + +from compass.step import Step + + +class Visualize(Step): + """ + A step for visualizing the output from a dome test case + """ + def __init__(self, test_case): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + """ + super().__init__(test_case=test_case, name='visualize') + + self.add_input_file(filename='output.nc', + target='../run_model/output.nc') + + filename = 'enthB_analy_result.mat' + with path('compass.landice.tests.enthalpy_benchmark.B', filename) as \ + target: + self.add_input_file(filename=filename, target=str(target)) + + # no setup function is needed + + def run(self): + """ + Run this step of the test case + """ + section = self.config['enthalpy_benchmark_viz'] + + display_image = section.getboolean('display_image') + + if not display_image: + plt.switch_backend('Agg') + + anaData = loadmat('enthB_analy_result.mat') + anaZ = anaData['enthB_analy_z'] + anaE = anaData['enthB_analy_E'] + anaT = anaData['enthB_analy_T'] + anaW = anaData['enthB_analy_omega'] + + cp_ice = 2009.0 + # rho_ice = 910.0 + + data = Dataset('output.nc', 'r') + + T = data.variables['temperature'][-1, :, :] + horiMeanT = np.mean(T, axis=0) + Ts = data.variables['surfaceTemperature'][-1, :] + meanTs = np.mean(Ts) + Tall = np.append(meanTs, horiMeanT) + + E = data.variables['enthalpy'][-1, :, :] + horiMeanE = np.mean(E, axis=0) + + W = data.variables['waterFrac'][-1, :, :] + horiMeanW = np.mean(W, axis=0) + + nz = len(data.dimensions['nVertLevels']) + z = 1.0 - (np.arange(nz) + 1.0) / nz + + fsize = 14 + plt.figure(1) + plt.subplot(1, 3, 1) + plt.plot((horiMeanE / 910.0 + cp_ice * 50) / 1.0e3, z, label='MALI') + plt.plot(anaE / 1000, anaZ, label='analytical') + plt.xlabel(r'$E$ (10$^3$ J kg$^{-1}$)', fontsize=fsize) + plt.ylabel(r'$z/H$', fontsize=fsize) + plt.xticks(np.arange(92, 109, step=4), fontsize=fsize) + plt.yticks(fontsize=fsize) + plt.text(93, 0.05, 'a', fontsize=fsize) + plt.legend() + plt.grid(True) + + plt.subplot(1, 3, 2) + plt.plot(Tall - 273.15, np.append(1, z)) + plt.plot(anaT - 273.15, anaZ) + plt.xlabel(r'$T$ ($^\circ$C)', fontsize=fsize) + # plt.ylabel('$\zeta$', fontsize=20) + plt.xticks(np.arange(-3.5, 0.51, step=1), fontsize=fsize) + plt.yticks(fontsize=fsize) + plt.text(-3.2, 0.05, 'b', fontsize=fsize) + plt.grid(True) + # plt.gca().invert_yaxis() + + plt.subplot(1, 3, 3) + plt.plot(horiMeanW * 100, z) + plt.plot(anaW * 100, anaZ) + plt.xlabel(r'$\omega$ (%)', fontsize=fsize) + # plt.ylabel('$\zeta$',fontsize=20) + # plt.xlim(-0.5,3) + plt.xticks(np.arange(-0.5, 2.51, step=1), fontsize=fsize) + plt.yticks(fontsize=fsize) + plt.text(-0.3, 0.05, 'c', fontsize=fsize) + plt.grid(True) + + plotname = 'enthalpy_B_results.png' + plt.savefig(plotname, dpi=150) + self.logger.info('Saved plot as {}'.format(plotname)) + + if display_image: + plt.show() diff --git a/compass/landice/tests/enthalpy_benchmark/README b/compass/landice/tests/enthalpy_benchmark/README new file mode 100644 index 0000000000..7e1c4bd0be --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/README @@ -0,0 +1,49 @@ +This test case implements the enthalpy benchmarks described here: +Kleiner, T., Rückamp, M., Bondzio, J. H., and Humbert, A.: Enthalpy benchmark +experiments for numerical ice sheet models, The Cryosphere, 9, 217–228, +https://doi.org/10.5194/tc-9-217-2015, 2015. + +For Benchmark A, we don't have to change the code. +By default the Benchmark A visualization script saves the plot as .png file. +However, it is possible to have it plot to an interactive python plot by adjusting a flag at the end of the script. +The output frequency for A has been reduced so that the test will run quickly enough to include +in regression testing. If you want more detailed output, you can adjust the output interval in the streams files. + + + +For Benchmark B, we need to make the following code changes: + +1) in mpas_li_thermal.F, in the subroutine enthalpy_matrix_elements, + +change + +subd(2:nVertLevels+1) = -factor * diffusivity(1:nVertLevels) * dsigmaTerm(1:nVertLevels,1) + +to + +subd(2:nVertLevels+1) = -factor * (diffusivity(1:nVertLevels) * dsigmaTerm(1:nVertLevels,1)+0.2/scyr*sqrt(dsigmaTerm(1:nVertLevels,1))*thickness) + + +and change + +rhs(2:nVertLevels+1) = enthalpy(1:nVertLevels) + heatDissipation(1:nVertLevels) * deltat * rhoi * cp_ice + +to + +rhs(2:nVertLevels+1) = enthalpy(1:nVertLevels) + 0*heatDissipation(1:nVertLevels) * deltat * rhoi * cp_ice + 2.0_RKIND*5.3e-24_RKIND*((910.0_RKIND*9.81_RKIND*sin(4.0_RKIND*pii/180.0_RKIND))**4)*((thickness*layerCenterSigma(1:nVertLevels))**4)*deltat + +2) in mpas_li_constants.F + +change + +iceMeltingPointPressureDependence = 9.7456e-8_RKIND + +to + +iceMeltingPointPressureDependence = 0.0_RKIND + + +and recompile the code before testing Benchmark B + +Also, for Benchmark B, you can run with more vertical layers to match the lower row of Kleiner Figure 4. +To do so, make a change the "levels" config option to 400. diff --git a/compass/landice/tests/enthalpy_benchmark/__init__.py b/compass/landice/tests/enthalpy_benchmark/__init__.py new file mode 100644 index 0000000000..793dafbeff --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/__init__.py @@ -0,0 +1,18 @@ +from compass.testgroup import TestGroup +from compass.landice.tests.enthalpy_benchmark.A import A +from compass.landice.tests.enthalpy_benchmark.B import B + + +class EnthalpyBenchmark(TestGroup): + """ + A test group for enthalpy benchmark test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.landice.Landice + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='enthalpy_benchmark') + + self.add_test_case(A(test_group=self)) + self.add_test_case(B(test_group=self)) diff --git a/compass/landice/tests/enthalpy_benchmark/enthalpy_benchmark.cfg b/compass/landice/tests/enthalpy_benchmark/enthalpy_benchmark.cfg new file mode 100644 index 0000000000..e0f8e9e73d --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/enthalpy_benchmark.cfg @@ -0,0 +1,15 @@ +# config options for enthalpy benchmark test cases +[enthalpy_benchmark] + +# sizes of the mesh (in cells) +nx = 2 +ny = 2 + +# resolution of the mesh (in m) +dc = 1000.0 + +# config options related to visualization for enthalpy benchmark test cases +[enthalpy_benchmark_viz] + +# whether to show show plot window +display_image = False \ No newline at end of file diff --git a/compass/landice/tests/enthalpy_benchmark/namelist.landice b/compass/landice/tests/enthalpy_benchmark/namelist.landice new file mode 100644 index 0000000000..5971040bae --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/namelist.landice @@ -0,0 +1,11 @@ +config_velocity_solver = 'none' +config_thickness_advection = 'none' +config_thermal_solver = 'enthalpy' +config_max_water_fraction = 0.05 +config_surface_air_temperature_value = 243.15 +config_dt = '0010-00-00_00:00:00' +config_run_duration = '100000-00-00_00:00:00' +config_block_decomp_file_prefix = 'graph.info.part.' +config_AM_globalStats_enable = .false. +config_year_digits = 6 +config_num_halos = 1 diff --git a/compass/landice/tests/enthalpy_benchmark/run_model.py b/compass/landice/tests/enthalpy_benchmark/run_model.py new file mode 100644 index 0000000000..6c61289f18 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/run_model.py @@ -0,0 +1,95 @@ +import os +from netCDF4 import Dataset + +from compass.model import run_model +from compass.step import Step + + +class RunModel(Step): + """ + A step for performing forward MALI runs as part of dome test cases. + + Attributes + ---------- + restart_filename : str, optional + The name of a restart file to continue the run from + """ + def __init__(self, test_case, name, restart_filename=None, subdir=None, + cores=1, min_cores=None, threads=1): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + name : str + the name of the test case + + restart_filename : str, optional + The name of a restart file to continue the run from + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + """ + self.restart_filename = restart_filename + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + self.add_namelist_file('compass.landice.tests.enthalpy_benchmark', + 'namelist.landice') + self.add_streams_file('compass.landice.tests.enthalpy_benchmark', + 'streams.landice') + + self.add_input_file(filename='landice_grid.nc', + target='../setup_mesh/landice_grid.nc') + self.add_input_file(filename='graph.info', + target='../setup_mesh/graph.info') + + if restart_filename is not None: + filename = os.path.basename(restart_filename) + self.add_input_file(filename=filename, target=restart_filename) + + self.add_output_file(filename='output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + if self.restart_filename is not None: + self._update_surface_air_temperature() + + run_model(self) + + def _update_surface_air_temperature(self): + section = self.config['enthalpy_benchmark'] + phase = self.name + # set the surface air temperature + option = '{}_surface_air_temperature'.format(phase) + surface_air_temperature = section.getfloat(option) + filename = self.restart_filename + with Dataset(filename, 'r+') as data: + data.variables['surfaceAirTemperature'][0, :] = \ + surface_air_temperature diff --git a/compass/landice/tests/enthalpy_benchmark/setup_mesh.py b/compass/landice/tests/enthalpy_benchmark/setup_mesh.py new file mode 100644 index 0000000000..16e0ae81d5 --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/setup_mesh.py @@ -0,0 +1,84 @@ +from netCDF4 import Dataset as NetCDFFile + +from mpas_tools.planar_hex import make_planar_hex_mesh +from mpas_tools.io import write_netcdf +from mpas_tools.mesh.conversion import convert, cull +from mpas_tools.logging import check_call + +from compass.model import make_graph_file +from compass.step import Step + + +class SetupMesh(Step): + """ + A step for creating a mesh and initial condition for enthalpy benchmark + test cases + """ + def __init__(self, test_case): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + """ + super().__init__(test_case=test_case, name='setup_mesh') + self.add_output_file(filename='graph.info') + self.add_output_file(filename='landice_grid.nc') + + # no setup() method is needed + + def run(self): + """ + Run this step of the test case + """ + logger = self.logger + section = self.config['enthalpy_benchmark'] + nx = section.getint('nx') + ny = section.getint('ny') + dc = section.getfloat('dc') + levels = section.get('levels') + + dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=True, + nonperiodic_y=True) + + write_netcdf(dsMesh, 'grid.nc') + + dsMesh = cull(dsMesh, logger=logger) + dsMesh = convert(dsMesh, logger=logger) + write_netcdf(dsMesh, 'mpas_grid.nc') + + args = ['create_landice_grid_from_generic_MPAS_grid.py', + '-i', 'mpas_grid.nc', + '-o', 'landice_grid.nc', + '-l', levels, + '--thermal'] + + check_call(args, logger) + + make_graph_file(mesh_filename='landice_grid.nc', + graph_filename='graph.info') + + _setup_initial_conditions(section, 'landice_grid.nc') + + +def _setup_initial_conditions(section, filename): + """ Add the initial conditions for enthalpy benchmark A """ + thickness = section.getfloat('thickness') + basal_heat_flux = section.getfloat('basal_heat_flux') + surface_air_temperature = section.getfloat('surface_air_temperature') + temperature = section.getfloat('temperature') + + with NetCDFFile(filename, 'r+') as gridfile: + thicknessVar = gridfile.variables['thickness'] + bedTopography = gridfile.variables['bedTopography'] + basalHeatFlux = gridfile.variables['basalHeatFlux'] + surfaceAirTemperature = gridfile.variables['surfaceAirTemperature'] + temperatureVar = gridfile.variables['temperature'] + + thicknessVar[:] = thickness + bedTopography[:] = 0 + basalHeatFlux[:] = basal_heat_flux + surfaceAirTemperature[:] = surface_air_temperature + temperatureVar[:] = temperature diff --git a/compass/landice/tests/enthalpy_benchmark/streams.landice b/compass/landice/tests/enthalpy_benchmark/streams.landice new file mode 100644 index 0000000000..1a8f650e1d --- /dev/null +++ b/compass/landice/tests/enthalpy_benchmark/streams.landice @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/landice/tests/greenland/__init__.py b/compass/landice/tests/greenland/__init__.py new file mode 100644 index 0000000000..412fda4de6 --- /dev/null +++ b/compass/landice/tests/greenland/__init__.py @@ -0,0 +1,20 @@ +from compass.testgroup import TestGroup +from compass.landice.tests.greenland.smoke_test import SmokeTest +from compass.landice.tests.greenland.decomposition_test import DecompositionTest +from compass.landice.tests.greenland.restart_test import RestartTest + + +class Greenland(TestGroup): + """ + A test group for Greenland test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.landice.Landice + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='greenland') + + self.add_test_case(SmokeTest(test_group=self)) + self.add_test_case(DecompositionTest(test_group=self)) + self.add_test_case(RestartTest(test_group=self)) diff --git a/compass/landice/tests/greenland/decomposition_test/__init__.py b/compass/landice/tests/greenland/decomposition_test/__init__.py new file mode 100644 index 0000000000..cd60e29b0f --- /dev/null +++ b/compass/landice/tests/greenland/decomposition_test/__init__.py @@ -0,0 +1,45 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.greenland.run_model import RunModel + + +class DecompositionTest(TestCase): + """ + A test case for performing two MALI runs of the Greenland Ice Sheet setup, + one with one core and one with eight. The test case verifies that the + results of the two runs are identical. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.greenland.Greenland + The test group that this test case belongs to + """ + name = 'decomposition_test' + super().__init__(test_group=test_group, name=name) + + for procs in [1, 8]: + name = '{}proc_run'.format(procs) + self.add_step( + RunModel(test_case=self, name=name, subdir=name, cores=procs, + threads=1)) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['thickness', 'normalVelocity'] + steps = self.steps_to_run + if '1proc_run' in steps and '8proc_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='1proc_run/output.nc', + filename2='8proc_run/output.nc') diff --git a/compass/landice/tests/greenland/namelist.landice b/compass/landice/tests/greenland/namelist.landice new file mode 100644 index 0000000000..d1652fc306 --- /dev/null +++ b/compass/landice/tests/greenland/namelist.landice @@ -0,0 +1,6 @@ +config_dt = '0000-00-01_00:00:00' +config_run_duration = '0000-00-05_00:00:00' +config_block_decomp_file_prefix = 'graph.info.part.' +config_calving = 'thickness_threshold' +config_calving_thickness = 300.0 +config_restore_calving_front = .true. diff --git a/compass/landice/tests/greenland/restart_test/__init__.py b/compass/landice/tests/greenland/restart_test/__init__.py new file mode 100644 index 0000000000..ee3b9ea7c6 --- /dev/null +++ b/compass/landice/tests/greenland/restart_test/__init__.py @@ -0,0 +1,71 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.greenland.run_model import RunModel + + +class RestartTest(TestCase): + """ + A test case for performing two MALI runs of the Greenland Ice Sheet setup, + one full run and one run broken into two segments with a restart. The + test case verifies that the results of the two runs are identical. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.greenland.Greenland + The test group that this test case belongs to + """ + super().__init__(test_group=test_group, name='restart_test') + + name = 'full_run' + step = RunModel(test_case=self, name=name, subdir=name, cores=4, + threads=1) + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.greenland.restart_test', + 'namelist.full', out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.greenland.restart_test', + 'streams.full', out_name='streams.landice') + self.add_step(step) + + name = 'restart_run' + step = RunModel(test_case=self, name=name, subdir=name, cores=4, + threads=1, suffixes=['landice', 'landice.rst']) + + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.greenland.restart_test', + 'namelist.restart', out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.greenland.restart_test', + 'streams.restart', out_name='streams.landice') + + step.add_namelist_file( + 'compass.landice.tests.greenland.restart_test', + 'namelist.restart.rst', out_name='namelist.landice.rst') + # same streams file for both restart stages + step.add_streams_file( + 'compass.landice.tests.greenland.restart_test', + 'streams.restart', out_name='streams.landice.rst') + self.add_step(step) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['thickness', 'normalVelocity'] + steps = self.steps_to_run + if 'full_run' in steps and 'restart_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/output.nc', + filename2='restart_run/output.nc') diff --git a/compass/landice/tests/greenland/restart_test/namelist.full b/compass/landice/tests/greenland/restart_test/namelist.full new file mode 100644 index 0000000000..19320af51c --- /dev/null +++ b/compass/landice/tests/greenland/restart_test/namelist.full @@ -0,0 +1,5 @@ +config_start_time = '0001-01-01_00:00:00' +config_run_duration = '0000-00-05_00:00:00' +config_dt = '0000-00-01_00:00:00' +config_write_output_on_startup = .true. +config_do_restart = .false. diff --git a/compass/landice/tests/greenland/restart_test/namelist.restart b/compass/landice/tests/greenland/restart_test/namelist.restart new file mode 100644 index 0000000000..afc7429704 --- /dev/null +++ b/compass/landice/tests/greenland/restart_test/namelist.restart @@ -0,0 +1,6 @@ +config_start_time = '0001-01-01_00:00:00' +config_run_duration = '0000-00-03_00:00:00' +config_dt = '0000-00-01_00:00:00' +config_write_output_on_startup = .true. +config_do_restart = .false. + diff --git a/compass/landice/tests/greenland/restart_test/namelist.restart.rst b/compass/landice/tests/greenland/restart_test/namelist.restart.rst new file mode 100644 index 0000000000..2ed1226dd7 --- /dev/null +++ b/compass/landice/tests/greenland/restart_test/namelist.restart.rst @@ -0,0 +1,5 @@ +config_start_time = '0001-01-04_00:00:00' +config_run_duration = '0000-00-02_00:00:00' +config_dt = '0000-00-01_00:00:00' +config_write_output_on_startup = .true. +config_do_restart = .true. diff --git a/compass/landice/tests/greenland/restart_test/streams.full b/compass/landice/tests/greenland/restart_test/streams.full new file mode 100644 index 0000000000..7232078f70 --- /dev/null +++ b/compass/landice/tests/greenland/restart_test/streams.full @@ -0,0 +1,13 @@ + + + + + + + diff --git a/compass/landice/tests/greenland/restart_test/streams.restart b/compass/landice/tests/greenland/restart_test/streams.restart new file mode 100644 index 0000000000..50ab022667 --- /dev/null +++ b/compass/landice/tests/greenland/restart_test/streams.restart @@ -0,0 +1,14 @@ + + + + + + + diff --git a/compass/landice/tests/greenland/run_model.py b/compass/landice/tests/greenland/run_model.py new file mode 100644 index 0000000000..a878a6a931 --- /dev/null +++ b/compass/landice/tests/greenland/run_model.py @@ -0,0 +1,93 @@ +from compass.model import make_graph_file, run_model +from compass.step import Step + + +class RunModel(Step): + """ + A step for performing forward MALI runs as part of dome test cases. + + Attributes + ---------- + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + def __init__(self, test_case, name='run_model', subdir=None, cores=1, + min_cores=None, threads=1, suffixes=None): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + name : str, optional + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + if suffixes is None: + suffixes = ['landice'] + self.suffixes = suffixes + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + # download and link the mesh + self.add_input_file(filename='landice_grid.nc', + target='gis20km.150922.nc', database='') + + for suffix in suffixes: + self.add_namelist_file( + 'compass.landice.tests.greenland', 'namelist.landice', + out_name='namelist.{}'.format(suffix)) + + self.add_streams_file( + 'compass.landice.tests.greenland', 'streams.landice', + out_name='streams.{}'.format(suffix)) + + self.add_output_file(filename='output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + make_graph_file(mesh_filename='landice_grid.nc', + graph_filename='graph.info') + for suffix in self.suffixes: + run_model(step=self, namelist='namelist.{}'.format(suffix), + streams='streams.{}'.format(suffix)) diff --git a/compass/landice/tests/greenland/smoke_test/__init__.py b/compass/landice/tests/greenland/smoke_test/__init__.py new file mode 100644 index 0000000000..b19389c51d --- /dev/null +++ b/compass/landice/tests/greenland/smoke_test/__init__.py @@ -0,0 +1,30 @@ +from compass.testcase import TestCase +from compass.landice.tests.greenland.run_model import RunModel + + +class SmokeTest(TestCase): + """ + The default test case for the Greenland test group simply downloads the + mesh and initial condition, then performs a short forward run on 4 cores. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.greenland.Greenland + The test group that this test case belongs to + """ + name = 'smoke_test' + super().__init__(test_group=test_group, name=name) + + self.add_step( + RunModel(test_case=self, cores=4, threads=1)) + + # no configure() method is needed because we will use the default dome + # config options + + # no run() method is needed because we're doing the default: running all + # steps diff --git a/compass/landice/tests/greenland/streams.landice b/compass/landice/tests/greenland/streams.landice new file mode 100644 index 0000000000..853191ee56 --- /dev/null +++ b/compass/landice/tests/greenland/streams.landice @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/landice/tests/hydro_radial/__init__.py b/compass/landice/tests/hydro_radial/__init__.py new file mode 100644 index 0000000000..c3a56ff9c9 --- /dev/null +++ b/compass/landice/tests/hydro_radial/__init__.py @@ -0,0 +1,24 @@ +from compass.testgroup import TestGroup +from compass.landice.tests.hydro_radial.decomposition_test import \ + DecompositionTest +from compass.landice.tests.hydro_radial.restart_test import RestartTest +from compass.landice.tests.hydro_radial.spinup_test import SpinupTest +from compass.landice.tests.hydro_radial.steady_state_drift_test import \ + SteadyStateDriftTest + + +class HydroRadial(TestGroup): + """ + A test group for radially symmetric hydrology test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.landice.Landice + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='hydro_radial') + + self.add_test_case(DecompositionTest(test_group=self)) + self.add_test_case(RestartTest(test_group=self)) + self.add_test_case(SpinupTest(test_group=self)) + self.add_test_case(SteadyStateDriftTest(test_group=self)) diff --git a/compass/landice/tests/hydro_radial/decomposition_test/__init__.py b/compass/landice/tests/hydro_radial/decomposition_test/__init__.py new file mode 100644 index 0000000000..0bca6e67fc --- /dev/null +++ b/compass/landice/tests/hydro_radial/decomposition_test/__init__.py @@ -0,0 +1,55 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.hydro_radial.setup_mesh import SetupMesh +from compass.landice.tests.hydro_radial.run_model import RunModel +from compass.landice.tests.hydro_radial.visualize import Visualize + + +class DecompositionTest(TestCase): + """ + A test case for performing two MALI runs of a radially symmetric + hydrological setup, one with one core and one with three. The test case + verifies that the results of the two runs are identical. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.hydro_radial.HydroRadial + The test group that this test case belongs to + """ + super().__init__(test_group=test_group, name='decomposition_test') + + self.add_step( + SetupMesh(test_case=self, initial_condition='zero')) + + for procs in [1, 3]: + name = '{}proc_run'.format(procs) + self.add_step( + RunModel(test_case=self, name=name, subdir=name, cores=procs, + threads=1)) + + input_dir = name + name = 'visualize_{}'.format(name) + step = Visualize(test_case=self, name=name, subdir=name, + input_dir=input_dir) + self.add_step(step, run_by_default=False) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['waterThickness', 'waterPressure'] + steps = self.steps_to_run + if '1proc_run' in steps and '3proc_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='1proc_run/output.nc', + filename2='3proc_run/output.nc') diff --git a/compass/landice/tests/hydro_radial/hydro_radial.cfg b/compass/landice/tests/hydro_radial/hydro_radial.cfg new file mode 100644 index 0000000000..d70eb14d62 --- /dev/null +++ b/compass/landice/tests/hydro_radial/hydro_radial.cfg @@ -0,0 +1,25 @@ +# config options for hydro_radial test cases +[hydro_radial] + +# sizes (in cells) for the 1000m uniform mesh +nx = 50 +ny = 58 + +# resolution (in m) for the 1000m uniform mesh +dc = 1000.0 + +# number of levels in the mesh +levels = 3 + + +# config options related to visualization for hydro_radial test cases +[hydro_radial_viz] + +# which time index to visualize +time_slice = -1 + +# whether to save image files +save_images = True + +# whether to hide figures (typically when save_images = True) +hide_figs = True diff --git a/compass/landice/tests/hydro_radial/namelist.landice b/compass/landice/tests/hydro_radial/namelist.landice new file mode 100644 index 0000000000..04dc1319d3 --- /dev/null +++ b/compass/landice/tests/hydro_radial/namelist.landice @@ -0,0 +1,20 @@ +config_dt = '0001-00-00_00:00:00' +config_stop_time = '0000-02-01_00:00:00' +config_block_decomp_file_prefix = 'graph.info.part.' +config_velocity_solver = 'none' +config_thickness_advection = 'none' +config_ice_density = 910.0 +config_default_flowParamA = 3.1689e-24 + +config_SGH = .true. +config_SGH_adaptive_timestep_fraction = 0.5 +config_SGH_max_adaptive_timestep = 31536000 +config_SGH_alpha = 1.0 +config_SGH_beta = 2.0 +config_SGH_conduc_coeff = 0.00000101976 +config_SGH_till_drainage = 0.0 +config_SGH_till_max = 0.0 +config_SGH_bed_roughness_max = 1.0 +config_SGH_bed_roughness = 0.5 +config_SGH_englacial_porosity = 0.01 +config_SGH_creep_coefficient = 0.04 diff --git a/compass/landice/tests/hydro_radial/near_exact_solution_r_P_W.txt b/compass/landice/tests/hydro_radial/near_exact_solution_r_P_W.txt new file mode 100644 index 0000000000..7c563a0bcb --- /dev/null +++ b/compass/landice/tests/hydro_radial/near_exact_solution_r_P_W.txt @@ -0,0 +1,1861 @@ +0,4.4618e+06,0.21779 +218.81,4.4615e+06,0.21779 +437.63,4.4604e+06,0.21779 +656.44,4.4587e+06,0.21779 +875.25,4.4563e+06,0.21779 +1140.3,4.4525e+06,0.21779 +1405.4,4.4477e+06,0.21779 +1670.4,4.4419e+06,0.21779 +1935.5,4.4351e+06,0.21779 +2019.5,4.4327e+06,0.21779 +2103.6,4.4302e+06,0.21779 +2187.6,4.4276e+06,0.21779 +2271.7,4.425e+06,0.21779 +2355.7,4.4222e+06,0.21779 +2439.8,4.4193e+06,0.21779 +2523.8,4.4163e+06,0.21779 +2607.9,4.4133e+06,0.21779 +2691.9,4.4101e+06,0.21779 +2776,4.4068e+06,0.21779 +2860,4.4034e+06,0.21779 +2944.1,4.3999e+06,0.21779 +3028.2,4.3963e+06,0.21779 +3112.2,4.3927e+06,0.21779 +3196.3,4.3889e+06,0.21779 +3280.3,4.385e+06,0.21779 +3364.4,4.381e+06,0.21779 +3448.4,4.3769e+06,0.21779 +3532.5,4.3727e+06,0.21779 +3616.5,4.3684e+06,0.21779 +3655,4.3664e+06,0.21779 +3693.5,4.3644e+06,0.21779 +3731.9,4.3624e+06,0.21779 +3770.4,4.3603e+06,0.21779 +3808.9,4.3582e+06,0.21779 +3847.3,4.3561e+06,0.21779 +3885.8,4.354e+06,0.21779 +3924.2,4.3519e+06,0.21779 +3962.7,4.3497e+06,0.21779 +4001.2,4.3475e+06,0.21779 +4039.6,4.3453e+06,0.21779 +4078.1,4.3431e+06,0.21779 +4116.6,4.3408e+06,0.21779 +4155,4.3386e+06,0.21779 +4193.5,4.3363e+06,0.21779 +4232,4.3339e+06,0.21779 +4270.4,4.3316e+06,0.21779 +4308.9,4.3293e+06,0.21779 +4347.4,4.3269e+06,0.21779 +4385.8,4.3245e+06,0.21779 +4392.2,4.3241e+06,0.21779 +4398.5,4.3237e+06,0.21779 +4404.9,4.3233e+06,0.21779 +4411.3,4.3229e+06,0.21779 +4417.6,4.3225e+06,0.21779 +4424,4.3221e+06,0.21779 +4430.3,4.3217e+06,0.21779 +4436.7,4.3213e+06,0.21779 +4443,4.3209e+06,0.21779 +4449.4,4.3205e+06,0.21779 +4455.8,4.3201e+06,0.21779 +4462.1,4.3197e+06,0.21779 +4468.5,4.3193e+06,0.21779 +4474.8,4.3189e+06,0.21779 +4481.2,4.3184e+06,0.21779 +4487.5,4.318e+06,0.21779 +4493.9,4.3176e+06,0.21779 +4500.2,4.3172e+06,0.21779 +4506.6,4.3168e+06,0.21779 +4513,4.3164e+06,0.21779 +4519.3,4.316e+06,0.21779 +4525.7,4.3156e+06,0.21779 +4532,4.3152e+06,0.21779 +4538.4,4.3148e+06,0.21779 +4541.8,4.3145e+06,0.21779 +4545.3,4.3143e+06,0.21779 +4548.8,4.3141e+06,0.21779 +4552.2,4.3139e+06,0.21779 +4555.7,4.3136e+06,0.21779 +4559.1,4.3134e+06,0.21779 +4562.6,4.3132e+06,0.21779 +4566,4.313e+06,0.21779 +4569.5,4.3127e+06,0.21779 +4572.9,4.3125e+06,0.21779 +4576.4,4.3123e+06,0.21779 +4579.9,4.3121e+06,0.21779 +4583.3,4.3118e+06,0.21779 +4586.8,4.3116e+06,0.21779 +4590.2,4.3114e+06,0.21779 +4593.7,4.3112e+06,0.21779 +4597.1,4.3109e+06,0.21779 +4600.6,4.3107e+06,0.21779 +4604,4.3105e+06,0.21779 +4607.5,4.3103e+06,0.21779 +4611,4.31e+06,0.21779 +4614.4,4.3098e+06,0.21779 +4617.9,4.3096e+06,0.21779 +4621.3,4.3093e+06,0.21779 +4624.8,4.3091e+06,0.21779 +4628.2,4.3089e+06,0.21779 +4631.7,4.3087e+06,0.21779 +4635.1,4.3084e+06,0.21779 +4637.6,4.3083e+06,0.21779 +4640.1,4.3081e+06,0.21779 +4642.6,4.3079e+06,0.21779 +4645.1,4.3078e+06,0.21779 +4647.6,4.3076e+06,0.21779 +4650.1,4.3074e+06,0.21779 +4652.6,4.3073e+06,0.21779 +4655.1,4.3071e+06,0.21779 +4657.6,4.3069e+06,0.21779 +4660.1,4.3068e+06,0.21779 +4662.6,4.3066e+06,0.21779 +4665.1,4.3064e+06,0.21779 +4667.6,4.3063e+06,0.21779 +4670.1,4.3061e+06,0.21779 +4672.6,4.3059e+06,0.21779 +4675.1,4.3058e+06,0.21779 +4677.6,4.3056e+06,0.21779 +4680.1,4.3054e+06,0.21779 +4682.6,4.3053e+06,0.21779 +4685.1,4.3051e+06,0.21779 +4687.6,4.3049e+06,0.21779 +4690.1,4.3048e+06,0.21779 +4692.6,4.3046e+06,0.21779 +4695.1,4.3044e+06,0.21779 +4697.6,4.3043e+06,0.21779 +4700.2,4.3041e+06,0.21779 +4702.7,4.3039e+06,0.21779 +4705.2,4.3038e+06,0.21779 +4707.1,4.3036e+06,0.21779 +4709,4.3035e+06,0.21779 +4710.9,4.3034e+06,0.21779 +4712.8,4.3032e+06,0.21779 +4714.8,4.3031e+06,0.21779 +4716.7,4.303e+06,0.21779 +4718.6,4.3029e+06,0.21779 +4720.5,4.3027e+06,0.21779 +4722.4,4.3026e+06,0.21779 +4724.4,4.3025e+06,0.21779 +4726.3,4.3023e+06,0.21779 +4728.2,4.3022e+06,0.21779 +4730.1,4.3021e+06,0.21779 +4732,4.3019e+06,0.21779 +4734,4.3018e+06,0.21779 +4735.9,4.3017e+06,0.21778 +4737.8,4.3016e+06,0.21778 +4739.7,4.3014e+06,0.21778 +4741.7,4.3013e+06,0.21778 +4743.6,4.3012e+06,0.21778 +4745.5,4.301e+06,0.21778 +4747.4,4.3009e+06,0.21778 +4749.3,4.3008e+06,0.21778 +4751.3,4.3006e+06,0.21778 +4753.2,4.3005e+06,0.21778 +4755.1,4.3004e+06,0.21778 +4757,4.3003e+06,0.21778 +4758.9,4.3001e+06,0.21778 +4760.5,4.3e+06,0.21778 +4762,4.2999e+06,0.21778 +4763.6,4.2998e+06,0.21778 +4765.1,4.2997e+06,0.21778 +4766.7,4.2996e+06,0.21778 +4768.2,4.2995e+06,0.21778 +4769.7,4.2994e+06,0.21778 +4771.3,4.2993e+06,0.21778 +4772.8,4.2992e+06,0.21778 +4774.4,4.2991e+06,0.21778 +4775.9,4.299e+06,0.21778 +4777.5,4.2989e+06,0.21778 +4779,4.2988e+06,0.21778 +4780.5,4.2987e+06,0.21778 +4782.1,4.2985e+06,0.21778 +4783.6,4.2984e+06,0.21778 +4785.2,4.2983e+06,0.21778 +4786.7,4.2982e+06,0.21778 +4788.3,4.2981e+06,0.21778 +4789.8,4.298e+06,0.21778 +4791.3,4.2979e+06,0.21777 +4792.9,4.2978e+06,0.21777 +4794.4,4.2977e+06,0.21777 +4796,4.2976e+06,0.21777 +4797.5,4.2975e+06,0.21777 +4799.1,4.2974e+06,0.21777 +4800.6,4.2973e+06,0.21777 +4802.1,4.2972e+06,0.21777 +4803.4,4.2971e+06,0.21777 +4804.7,4.297e+06,0.21777 +4806,4.2969e+06,0.21777 +4807.3,4.2968e+06,0.21777 +4808.6,4.2967e+06,0.21777 +4809.8,4.2966e+06,0.21777 +4811.1,4.2966e+06,0.21776 +4812.4,4.2965e+06,0.21776 +4813.7,4.2964e+06,0.21776 +4815,4.2963e+06,0.21776 +4816.2,4.2962e+06,0.21776 +4817.5,4.2961e+06,0.21776 +4818.8,4.296e+06,0.21776 +4820.1,4.2959e+06,0.21776 +4821.4,4.2959e+06,0.21776 +4822.7,4.2958e+06,0.21775 +4823.9,4.2957e+06,0.21775 +4825.2,4.2956e+06,0.21775 +4826.5,4.2955e+06,0.21775 +4827.8,4.2954e+06,0.21775 +4829.1,4.2953e+06,0.21775 +4830.3,4.2952e+06,0.21774 +4831.6,4.2951e+06,0.21774 +4832.9,4.2951e+06,0.21774 +4834.2,4.295e+06,0.21774 +4835.5,4.2949e+06,0.21774 +4836.8,4.2948e+06,0.21774 +4838,4.2947e+06,0.21773 +4839.1,4.2946e+06,0.21773 +4840.2,4.2946e+06,0.21773 +4841.3,4.2945e+06,0.21773 +4842.4,4.2944e+06,0.21772 +4843.4,4.2943e+06,0.21772 +4844.5,4.2943e+06,0.21772 +4845.6,4.2942e+06,0.21772 +4846.7,4.2941e+06,0.21772 +4847.7,4.294e+06,0.21771 +4848.8,4.294e+06,0.21771 +4849.9,4.2939e+06,0.21771 +4851,4.2938e+06,0.2177 +4852.1,4.2937e+06,0.2177 +4853.1,4.2937e+06,0.2177 +4854.2,4.2936e+06,0.2177 +4855.3,4.2935e+06,0.21769 +4856.4,4.2934e+06,0.21769 +4857.5,4.2934e+06,0.21769 +4858.5,4.2933e+06,0.21768 +4859.6,4.2932e+06,0.21768 +4860.7,4.2931e+06,0.21767 +4861.8,4.2931e+06,0.21767 +4862.9,4.293e+06,0.21767 +4863.9,4.2929e+06,0.21766 +4865,4.2928e+06,0.21766 +4866.1,4.2928e+06,0.21765 +4867.2,4.2927e+06,0.21765 +4868.2,4.2926e+06,0.21764 +4869.2,4.2925e+06,0.21764 +4870.2,4.2925e+06,0.21763 +4871.1,4.2924e+06,0.21763 +4872.1,4.2923e+06,0.21762 +4873.1,4.2923e+06,0.21762 +4874,4.2922e+06,0.21761 +4875,4.2921e+06,0.21761 +4876,4.2921e+06,0.2176 +4876.9,4.292e+06,0.2176 +4877.9,4.2919e+06,0.21759 +4878.8,4.2919e+06,0.21758 +4879.8,4.2918e+06,0.21758 +4880.8,4.2917e+06,0.21757 +4881.7,4.2917e+06,0.21756 +4882.7,4.2916e+06,0.21756 +4883.7,4.2915e+06,0.21755 +4884.6,4.2915e+06,0.21754 +4885.6,4.2914e+06,0.21753 +4886.5,4.2913e+06,0.21752 +4887.5,4.2913e+06,0.21752 +4888.5,4.2912e+06,0.21751 +4889.4,4.2911e+06,0.2175 +4890.4,4.2911e+06,0.21749 +4891.4,4.291e+06,0.21748 +4892.3,4.2909e+06,0.21747 +4893.3,4.2909e+06,0.21746 +4894.2,4.2908e+06,0.21745 +4895.2,4.2907e+06,0.21744 +4896,4.2907e+06,0.21743 +4896.8,4.2906e+06,0.21742 +4897.6,4.2906e+06,0.21741 +4898.4,4.2905e+06,0.2174 +4899.2,4.2905e+06,0.21739 +4900.1,4.2904e+06,0.21738 +4900.9,4.2903e+06,0.21737 +4901.7,4.2903e+06,0.21736 +4902.5,4.2902e+06,0.21734 +4903.3,4.2902e+06,0.21733 +4904.1,4.2901e+06,0.21732 +4904.9,4.2901e+06,0.21731 +4905.7,4.29e+06,0.21729 +4906.5,4.2899e+06,0.21728 +4907.3,4.2899e+06,0.21727 +4908.1,4.2898e+06,0.21725 +4908.9,4.2898e+06,0.21724 +4909.7,4.2897e+06,0.21723 +4910.5,4.2897e+06,0.21721 +4911.3,4.2896e+06,0.21719 +4912.1,4.2895e+06,0.21718 +4913,4.2895e+06,0.21716 +4913.8,4.2894e+06,0.21715 +4914.6,4.2894e+06,0.21713 +4915.4,4.2893e+06,0.21711 +4916.2,4.2893e+06,0.21709 +4917,4.2892e+06,0.21707 +4917.8,4.2892e+06,0.21705 +4918.5,4.2891e+06,0.21704 +4919.2,4.2891e+06,0.21702 +4919.9,4.289e+06,0.217 +4920.6,4.289e+06,0.21698 +4921.3,4.2889e+06,0.21697 +4921.9,4.2889e+06,0.21695 +4922.6,4.2888e+06,0.21693 +4923.3,4.2888e+06,0.21691 +4924,4.2887e+06,0.21689 +4924.7,4.2887e+06,0.21687 +4925.4,4.2886e+06,0.21684 +4926.1,4.2886e+06,0.21682 +4926.8,4.2885e+06,0.2168 +4927.5,4.2885e+06,0.21678 +4928.2,4.2884e+06,0.21675 +4928.9,4.2884e+06,0.21673 +4929.5,4.2883e+06,0.21671 +4930.2,4.2883e+06,0.21668 +4930.9,4.2882e+06,0.21666 +4931.6,4.2882e+06,0.21663 +4932.3,4.2881e+06,0.2166 +4933,4.2881e+06,0.21657 +4933.7,4.288e+06,0.21655 +4934.4,4.288e+06,0.21652 +4935.1,4.2879e+06,0.21649 +4935.8,4.2879e+06,0.21646 +4936.5,4.2878e+06,0.21643 +4937.2,4.2878e+06,0.2164 +4937.8,4.2877e+06,0.21637 +4938.4,4.2877e+06,0.21634 +4939,4.2877e+06,0.21631 +4939.6,4.2876e+06,0.21628 +4940.2,4.2876e+06,0.21625 +4940.8,4.2875e+06,0.21621 +4941.5,4.2875e+06,0.21618 +4942.1,4.2874e+06,0.21615 +4942.7,4.2874e+06,0.21611 +4943.3,4.2874e+06,0.21608 +4943.9,4.2873e+06,0.21604 +4944.5,4.2873e+06,0.21601 +4945.1,4.2872e+06,0.21597 +4945.8,4.2872e+06,0.21593 +4946.4,4.2871e+06,0.2159 +4947,4.2871e+06,0.21586 +4947.6,4.2871e+06,0.21582 +4948.2,4.287e+06,0.21578 +4948.8,4.287e+06,0.21573 +4949.5,4.2869e+06,0.21569 +4950.1,4.2869e+06,0.21565 +4950.7,4.2868e+06,0.2156 +4951.3,4.2868e+06,0.21556 +4951.9,4.2867e+06,0.21551 +4952.5,4.2867e+06,0.21546 +4953.1,4.2867e+06,0.21542 +4953.8,4.2866e+06,0.21537 +4954.4,4.2866e+06,0.21532 +4954.9,4.2865e+06,0.21527 +4955.4,4.2865e+06,0.21523 +4955.9,4.2865e+06,0.21518 +4956.5,4.2864e+06,0.21514 +4957,4.2864e+06,0.21509 +4957.5,4.2864e+06,0.21504 +4958,4.2863e+06,0.21499 +4958.6,4.2863e+06,0.21494 +4959.1,4.2862e+06,0.21489 +4959.6,4.2862e+06,0.21484 +4960.1,4.2862e+06,0.21479 +4960.7,4.2861e+06,0.21474 +4961.2,4.2861e+06,0.21468 +4961.7,4.2861e+06,0.21463 +4962.2,4.286e+06,0.21457 +4962.7,4.286e+06,0.21451 +4963.3,4.2859e+06,0.21446 +4963.8,4.2859e+06,0.2144 +4964.3,4.2859e+06,0.21434 +4964.8,4.2858e+06,0.21427 +4965.4,4.2858e+06,0.21421 +4965.9,4.2858e+06,0.21415 +4966.4,4.2857e+06,0.21408 +4966.9,4.2857e+06,0.21402 +4967.5,4.2856e+06,0.21395 +4968,4.2856e+06,0.21388 +4968.5,4.2856e+06,0.21381 +4969,4.2855e+06,0.21374 +4969.5,4.2855e+06,0.21367 +4970,4.2855e+06,0.2136 +4970.5,4.2854e+06,0.21353 +4971,4.2854e+06,0.21346 +4971.5,4.2854e+06,0.21339 +4972,4.2853e+06,0.21331 +4972.5,4.2853e+06,0.21324 +4973,4.2853e+06,0.21316 +4973.5,4.2852e+06,0.21308 +4974,4.2852e+06,0.213 +4974.5,4.2851e+06,0.21292 +4974.9,4.2851e+06,0.21284 +4975.4,4.2851e+06,0.21276 +4975.9,4.285e+06,0.21267 +4976.4,4.285e+06,0.21259 +4976.9,4.285e+06,0.2125 +4977.4,4.2849e+06,0.21241 +4977.9,4.2849e+06,0.21232 +4978.4,4.2849e+06,0.21222 +4978.9,4.2848e+06,0.21213 +4979.4,4.2848e+06,0.21203 +4979.9,4.2848e+06,0.21193 +4980.4,4.2847e+06,0.21184 +4980.9,4.2847e+06,0.21173 +4981.4,4.2847e+06,0.21163 +4981.8,4.2846e+06,0.21153 +4982.3,4.2846e+06,0.21142 +4982.8,4.2846e+06,0.21131 +4983.2,4.2845e+06,0.21123 +4983.5,4.2845e+06,0.21115 +4983.9,4.2845e+06,0.21107 +4984.3,4.2845e+06,0.21099 +4984.6,4.2844e+06,0.2109 +4985,4.2844e+06,0.21082 +4985.3,4.2844e+06,0.21073 +4985.7,4.2844e+06,0.21065 +4986,4.2843e+06,0.21056 +4986.4,4.2843e+06,0.21047 +4986.7,4.2843e+06,0.21038 +4987.1,4.2843e+06,0.21029 +4987.4,4.2842e+06,0.21019 +4987.8,4.2842e+06,0.2101 +4988.2,4.2842e+06,0.21001 +4988.5,4.2841e+06,0.20991 +4988.9,4.2841e+06,0.20981 +4989.2,4.2841e+06,0.20971 +4989.6,4.2841e+06,0.20961 +4989.9,4.284e+06,0.20951 +4990.3,4.284e+06,0.20941 +4990.6,4.284e+06,0.20931 +4991,4.284e+06,0.2092 +4991.4,4.2839e+06,0.20909 +4991.7,4.2839e+06,0.20899 +4992.1,4.2839e+06,0.20888 +4992.4,4.2839e+06,0.20877 +4992.8,4.2838e+06,0.20865 +4993,4.2838e+06,0.20859 +4993.2,4.2838e+06,0.20853 +4993.4,4.2838e+06,0.20846 +4993.6,4.2838e+06,0.2084 +4993.8,4.2838e+06,0.20833 +4994,4.2838e+06,0.20827 +4994.2,4.2837e+06,0.2082 +4994.3,4.2837e+06,0.20814 +4994.5,4.2837e+06,0.20807 +4994.7,4.2837e+06,0.208 +4994.9,4.2837e+06,0.20794 +4995.1,4.2837e+06,0.20787 +4995.3,4.2837e+06,0.2078 +4995.5,4.2836e+06,0.20773 +4995.7,4.2836e+06,0.20766 +4995.9,4.2836e+06,0.20759 +4996.1,4.2836e+06,0.20752 +4996.3,4.2836e+06,0.20745 +4996.5,4.2836e+06,0.20738 +4996.7,4.2836e+06,0.20731 +4996.9,4.2836e+06,0.20724 +4997.1,4.2835e+06,0.20716 +4997.3,4.2835e+06,0.20709 +4997.5,4.2835e+06,0.20701 +4997.6,4.2835e+06,0.20698 +4997.7,4.2835e+06,0.20695 +4997.7,4.2835e+06,0.20692 +4997.8,4.2835e+06,0.20689 +4997.9,4.2835e+06,0.20686 +4998,4.2835e+06,0.20682 +4998.1,4.2835e+06,0.20679 +4998.2,4.2835e+06,0.20676 +4998.2,4.2835e+06,0.20673 +4998.3,4.2835e+06,0.20669 +4998.4,4.2834e+06,0.20666 +4998.5,4.2834e+06,0.20663 +4998.6,4.2834e+06,0.20659 +4998.7,4.2834e+06,0.20656 +4998.7,4.2834e+06,0.20653 +4998.8,4.2834e+06,0.2065 +4998.9,4.2834e+06,0.20646 +4999,4.2834e+06,0.20643 +4999.1,4.2834e+06,0.20639 +4999.2,4.2834e+06,0.20636 +4999.2,4.2834e+06,0.20635 +4999.2,4.2834e+06,0.20633 +4999.3,4.2834e+06,0.20632 +4999.3,4.2834e+06,0.20631 +4999.3,4.2834e+06,0.20629 +4999.4,4.2834e+06,0.20628 +4999.4,4.2834e+06,0.20627 +4999.4,4.2834e+06,0.20625 +4999.5,4.2834e+06,0.20624 +4999.5,4.2834e+06,0.20622 +4999.5,4.2834e+06,0.20621 +4999.6,4.2834e+06,0.2062 +4999.6,4.2834e+06,0.20618 +4999.6,4.2834e+06,0.20617 +4999.7,4.2834e+06,0.20615 +4999.7,4.2834e+06,0.20614 +4999.7,4.2834e+06,0.20614 +4999.7,4.2834e+06,0.20613 +4999.7,4.2833e+06,0.20613 +4999.7,4.2833e+06,0.20612 +4999.8,4.2833e+06,0.20612 +4999.8,4.2833e+06,0.20612 +4999.8,4.2833e+06,0.20611 +4999.8,4.2833e+06,0.20611 +4999.8,4.2833e+06,0.2061 +4999.8,4.2833e+06,0.2061 +4999.8,4.2833e+06,0.2061 +4999.8,4.2833e+06,0.20609 +4999.8,4.2833e+06,0.20609 +4999.8,4.2833e+06,0.20609 +4999.8,4.2833e+06,0.20608 +4999.9,4.2833e+06,0.20608 +4999.9,4.2833e+06,0.20607 +4999.9,4.2833e+06,0.20607 +4999.9,4.2833e+06,0.20607 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20606 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20605 +4999.9,4.2833e+06,0.20604 +4999.9,4.2833e+06,0.20604 +4999.9,4.2833e+06,0.20604 +4999.9,4.2833e+06,0.20604 +4999.9,4.2833e+06,0.20604 +4999.9,4.2833e+06,0.20604 +5000,4.2833e+06,0.20604 +5000,4.2833e+06,0.20604 +5000,4.2833e+06,0.20604 +5000,4.2833e+06,0.20604 +5000,4.2833e+06,0.20604 +5000,4.2833e+06,0.20604 +5000,4.2833e+06,0.20604 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20603 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20602 +5000,4.2833e+06,0.20601 +5000,4.2833e+06,0.20601 +5000,4.2833e+06,0.20601 +5000,4.2833e+06,0.20601 +5000,4.2833e+06,0.206 +5000.1,4.2833e+06,0.206 +5000.1,4.2833e+06,0.20599 +5000.1,4.2833e+06,0.20599 +5000.1,4.2833e+06,0.20598 +5000.1,4.2833e+06,0.20598 +5000.1,4.2833e+06,0.20597 +5000.1,4.2833e+06,0.20597 +5000.1,4.2833e+06,0.20596 +5000.2,4.2833e+06,0.20595 +5000.2,4.2833e+06,0.20594 +5000.2,4.2833e+06,0.20593 +5000.2,4.2833e+06,0.20593 +5000.3,4.2833e+06,0.20592 +5000.3,4.2833e+06,0.20591 +5000.3,4.2833e+06,0.2059 +5000.3,4.2833e+06,0.20589 +5000.4,4.2833e+06,0.20587 +5000.4,4.2833e+06,0.20586 +5000.4,4.2833e+06,0.20585 +5000.5,4.2833e+06,0.20584 +5000.5,4.2833e+06,0.20582 +5000.5,4.2833e+06,0.20581 +5000.6,4.2833e+06,0.20579 +5000.6,4.2833e+06,0.20578 +5000.7,4.2833e+06,0.20576 +5000.7,4.2833e+06,0.20574 +5000.7,4.2833e+06,0.20572 +5000.8,4.2833e+06,0.20571 +5000.9,4.2833e+06,0.20568 +5000.9,4.2833e+06,0.20566 +5001,4.2833e+06,0.20563 +5001,4.2833e+06,0.20561 +5001.1,4.2833e+06,0.20558 +5001.2,4.2832e+06,0.20556 +5001.2,4.2832e+06,0.20554 +5001.3,4.2832e+06,0.20551 +5001.4,4.2832e+06,0.20548 +5001.5,4.2832e+06,0.20545 +5001.6,4.2832e+06,0.20542 +5001.6,4.2832e+06,0.20539 +5001.7,4.2832e+06,0.20536 +5001.8,4.2832e+06,0.20533 +5001.9,4.2832e+06,0.2053 +5002,4.2832e+06,0.20527 +5002.1,4.2832e+06,0.20523 +5002.2,4.2832e+06,0.20519 +5002.3,4.2832e+06,0.20515 +5002.4,4.2832e+06,0.20511 +5002.5,4.2832e+06,0.20508 +5002.6,4.2831e+06,0.20504 +5002.7,4.2831e+06,0.205 +5002.8,4.2831e+06,0.20496 +5003,4.2831e+06,0.20492 +5003.1,4.2831e+06,0.20487 +5003.2,4.2831e+06,0.20483 +5003.4,4.2831e+06,0.20478 +5003.5,4.2831e+06,0.20473 +5003.6,4.2831e+06,0.20469 +5003.8,4.2831e+06,0.20464 +5003.9,4.2831e+06,0.2046 +5004,4.283e+06,0.20454 +5004.2,4.283e+06,0.20449 +5004.4,4.283e+06,0.20444 +5004.5,4.283e+06,0.20438 +5004.7,4.283e+06,0.20433 +5004.8,4.283e+06,0.20428 +5005,4.283e+06,0.20422 +5005.2,4.283e+06,0.20417 +5005.4,4.2829e+06,0.2041 +5005.6,4.2829e+06,0.20403 +5005.8,4.2829e+06,0.20397 +5006,4.2829e+06,0.2039 +5006.2,4.2829e+06,0.20383 +5006.4,4.2829e+06,0.20377 +5006.6,4.2829e+06,0.2037 +5006.8,4.2828e+06,0.20363 +5007,4.2828e+06,0.20357 +5007.3,4.2828e+06,0.2035 +5007.5,4.2828e+06,0.20343 +5007.7,4.2828e+06,0.20337 +5007.9,4.2828e+06,0.20329 +5008.2,4.2827e+06,0.2032 +5008.5,4.2827e+06,0.20312 +5008.7,4.2827e+06,0.20304 +5009,4.2827e+06,0.20296 +5009.3,4.2827e+06,0.20288 +5009.5,4.2826e+06,0.2028 +5009.8,4.2826e+06,0.20272 +5010.1,4.2826e+06,0.20264 +5010.3,4.2826e+06,0.20256 +5010.6,4.2826e+06,0.20248 +5010.9,4.2825e+06,0.2024 +5011.2,4.2825e+06,0.20231 +5011.5,4.2825e+06,0.20221 +5011.9,4.2825e+06,0.20211 +5012.2,4.2824e+06,0.20202 +5012.5,4.2824e+06,0.20192 +5012.8,4.2824e+06,0.20183 +5013.2,4.2824e+06,0.20173 +5013.5,4.2823e+06,0.20164 +5013.8,4.2823e+06,0.20154 +5014.2,4.2823e+06,0.20145 +5014.5,4.2823e+06,0.20136 +5014.8,4.2822e+06,0.20127 +5015.2,4.2822e+06,0.20115 +5015.6,4.2822e+06,0.20104 +5016,4.2822e+06,0.20093 +5016.4,4.2821e+06,0.20082 +5016.9,4.2821e+06,0.20071 +5017.3,4.2821e+06,0.2006 +5017.7,4.282e+06,0.20049 +5018.1,4.282e+06,0.20039 +5018.5,4.282e+06,0.20028 +5018.9,4.2819e+06,0.20017 +5019.3,4.2819e+06,0.20006 +5019.7,4.2819e+06,0.19996 +5020.2,4.2818e+06,0.19982 +5020.7,4.2818e+06,0.19969 +5021.2,4.2818e+06,0.19955 +5021.8,4.2817e+06,0.19942 +5022.3,4.2817e+06,0.19928 +5022.8,4.2816e+06,0.19915 +5023.3,4.2816e+06,0.19902 +5023.8,4.2816e+06,0.19889 +5024.3,4.2815e+06,0.19876 +5024.9,4.2815e+06,0.19863 +5025.4,4.2814e+06,0.1985 +5025.9,4.2814e+06,0.19837 +5026.4,4.2814e+06,0.19824 +5026.9,4.2813e+06,0.19812 +5027.5,4.2813e+06,0.19799 +5028,4.2812e+06,0.19786 +5028.6,4.2812e+06,0.1977 +5029.3,4.2811e+06,0.19755 +5030,4.2811e+06,0.19739 +5030.6,4.281e+06,0.19723 +5031.3,4.281e+06,0.19708 +5031.9,4.2809e+06,0.19693 +5032.6,4.2809e+06,0.19677 +5033.2,4.2808e+06,0.19662 +5033.9,4.2808e+06,0.19647 +5034.5,4.2807e+06,0.19632 +5035.2,4.2807e+06,0.19617 +5035.9,4.2806e+06,0.19602 +5036.5,4.2806e+06,0.19587 +5037.2,4.2805e+06,0.19572 +5037.8,4.2805e+06,0.19558 +5038.5,4.2804e+06,0.19543 +5039.3,4.2804e+06,0.19525 +5040.1,4.2803e+06,0.19507 +5040.9,4.2802e+06,0.19489 +5041.7,4.2802e+06,0.19471 +5042.5,4.2801e+06,0.19454 +5043.4,4.2801e+06,0.19436 +5044.2,4.28e+06,0.19419 +5045,4.2799e+06,0.19401 +5045.8,4.2799e+06,0.19384 +5046.6,4.2798e+06,0.19367 +5047.4,4.2797e+06,0.1935 +5048.2,4.2797e+06,0.19333 +5049,4.2796e+06,0.19316 +5049.9,4.2795e+06,0.19299 +5050.7,4.2795e+06,0.19282 +5051.5,4.2794e+06,0.19265 +5052.5,4.2793e+06,0.19245 +5053.5,4.2793e+06,0.19225 +5054.5,4.2792e+06,0.19205 +5055.5,4.2791e+06,0.19185 +5056.5,4.279e+06,0.19165 +5057.5,4.2789e+06,0.19145 +5058.5,4.2789e+06,0.19126 +5059.4,4.2788e+06,0.19106 +5060.4,4.2787e+06,0.19087 +5061.4,4.2786e+06,0.19068 +5062.4,4.2785e+06,0.19048 +5063.4,4.2785e+06,0.19029 +5064.4,4.2784e+06,0.1901 +5065.4,4.2783e+06,0.18992 +5066.4,4.2782e+06,0.18973 +5067.4,4.2781e+06,0.18954 +5068.6,4.278e+06,0.18932 +5069.8,4.2779e+06,0.18909 +5071,4.2778e+06,0.18887 +5072.2,4.2777e+06,0.18865 +5073.4,4.2776e+06,0.18843 +5074.6,4.2775e+06,0.18821 +5075.8,4.2774e+06,0.188 +5077,4.2773e+06,0.18778 +5078.2,4.2772e+06,0.18757 +5079.4,4.2771e+06,0.18735 +5080.7,4.277e+06,0.18714 +5081.9,4.2769e+06,0.18693 +5083.1,4.2768e+06,0.18672 +5084.3,4.2767e+06,0.18651 +5085.5,4.2766e+06,0.1863 +5086.7,4.2765e+06,0.1861 +5088.1,4.2764e+06,0.18585 +5089.6,4.2763e+06,0.18561 +5091,4.2762e+06,0.18536 +5092.5,4.2761e+06,0.18512 +5093.9,4.2759e+06,0.18488 +5095.3,4.2758e+06,0.18464 +5096.8,4.2757e+06,0.1844 +5098.2,4.2756e+06,0.18417 +5099.7,4.2755e+06,0.18393 +5101.1,4.2753e+06,0.1837 +5102.6,4.2752e+06,0.18347 +5104,4.2751e+06,0.18324 +5105.5,4.275e+06,0.18301 +5106.9,4.2748e+06,0.18278 +5108.4,4.2747e+06,0.18255 +5109.8,4.2746e+06,0.18233 +5111.6,4.2744e+06,0.18205 +5113.3,4.2743e+06,0.18178 +5115.1,4.2741e+06,0.18152 +5116.8,4.274e+06,0.18125 +5118.6,4.2738e+06,0.18099 +5120.3,4.2737e+06,0.18072 +5122.1,4.2735e+06,0.18046 +5123.8,4.2734e+06,0.1802 +5125.6,4.2732e+06,0.17994 +5127.3,4.2731e+06,0.17969 +5129.1,4.2729e+06,0.17943 +5130.8,4.2728e+06,0.17918 +5132.6,4.2726e+06,0.17893 +5134.3,4.2725e+06,0.17868 +5136.1,4.2723e+06,0.17843 +5137.8,4.2722e+06,0.17818 +5139.9,4.272e+06,0.17788 +5142.1,4.2718e+06,0.17758 +5144.2,4.2716e+06,0.17729 +5146.4,4.2714e+06,0.17699 +5148.5,4.2712e+06,0.1767 +5150.6,4.271e+06,0.17641 +5152.8,4.2708e+06,0.17612 +5154.9,4.2706e+06,0.17583 +5157,4.2704e+06,0.17555 +5159.2,4.2702e+06,0.17527 +5161.3,4.2701e+06,0.17499 +5163.5,4.2699e+06,0.17471 +5165.6,4.2697e+06,0.17443 +5167.7,4.2695e+06,0.17415 +5169.9,4.2693e+06,0.17388 +5172,4.2691e+06,0.17361 +5174.1,4.2689e+06,0.17334 +5176.3,4.2687e+06,0.17307 +5178.4,4.2685e+06,0.1728 +5180.6,4.2683e+06,0.17254 +5183.1,4.2681e+06,0.17222 +5185.7,4.2678e+06,0.17191 +5188.3,4.2676e+06,0.17159 +5190.8,4.2673e+06,0.17128 +5193.4,4.2671e+06,0.17098 +5196,4.2669e+06,0.17067 +5198.5,4.2666e+06,0.17037 +5201.1,4.2664e+06,0.17006 +5203.7,4.2661e+06,0.16977 +5206.2,4.2659e+06,0.16947 +5208.8,4.2657e+06,0.16917 +5211.4,4.2654e+06,0.16888 +5213.9,4.2652e+06,0.16859 +5216.5,4.2649e+06,0.1683 +5219.1,4.2647e+06,0.16801 +5221.7,4.2644e+06,0.16772 +5224.8,4.2641e+06,0.16738 +5227.9,4.2638e+06,0.16703 +5231.1,4.2635e+06,0.16669 +5234.2,4.2632e+06,0.16635 +5237.4,4.2629e+06,0.16602 +5240.5,4.2626e+06,0.16568 +5243.6,4.2623e+06,0.16535 +5246.8,4.262e+06,0.16502 +5249.9,4.2617e+06,0.1647 +5253,4.2614e+06,0.16437 +5256.2,4.2611e+06,0.16405 +5259.3,4.2608e+06,0.16373 +5262.5,4.2605e+06,0.16342 +5265.6,4.2601e+06,0.1631 +5268.7,4.2598e+06,0.16279 +5271.9,4.2595e+06,0.16248 +5275,4.2592e+06,0.16217 +5278.2,4.2589e+06,0.16187 +5281.3,4.2586e+06,0.16156 +5284.4,4.2583e+06,0.16126 +5288.2,4.2579e+06,0.1609 +5292,4.2575e+06,0.16054 +5295.8,4.2571e+06,0.16019 +5299.6,4.2567e+06,0.15983 +5303.4,4.2563e+06,0.15948 +5307.2,4.2559e+06,0.15914 +5311,4.2555e+06,0.15879 +5314.8,4.2552e+06,0.15845 +5318.6,4.2548e+06,0.15811 +5322.4,4.2544e+06,0.15777 +5326.2,4.254e+06,0.15744 +5330,4.2536e+06,0.15711 +5333.8,4.2532e+06,0.15678 +5337.6,4.2528e+06,0.15646 +5341.4,4.2524e+06,0.15613 +5345.2,4.252e+06,0.15581 +5349,4.2516e+06,0.15549 +5352.8,4.2512e+06,0.15518 +5356.6,4.2508e+06,0.15486 +5360.4,4.2504e+06,0.15455 +5365.2,4.2498e+06,0.15417 +5369.9,4.2493e+06,0.15378 +5374.7,4.2488e+06,0.15341 +5379.4,4.2483e+06,0.15303 +5384.2,4.2478e+06,0.15266 +5388.9,4.2473e+06,0.15229 +5393.6,4.2468e+06,0.15193 +5398.4,4.2462e+06,0.15156 +5403.1,4.2457e+06,0.15121 +5407.9,4.2452e+06,0.15085 +5412.6,4.2447e+06,0.1505 +5417.4,4.2441e+06,0.15015 +5422.1,4.2436e+06,0.1498 +5426.8,4.2431e+06,0.14946 +5431.6,4.2425e+06,0.14912 +5436.3,4.242e+06,0.14878 +5441.1,4.2415e+06,0.14845 +5445.8,4.2409e+06,0.14812 +5450.6,4.2404e+06,0.14779 +5455.3,4.2399e+06,0.14746 +5460.1,4.2393e+06,0.14714 +5464.8,4.2388e+06,0.14682 +5469.5,4.2382e+06,0.1465 +5474.3,4.2377e+06,0.14618 +5480.2,4.237e+06,0.14579 +5486.1,4.2363e+06,0.14541 +5492.1,4.2356e+06,0.14502 +5498,4.2349e+06,0.14464 +5503.9,4.2342e+06,0.14427 +5509.8,4.2335e+06,0.1439 +5515.7,4.2328e+06,0.14353 +5521.7,4.2321e+06,0.14316 +5527.6,4.2314e+06,0.1428 +5533.5,4.2307e+06,0.14245 +5539.4,4.23e+06,0.14209 +5545.4,4.2292e+06,0.14174 +5551.3,4.2285e+06,0.14139 +5557.2,4.2278e+06,0.14105 +5563.1,4.2271e+06,0.14071 +5569.1,4.2264e+06,0.14037 +5575,4.2256e+06,0.14004 +5580.9,4.2249e+06,0.13971 +5586.8,4.2242e+06,0.13938 +5592.8,4.2234e+06,0.13905 +5598.7,4.2227e+06,0.13873 +5604.6,4.2219e+06,0.13841 +5610.5,4.2212e+06,0.13809 +5616.5,4.2205e+06,0.13778 +5623.6,4.2195e+06,0.1374 +5630.8,4.2186e+06,0.13703 +5638,4.2177e+06,0.13666 +5645.2,4.2168e+06,0.1363 +5652.4,4.2159e+06,0.13593 +5659.5,4.215e+06,0.13558 +5666.7,4.214e+06,0.13522 +5673.9,4.2131e+06,0.13487 +5681.1,4.2122e+06,0.13453 +5688.3,4.2112e+06,0.13419 +5695.4,4.2103e+06,0.13385 +5702.6,4.2093e+06,0.13351 +5709.8,4.2084e+06,0.13318 +5717,4.2074e+06,0.13285 +5724.2,4.2064e+06,0.13252 +5731.3,4.2055e+06,0.1322 +5738.5,4.2045e+06,0.13188 +5745.7,4.2035e+06,0.13157 +5752.9,4.2026e+06,0.13125 +5760.1,4.2016e+06,0.13094 +5767.2,4.2006e+06,0.13064 +5774.4,4.1996e+06,0.13033 +5781.6,4.1986e+06,0.13003 +5788.8,4.1977e+06,0.12973 +5797.7,4.1964e+06,0.12937 +5806.6,4.1952e+06,0.12901 +5815.5,4.1939e+06,0.12865 +5824.4,4.1927e+06,0.12829 +5833.3,4.1914e+06,0.12794 +5842.2,4.1902e+06,0.1276 +5851.1,4.1889e+06,0.12726 +5860.1,4.1876e+06,0.12692 +5869,4.1863e+06,0.12659 +5877.9,4.1851e+06,0.12626 +5886.8,4.1838e+06,0.12593 +5895.7,4.1825e+06,0.12561 +5904.6,4.1812e+06,0.12529 +5913.5,4.1799e+06,0.12497 +5922.4,4.1786e+06,0.12466 +5931.4,4.1772e+06,0.12435 +5940.3,4.1759e+06,0.12405 +5949.2,4.1746e+06,0.12375 +5958.1,4.1733e+06,0.12345 +5967,4.1719e+06,0.12315 +5975.9,4.1706e+06,0.12286 +5984.8,4.1692e+06,0.12257 +5993.7,4.1679e+06,0.12228 +6002.6,4.1665e+06,0.122 +6011.6,4.1652e+06,0.12172 +6020.5,4.1638e+06,0.12144 +6029.4,4.1624e+06,0.12117 +6038.3,4.161e+06,0.1209 +6049.2,4.1593e+06,0.12057 +6060.1,4.1576e+06,0.12024 +6071.1,4.1559e+06,0.11992 +6082,4.1542e+06,0.11961 +6092.9,4.1525e+06,0.11929 +6103.8,4.1507e+06,0.11898 +6114.8,4.149e+06,0.11868 +6125.7,4.1472e+06,0.11838 +6136.6,4.1455e+06,0.11808 +6147.5,4.1437e+06,0.11778 +6158.5,4.1419e+06,0.11749 +6169.4,4.1402e+06,0.1172 +6180.3,4.1384e+06,0.11692 +6191.2,4.1366e+06,0.11664 +6202.2,4.1348e+06,0.11636 +6213.1,4.133e+06,0.11609 +6224,4.1311e+06,0.11582 +6234.9,4.1293e+06,0.11555 +6245.9,4.1275e+06,0.11528 +6256.8,4.1256e+06,0.11502 +6267.7,4.1238e+06,0.11476 +6278.6,4.1219e+06,0.11451 +6289.6,4.1201e+06,0.11425 +6300.5,4.1182e+06,0.114 +6311.4,4.1163e+06,0.11376 +6322.3,4.1144e+06,0.11351 +6333.3,4.1125e+06,0.11327 +6344.2,4.1106e+06,0.11303 +6357.5,4.1083e+06,0.11274 +6370.8,4.106e+06,0.11246 +6384.2,4.1036e+06,0.11218 +6397.5,4.1013e+06,0.1119 +6410.8,4.0989e+06,0.11163 +6424.1,4.0965e+06,0.11136 +6437.5,4.0941e+06,0.11109 +6450.8,4.0917e+06,0.11083 +6464.1,4.0893e+06,0.11057 +6477.5,4.0869e+06,0.11031 +6490.8,4.0844e+06,0.11006 +6504.1,4.082e+06,0.10981 +6517.4,4.0795e+06,0.10956 +6530.8,4.0771e+06,0.10932 +6544.1,4.0746e+06,0.10908 +6557.4,4.0721e+06,0.10884 +6570.7,4.0696e+06,0.10861 +6584.1,4.0671e+06,0.10837 +6597.4,4.0646e+06,0.10814 +6610.7,4.062e+06,0.10792 +6624,4.0595e+06,0.1077 +6637.4,4.0569e+06,0.10747 +6650.7,4.0544e+06,0.10726 +6664,4.0518e+06,0.10704 +6677.4,4.0492e+06,0.10683 +6690.7,4.0466e+06,0.10662 +6704,4.044e+06,0.10641 +6717.3,4.0414e+06,0.10621 +6733.6,4.0382e+06,0.10596 +6749.8,4.035e+06,0.10572 +6766,4.0318e+06,0.10548 +6782.2,4.0285e+06,0.10524 +6798.4,4.0253e+06,0.10501 +6814.7,4.022e+06,0.10478 +6830.9,4.0187e+06,0.10455 +6847.1,4.0154e+06,0.10433 +6863.3,4.0121e+06,0.10411 +6879.6,4.0088e+06,0.10389 +6895.8,4.0054e+06,0.10368 +6912,4.0021e+06,0.10346 +6928.2,3.9987e+06,0.10326 +6944.4,3.9953e+06,0.10305 +6960.7,3.9919e+06,0.10285 +6976.9,3.9885e+06,0.10265 +6993.1,3.985e+06,0.10245 +7009.3,3.9816e+06,0.10225 +7025.5,3.9781e+06,0.10206 +7041.8,3.9747e+06,0.10187 +7058,3.9712e+06,0.10168 +7074.2,3.9677e+06,0.1015 +7090.4,3.9641e+06,0.10132 +7106.7,3.9606e+06,0.10113 +7122.9,3.957e+06,0.10096 +7139.1,3.9535e+06,0.10078 +7155.3,3.9499e+06,0.10061 +7171.5,3.9463e+06,0.10044 +7191.2,3.9419e+06,0.10023 +7210.8,3.9376e+06,0.10003 +7230.4,3.9332e+06,0.099833 +7250,3.9287e+06,0.099637 +7269.6,3.9243e+06,0.099445 +7289.3,3.9198e+06,0.099256 +7308.9,3.9153e+06,0.099069 +7328.5,3.9108e+06,0.098886 +7348.1,3.9063e+06,0.098704 +7367.7,3.9017e+06,0.098526 +7387.4,3.8972e+06,0.09835 +7407,3.8926e+06,0.098177 +7426.6,3.888e+06,0.098007 +7446.2,3.8833e+06,0.097838 +7465.8,3.8787e+06,0.097673 +7485.5,3.874e+06,0.097509 +7505.1,3.8693e+06,0.097348 +7524.7,3.8646e+06,0.09719 +7544.3,3.8599e+06,0.097033 +7563.9,3.8551e+06,0.096879 +7583.6,3.8503e+06,0.096727 +7603.2,3.8456e+06,0.096577 +7622.8,3.8407e+06,0.096429 +7642.4,3.8359e+06,0.096284 +7662,3.8311e+06,0.09614 +7681.7,3.8262e+06,0.095998 +7701.3,3.8213e+06,0.095859 +7720.9,3.8164e+06,0.095721 +7740.5,3.8115e+06,0.095585 +7760.1,3.8065e+06,0.095451 +7779.8,3.8015e+06,0.095319 +7799.4,3.7966e+06,0.095189 +7819,3.7915e+06,0.095061 +7838.6,3.7865e+06,0.094934 +7858.2,3.7815e+06,0.094809 +7877.9,3.7764e+06,0.094686 +7897.5,3.7713e+06,0.094564 +7917.1,3.7662e+06,0.094444 +7936.7,3.7611e+06,0.094326 +7956.3,3.756e+06,0.09421 +7976,3.7508e+06,0.094095 +7995.6,3.7456e+06,0.093981 +8015.2,3.7404e+06,0.093869 +8034.8,3.7352e+06,0.093759 +8054.5,3.73e+06,0.09365 +8074.1,3.7247e+06,0.093543 +8093.7,3.7194e+06,0.093437 +8113.3,3.7141e+06,0.093332 +8132.2,3.709e+06,0.093233 +8151,3.7039e+06,0.093135 +8169.9,3.6988e+06,0.093038 +8188.8,3.6936e+06,0.092943 +8207.6,3.6884e+06,0.092849 +8226.5,3.6832e+06,0.092756 +8245.4,3.678e+06,0.092664 +8264.2,3.6728e+06,0.092574 +8283.1,3.6676e+06,0.092484 +8302,3.6623e+06,0.092396 +8320.9,3.657e+06,0.092309 +8339.7,3.6517e+06,0.092223 +8358.6,3.6464e+06,0.092139 +8377.5,3.6411e+06,0.092055 +8396.3,3.6357e+06,0.091973 +8415.2,3.6304e+06,0.091891 +8434.1,3.625e+06,0.091811 +8452.9,3.6196e+06,0.091731 +8471.8,3.6142e+06,0.091653 +8490.7,3.6087e+06,0.091576 +8509.5,3.6033e+06,0.0915 +8528.4,3.5978e+06,0.091425 +8547.3,3.5924e+06,0.09135 +8566.1,3.5869e+06,0.091277 +8589,3.5802e+06,0.09119 +8611.9,3.5735e+06,0.091104 +8634.8,3.5667e+06,0.091019 +8657.7,3.56e+06,0.090936 +8680.5,3.5532e+06,0.090854 +8703.4,3.5464e+06,0.090774 +8726.3,3.5395e+06,0.090695 +8749.2,3.5327e+06,0.090617 +8772.1,3.5258e+06,0.09054 +8794.9,3.5189e+06,0.090465 +8817.8,3.512e+06,0.090391 +8840.7,3.505e+06,0.090319 +8863.6,3.498e+06,0.090248 +8886.5,3.491e+06,0.090177 +8909.3,3.484e+06,0.090109 +8932.2,3.477e+06,0.090041 +8955.1,3.4699e+06,0.089975 +8978,3.4628e+06,0.089909 +9000.9,3.4557e+06,0.089845 +9023.8,3.4485e+06,0.089783 +9046.6,3.4414e+06,0.089721 +9069.5,3.4342e+06,0.08966 +9092.4,3.427e+06,0.089601 +9115.3,3.4198e+06,0.089543 +9138.2,3.4125e+06,0.089486 +9161,3.4052e+06,0.08943 +9183.9,3.3979e+06,0.089375 +9206.8,3.3906e+06,0.089321 +9234.5,3.3817e+06,0.089258 +9262.2,3.3728e+06,0.089196 +9289.9,3.3638e+06,0.089136 +9317.6,3.3548e+06,0.089077 +9345.3,3.3458e+06,0.08902 +9373,3.3367e+06,0.088964 +9400.7,3.3277e+06,0.08891 +9428.4,3.3185e+06,0.088858 +9456.1,3.3094e+06,0.088807 +9483.8,3.3002e+06,0.088758 +9511.5,3.291e+06,0.08871 +9539.2,3.2817e+06,0.088663 +9566.9,3.2724e+06,0.088619 +9594.6,3.2631e+06,0.088575 +9622.3,3.2538e+06,0.088533 +9650,3.2444e+06,0.088493 +9684.1,3.2328e+06,0.088446 +9718.2,3.2212e+06,0.0884 +9752.3,3.2095e+06,0.088357 +9786.4,3.1978e+06,0.088317 +9820.5,3.186e+06,0.088278 +9854.6,3.1742e+06,0.088242 +9888.7,3.1623e+06,0.088208 +9922.8,3.1504e+06,0.088176 +9970.4,3.1337e+06,0.088136 +10018,3.1169e+06,0.0881 +10066,3.1001e+06,0.088069 +10113,3.0831e+06,0.088042 +10148,3.0707e+06,0.088025 +10183,3.0582e+06,0.08801 +10218,3.0456e+06,0.087998 +10252,3.0331e+06,0.087989 +10287,3.0204e+06,0.087982 +10322,3.0078e+06,0.087977 +10357,2.9951e+06,0.087975 +10391,2.9823e+06,0.087975 +10426,2.9695e+06,0.087978 +10461,2.9567e+06,0.087984 +10496,2.9438e+06,0.087992 +10531,2.9309e+06,0.088002 +10565,2.9179e+06,0.088016 +10600,2.9049e+06,0.088032 +10635,2.8918e+06,0.08805 +10670,2.8788e+06,0.088072 +10705,2.8656e+06,0.088096 +10739,2.8525e+06,0.088123 +10774,2.8393e+06,0.088152 +10809,2.826e+06,0.088185 +10844,2.8128e+06,0.08822 +10878,2.7994e+06,0.088259 +10913,2.7861e+06,0.0883 +10948,2.7727e+06,0.088344 +10983,2.7593e+06,0.088392 +11018,2.7458e+06,0.088442 +11052,2.7323e+06,0.088495 +11087,2.7188e+06,0.088552 +11122,2.7052e+06,0.088612 +11157,2.6917e+06,0.088675 +11191,2.678e+06,0.088741 +11226,2.6644e+06,0.088811 +11261,2.6507e+06,0.088884 +11296,2.637e+06,0.08896 +11330,2.6233e+06,0.08904 +11365,2.6095e+06,0.089123 +11400,2.5957e+06,0.08921 +11435,2.5819e+06,0.089301 +11469,2.5681e+06,0.089395 +11504,2.5542e+06,0.089493 +11539,2.5403e+06,0.089595 +11574,2.5264e+06,0.089701 +11608,2.5124e+06,0.089811 +11643,2.4984e+06,0.089924 +11678,2.4844e+06,0.090042 +11713,2.4704e+06,0.090164 +11747,2.4564e+06,0.09029 +11782,2.4423e+06,0.090421 +11817,2.4282e+06,0.090555 +11852,2.4141e+06,0.090695 +11886,2.4e+06,0.090838 +11921,2.3859e+06,0.090987 +11956,2.3717e+06,0.09114 +11990,2.3575e+06,0.091297 +12025,2.3433e+06,0.09146 +12060,2.3291e+06,0.091627 +12095,2.3149e+06,0.0918 +12129,2.3006e+06,0.091977 +12164,2.2864e+06,0.09216 +12199,2.2721e+06,0.092348 +12234,2.2579e+06,0.092541 +12268,2.2436e+06,0.09274 +12303,2.2293e+06,0.092944 +12338,2.215e+06,0.093154 +12373,2.2007e+06,0.093369 +12407,2.1863e+06,0.093591 +12442,2.172e+06,0.093818 +12477,2.1577e+06,0.094052 +12511,2.1434e+06,0.094291 +12546,2.129e+06,0.094537 +12581,2.1147e+06,0.094789 +12616,2.1003e+06,0.095048 +12650,2.086e+06,0.095313 +12685,2.0717e+06,0.095585 +12720,2.0573e+06,0.095864 +12755,2.043e+06,0.09615 +12789,2.0287e+06,0.096442 +12824,2.0144e+06,0.096742 +12859,2e+06,0.09705 +12894,1.9857e+06,0.097365 +12928,1.9714e+06,0.097687 +12963,1.9571e+06,0.098017 +12998,1.9428e+06,0.098355 +13033,1.9286e+06,0.098701 +13067,1.9143e+06,0.099056 +13102,1.9001e+06,0.099418 +13137,1.8858e+06,0.099789 +13171,1.8716e+06,0.10017 +13206,1.8574e+06,0.10056 +13241,1.8433e+06,0.10095 +13276,1.8291e+06,0.10136 +13310,1.815e+06,0.10178 +13345,1.8008e+06,0.1022 +13380,1.7867e+06,0.10264 +13415,1.7727e+06,0.10308 +13449,1.7586e+06,0.10353 +13484,1.7446e+06,0.104 +13519,1.7306e+06,0.10447 +13554,1.7166e+06,0.10496 +13588,1.7027e+06,0.10545 +13609,1.6942e+06,0.10576 +13631,1.6858e+06,0.10607 +13652,1.6773e+06,0.10639 +13673,1.6689e+06,0.10671 +13694,1.6605e+06,0.10703 +13715,1.6521e+06,0.10736 +13736,1.6437e+06,0.10769 +13757,1.6353e+06,0.10803 +13779,1.6269e+06,0.10837 +13800,1.6186e+06,0.10871 +13821,1.6102e+06,0.10906 +13842,1.6019e+06,0.10942 +13863,1.5936e+06,0.10977 +13884,1.5853e+06,0.11014 +13905,1.577e+06,0.11051 +13927,1.5687e+06,0.11088 +13948,1.5605e+06,0.11126 +13969,1.5522e+06,0.11164 +13990,1.544e+06,0.11203 +14011,1.5358e+06,0.11242 +14032,1.5276e+06,0.11281 +14053,1.5194e+06,0.11322 +14075,1.5113e+06,0.11362 +14096,1.5031e+06,0.11404 +14121,1.4934e+06,0.11454 +14147,1.4836e+06,0.11505 +14172,1.4739e+06,0.11557 +14197,1.4642e+06,0.11609 +14223,1.4546e+06,0.11663 +14248,1.4449e+06,0.11717 +14274,1.4353e+06,0.11772 +14299,1.4257e+06,0.11828 +14325,1.4162e+06,0.11884 +14350,1.4067e+06,0.11942 +14375,1.3972e+06,0.12 +14401,1.3877e+06,0.12059 +14426,1.3783e+06,0.12119 +14452,1.3689e+06,0.1218 +14477,1.3595e+06,0.12242 +14503,1.3501e+06,0.12305 +14528,1.3408e+06,0.12368 +14554,1.3316e+06,0.12433 +14579,1.3223e+06,0.12499 +14604,1.3131e+06,0.12565 +14626,1.3051e+06,0.12623 +14648,1.2972e+06,0.12682 +14670,1.2893e+06,0.12742 +14692,1.2814e+06,0.12803 +14714,1.2736e+06,0.12864 +14736,1.2658e+06,0.12926 +14758,1.258e+06,0.12989 +14780,1.2502e+06,0.13053 +14802,1.2425e+06,0.13117 +14824,1.2347e+06,0.13182 +14846,1.2271e+06,0.13248 +14869,1.2194e+06,0.13315 +14891,1.2118e+06,0.13383 +14913,1.2042e+06,0.13451 +14935,1.1966e+06,0.1352 +14957,1.189e+06,0.13591 +14979,1.1815e+06,0.13662 +15001,1.174e+06,0.13733 +15023,1.1665e+06,0.13806 +15045,1.1591e+06,0.1388 +15067,1.1517e+06,0.13954 +15089,1.1443e+06,0.1403 +15111,1.1369e+06,0.14106 +15133,1.1296e+06,0.14183 +15155,1.1223e+06,0.14261 +15177,1.1151e+06,0.14341 +15199,1.1078e+06,0.14421 +15221,1.1006e+06,0.14502 +15241,1.0941e+06,0.14577 +15261,1.0875e+06,0.14652 +15281,1.081e+06,0.14729 +15301,1.0745e+06,0.14806 +15321,1.0681e+06,0.14884 +15341,1.0616e+06,0.14963 +15361,1.0552e+06,0.15043 +15382,1.0489e+06,0.15124 +15402,1.0425e+06,0.15206 +15422,1.0362e+06,0.15288 +15442,1.0299e+06,0.15371 +15462,1.0236e+06,0.15456 +15482,1.0174e+06,0.15541 +15502,1.0111e+06,0.15627 +15522,1.0049e+06,0.15714 +15542,9.9876e+05,0.15802 +15562,9.9261e+05,0.15891 +15583,9.865e+05,0.15981 +15603,9.804e+05,0.16071 +15623,9.7434e+05,0.16163 +15643,9.683e+05,0.16256 +15663,9.6229e+05,0.16349 +15683,9.563e+05,0.16444 +15703,9.5034e+05,0.1654 +15723,9.4441e+05,0.16636 +15743,9.3851e+05,0.16734 +15764,9.3263e+05,0.16833 +15784,9.2678e+05,0.16932 +15804,9.2099e+05,0.17033 +15824,9.1523e+05,0.17134 +15844,9.0949e+05,0.17236 +15864,9.0379e+05,0.17339 +15884,8.981e+05,0.17443 +15904,8.9245e+05,0.17549 +15924,8.8682e+05,0.17655 +15943,8.8122e+05,0.17763 +15963,8.7565e+05,0.17871 +15983,8.701e+05,0.17981 +16003,8.6458e+05,0.18092 +16023,8.5909e+05,0.18204 +16043,8.5362e+05,0.18317 +16063,8.4818e+05,0.18431 +16083,8.4277e+05,0.18547 +16103,8.3738e+05,0.18663 +16123,8.3202e+05,0.18781 +16143,8.2669e+05,0.189 +16163,8.2139e+05,0.1902 +16183,8.1611e+05,0.19141 +16203,8.1086e+05,0.19263 +16223,8.0563e+05,0.19387 +16243,8.0043e+05,0.19512 +16263,7.9526e+05,0.19638 +16283,7.9012e+05,0.19765 +16303,7.85e+05,0.19894 +16323,7.7991e+05,0.20024 +16343,7.7484e+05,0.20155 +16363,7.6981e+05,0.20287 +16383,7.648e+05,0.20421 +16403,7.5981e+05,0.20556 +16423,7.5485e+05,0.20692 +16443,7.4992e+05,0.20829 +16463,7.4502e+05,0.20968 +16483,7.4014e+05,0.21108 +16503,7.3529e+05,0.2125 +16523,7.3046e+05,0.21393 +16543,7.2566e+05,0.21537 +16563,7.2089e+05,0.21682 +16583,7.1614e+05,0.21829 +16603,7.1142e+05,0.21978 +16623,7.0672e+05,0.22127 +16643,7.0205e+05,0.22278 +16663,6.9741e+05,0.22431 +16683,6.9279e+05,0.22585 +16703,6.882e+05,0.2274 +16723,6.8363e+05,0.22897 +16743,6.7909e+05,0.23055 +16763,6.7458e+05,0.23215 +16783,6.7009e+05,0.23376 +16803,6.6563e+05,0.23538 +16823,6.6119e+05,0.23702 +16843,6.5677e+05,0.23868 +16863,6.5239e+05,0.24035 +16883,6.4802e+05,0.24203 +16903,6.4369e+05,0.24373 +16927,6.3839e+05,0.24584 +16952,6.3313e+05,0.24797 +16976,6.2791e+05,0.25012 +17001,6.2273e+05,0.2523 +17025,6.1758e+05,0.2545 +17050,6.1247e+05,0.25672 +17074,6.074e+05,0.25897 +17099,6.0236e+05,0.26124 +17124,5.9736e+05,0.26354 +17148,5.9239e+05,0.26586 +17173,5.8747e+05,0.2682 +17197,5.8257e+05,0.27056 +17231,5.7599e+05,0.27382 +17264,5.6946e+05,0.27711 +17297,5.6301e+05,0.28045 +17331,5.5661e+05,0.28384 +17346,5.5362e+05,0.28545 +17362,5.5064e+05,0.28708 +17378,5.4767e+05,0.28871 +17393,5.4472e+05,0.29036 +17409,5.4178e+05,0.29201 +17425,5.3886e+05,0.29367 +17441,5.3594e+05,0.29535 +17456,5.3305e+05,0.29703 +17472,5.3016e+05,0.29873 +17488,5.2729e+05,0.30043 +17503,5.2444e+05,0.30215 +17519,5.216e+05,0.30387 +17535,5.1877e+05,0.30561 +17551,5.1595e+05,0.30735 +17566,5.1315e+05,0.30911 +17582,5.1036e+05,0.31088 +17598,5.0758e+05,0.31265 +17614,5.0482e+05,0.31444 +17629,5.0207e+05,0.31624 +17645,4.9933e+05,0.31805 +17661,4.9661e+05,0.31986 +17676,4.939e+05,0.32169 +17692,4.912e+05,0.32353 +17708,4.8851e+05,0.32538 +17724,4.8584e+05,0.32724 +17739,4.8318e+05,0.32911 +17755,4.8053e+05,0.33099 +17771,4.779e+05,0.33288 +17784,4.7575e+05,0.33443 +17797,4.7361e+05,0.33599 +17809,4.7149e+05,0.33756 +17822,4.6937e+05,0.33913 +17835,4.6725e+05,0.34071 +17848,4.6515e+05,0.3423 +17861,4.6305e+05,0.34389 +17874,4.6097e+05,0.34549 +17887,4.5889e+05,0.3471 +17899,4.5681e+05,0.34872 +17912,4.5475e+05,0.35034 +17925,4.5269e+05,0.35197 +17938,4.5065e+05,0.3536 +17951,4.4861e+05,0.35524 +17964,4.4657e+05,0.35689 +17977,4.4455e+05,0.35854 +17989,4.4253e+05,0.36021 +18002,4.4052e+05,0.36187 +18015,4.3852e+05,0.36355 +18028,4.3653e+05,0.36523 +18041,4.3454e+05,0.36692 +18054,4.3256e+05,0.36861 +18067,4.3059e+05,0.37032 +18079,4.2863e+05,0.37202 +18095,4.2626e+05,0.3741 +18110,4.2391e+05,0.37619 +18126,4.2156e+05,0.37828 +18142,4.1923e+05,0.38039 +18157,4.1691e+05,0.3825 +18173,4.146e+05,0.38462 +18188,4.123e+05,0.38676 +18204,4.1001e+05,0.3889 +18219,4.0773e+05,0.39105 +18235,4.0546e+05,0.39321 +18250,4.032e+05,0.39538 +18266,4.0095e+05,0.39756 +18288,3.9782e+05,0.40062 +18310,3.9471e+05,0.4037 +18331,3.9162e+05,0.4068 +18353,3.8855e+05,0.40992 +18371,3.8608e+05,0.41245 +18388,3.8362e+05,0.41499 +18406,3.8118e+05,0.41755 +18423,3.7875e+05,0.42011 +18441,3.7634e+05,0.42269 +18459,3.7393e+05,0.42527 +18476,3.7154e+05,0.42787 +18494,3.6916e+05,0.43047 +18511,3.6679e+05,0.43309 +18529,3.6443e+05,0.43572 +18547,3.6208e+05,0.43836 +18564,3.5975e+05,0.441 +18582,3.5743e+05,0.44366 +18599,3.5512e+05,0.44633 +18617,3.5282e+05,0.44901 +18635,3.5053e+05,0.45169 +18652,3.4825e+05,0.45439 +18670,3.4599e+05,0.45709 +18687,3.4373e+05,0.45981 +18705,3.4149e+05,0.46253 +18722,3.3925e+05,0.46527 +18740,3.3703e+05,0.46801 +18758,3.3482e+05,0.47076 +18775,3.3262e+05,0.47352 +18793,3.3043e+05,0.47628 +18810,3.2825e+05,0.47906 +18828,3.2608e+05,0.48184 +18846,3.2392e+05,0.48463 +18863,3.2177e+05,0.48743 +18881,3.1963e+05,0.49024 +18898,3.175e+05,0.49305 +18916,3.1539e+05,0.49587 +18934,3.1328e+05,0.4987 +18951,3.1118e+05,0.50153 +18969,3.0909e+05,0.50438 +18986,3.0701e+05,0.50722 +19004,3.0494e+05,0.51008 +19021,3.0288e+05,0.51294 +19039,3.0083e+05,0.5158 +19057,2.9878e+05,0.51868 +19074,2.9675e+05,0.52155 +19092,2.9473e+05,0.52444 +19109,2.9271e+05,0.52733 +19127,2.9071e+05,0.53022 +19145,2.8871e+05,0.53312 +19162,2.8672e+05,0.53602 +19180,2.8474e+05,0.53893 +19197,2.8277e+05,0.54184 +19215,2.8081e+05,0.54475 +19232,2.7886e+05,0.54767 +19250,2.7691e+05,0.55059 +19268,2.7498e+05,0.55352 +19285,2.7305e+05,0.55645 +19303,2.7113e+05,0.55938 +19320,2.6922e+05,0.56232 +19338,2.6731e+05,0.56525 +19356,2.6542e+05,0.56819 +19373,2.6353e+05,0.57114 +19391,2.6165e+05,0.57408 +19408,2.5978e+05,0.57703 +19426,2.5791e+05,0.57997 +19444,2.5605e+05,0.58292 +19461,2.5421e+05,0.58587 +19479,2.5236e+05,0.58882 +19496,2.5053e+05,0.59177 +19514,2.487e+05,0.59472 +19531,2.4688e+05,0.59767 +19549,2.4507e+05,0.60062 +19567,2.4326e+05,0.60357 +19584,2.4147e+05,0.60652 +19602,2.3967e+05,0.60947 +19619,2.3789e+05,0.61242 +19637,2.3611e+05,0.61537 +19655,2.3434e+05,0.61831 +19672,2.3258e+05,0.62125 +19690,2.3082e+05,0.6242 +19707,2.2907e+05,0.62713 +19725,2.2733e+05,0.63007 +19743,2.2559e+05,0.63301 +19760,2.2386e+05,0.63594 +19778,2.2213e+05,0.63886 +19795,2.2041e+05,0.64179 +19813,2.187e+05,0.64471 +19830,2.17e+05,0.64763 +19845,2.1562e+05,0.64999 +19859,2.1424e+05,0.65235 +19873,2.1287e+05,0.6547 +19887,2.1151e+05,0.65705 +19902,2.1014e+05,0.6594 +19916,2.0879e+05,0.66175 +19930,2.0743e+05,0.66409 +19944,2.0608e+05,0.66643 +19959,2.0473e+05,0.66877 +19973,2.0339e+05,0.6711 +19987,2.0205e+05,0.67343 +20001,2.0071e+05,0.67575 +20016,1.9938e+05,0.67807 +20030,1.9805e+05,0.68038 +20044,1.9672e+05,0.6827 +20059,1.954e+05,0.685 +20073,1.9408e+05,0.6873 +20087,1.9277e+05,0.6896 +20101,1.9146e+05,0.69189 +20116,1.9015e+05,0.69418 +20130,1.8884e+05,0.69646 +20144,1.8754e+05,0.69874 +20158,1.8624e+05,0.70101 +20173,1.8495e+05,0.70328 +20187,1.8366e+05,0.70554 +20201,1.8237e+05,0.7078 +20215,1.8108e+05,0.71005 +20230,1.798e+05,0.71229 +20241,1.7879e+05,0.71406 +20252,1.7778e+05,0.71582 +20263,1.7678e+05,0.71759 +20275,1.7577e+05,0.71934 +20286,1.7477e+05,0.7211 +20297,1.7377e+05,0.72285 +20308,1.7277e+05,0.72459 +20320,1.7177e+05,0.72633 +20331,1.7078e+05,0.72807 +20342,1.6978e+05,0.7298 +20353,1.6879e+05,0.73153 +20365,1.678e+05,0.73325 +20376,1.6681e+05,0.73497 +20387,1.6583e+05,0.73669 +20398,1.6484e+05,0.7384 +20410,1.6386e+05,0.74011 +20421,1.6288e+05,0.74181 +20432,1.619e+05,0.74351 +20443,1.6092e+05,0.7452 +20455,1.5994e+05,0.74689 +20466,1.5897e+05,0.74858 +20477,1.58e+05,0.75025 +20488,1.5703e+05,0.75193 +20500,1.5606e+05,0.7536 +20514,1.5486e+05,0.75565 +20527,1.5367e+05,0.7577 +20541,1.5248e+05,0.75974 +20555,1.513e+05,0.76177 +20569,1.5011e+05,0.76379 +20583,1.4893e+05,0.7658 +20597,1.4775e+05,0.76781 +20611,1.4657e+05,0.76981 +20625,1.454e+05,0.7718 +20639,1.4423e+05,0.77378 +20652,1.4306e+05,0.77576 +20666,1.4189e+05,0.77773 +20680,1.4072e+05,0.77969 +20694,1.3956e+05,0.78164 +20708,1.384e+05,0.78358 +20722,1.3724e+05,0.78551 +20739,1.3579e+05,0.78792 +20757,1.3435e+05,0.79032 +20774,1.329e+05,0.7927 +20791,1.3146e+05,0.79507 +20808,1.3008e+05,0.79734 +20825,1.2869e+05,0.7996 +20842,1.2731e+05,0.80185 +20859,1.2594e+05,0.80408 +20875,1.2456e+05,0.8063 +20892,1.2319e+05,0.80851 +20909,1.2182e+05,0.81071 +20926,1.2045e+05,0.81289 +20943,1.1909e+05,0.81505 +20959,1.1773e+05,0.81721 +20976,1.1637e+05,0.81935 +20993,1.1501e+05,0.82147 +21010,1.1366e+05,0.82358 +21027,1.123e+05,0.82568 +21043,1.1096e+05,0.82776 +21060,1.0961e+05,0.82983 +21077,1.0826e+05,0.83189 +21094,1.0692e+05,0.83393 +21111,1.0558e+05,0.83595 +21127,1.0425e+05,0.83797 +21144,1.0291e+05,0.83996 +21161,1.0158e+05,0.84195 +21178,1.0025e+05,0.84392 +21195,98918,0.84587 +21211,97591,0.84781 +21228,96267,0.84973 +21245,94944,0.85164 +21262,93624,0.85354 +21278,92306,0.85542 +21295,90989,0.85728 +21312,89675,0.85914 +21329,88362,0.86097 +21349,86788,0.86316 +21369,85217,0.86532 +21389,83648,0.86747 +21410,82081,0.86959 +21430,80518,0.87169 +21450,78956,0.87377 +21470,77397,0.87582 +21490,75841,0.87786 +21510,74286,0.87987 +21531,72734,0.88187 +21551,71185,0.88384 +21571,69637,0.88579 +21597,67652,0.88826 +21623,65671,0.8907 +21649,63693,0.8931 +21675,61718,0.89547 +21701,59746,0.8978 +21726,57778,0.9001 +21752,55812,0.90236 +21778,53849,0.90459 +21804,51889,0.90678 +21830,49932,0.90894 +21856,47977,0.91107 +21882,46025,0.91316 +21895,45062,0.91418 +21907,44100,0.91519 +21920,43138,0.91619 +21933,42177,0.91719 +21946,41216,0.91817 +21959,40256,0.91915 +21971,39297,0.92012 +21984,38338,0.92109 +21997,37379,0.92204 +22010,36421,0.92299 +22023,35464,0.92393 +22035,34507,0.92486 +22048,33550,0.92578 +22061,32594,0.92669 +22074,31639,0.9276 +22087,30684,0.9285 +22099,29729,0.92939 +22112,28775,0.93027 +22125,27821,0.93115 +22138,26868,0.93202 +22150,25915,0.93288 +22163,24962,0.93373 +22176,24010,0.93457 +22189,23058,0.93541 +22202,22107,0.93624 +22214,21155,0.93706 +22227,20205,0.93788 +22240,19254,0.93868 +22248,18698,0.93915 +22255,18142,0.93962 +22262,17586,0.94008 +22270,17030,0.94054 +22277,16474,0.941 +22285,15919,0.94146 +22292,15363,0.94191 +22300,14808,0.94236 +22307,14252,0.94281 +22315,13697,0.94326 +22322,13142,0.9437 +22330,12587,0.94414 +22337,12032,0.94458 +22345,11477,0.94502 +22352,10922,0.94545 +22360,10367,0.94588 +22367,9812.9,0.94631 +22375,9258.4,0.94674 +22382,8703.9,0.94716 +22390,8149.6,0.94758 +22397,7595.4,0.948 +22405,7041.2,0.94842 +22412,6487.1,0.94883 +22420,5933.1,0.94924 +22423,5682.6,0.94943 +22427,5432.2,0.94961 +22430,5181.8,0.9498 +22433,4931.4,0.94998 +22437,4681,0.95017 +22440,4430.6,0.95035 +22443,4180.2,0.95053 +22447,3929.9,0.95071 +22450,3679.5,0.95089 +22454,3429.2,0.95108 +22457,3178.9,0.95126 +22460,2928.6,0.95144 +22464,2678.3,0.95162 +22467,2428.1,0.95179 +22471,2177.8,0.95197 +22474,1927.6,0.95215 +22477,1677.4,0.95233 +22481,1427.1,0.95251 +22484,1177,0.95268 +22487,926.77,0.95286 +22488,885.47,0.95289 +22489,844.17,0.95292 +22489,802.88,0.95295 +22490,761.58,0.95298 +22490,720.28,0.953 +22491,678.98,0.95303 +22491,637.69,0.95306 +22492,596.39,0.95309 +22492,555.1,0.95312 +22493,513.8,0.95315 +22494,472.51,0.95318 +22494,431.21,0.95321 +22495,389.92,0.95324 +22495,348.62,0.95326 +22496,307.33,0.95329 +22496,266.04,0.95332 +22497,251.85,0.95333 +22497,237.66,0.95334 +22497,223.48,0.95335 +22497,209.29,0.95336 +22497,195.1,0.95337 +22498,180.92,0.95338 +22498,166.73,0.95339 +22498,152.54,0.9534 +22498,138.36,0.95341 +22498,124.17,0.95342 +22499,109.98,0.95343 +22499,95.798,0.95344 +22499,81.613,0.95345 +22499,67.427,0.95346 +22499,53.242,0.95347 +22499,39.058,0.95348 +22500,35.802,0.95348 +22500,32.547,0.95349 +22500,29.292,0.95349 +22500,26.038,0.95349 +22500,22.783,0.95349 +22500,19.528,0.95349 +22500,16.273,0.9535 +22500,13.019,0.9535 +22500,9.7635,0.9535 +22500,6.5085,0.9535 +22500,3.254,0.95351 +22500,6.9849e-10,0.95351 diff --git a/compass/landice/tests/hydro_radial/restart_test/__init__.py b/compass/landice/tests/hydro_radial/restart_test/__init__.py new file mode 100644 index 0000000000..79ea1dc287 --- /dev/null +++ b/compass/landice/tests/hydro_radial/restart_test/__init__.py @@ -0,0 +1,89 @@ +from compass.validate import compare_variables +from compass.testcase import TestCase +from compass.landice.tests.hydro_radial.setup_mesh import SetupMesh +from compass.landice.tests.hydro_radial.run_model import RunModel +from compass.landice.tests.hydro_radial.visualize import Visualize + + +class RestartTest(TestCase): + """ + A test case for performing two MALI runs of a radially symmetric + hydrological setup, one full run and one run broken into two segments with + a restart. The test case verifies that the results of the two runs are + identical. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.hydro_radial.Dome + The test group that this test case belongs to + """ + super().__init__(test_group=test_group, name='restart_test') + + self.add_step( + SetupMesh(test_case=self, initial_condition='zero')) + + name = 'full_run' + step = RunModel(test_case=self, name=name, subdir=name, cores=4, + threads=1) + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.hydro_radial.restart_test', + 'namelist.full', out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.hydro_radial.restart_test', + 'streams.full', out_name='streams.landice') + self.add_step(step) + + input_dir = name + name = 'visualize_{}'.format(name) + step = Visualize(test_case=self, name=name, subdir=name, + input_dir=input_dir) + self.add_step(step, run_by_default=False) + + name = 'restart_run' + step = RunModel(test_case=self, name=name, subdir=name, cores=4, + threads=1, + suffixes=['landice', 'landice.rst']) + + # modify the namelist options and streams file + step.add_namelist_file( + 'compass.landice.tests.hydro_radial.restart_test', + 'namelist.restart', out_name='namelist.landice') + step.add_streams_file( + 'compass.landice.tests.hydro_radial.restart_test', + 'streams.restart', out_name='streams.landice') + + step.add_namelist_file( + 'compass.landice.tests.hydro_radial.restart_test', + 'namelist.restart.rst', out_name='namelist.landice.rst') + step.add_streams_file( + 'compass.landice.tests.hydro_radial.restart_test', + 'streams.restart.rst', out_name='streams.landice.rst') + self.add_step(step) + + input_dir = name + name = 'visualize_{}'.format(name) + step = Visualize(test_case=self, name=name, subdir=name, + input_dir=input_dir) + self.add_step(step, run_by_default=False) + + # no configure() method is needed + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + variables = ['waterThickness', 'waterPressure'] + steps = self.steps_to_run + if 'full_run' in steps and 'restart_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/output.nc', + filename2='restart_run/output.nc') diff --git a/compass/landice/tests/hydro_radial/restart_test/namelist.full b/compass/landice/tests/hydro_radial/restart_test/namelist.full new file mode 100644 index 0000000000..ddc45adb09 --- /dev/null +++ b/compass/landice/tests/hydro_radial/restart_test/namelist.full @@ -0,0 +1,4 @@ +config_stop_time = '0002-01-01_00:00:00' +config_run_duration = 'none' +config_write_output_on_startup = .true. +config_do_restart = .false. diff --git a/compass/landice/tests/hydro_radial/restart_test/namelist.restart b/compass/landice/tests/hydro_radial/restart_test/namelist.restart new file mode 100644 index 0000000000..bc19a9c86e --- /dev/null +++ b/compass/landice/tests/hydro_radial/restart_test/namelist.restart @@ -0,0 +1,4 @@ +config_stop_time = '0001-01-01_00:00:00' +config_run_duration = 'none' +config_write_output_on_startup = .true. +config_do_restart = .false. diff --git a/compass/landice/tests/hydro_radial/restart_test/namelist.restart.rst b/compass/landice/tests/hydro_radial/restart_test/namelist.restart.rst new file mode 100644 index 0000000000..68ad16a403 --- /dev/null +++ b/compass/landice/tests/hydro_radial/restart_test/namelist.restart.rst @@ -0,0 +1,4 @@ +config_start_time = '0001-01-01_00:00:00' +config_stop_time = '0002-01-01_00:00:00' +config_write_output_on_startup = .true. +config_do_restart = .true. diff --git a/compass/landice/tests/hydro_radial/restart_test/streams.full b/compass/landice/tests/hydro_radial/restart_test/streams.full new file mode 100644 index 0000000000..3b36468ca7 --- /dev/null +++ b/compass/landice/tests/hydro_radial/restart_test/streams.full @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/hydro_radial/restart_test/streams.restart b/compass/landice/tests/hydro_radial/restart_test/streams.restart new file mode 100644 index 0000000000..3b36468ca7 --- /dev/null +++ b/compass/landice/tests/hydro_radial/restart_test/streams.restart @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/hydro_radial/restart_test/streams.restart.rst b/compass/landice/tests/hydro_radial/restart_test/streams.restart.rst new file mode 100644 index 0000000000..4926498030 --- /dev/null +++ b/compass/landice/tests/hydro_radial/restart_test/streams.restart.rst @@ -0,0 +1,10 @@ + + + + + + + + diff --git a/compass/landice/tests/hydro_radial/run_model.py b/compass/landice/tests/hydro_radial/run_model.py new file mode 100644 index 0000000000..b782582e03 --- /dev/null +++ b/compass/landice/tests/hydro_radial/run_model.py @@ -0,0 +1,91 @@ +from compass.model import run_model +from compass.step import Step + + +class RunModel(Step): + """ + A step for performing forward MALI runs as part of radially symmetric + hydrological test cases. + + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + def __init__(self, test_case, name='run_model', subdir=None, + cores=1, min_cores=None, threads=1, suffixes=None): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + name : str, optional + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + suffixes : list of str, optional + a list of suffixes for namelist and streams files produced + for this step. Most steps most runs will just have a + ``namelist.landice`` and a ``streams.landice`` (the default) but + the ``restart_run`` step of the ``restart_test`` runs the model + twice, the second time with ``namelist.landice.rst`` and + ``streams.landice.rst`` + """ + if suffixes is None: + suffixes = ['landice'] + self.suffixes = suffixes + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + for suffix in suffixes: + self.add_namelist_file( + 'compass.landice.tests.hydro_radial', 'namelist.landice', + out_name='namelist.{}'.format(suffix)) + + self.add_streams_file( + 'compass.landice.tests.hydro_radial', 'streams.landice', + out_name='streams.{}'.format(suffix)) + + self.add_input_file(filename='landice_grid.nc', + target='../setup_mesh/landice_grid.nc') + self.add_input_file(filename='graph.info', + target='../setup_mesh/graph.info') + + self.add_output_file(filename='output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + for suffix in self.suffixes: + run_model(step=self, namelist='namelist.{}'.format(suffix), + streams='streams.{}'.format(suffix)) diff --git a/compass/landice/tests/hydro_radial/setup_mesh.py b/compass/landice/tests/hydro_radial/setup_mesh.py new file mode 100644 index 0000000000..babb222b07 --- /dev/null +++ b/compass/landice/tests/hydro_radial/setup_mesh.py @@ -0,0 +1,220 @@ +import numpy as np +from netCDF4 import Dataset as NetCDFFile +from importlib.resources import path + +from mpas_tools.planar_hex import make_planar_hex_mesh +from mpas_tools.io import write_netcdf +from mpas_tools.mesh.conversion import convert, cull +from mpas_tools.logging import check_call + +from compass.model import make_graph_file +from compass.step import Step + + +class SetupMesh(Step): + """ + A step for creating a mesh and initial condition for dome test cases + + Attributes + ---------- + initial_condition : {'zero', 'exact'} + The type of initial condition to set up. 'zero' means nearly zero ice + thickness. 'exact' uses a precomputed near exact solution from a file. + """ + def __init__(self, test_case, initial_condition): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + initial_condition : {'zero', 'exact'} + The type of initial condition to set up. 'zero' means nearly zero + ice thickness. 'exact' uses a precomputed near exact solution from + a file. + """ + super().__init__(test_case=test_case, name='setup_mesh') + + self.initial_condition = initial_condition + + if initial_condition == 'exact': + filename = 'near_exact_solution_r_P_W.txt' + with path('compass.landice.tests.hydro_radial', filename) as target: + self.add_input_file(filename=filename, target=str(target)) + elif initial_condition != 'zero': + raise ValueError("Unknown initial condition type specified " + "{}.".format(initial_condition)) + + self.add_output_file(filename='graph.info') + self.add_output_file(filename='landice_grid.nc') + + # no setup() method is needed + + def run(self): + """ + Run this step of the test case + """ + initial_condition = self.initial_condition + logger = self.logger + section = self.config['hydro_radial'] + + nx = section.getint('nx') + ny = section.getint('ny') + dc = section.getfloat('dc') + + dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=True, + nonperiodic_y=True) + + write_netcdf(dsMesh, 'grid.nc') + + dsMesh = cull(dsMesh, logger=logger) + dsMesh = convert(dsMesh, logger=logger) + write_netcdf(dsMesh, 'mpas_grid.nc') + + levels = section.get('levels') + args = ['create_landice_grid_from_generic_MPAS_grid.py', + '-i', 'mpas_grid.nc', + '-o', 'landice_grid.nc', + '-l', levels, + '--hydro', + '--diri'] + + check_call(args, logger) + + make_graph_file(mesh_filename='landice_grid.nc', + graph_filename='graph.info') + + _setup_hydro_radial_initial_conditions( + logger, filename='landice_grid.nc', + initial_condition=initial_condition) + + +def _setup_hydro_radial_initial_conditions(logger, filename, + initial_condition): + """ + Add the initial condition to the given MPAS mesh file + + Parameters + ---------- + logger : logging.Logger + A logger for output from the step + + filename : str + file to setup hydro_radial + + initial_condition : {'zero', 'exact'} + the type of initial condition + """ + # Open the file, get needed dimensions + gridfile = NetCDFFile(filename, 'r+') + nVertLevels = len(gridfile.dimensions['nVertLevels']) + # Get variables + xCell = gridfile.variables['xCell'] + yCell = gridfile.variables['yCell'] + xEdge = gridfile.variables['xEdge'] + yEdge = gridfile.variables['yEdge'] + xVertex = gridfile.variables['xVertex'] + yVertex = gridfile.variables['yVertex'] + thickness = gridfile.variables['thickness'] + bedTopography = gridfile.variables['bedTopography'] + layerThicknessFractions = gridfile.variables['layerThicknessFractions'] + + # Find center of domain + x0 = xCell[:].min() + 0.5 * (xCell[:].max() - xCell[:].min()) + y0 = yCell[:].min() + 0.5 * (yCell[:].max() - yCell[:].min()) + # Calculate distance of each cell center from dome center + r = ((xCell[:] - x0)**2 + (yCell[:] - y0)**2)**0.5 + + # Center the dome in the center of the cell that is closest to the center + # of the domain. + # NOTE: for some meshes, maybe we don't want to do this - could add + # command-line argument controlling this later. + putOriginOnACell = True + if putOriginOnACell: + centerCellIndex = np.abs(r[:]).argmin() + xShift = -1.0 * xCell[centerCellIndex] + yShift = -1.0 * yCell[centerCellIndex] + xCell[:] = xCell[:] + xShift + yCell[:] = yCell[:] + yShift + xEdge[:] = xEdge[:] + xShift + yEdge[:] = yEdge[:] + yShift + xVertex[:] = xVertex[:] + xShift + yVertex[:] = yVertex[:] + yShift + # Now update origin location and distance array + x0 = 0.0 + y0 = 0.0 + r = ((xCell[:] - x0)**2 + (yCell[:] - y0)**2)**0.5 + + # center thickness (m) + h0 = 500.0 + # sliding velocity at margin (m/s) + v0 = 100.0 / (3600.0 * 24.0 * 365.0) + # ideal ice cap radius (m) + R0 = 25.0e3 + # onset of sliding (m) + R1 = 5.0e3 + # actual margin location (m) + L = 0.9 * R0 + + thickness[0, r < R0] = h0 * (1.0 - (r[r < R0] / R0)**2) + thickness[0, r > L] = 0.0 + + # flat bed + bedTopography[:] = 0.0 + + # Setup layerThicknessFractions + layerThicknessFractions[:] = 1.0 / nVertLevels + + # melt + gridfile.variables['basalMeltInput'][:] = 0.0 + # 20 cm/yr as SI mass rate + gridfile.variables['basalMeltInput'][:] = \ + 0.2 / (365.0 * 24.0 * 3600.0) * 1000.0 + # Use this line to only add a source term to the center cell - useful for + # debugging divergence + + # value from ramp + # gridfile.variables['basalMeltInput'][0,r==0.0] = 4.0e-10 * 1000.0 *100 + + # velocity + gridfile.variables['uReconstructX'][:] = 0.0 + velo = v0 * (r - R1)**5 / (L - R1)**5 + velo[r < R1] = 0.0 + gridfile.variables['uReconstructX'][0, :, -1] = velo + gridfile.variables['uReconstructX'][0, thickness[0, :] == 0.0, :] = 0.0 + + if initial_condition == 'zero': + logger.info("Using 'zero' option for initial condition.") + # set some small initial value to keep adaptive time stepper from + # taking a huge time step initially + gridfile.variables['waterThickness'][0, :] = 0.01 + gridfile.variables['waterPressure'][0, :] = 0.0 + elif initial_condition == 'exact': + logger.info("Using 'exact' option for initial condition.") + # IC on thickness + # import exact solution + fnameSoln = 'near_exact_solution_r_P_W.txt' + soln = np.loadtxt(fnameSoln, delimiter=',') + rsoln = soln[:, 0] + Psoln = soln[:, 1] + Wsoln = soln[:, 2] + + Wmpas = np.interp(r, rsoln, Wsoln) # apply exact solution + Wmpas[np.isnan(Wmpas)] = 0.0 + gridfile.variables['waterThickness'][0, :] = Wmpas + + # IC on water pressure + # apply exact solution + Pmpas = np.interp(r, rsoln, Psoln) + Pmpas[np.isnan(Pmpas)] = 0.0 + gridfile.variables['waterPressure'][0, :] = Pmpas + else: + raise ValueError("Unknown initial condition type specified " + "{}.".format(initial_condition)) + + gridfile.close() + + logger.info('Successfully added hydro_radial initial conditions to: ' + '{}'.format(filename)) diff --git a/compass/landice/tests/hydro_radial/spinup_test/__init__.py b/compass/landice/tests/hydro_radial/spinup_test/__init__.py new file mode 100644 index 0000000000..13c4467cda --- /dev/null +++ b/compass/landice/tests/hydro_radial/spinup_test/__init__.py @@ -0,0 +1,43 @@ +from compass.testcase import TestCase +from compass.landice.tests.hydro_radial.setup_mesh import SetupMesh +from compass.landice.tests.hydro_radial.run_model import RunModel +from compass.landice.tests.hydro_radial.visualize import Visualize + + +class SpinupTest(TestCase): + """ + A spin-up test case for the radially symmetric hydrological test group that + creates the mesh and initial condition, then performs a long short forward + run on 4 cores until a quasi-steady state is reached. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.hydro_radial.HydroRadial + The test group that this test case belongs to + """ + super().__init__(test_group=test_group, name='spinup_test') + + self.add_step( + SetupMesh(test_case=self, initial_condition='zero')) + step = RunModel(test_case=self, cores=4, threads=1) + step.add_namelist_file( + 'compass.landice.tests.hydro_radial.spinup_test', + 'namelist.landice') + + step.add_streams_file( + 'compass.landice.tests.hydro_radial.spinup_test', + 'streams.landice') + self.add_step(step) + + step = Visualize(test_case=self) + self.add_step(step, run_by_default=False) + + # no configure() method is needed + + # no run() method is needed because we're doing the default: running all + # steps diff --git a/compass/landice/tests/hydro_radial/spinup_test/namelist.landice b/compass/landice/tests/hydro_radial/spinup_test/namelist.landice new file mode 100644 index 0000000000..c9226b14e1 --- /dev/null +++ b/compass/landice/tests/hydro_radial/spinup_test/namelist.landice @@ -0,0 +1,3 @@ +config_stop_time = '10000-01-01_00:00:00' +config_year_digits = 6 +config_SGH_englacial_porosity = 0.1 diff --git a/compass/landice/tests/hydro_radial/spinup_test/streams.landice b/compass/landice/tests/hydro_radial/spinup_test/streams.landice new file mode 100644 index 0000000000..7dc69e1efe --- /dev/null +++ b/compass/landice/tests/hydro_radial/spinup_test/streams.landice @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/landice/tests/hydro_radial/steady_state_drift_test/__init__.py b/compass/landice/tests/hydro_radial/steady_state_drift_test/__init__.py new file mode 100644 index 0000000000..ea6b1008fe --- /dev/null +++ b/compass/landice/tests/hydro_radial/steady_state_drift_test/__init__.py @@ -0,0 +1,34 @@ +from compass.testcase import TestCase +from compass.landice.tests.hydro_radial.setup_mesh import SetupMesh +from compass.landice.tests.hydro_radial.run_model import RunModel +from compass.landice.tests.hydro_radial.visualize import Visualize + + +class SteadyStateDriftTest(TestCase): + """ + This test case assesses the drift of the model away from an initial + condition that is a quasi-exact solution. + """ + + def __init__(self, test_group): + """ + Create the test case + + Parameters + ---------- + test_group : compass.landice.tests.hydro_radial.HydroRadial + The test group that this test case belongs to + """ + super().__init__(test_group=test_group, name='steady_state_drift_test') + + self.add_step( + SetupMesh(test_case=self, initial_condition='exact')) + self.add_step( + RunModel(test_case=self, cores=4, threads=1)) + step = Visualize(test_case=self) + self.add_step(step, run_by_default=False) + + # no configure() method is needed + + # no run() method is needed because we're doing the default: running all + # steps diff --git a/compass/landice/tests/hydro_radial/streams.landice b/compass/landice/tests/hydro_radial/streams.landice new file mode 100644 index 0000000000..8eb91b93a3 --- /dev/null +++ b/compass/landice/tests/hydro_radial/streams.landice @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + diff --git a/compass/landice/tests/hydro_radial/visualize.py b/compass/landice/tests/hydro_radial/visualize.py new file mode 100644 index 0000000000..97a899675f --- /dev/null +++ b/compass/landice/tests/hydro_radial/visualize.py @@ -0,0 +1,300 @@ +import numpy as np +import netCDF4 +import matplotlib.pyplot as plt +from importlib.resources import path + +from compass.step import Step + + +class Visualize(Step): + """ + A step for visualizing the output from a dome test case + """ + def __init__(self, test_case, name='visualize', subdir=None, + input_dir='run_model'): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + name : str, optional + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + input_dir : str, optional + The input directory within the test case with a file ``output.nc`` + to visualize + """ + super().__init__(test_case=test_case, name=name, subdir=subdir) + + self.add_input_file(filename='output.nc', + target='../{}/output.nc'.format(input_dir)) + + self.add_input_file(filename='landice_grid.nc', + target='../{}/landice_grid.nc'.format(input_dir)) + + filename = 'near_exact_solution_r_P_W.txt' + with path('compass.landice.tests.hydro_radial', filename) as target: + self.add_input_file(filename=filename, target=str(target)) + + # depending on settings, this will produce no outputs, so we won't add any + + # no setup method is needed + + def run(self): + """ + Run this step of the test case + """ + visualize_hydro_radial(self.config, self.logger) + + +def visualize_hydro_radial(config, logger): + """ + Plot the output from a hydro_radial test case + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options for this test case, a combination of the defaults + for the machine, core and configuration + + logger : logging.Logger + A logger for output from the step + """ + section = config['hydro_radial_viz'] + + time_slice = section.getint('time_slice') + save_images = section.getboolean('save_images') + hide_figs = section.getboolean('hide_figs') + + filename = 'output.nc' + grid_filename = 'landice_grid.nc' + + f = netCDF4.Dataset(filename, 'r') + xCell = f.variables['xCell'][:] + yCell = f.variables['yCell'][:] + xEdge = f.variables['xEdge'][:] + yEdge = f.variables['yEdge'][:] + h = f.variables['waterThickness'][time_slice, :] + u = f.variables['waterVelocityCellX'][time_slice, :] + P = f.variables['waterPressure'][time_slice, :] + N = f.variables['effectivePressure'][time_slice, :] + div = f.variables['divergence'][time_slice, :] + opening = f.variables['openingRate'][time_slice, :] + closing = f.variables['closingRate'][time_slice, :] + melt = f.variables['basalMeltInput'][time_slice, :] + sliding = f.variables['basalSpeed'][time_slice, :] + days = f.variables['daysSinceStart'][:] + + logger.info("Total number of time levels={}".format(len(days))) + logger.info("Using time slice {}, which is year {}".format( + time_slice, days[time_slice] / 365.0)) + + logger.info("Attempting to read thickness field from " + "{}.".format(grid_filename)) + fin = netCDF4.Dataset(grid_filename, 'r') + H = fin.variables['thickness'][0, :] + + # Find center row - currently files are set up to have central row at y=0 + unique_ys = np.unique(yCell[:]) + centerY = unique_ys[len(unique_ys) // 2] + logger.info("number of ys={}, center y index={}, center Y value={}".format( + len(unique_ys), len(unique_ys) // 2, centerY)) + ind = np.nonzero(yCell[:] == centerY) + x = xCell[ind] / 1000.0 + + logger.info("start plotting.") + + fig = plt.figure(1, facecolor='w') + + # import exact solution + fnameSoln = 'near_exact_solution_r_P_W.txt' + soln = np.loadtxt(fnameSoln, delimiter=',') + rsoln = soln[:, 0] / 1000.0 + Psoln = soln[:, 1] / 1.0e5 + Wsoln = soln[:, 2] + + # water thickness + ax1 = fig.add_subplot(121) + plt.plot(rsoln, Wsoln, 'k-', label='W exact') + plt.plot(x, h[ind], 'r.--', label='W model') + plt.xlabel('X-position (km)') + plt.ylabel('water depth (m)') + plt.legend() + plt.plot([5.0, 5.0], [0.0, 1.0], ':k') + plt.grid(True) + + # water pressure + fig.add_subplot(122, sharex=ax1) + plt.plot(x, H[ind] * 910.0 * 9.80616 / 1.0e5, 'g:', label='P_o') + plt.plot(rsoln, Psoln, 'k-', label='P_w exact') + plt.plot(x, P[ind] / 1.0e5, 'r.--', label='P_w model') + plt.xlabel('X-position (km)') + plt.ylabel('water pressure (bar)') + plt.legend() + plt.plot([5.0, 5.0], [0.0, 45.0], ':k') + plt.grid(True) + if save_images: + plt.savefig('hydro_radial_vs_exact.png', dpi=150) + + # plot how close to SS we are + fig = plt.figure(2, facecolor='w') + ax1 = fig.add_subplot(211) + for i in ind: + plt.plot(days / 365.0, f.variables['waterThickness'][:, i]) + plt.xlabel('Years since start') + plt.ylabel('water thickness (m)') + plt.grid(True) + + fig.add_subplot(212, sharex=ax1) + for i in ind: + plt.plot(days / 365.0, f.variables['effectivePressure'][:, i] / 1.0e6) + plt.xlabel('Years since start') + plt.ylabel('effective pressure (MPa)') + plt.grid(True) + + if save_images: + plt.savefig('hydro_radial_steady_state.png', dpi=150) + + # plot opening/closing rates + fig = plt.figure(3, facecolor='w') + + nplt = 5 + + fig.add_subplot(nplt, 1, 1) + plt.plot(x, opening[ind], 'r', label='opening') + plt.plot(x, closing[ind], 'b', label='closing') + plt.plot(x, melt[ind] / 1000.0, 'g', label='melt') + plt.xlabel('X-position (km)') + plt.ylabel('rate (m/s)') + plt.legend() + plt.grid(True) + + # SS N=f(h) + fig.add_subplot(nplt, 1, 2) + plt.plot(x, N[ind] / 1.0e6, '.-', label='modeled transient to SS') + # steady state N=f(h) from the cavity evolution eqn + N = (opening[ind] / (0.04 * 3.1709792e-24 * h[ind]))**0.3333333 / 1.0e6 + plt.plot(x, N, '.--r', label='SS N=f(h)') + plt.xlabel('X-position (km)') + plt.ylabel('effective pressure (MPa)') + plt.grid(True) + plt.legend() + + fig.add_subplot(nplt, 1, 3) + plt.plot(x, u[ind]) + plt.ylabel('water velocity (m/s)') + plt.grid(True) + + fig.add_subplot(nplt, 1, 4) + plt.plot(x, u[ind] * h[ind]) + plt.ylabel('water flux (m2/s)') + plt.grid(True) + + fig.add_subplot(nplt, 1, 5) + plt.plot(x, div[ind]) + plt.plot(x, melt[ind] / 1000.0, 'g', label='melt') + plt.ylabel('divergence (m/s)') + plt.grid(True) + + if save_images: + plt.savefig('hydro_radial_opening_closing.png', dpi=150) + + # plot some edge quantities + inde = np.nonzero(yEdge[:] == centerY) + xe = xEdge[inde] / 1000.0 + ve = f.variables['waterVelocity'][time_slice, :] + dphie = f.variables['hydropotentialBaseSlopeNormal'][time_slice, :] + he = f.variables['waterThicknessEdgeUpwind'][time_slice, :] + fluxe = f.variables['waterFluxAdvec'][time_slice, :] + + fig = plt.figure(5, facecolor='w') + nplt = 5 + + ax1 = fig.add_subplot(nplt, 1, 1) + plt.plot(xe, dphie[inde], '.') + plt.ylabel('dphidx edge)') + plt.grid(True) + + fig.add_subplot(nplt, 1, 2, sharex=ax1) + plt.plot(x, P[ind], 'x') + plt.ylabel('dphidx edge)') + plt.grid(True) + + fig.add_subplot(nplt, 1, 3, sharex=ax1) + plt.plot(xe, ve[inde], '.') + plt.ylabel('vel edge)') + plt.grid(True) + + fig.add_subplot(nplt, 1, 4, sharex=ax1) + plt.plot(xe, he[inde], '.') + plt.plot(x, h[ind], 'x') + plt.ylabel('h edge)') + plt.grid(True) + + fig.add_subplot(nplt, 1, 5, sharex=ax1) + plt.plot(xe, fluxe[inde], '.') + plt.ylabel('flux edge)') + plt.grid(True) + + # ========== + # Make plot similar to Bueler and van Pelt Fig. 5 + + # get thickness/pressure at time 0 - this should be the nearly-exact + # solution interpolated onto the MPAS mesh + h0 = f.variables['waterThickness'][0, :] + P0 = f.variables['waterPressure'][0, :] + # assuming sliding has been zeroed where there is no ice, so we don't need + # to get the thickness field + hasice = sliding > 0.0 + + Werr = np.absolute(h - h0) + Perr = np.absolute(P - P0) + dcEdge = f.variables['dcEdge'][:] + # ideally should restrict this to edges with ice + dx = dcEdge.mean() + + if save_images: + plt.savefig('hydro_radial_edge.png', dpi=150) + + fig = plt.figure(6, facecolor='w') + + ax = fig.add_subplot(2, 1, 1) + plt.plot(dx, Werr[hasice].mean(), 's', label='avg W err') + plt.plot(dx, Werr[hasice].max(), 'x', label='max W err') + ax.set_yscale('log') + plt.grid(True) + plt.legend() + plt.xlabel('delta x (m)') + plt.ylabel('error in W (m)') + logger.info("avg W err={}".format(Werr[hasice].mean())) + logger.info("max W err={}".format(Werr[hasice].max())) + + ax = fig.add_subplot(2, 1, 2) + plt.plot(dx, Perr[hasice].mean() / 1.0e5, 's', label='avg P err') + plt.plot(dx, Perr[hasice].max() / 1.0e5, 'x', label='max P err') + ax.set_yscale('log') + plt.grid(True) + plt.legend() + plt.xlabel('delta x (m)') + plt.ylabel('error in P (bar)') + logger.info("avg P err={}".format(Perr[hasice].mean() / 1.0e5)) + logger.info("max P err={}".format(Perr[hasice].max() / 1.0e5)) + + logger.info("plotting complete") + + plt.draw() + if save_images: + plt.savefig('hydro_radial_error.png', dpi=150) + + if hide_figs: + logger.info("Plot display disabled with hide_plot config option.") + else: + plt.show() + + f.close() diff --git a/compass/list.py b/compass/list.py new file mode 100755 index 0000000000..de3b313713 --- /dev/null +++ b/compass/list.py @@ -0,0 +1,126 @@ +import argparse +import re +import sys +import os +from importlib.resources import contents + +from compass.mpas_cores import get_mpas_cores + + +def list_cases(test_expr=None, number=None, verbose=False): + """ + List the available test cases + + Parameters + ---------- + test_expr : str, optional + A regular expression for a test path name to search for + + number : int, optional + The number of the test to list + + verbose : bool, optional + Whether to print details of each test or just the subdirectories + """ + mpas_cores = get_mpas_cores() + + if number is None: + print('Testcases:') + + test_cases = [] + for mpas_core in mpas_cores: + for test_group in mpas_core.test_groups.values(): + for test_case in test_group.test_cases.values(): + test_cases.append(test_case) + + for test_number, test_case in enumerate(test_cases): + print_number = False + print_test = False + if number is not None: + if number == test_number: + print_test = True + elif test_expr is None or re.match(test_expr, test_case.path): + print_test = True + print_number = True + + if print_test: + number_string = '{:d}: '.format(test_number).rjust(6) + if print_number: + prefix = number_string + else: + prefix = '' + if verbose: + lines = list() + to_print = {'path': test_case.path, + 'name': test_case.name, + 'MPAS core': test_case.mpas_core.name, + 'test group': test_case.test_group.name, + 'subdir': test_case.subdir} + for key in to_print: + key_string = '{}: '.format(key).ljust(15) + lines.append('{}{}{}'.format(prefix, key_string, + to_print[key])) + if print_number: + prefix = ' ' + lines.append('{}steps:'.format(prefix)) + for step in test_case.steps.values(): + if step.name == step.subdir: + lines.append('{} - {}'.format(prefix, step.name)) + else: + lines.append('{} - {}: {}'.format(prefix, step.name, + step.subdir)) + lines.append('') + print_string = '\n'.join(lines) + else: + print_string = '{}{}'.format(prefix, test_case.path) + + print(print_string) + + +def list_machines(): + machine_configs = contents('compass.machines') + print('Machines:') + for config in machine_configs: + if config.endswith('.cfg'): + print(' {}'.format(os.path.splitext(config)[0])) + + +def list_suites(cores=None): + if cores is None: + cores = [mpas_core.name for mpas_core in get_mpas_cores()] + print('Suites:') + for core in cores: + try: + suites = contents('compass.{}.suites'.format(core)) + except FileNotFoundError: + continue + for suite in suites: + if suite.endswith('.txt'): + print(' -c {} -t {}'.format(core, os.path.splitext(suite)[0])) + + +def main(): + parser = argparse.ArgumentParser( + description='List the available test cases or machines', + prog='compass list') + parser.add_argument("-t", "--test_expr", dest="test_expr", + help="A regular expression for a test path name to " + "search for", + metavar="TEST") + parser.add_argument("-n", "--number", dest="number", type=int, + help="The number of the test to list") + parser.add_argument("--machines", dest="machines", action="store_true", + help="List supported machines (instead of test cases)") + parser.add_argument("--suites", dest="suites", action="store_true", + help="List test suites (instead of test cases)") + parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", + help="List details of each test case, not just the " + "path") + args = parser.parse_args(sys.argv[2:]) + if args.machines: + list_machines() + elif args.suites: + list_suites() + else: + list_cases(test_expr=args.test_expr, number=args.number, + verbose=args.verbose) diff --git a/compass/machines/__init__.py b/compass/machines/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/machines/anvil.cfg b/compass/machines/anvil.cfg new file mode 100644 index 0000000000..ae08f518c5 --- /dev/null +++ b/compass/machines/anvil.cfg @@ -0,0 +1,32 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# The root to a location where the mesh_database, initial_condition_database, +# and bathymetry_database for MPAS-Ocean will be cached +ocean_database_root = /lcrc/group/e3sm/public_html/mpas_standalonedata/mpas-ocean + +# The root to a location where the mesh_database and initial_condition_database +# for MALI will be cached +landice_database_root = /lcrc/group/e3sm/public_html/mpas_standalonedata/mpas-albany-landice + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /lcrc/soft/climate/e3sm-unified/base + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = slurm + +# whether to use mpirun or srun to run the model +parallel_executable = srun + +# cores per node on the machine +cores_per_node = 36 + +# the number of multiprocessing or dask threads to use +threads = 18 diff --git a/compass/machines/badger.cfg b/compass/machines/badger.cfg new file mode 100644 index 0000000000..22b3246043 --- /dev/null +++ b/compass/machines/badger.cfg @@ -0,0 +1,35 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# The root to a location where the mesh_database, initial_condition_database, +# and bathymetry_database for MPAS-Ocean will be cached +ocean_database_root = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/ + +# The root to a location where the mesh_database and initial_condition_database +# for MALI will be cached +landice_database_root = /usr/projects/regionalclimate/COMMON_MPAS/mpas_standalonedata/mpas-albany-landice + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = slurm + +# whether to use mpirun or srun to run the model +parallel_executable = srun + +# cores per node on the machine +cores_per_node = 36 + +# the slurm account +account = e3sm + +# the number of multiprocessing or dask threads to use +threads = 18 diff --git a/compass/machines/chrysalis.cfg b/compass/machines/chrysalis.cfg new file mode 100644 index 0000000000..70136ef3ad --- /dev/null +++ b/compass/machines/chrysalis.cfg @@ -0,0 +1,32 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# The root to a location where the mesh_database, initial_condition_database, +# and bathymetry_database for MPAS-Ocean will be cached +ocean_database_root = /lcrc/group/e3sm/public_html/mpas_standalonedata/mpas-ocean + +# The root to a location where the mesh_database and initial_condition_database +# for MALI will be cached +landice_database_root = /lcrc/group/e3sm/public_html/mpas_standalonedata/mpas-albany-landice + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /lcrc/soft/climate/e3sm-unified/base + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = slurm + +# whether to use mpirun or srun to run the model +parallel_executable = srun + +# cores per node on the machine +cores_per_node = 64 + +# the number of multiprocessing or dask threads to use +threads = 18 diff --git a/compass/machines/compy.cfg b/compass/machines/compy.cfg new file mode 100644 index 0000000000..19667f46c5 --- /dev/null +++ b/compass/machines/compy.cfg @@ -0,0 +1,32 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# The root to a location where the mesh_database, initial_condition_database, +# and bathymetry_database for MPAS-Ocean will be cached +ocean_database_root = /compyfs/mpas_standalonedata/mpas-ocean + +# The root to a location where the mesh_database and initial_condition_database +# for MALI will be cached +landice_database_root = /compyfs/mpas_standalonedata/mpas-albany-landice + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /share/apps/E3SM/conda_envs/base + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = slurm + +# whether to use mpirun or srun to run the model +parallel_executable = srun + +# cores per node on the machine +cores_per_node = 36 + +# the number of multiprocessing or dask threads to use +threads = 18 diff --git a/compass/machines/cori-haswell.cfg b/compass/machines/cori-haswell.cfg new file mode 100644 index 0000000000..20c7c525f0 --- /dev/null +++ b/compass/machines/cori-haswell.cfg @@ -0,0 +1,32 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# The root to a location where the mesh_database, initial_condition_database, +# and bathymetry_database for MPAS-Ocean will be cached +ocean_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-ocean + +# The root to a location where the mesh_database and initial_condition_database +# for MALI will be cached +landice_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-albany-landice + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /global/cfs/cdirs/e3sm/software/anaconda_envs/base + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = slurm + +# whether to use mpirun or srun to run the model +parallel_executable = srun + +# cores per node on the machine +cores_per_node = 32 + +# the number of multiprocessing or dask threads to use +threads = 16 diff --git a/compass/machines/cori-knl.cfg b/compass/machines/cori-knl.cfg new file mode 100644 index 0000000000..29b9212b32 --- /dev/null +++ b/compass/machines/cori-knl.cfg @@ -0,0 +1,32 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# The root to a location where the mesh_database, initial_condition_database, +# and bathymetry_database for MPAS-Ocean will be cached +ocean_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-ocean + +# The root to a location where the mesh_database and initial_condition_database +# for MALI will be cached +landice_database_root = /global/cfs/cdirs/e3sm/mpas_standalonedata/mpas-albany-landice + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /global/cfs/cdirs/e3sm/software/anaconda_envs/base + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = slurm + +# whether to use mpirun or srun to run the model +parallel_executable = srun + +# cores per node on the machine +cores_per_node = 68 + +# the number of multiprocessing or dask threads to use +threads = 18 diff --git a/compass/machines/default.cfg b/compass/machines/default.cfg new file mode 100644 index 0000000000..4c242c3471 --- /dev/null +++ b/compass/machines/default.cfg @@ -0,0 +1,15 @@ + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = single_node + +# whether to use mpirun or srun to run the model +parallel_executable = mpirun + +# cores per node on the machine +cores_per_node = 4 + +# the number of multiprocessing or dask threads to use +threads = 4 diff --git a/compass/machines/grizzly.cfg b/compass/machines/grizzly.cfg new file mode 100644 index 0000000000..22b3246043 --- /dev/null +++ b/compass/machines/grizzly.cfg @@ -0,0 +1,35 @@ + +# The paths section describes paths that are used within the ocean core test +# cases. +[paths] + +# The root to a location where the mesh_database, initial_condition_database, +# and bathymetry_database for MPAS-Ocean will be cached +ocean_database_root = /usr/projects/regionalclimate/COMMON_MPAS/ocean/grids/ + +# The root to a location where the mesh_database and initial_condition_database +# for MALI will be cached +landice_database_root = /usr/projects/regionalclimate/COMMON_MPAS/mpas_standalonedata/mpas-albany-landice + +# the path to the base conda environment where compass environments have +# been created +compass_envs = /usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base + + +# The parallel section describes options related to running tests in parallel +[parallel] + +# parallel system of execution: slurm or single_node +system = slurm + +# whether to use mpirun or srun to run the model +parallel_executable = srun + +# cores per node on the machine +cores_per_node = 36 + +# the slurm account +account = e3sm + +# the number of multiprocessing or dask threads to use +threads = 18 diff --git a/compass/machines/job_script.slurm.template b/compass/machines/job_script.slurm.template new file mode 100644 index 0000000000..042ff988a0 --- /dev/null +++ b/compass/machines/job_script.slurm.template @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +#SBATCH --nodes={{ job.nodes }} +#SBATCH --time={{ job.time }} +#SBATCH --account={{ machine.account }} +#SBATCH --job-name={{ job.name }} +#SBATCH --output={{ job.name }}.o%j +#SBATCH --error={{ job.name }}.e%j +#SBATCH --qos=interactive + +export OMP_NUM_THREADS=1 + +source {{ machine.compass_envs }}/etc/profile.d/conda.sh +conda activate compass_{{ compass.version }} +export HDF5_USE_FILE_LOCKING=FALSE + +./run.py \ No newline at end of file diff --git a/compass/model.py b/compass/model.py new file mode 100644 index 0000000000..ea6d8c6a3d --- /dev/null +++ b/compass/model.py @@ -0,0 +1,195 @@ +import os +import numpy +import xarray + +from mpas_tools.logging import check_call + +from compass.namelist import update + + +def run_model(step, update_pio=True, partition_graph=True, + graph_file='graph.info', namelist=None, streams=None): + """ + Run the model after determining the number of cores + + Parameters + ---------- + step : compass.Step + a step + + update_pio : bool, optional + Whether to modify the namelist so the number of PIO tasks and the + stride between them is consistent with the number of nodes and cores + (one PIO task per node). + + partition_graph : bool, optional + Whether to partition the domain for the requested number of cores. If + so, the partitioning executable is taken from the ``partition`` option + of the ``[executables]`` config section. + + graph_file : str, optional + The name of the graph file to partition + + namelist : str, optional + The name of the namelist file, default is ``namelist.`` + + streams : str, optional + The name of the streams file, default is ``streams.`` + """ + mpas_core = step.mpas_core.name + cores = step.cores + threads = step.threads + step_dir = step.work_dir + config = step.config + logger = step.logger + + if namelist is None: + namelist = 'namelist.{}'.format(mpas_core) + + if streams is None: + streams = 'streams.{}'.format(mpas_core) + + if update_pio: + update_namelist_pio(namelist, config, cores, step_dir) + + if partition_graph: + partition(cores, config, logger, graph_file=graph_file) + + os.environ['OMP_NUM_THREADS'] = '{}'.format(threads) + + parallel_executable = config.get('parallel', 'parallel_executable') + model = config.get('executables', 'model') + model_basename = os.path.basename(model) + + args = [parallel_executable, + '-n', '{}'.format(cores), + './{}'.format(model_basename), + '-n', namelist, + '-s', streams] + + check_call(args, logger) + + +def partition(cores, config, logger, graph_file='graph.info'): + """ + Partition the domain for the requested number of cores + + Parameters + ---------- + cores : int + The number of cores that the model should be run on + + config : configparser.ConfigParser + Configuration options for the test case, used to get the partitioning + executable + + logger : logging.Logger + A logger for output from the step that is calling this function + + graph_file : str, optional + The name of the graph file to partition + + """ + if cores > 1: + executable = config.get('parallel', 'partition_executable') + args = [executable, graph_file, '{}'.format(cores)] + check_call(args, logger) + + +def update_namelist_pio(namelist, config, cores, step_dir): + """ + Modify the namelist so the number of PIO tasks and the stride between them + is consistent with the number of nodes and cores (one PIO task per node). + + Parameters + ---------- + namelist : str + The name of the namelist file + + config : configparser.ConfigParser + Configuration options for this test case + + cores : int + The number of cores + + step_dir : str + The work directory for this step of the test case + """ + + cores_per_node = config.getint('parallel', 'cores_per_node') + + # update PIO tasks based on the machine settings and the available number + # or cores + pio_num_iotasks = int(numpy.ceil(cores/cores_per_node)) + pio_stride = cores//pio_num_iotasks + if pio_stride > cores_per_node: + raise ValueError('Not enough nodes for the number of cores. cores: ' + '{}, cores per node: {}'.format(cores, + cores_per_node)) + + replacements = {'config_pio_num_iotasks': '{}'.format(pio_num_iotasks), + 'config_pio_stride': '{}'.format(pio_stride)} + + update(replacements=replacements, step_work_dir=step_dir, + out_name=namelist) + + +def make_graph_file(mesh_filename, graph_filename='graph.info', + weight_field=None): + """ + Make a graph file from the MPAS mesh for use in the Metis graph + partitioning software + + Parameters + ---------- + mesh_filename : str + The name of the input MPAS mesh file + + graph_filename : str, optional + The name of the output graph file + + weight_field : str + The name of a variable in the MPAS mesh file to use as a field of + weights + """ + + with xarray.open_dataset(mesh_filename) as ds: + + nCells = ds.sizes['nCells'] + + nEdgesOnCell = ds.nEdgesOnCell.values + cellsOnCell = ds.cellsOnCell.values - 1 + if weight_field is not None: + if weight_field in ds: + raise ValueError('weight_field {} not found in {}'.format( + weight_field, mesh_filename)) + weights = ds[weight_field].values + else: + weights = None + + nEdges = 0 + for i in range(nCells): + for j in range(nEdgesOnCell[i]): + if cellsOnCell[i][j] != -1: + nEdges = nEdges + 1 + + nEdges = nEdges/2 + + with open(graph_filename, 'w+') as graph: + if weights is None: + graph.write('{} {}\n'.format(nCells, nEdges)) + + for i in range(nCells): + for j in range(0, nEdgesOnCell[i]): + if cellsOnCell[i][j] >= 0: + graph.write('{} '.format(cellsOnCell[i][j]+1)) + graph.write('\n') + else: + graph.write('{} {} 010\n'.format(nCells, nEdges)) + + for i in range(nCells): + graph.write('{} '.format(int(weights[i]))) + for j in range(0, nEdgesOnCell[i]): + if cellsOnCell[i][j] >= 0: + graph.write('{} '.format(cellsOnCell[i][j] + 1)) + graph.write('\n') diff --git a/compass/mpas_core.py b/compass/mpas_core.py new file mode 100644 index 0000000000..2f82cd2ae4 --- /dev/null +++ b/compass/mpas_core.py @@ -0,0 +1,38 @@ +class MpasCore: + """ + The base class for housing all the tests for a given MPAS core, such as + ocean, landice or sw (shallow water) + + Attributes + ---------- + name : str + the name of the MPAS core + + test_groups : dict + A dictionary of test groups for the MPAS core with their names as keys + """ + + def __init__(self, name): + """ + Create a new container for the test groups for a given MPAS core + + Parameters + ---------- + name : str + the name of the MPAS core + """ + self.name = name + + # test groups are added with add_test_groups() + self.test_groups = dict() + + def add_test_group(self, test_group): + """ + Add a test group to the MPAS core + + Parameters + ---------- + test_group : compass.TestGroup + the test group to add + """ + self.test_groups[test_group.name] = test_group diff --git a/compass/mpas_cores.py b/compass/mpas_cores.py new file mode 100644 index 0000000000..f8b8f5632c --- /dev/null +++ b/compass/mpas_cores.py @@ -0,0 +1,17 @@ +# import new MPAS cores here +from compass.landice import Landice +from compass.ocean import Ocean + + +def get_mpas_cores(): + """ + Get a list of all collections of tests for MPAS cores + + Returns + ------- + mpas_cores : list of compass.MpasCore + A list of MPAS cores containing all available tests + """ + # add new MPAS cores here + mpas_cores = [Landice(), Ocean()] + return mpas_cores diff --git a/compass/namelist.py b/compass/namelist.py new file mode 100644 index 0000000000..de9db7cb9d --- /dev/null +++ b/compass/namelist.py @@ -0,0 +1,101 @@ +from importlib import resources + + +def update(replacements, step_work_dir, out_name): + """ + Update an existing namelist file with additional ``replacements``. This + would typically be used for namelist options that are only known at + runtime, not during setup. For example, the number of PIO tasks and the + stride between tasks, which are related to the number of nodes and cores. + + Parameters + ---------- + replacements : dict + A dictionary of options and value to replace namelist options with new + values + + step_work_dir : str + The path for the work directory for the step that this namelist is + being generated for + + out_name : str + The name of the namelist file (without a path) + """ + + filename = '{}/{}'.format(step_work_dir, out_name) + + namelist = ingest(filename) + + namelist = replace(namelist, replacements) + + write(namelist, filename) + + +def parse_replacements(package, namelist): + """ + Parse the replacement namelist options from the given file + + Parameters + ---------- + package : Package + The package name or module object that contains ``namelist`` + + namelist : str + The name of the namelist replacements file to read from + + Returns + ------- + replacements : dict + A dictionary of replacement namelist options + """ + + lines = resources.read_text(package, namelist).split('\n') + replacements = dict() + for line in lines: + if '=' in line: + opt, val = line.split('=') + replacements[opt.strip()] = val.strip() + + return replacements + + +def ingest(defaults_filename): + """ Read the defaults file """ + with open(defaults_filename, 'r') as f: + lines = f.readlines() + + namelist = dict() + record = None + for line in lines: + if '&' in line: + record = line.strip('&').strip('\n').strip() + namelist[record] = dict() + elif '=' in line: + if record is not None: + opt, val = line.strip('\n').split('=') + namelist[record][opt.strip()] = val.strip() + + return namelist + + +def replace(namelist, replacements): + """ Replace entries in the namelist using the replacements dict """ + new = dict(namelist) + for record in new: + for key in replacements: + if key in new[record]: + new[record][key] = replacements[key] + + return new + + +def write(namelist, filename): + """ Write the namelist out """ + + with open(filename, 'w') as f: + for record in namelist: + f.write('&{}\n'.format(record)) + rec = namelist[record] + for key in rec: + f.write(' {} = {}\n'.format(key.strip(), rec[key].strip())) + f.write('/\n') diff --git a/compass/ocean/__init__.py b/compass/ocean/__init__.py new file mode 100644 index 0000000000..da250e5edc --- /dev/null +++ b/compass/ocean/__init__.py @@ -0,0 +1,22 @@ +from compass.mpas_core import MpasCore +from compass.ocean.tests.baroclinic_channel import BaroclinicChannel +from compass.ocean.tests.global_ocean import GlobalOcean +from compass.ocean.tests.ice_shelf_2d import IceShelf2d +from compass.ocean.tests.ziso import Ziso + + +class Ocean(MpasCore): + """ + The collection of all test case for the MPAS-Ocean core + """ + + def __init__(self): + """ + Construct the collection of MPAS-Ocean test cases + """ + super().__init__(name='ocean') + + self.add_test_group(BaroclinicChannel(mpas_core=self)) + self.add_test_group(GlobalOcean(mpas_core=self)) + self.add_test_group(IceShelf2d(mpas_core=self)) + self.add_test_group(Ziso(mpas_core=self)) diff --git a/compass/ocean/iceshelf.py b/compass/ocean/iceshelf.py new file mode 100644 index 0000000000..42d5030291 --- /dev/null +++ b/compass/ocean/iceshelf.py @@ -0,0 +1,166 @@ +import numpy +from netCDF4 import Dataset +import shutil + +from mpas_tools.cime.constants import constants +from compass.io import symlink +from compass.model import update_namelist_pio, partition, run_model + + +def compute_land_ice_pressure_and_draft(ssh, modify_mask, ref_density): + """ + Compute the pressure from and overlying ice shelf and the ice-shelf draft + + Parameters + ---------- + ssh : xarray.DataArray + The sea surface height (the ice draft) + + modify_mask : xarray.DataArray + A mask that is 1 where ``landIcePressure`` can be deviate from 0 + + ref_density : float + A reference density for seawater displaced by the ice shelf + + Returns + ------- + landIcePressure : xarray.DataArray + The pressure from the overlying land ice on the ocean + + landIceDraft : xarray.DataArray + The ice draft, equal to the initial ``ssh`` + """ + gravity = constants['SHR_CONST_G'] + landIcePressure = \ + modify_mask*numpy.maximum(-ref_density * gravity * ssh, 0.) + landIceDraft = ssh + return landIcePressure, landIceDraft + + +def adjust_ssh(variable, iteration_count, step): + """ + Adjust the sea surface height or land-ice pressure to be dynamically + consistent with one another. A series of short model runs are performed, + each with + + Parameters + ---------- + variable : {'ssh', 'landIcePressure'} + The variable to adjust + + iteration_count : int + The number of iterations of adjustment + + step : compass.Step + the step for performing SSH or land-ice pressure adjustment + """ + cores = step.cores + step_dir = step.work_dir + config = step.config + logger = step.logger + + if variable not in ['ssh', 'landIcePressure']: + raise ValueError("Unknown variable to modify: {}".format(variable)) + + update_namelist_pio('namelist.ocean', config, cores, step_dir) + partition(cores, config, logger) + + for iterIndex in range(iteration_count): + logger.info(" * Iteration {}/{}".format(iterIndex + 1, + iteration_count)) + + symlink('adjusting_init{}.nc'.format(iterIndex), 'adjusting_init.nc') + + logger.info(" * Running forward model") + run_model(step, update_pio=False, partition_graph=False) + logger.info(" - Complete") + + logger.info(" * Updating SSH or land-ice pressure") + + # copy the init file first + shutil.copy('adjusting_init{}.nc'.format(iterIndex), + 'adjusting_init{}.nc'.format(iterIndex+1)) + + symlink('adjusting_init{}.nc'.format(iterIndex+1), + 'adjusting_init.nc') + + with Dataset('adjusting_init.nc', 'r+') as ds: + + on_a_sphere = ds.on_a_sphere.lower() == 'yes' + + nVertLevels = len(ds.dimensions['nVertLevels']) + initSSH = ds.variables['ssh'][0, :] + bottomDepth = ds.variables['bottomDepth'][:] + modifyLandIcePressureMask = ds.variables['modifyLandIcePressureMask'][0, :] + landIcePressure = ds.variables['landIcePressure'][0, :] + lonCell = ds.variables['lonCell'][:] + latCell = ds.variables['latCell'][:] + xCell = ds.variables['xCell'][:] + yCell = ds.variables['yCell'][:] + maxLevelCell = ds.variables['maxLevelCell'][:] + + with Dataset('output_ssh.nc', 'r') as ds_ssh: + nTime = len(ds_ssh.dimensions['Time']) + finalSSH = ds_ssh.variables['ssh'][nTime - 1, :] + topDensity = ds_ssh.variables['density'][nTime - 1, :, 0] + + mask = numpy.logical_and(maxLevelCell > 0, + modifyLandIcePressureMask == 1) + + deltaSSH = mask * (finalSSH - initSSH) + + # then, modify the SSH or land-ice pressure + if variable == 'ssh': + ds.variables['ssh'][0, :] = finalSSH + # also update the landIceDraft variable, which will be used to + # compensate for the SSH due to land-ice pressure when + # computing sea-surface tilt + ds.variables['landIceDraft'][0, :] = finalSSH + # we also need to stretch layerThickness to be compatible with + # the new SSH + stretch = (finalSSH + bottomDepth) / (initSSH + bottomDepth) + layerThickness = ds.variables['layerThickness'] + for k in range(nVertLevels): + layerThickness[0, :, k] *= stretch + else: + # Moving the SSH up or down by deltaSSH would change the + # land-ice pressure by density(SSH)*g*deltaSSH. If deltaSSH is + # positive (moving up), it means the land-ice pressure is too + # small and if deltaSSH is negative (moving down), it means + # land-ice pressure is too large, the sign of the second term + # makes sense. + gravity = constants['SHR_CONST_G'] + deltaLandIcePressure = topDensity * gravity * deltaSSH + + landIcePressure = numpy.maximum( + 0.0, landIcePressure + deltaLandIcePressure) + + ds.variables['landIcePressure'][0, :] = landIcePressure + + finalSSH = initSSH + + # Write the largest change in SSH and its lon/lat to a file + with open('maxDeltaSSH_{:03d}.log'.format(iterIndex), 'w') as log_file: + + indices = numpy.nonzero(landIcePressure)[0] + index = numpy.argmax(numpy.abs(deltaSSH[indices])) + iCell = indices[index] + if on_a_sphere: + coords = 'lon/lat: {:f} {:f}'.format( + numpy.rad2deg(lonCell[iCell]), + numpy.rad2deg(latCell[iCell])) + else: + coords = 'x/y: {:f} {:f}'.format(1e-3 * xCell[iCell], + 1e-3 * yCell[iCell]) + string = 'deltaSSHMax: {:g}, {}'.format( + deltaSSH[iCell], coords) + logger.info(' {}'.format(string)) + log_file.write('{}\n'.format(string)) + string = 'ssh: {:g}, landIcePressure: {:g}'.format( + finalSSH[iCell], landIcePressure[iCell]) + logger.info(' {}'.format(string)) + log_file.write('{}\n'.format(string)) + + logger.info(" - Complete\n") + + shutil.copy('adjusting_init.nc', 'adjusted_init.nc') diff --git a/compass/ocean/namelists/__init__.py b/compass/ocean/namelists/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/ocean/namelists/namelist.ssh_adjust b/compass/ocean/namelists/namelist.ssh_adjust new file mode 100644 index 0000000000..a01d9763f1 --- /dev/null +++ b/compass/ocean/namelists/namelist.ssh_adjust @@ -0,0 +1,2 @@ +config_run_duration = '0000_01:00:00' +config_land_ice_flux_mode = 'pressure_only' diff --git a/compass/ocean/ocean.cfg b/compass/ocean/ocean.cfg new file mode 100644 index 0000000000..d1b47295b3 --- /dev/null +++ b/compass/ocean/ocean.cfg @@ -0,0 +1,49 @@ +# This config file has default config options for the ocean core + +# The paths section points compass to external paths +[paths] + +# the relative or absolute path to the root of a branch where MPAS-Ocean +# has been built +mpas_model = MPAS-Model/ocean/develop + +# The namelists section defines paths to example_compact namelists that will be used +# to generate specific namelists. By default, these point to the forward and +# init namelists in the default_inputs directory after a successful build of +# the ocean model. Change these in a custom config file if you need a different +# example_compact. +[namelists] +forward = ${paths:mpas_model}/default_inputs/namelist.ocean.forward +init = ${paths:mpas_model}/default_inputs/namelist.ocean.init + +# The streams section defines paths to example_compact streams files that will be used +# to generate specific streams files. By default, these point to the forward and +# init streams files in the default_inputs directory after a successful build of +# the ocean model. Change these in a custom config file if you need a different +# example_compact. +[streams] +forward = ${paths:mpas_model}/default_inputs/streams.ocean.forward +init = ${paths:mpas_model}/default_inputs/streams.ocean.init + + +# The executables section defines paths to required executables. These +# executables are provided for use by specific test cases. Most tools that +# compass needs should be in the conda environment, so this is only the path +# to the MPAS-Ocean executable by default. +[executables] +model = ${paths:mpas_model}/ocean_model + + +# Options related to downloading files +[download] + +# the path on the server for MPAS-Ocean +core_path = mpas-ocean + + +# Options relate to adjusting the sea-surface height or land-ice pressure +# below ice shelves to they are dynamically consistent with one another +[ssh_adjustment] + +# the number of iterations of ssh adjustment to perform +iterations = 10 diff --git a/compass/ocean/particles.py b/compass/ocean/particles.py new file mode 100755 index 0000000000..671f5360f7 --- /dev/null +++ b/compass/ocean/particles.py @@ -0,0 +1,695 @@ +import netCDF4 +import numpy as np +from pyamg.classical import interpolate as amginterp +from pyamg.classical import split +from scipy import sparse, spatial + + +VERTICAL_TREATMENTS = {"indexLevel": 1, + "fixedZLevel": 2, + "passiveFloat": 3, + "buoyancySurface": 4, + "argoFloat": 5} +DEFAULTS = {"dt": 300, "resettime": 1.0 * 24.0 * 60.0 * 60.0} +TYPELIST = ["buoyancy", "passive", "surface", "all"] +VERTSEEDTYPE = ["linear", "denseCenter", "log"] +SPATIAL_FILTER = ["SouthernOceanPlanar", "SouthernOceanXYZ"] + + +def write(init_filename, graph_filename, particle_filename, types='all', + n_vert_levels=10, vert_seed_type='linear', n_buoy_surf=11, + pot_dens_min=1028.5, pot_dens_max=1030.0, spatial_filter=None, + downsample=0, seed_center=True, seed_vertex=False, + add_noise=False, cfl_min=0.005): + """ + Write an initial condition for particles partitioned across cores + + Parameters + ---------- + init_filename : str + path of netCDF init/mesh file + + graph_filename : str + path of graph partition file of form */*.info.part + + particle_filename : str + path of output netCDF particle file + + types : {"buoyancy", "passive", "surface", "all"}, optional + types of particles", + + n_vert_levels : int, optional + number of vertical levels for passive, 3D floats + + vert_seed_type : {"linear", "denseCenter", "log"}, optional + method for seeding in the vertical + + n_buoy_surf : int, optional + number of buoyancy surfaces for isopycnally-constrained particles + + pot_dens_min : float, optional + minimum value of potential density surface for isopycnally-constrained + particles + + pot_dens_max : float, optional + maximum value of potential density surface for isopycnally-constrained + particles + + spatial_filter : {"SouthernOceanPlanar", "SouthernOceanXYZ"}, optional + apply a certain type of spatial filter + + downsample : int, optional + downsample particle positions using AMG a number of times + + seed_center : bool, optional + seed particles on cell centers + + seed_vertex : bool, optional + seed three particles by a fixed epsilon off each cell vertex + + add_noise : bool, optional + add gaussian noise to generate three particles around the cell center + + cfl_min : float, optional + minimum assumed CFL, which is used in perturbing particles if + ``seed_vertex=True`` or ``add_noise=True`` + """ + + buoy_surf = np.linspace(pot_dens_min, pot_dens_max, n_buoy_surf) + cpts, xCell, yCell, zCell = _particle_coords( + init_filename, downsample, seed_center, seed_vertex, add_noise, + cfl_min) + + # build particles + particlelist = [] + if "buoyancy" in types or "all" in types: + particlelist.append(_build_isopycnal_particles( + cpts, xCell, yCell, zCell, buoy_surf, spatial_filter)) + if "passive" in types or "all" in types: + particlelist.append(_build_passive_floats( + cpts, xCell, yCell, zCell, init_filename, n_vert_levels, + spatial_filter, vert_seed_type)) + # apply surface particles everywhere to ensure that LIGHT works + # (allow for some load-imbalance for filters) + if "surface" in types or "all" in types: + particlelist.append(_build_surface_floats( + cpts, xCell, yCell, zCell, spatial_filter)) + + # write particles to disk + ParticleList(particlelist).write(particle_filename, graph_filename) + + +def remap_particles(init_filename, particle_filename, graph_filename): + """ + Remap particles onto a new grid decomposition. + + Load in particle positions, locations of grid cell centers, and + decomposition corresponding to ``init_filename``. + + The goal is to update particle field ``currentBlock`` to comply with the + new grid as defined by ``init_filename`` and ``particle_filename``. + NOTE: ``init_filename`` and ``graph_filename`` must be compatible! + + We assume that all particles will be within the domain such that a nearest + neighbor search is sufficient to make the remap. + + Parameters + ---------- + init_filename : str + path of netCDF init/mesh file + + graph_filename : str + path of graph partition file of form */*.info.part + + particle_filename : str + path of input/output netCDF particle file + """ + # load the files + with netCDF4.Dataset(init_filename, "r") as f_in, \ + netCDF4.Dataset(particle_filename, "r+") as f_part: + + # get the particle data + xpart = f_part.variables["xParticle"] + ypart = f_part.variables["yParticle"] + zpart = f_part.variables["zParticle"] + currentBlock = f_part.variables["currentBlock"] + try: + currentCell = f_part.variables["currentCell"] + currentCellGlobalID = f_part.variables["currentCellGlobalID"] + except KeyError: + currentCell = f_part.createVariable("currentCell", "i", + ("nParticles",)) + currentCellGlobalID = f_part.createVariable( + "currentCellGlobalID", "i", ("nParticles",)) + + # get the cell positions + xcell = f_in.variables["xCell"] + ycell = f_in.variables["yCell"] + zcell = f_in.variables["zCell"] + + # build the spatial tree + tree = spatial.cKDTree(np.vstack((xcell, ycell, zcell)).T) + + # get nearest cell for each particle + dvEdge = f_in.variables["dvEdge"] + maxdist = 2.0 * max(dvEdge[:]) + _, cellIndices = tree.query( + np.vstack((xpart, ypart, zpart)).T, distance_upper_bound=maxdist, + k=1) + + # load the decomposition (apply to latest time step) + decomp = np.genfromtxt(graph_filename) + currentBlock[-1, :] = decomp[cellIndices] + currentCell[-1, :] = -1 + currentCellGlobalID[-1, :] = cellIndices + 1 + + +def _use_defaults(name, val): + if (val is not None) or (val is not np.nan): + return val + else: + return DEFAULTS[name] + + +def _ensure_shape(start, new): + if isinstance(new, (int, float)): + new *= np.ones_like(start) + return new + + +def _southern_ocean_only_xyz(x, y, z, maxNorth=-45.0): + sq = np.sqrt(x ** 2 + y ** 2 + z ** 2) + lat = np.arcsin(z / sq) + ok = np.pi / 180.0 * maxNorth + ids = lat < ok + return ids + + +def _southern_ocean_only_planar(x, y, z, maxy=1000.0 * 1e3): + ids = y < maxy + return ids + + +def _downsample_points(x, y, z, tri, nsplit): + """ + Downsample points using algebraic multigrid splitting. + + Note, currently assumes that all points on grid are equidistant, which does + a numeric (not area-weighted) downsampling. + + Phillip Wolfram + LANL + Origin: 03/09/2015, Updated: 01/14/2019 + """ + # reference on cleanest way to do this calculation: + # https://www.mathworks.com/matlabcentral/answers/ + # 369143-how-to-do-delaunay-triangulation-and-return-an-adjacency-matrix + + # allocate the memory + Np = x.shape[0] + A = sparse.lil_matrix((Np, Np)) + + # cleanup impartial cells (don't include the triangles on boundary) + tri = tri[np.logical_not(np.any(tri == -1, axis=1)), :] + + # handle one direction for triangles + A[tri[:, 0], tri[:, 1]] = 1 + A[tri[:, 1], tri[:, 2]] = 1 + A[tri[:, 2], tri[:, 0]] = 1 + + # handle other direction (bi-directional graph) + A[tri[:, 1], tri[:, 0]] = 1 + A[tri[:, 2], tri[:, 1]] = 1 + A[tri[:, 0], tri[:, 2]] = 1 + + A = A.tocsr() + + Cpts = np.arange(Np) + # Grab root-nodes (i.e., Coarse / Fine splitting) + for ii in np.arange(nsplit): + splitting = split.PMIS(A) + # convert to index for subsetting particles + Cpts = Cpts[np.asarray(splitting, dtype=bool)] + + if ii < nsplit - 1: + P = amginterp.direct_interpolation(A, A, splitting) + R = P.T.tocsr() + A = R * A * P + + return Cpts, x[Cpts], y[Cpts], z[Cpts] + + +class Particles: + def __init__( + self, + x, + y, + z, + cellindices, + verticaltreatment, + dt=np.nan, + zlevel=np.nan, + indexlevel=np.nan, + buoypart=np.nan, + buoysurf=None, + spatialfilter=None, + resettime=np.nan, + xreset=np.nan, + yreset=np.nan, + zreset=np.nan, + zlevelreset=np.nan, + ): + + # start with all the indices and restrict + ids = np.ones((len(x)), dtype=bool) + if type(spatialfilter) is str: + spatialfilter = [spatialfilter] + if spatialfilter: + if np.max(["SouthernOceanXYZ" == afilter for afilter in + spatialfilter]): + ids = np.logical_and(ids, _southern_ocean_only_xyz(x, y, z)) + if np.max(["SouthernOceanPlanar" == afilter for afilter in + spatialfilter]): + ids = np.logical_and(ids, _southern_ocean_only_planar(x, y, z)) + + self.x = x[ids] + self.y = y[ids] + self.z = z[ids] + self.verticaltreatment = _ensure_shape( + self.x, VERTICAL_TREATMENTS[verticaltreatment]) + self.nparticles = len(self.x) + + self.dt = dt + + # 3D passive floats + self.zlevel = _ensure_shape(x, zlevel)[ids] + + # isopycnal floats + if buoysurf is not None: + self.buoysurf = buoysurf + self.buoypart = _ensure_shape(x, buoypart)[ids] + self.cellindices = cellindices[ids] + self.cellGlobalID = cellindices[ids] + + # index level following floats + self.indexlevel = _ensure_shape(x, indexlevel)[ids] + + # reset features + self.resettime = _ensure_shape(x, resettime)[ids] + self.xreset = _ensure_shape(x, xreset)[ids] + self.yreset = _ensure_shape(x, yreset)[ids] + self.zreset = _ensure_shape(x, zreset)[ids] + self.zlevelreset = _ensure_shape(x, zlevelreset)[ids] + + def compute_lat_lon(self): + """ + Ripped out whole-sale from latlon_coordinate_transforms.py + PJW 01/15/2019 + """ + + x = self.x + y = self.y + z = self.z + + self.latParticle = np.arcsin(z / np.sqrt(x ** 2 + y ** 2 + z ** 2)) + self.lonParticle = np.arctan2(y, x) + + +class ParticleList: + def __init__(self, particlelist): + self.particlelist = particlelist + + def aggregate(self): + self.len() + + # buoyancysurf + buoysurf = np.array([]) + for alist in self.particlelist: + if "buoysurf" in dir(alist): + buoysurf = np.unique( + np.setdiff1d(np.append(buoysurf, alist.buoysurf), None) + ) + if len(buoysurf) > 0: + self.buoysurf = np.asarray(buoysurf, dtype="f8") + else: + self.buoysurf = None + + def __getattr__(self, name): + # __getattr__ ensures self.x is concatenated properly + return self.concatenate(name) + + def concatenate(self, varname): + var = getattr(self.particlelist[0], varname) + for alist in self.particlelist[1:]: + var = np.append(var, getattr(alist, varname)) + return var + + def append(self, particlelist): + self.particlelist.append(particlelist[:]) + + def len(self): + self.nparticles = 0 + for alist in self.particlelist: + self.nparticles += alist.nparticles + + return self.nparticles + + # probably a cleaner way to have this "fall through" to the particle + # instances themselves, but didn't have time to sort this all out so this + # isn't general for now + def compute_lat_lon(self): + for alist in self.particlelist: + alist.compute_lat_lon() + + def write(self, f_name, f_decomp): + + decomp = np.genfromtxt(f_decomp) + + self.aggregate() + assert ( + max(decomp) < self.nparticles + ), "Number of particles must be larger than decomposition!" + + f_out = netCDF4.Dataset(f_name, "w", format="NETCDF3_64BIT_OFFSET") + + f_out.createDimension("Time") + f_out.createDimension("nParticles", self.nparticles) + + f_out.createVariable("xParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("yParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("zParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("lonParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("latParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("zLevelParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("dtParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("buoyancyParticle", "f8", ("Time", "nParticles")) + f_out.createVariable("currentBlock", "i", ("Time", "nParticles")) + f_out.createVariable("currentCell", "i", ("Time", "nParticles")) + f_out.createVariable("currentCellGlobalID", "i", ("Time", + "nParticles")) + f_out.createVariable("indexToParticleID", "i", ("nParticles",)) + f_out.createVariable("verticalTreatment", "i", ("Time", "nParticles")) + f_out.createVariable("indexLevel", "i", ("Time", "nParticles")) + f_out.createVariable("resetTime", "i", ("nParticles",)) + f_out.createVariable("currentBlockReset", "i", ("nParticles",)) + f_out.createVariable("currentCellReset", "i", ("nParticles",)) + f_out.createVariable("xParticleReset", "f8", ("nParticles",)) + f_out.createVariable("yParticleReset", "f8", ("nParticles",)) + f_out.createVariable("zParticleReset", "f8", ("nParticles",)) + f_out.createVariable("zLevelParticleReset", "f8", ("nParticles",)) + + f_out.variables["xParticle"][0, :] = self.x + f_out.variables["yParticle"][0, :] = self.y + f_out.variables["zParticle"][0, :] = self.z + + self.compute_lat_lon() + f_out.variables["lonParticle"][0, :] = self.lonParticle + f_out.variables["latParticle"][0, :] = self.latParticle + + f_out.variables["verticalTreatment"][0, :] = self.verticaltreatment + + f_out.variables["zLevelParticle"][0, :] = self.zlevel + + if self.buoysurf is not None and len(self.buoysurf) > 0: + f_out.createDimension("nBuoyancySurfaces", len(self.buoysurf)) + f_out.createVariable("buoyancySurfaceValues", "f8", + ("nBuoyancySurfaces")) + f_out.variables["buoyancyParticle"][0, :] = self.buoypart + f_out.variables["buoyancySurfaceValues"][:] = self.buoysurf + + f_out.variables["dtParticle"][0, :] = DEFAULTS["dt"] + # assume single-processor mode for now + f_out.variables["currentBlock"][:] = 0 + # reset each day + f_out.variables["resetTime"][:] = DEFAULTS["resettime"] + f_out.variables["indexLevel"][:] = 1 + f_out.variables["indexToParticleID"][:] = np.arange(self.nparticles) + + # resets + f_out.variables["currentBlock"][0, :] = decomp[self.cellindices] + f_out.variables["currentBlockReset"][:] = decomp[self.cellindices] + f_out.variables["currentCell"][0, :] = -1 + f_out.variables["currentCellGlobalID"][0, :] = self.cellGlobalID + 1 + f_out.variables["currentCellReset"][:] = -1 + f_out.variables["xParticleReset"][:] = \ + f_out.variables["xParticle"][0, :] + f_out.variables["yParticleReset"][:] = \ + f_out.variables["yParticle"][0, :] + f_out.variables["zParticleReset"][:] = \ + f_out.variables["zParticle"][0, :] + f_out.variables["zLevelParticleReset"][:] = \ + f_out.variables["zLevelParticle"][0, :] + + f_out.close() + + +def _rescale_for_shell(f_init, x, y, z): + rearth = f_init.sphere_radius + r = np.sqrt(x * x + y * y + z * z) + x *= rearth / r + y *= rearth / r + z *= rearth / r + return x, y, z + + +def _get_particle_coords(f_init, seed_center=True, seed_vertex=False, + add_noise=False, CFLmin=None): + xCell = f_init.variables["xCell"][:] + yCell = f_init.variables["yCell"][:] + zCell = f_init.variables["zCell"][:] + + # Case of only cell-center seeding a single particle. + if seed_center and not add_noise: + cells_center = (xCell, yCell, zCell) + cpts_center = np.arange(len(xCell)) + + # Case of cell-center seeding with 3 particles distributed around the + # center by noise. + elif seed_center and add_noise: + cellsOnCell = f_init.variables["cellsOnCell"][:, :] + + nCells = len(f_init.dimensions["nCells"]) + perturbation = CFLmin * np.ones((nCells,)) + + allx = [] + ally = [] + allz = [] + allcpts = [] + # There are six potential cell neighbors to perturb the particles for. + # This selects three random directions (without replacement) at every + # cell. + cellDirs = np.stack( + [ + np.random.choice(np.arange(6), size=3, replace=False) + for _ in range(nCells) + ] + ) + for ci in np.arange(3): + epsilon = np.abs(np.random.normal(size=nCells)) + epsilon /= epsilon.max() + # Adds gaussian noise at each cell, creating range of + # [CFLMin, 2*CFLMin] + theta = perturbation * epsilon + perturbation + + x = (1.0 - theta) * xCell + theta * xCell[ + cellsOnCell[range(nCells), cellDirs[:, ci]] - 1 + ] + y = (1.0 - theta) * yCell + theta * yCell[ + cellsOnCell[range(nCells), cellDirs[:, ci]] - 1 + ] + z = (1.0 - theta) * zCell + theta * zCell[ + cellsOnCell[range(nCells), cellDirs[:, ci]] - 1 + ] + + x, y, z = _rescale_for_shell(f_init, x, y, z) + + allx.append(x) + ally.append(y) + allz.append(z) + allcpts.append(cellsOnCell[:, ci] - 1) + cells_center = ( + np.concatenate(allx), + np.concatenate(ally), + np.concatenate(allz), + ) + cpts_center = np.concatenate(allcpts) + + # Case of seeding 3 particles by a small epsilon around the vertices. + if seed_vertex: + cellsOnVertex = f_init.variables["cellsOnVertex"][:, :] + xVertex = f_init.variables["xVertex"][:] + yVertex = f_init.variables["yVertex"][:] + zVertex = f_init.variables["zVertex"][:] + + nVertices = len(f_init.dimensions["nVertices"]) + perturbation = CFLmin * np.ones((nVertices,)) + + allx = [] + ally = [] + allz = [] + allcpts = [] + for vi in np.arange(3): + ids = np.where(cellsOnVertex[:, vi] != 0)[0] + theta = perturbation[ids] + + x = (1.0 - theta) * xVertex[ids] + \ + theta * xCell[cellsOnVertex[ids, vi] - 1] + y = (1.0 - theta) * yVertex[ids] + \ + theta * yCell[cellsOnVertex[ids, vi] - 1] + z = (1.0 - theta) * zVertex[ids] + \ + theta * zCell[cellsOnVertex[ids, vi] - 1] + + x, y, z = _rescale_for_shell(f_init, x, y, z) + + allx.append(x) + ally.append(y) + allz.append(z) + allcpts.append(cellsOnVertex[ids, vi] - 1) + cells_vertex = ( + np.concatenate(allx), + np.concatenate(ally), + np.concatenate(allz), + ) + cpts_vertex = np.concatenate(allcpts) + + # Allows for both cell-center and cell-vertex seeding. + if seed_center and not seed_vertex: + cells = cells_center + cpts = cpts_center + elif not seed_center and seed_vertex: + cells = cells_vertex + cpts = cpts_vertex + else: + cpts = np.concatenate((cpts_vertex, cpts_center)) + cells = ( + np.concatenate((cells_vertex[0], cells_center[0])), + np.concatenate((cells_vertex[1], cells_center[1])), + np.concatenate((cells_vertex[2], cells_center[2])), + ) + return cells, cpts + + +def _expand_nlevels(x, n): + return np.tile(x, (n)) + + +def _particle_coords( + f_init, downsample, seed_center, seed_vertex, add_noise, CFLmin +): + + f_init = netCDF4.Dataset(f_init, "r") + cells, cpts = _get_particle_coords( + f_init, seed_center, seed_vertex, add_noise, CFLmin + ) + xCell, yCell, zCell = cells + if downsample: + tri = f_init.variables["cellsOnVertex"][:, :] - 1 + cpts, xCell, yCell, zCell = _downsample_points( + xCell, yCell, zCell, tri, downsample + ) + f_init.close() + + return cpts, xCell, yCell, zCell + + +def _build_isopycnal_particles(cpts, xCell, yCell, zCell, buoysurf, afilter): + + nparticles = len(xCell) + nbuoysurf = buoysurf.shape[0] + + x = _expand_nlevels(xCell, nbuoysurf) + y = _expand_nlevels(yCell, nbuoysurf) + z = _expand_nlevels(zCell, nbuoysurf) + + buoypart = ( + (np.tile(buoysurf, (nparticles, 1))) + .reshape(nparticles * nbuoysurf, order="F") + .copy()) + cellindices = np.tile(cpts, (nbuoysurf)) + + return Particles(x, y, z, cellindices, "buoyancySurface", + buoypart=buoypart, buoysurf=buoysurf, + spatialfilter=afilter) + + +def _build_passive_floats(cpts, xCell, yCell, zCell, f_init, nvertlevels, + afilter, vertseedtype): + + x = _expand_nlevels(xCell, nvertlevels) + y = _expand_nlevels(yCell, nvertlevels) + z = _expand_nlevels(zCell, nvertlevels) + f_init = netCDF4.Dataset(f_init, "r") + if vertseedtype == "linear": + wgts = np.linspace(0, 1, nvertlevels + 2)[1:-1] + elif vertseedtype == "log": + wgts = np.geomspace(1.0 / (nvertlevels - 1), 1, nvertlevels + 1)[0:-1] + elif vertseedtype == "denseCenter": + wgts = _dense_center_seeding(nvertlevels) + else: + raise ValueError( + "Must designate `vertseedtype` as one of the following: " + + f"{VERTSEEDTYPE}" + ) + zlevel = -np.kron(wgts, f_init.variables["bottomDepth"][cpts]) + cellindices = np.tile(cpts, (nvertlevels)) + f_init.close() + + return Particles( + x, y, z, cellindices, "passiveFloat", zlevel=zlevel, + spatialfilter=afilter) + + +def _dense_center_seeding(nVert): + """ + Distributes passive floats with 50% of them occurring between 40% and 60% + of the bottom depth. + """ + nMid = np.ceil((1 / 2) * nVert) + nRem = nVert - nMid + if nRem % 2 != 0: + nMid += 1 + nRem -= 1 + upper = np.linspace(0, 0.4, (int(nRem) // 2) + 1) + center = np.linspace(0.4, 0.6, int(nMid) + 2) + lower = np.linspace(0.6, 1, (int(nRem) // 2) + 1) + c_wgts = np.concatenate([upper[1:], center[1:-1], lower[0:-1]]) + return c_wgts + + +def _build_surface_floats(cpts, xCell, yCell, zCell, afilter): + + x = _expand_nlevels(xCell, 1) + y = _expand_nlevels(yCell, 1) + z = _expand_nlevels(zCell, 1) + cellindices = cpts + + return Particles(x, y, z, cellindices, "indexLevel", indexlevel=1, + zlevel=0, spatialfilter=afilter) + + +def _build_particle_file(f_init, f_name, f_decomp, types, spatialfilter, + buoySurf, nVertLevels, downsample, vertseedtype, + seed_center, seed_vertex, add_noise, CFLmin): + + cpts, xCell, yCell, zCell = _particle_coords( + f_init, downsample, seed_center, seed_vertex, add_noise, CFLmin) + + # build particles + particlelist = [] + if "buoyancy" in types or "all" in types: + particlelist.append( + _build_isopycnal_particles( + cpts, xCell, yCell, zCell, buoySurf, spatialfilter)) + if "passive" in types or "all" in types: + particlelist.append( + _build_passive_floats( + cpts, xCell, yCell, zCell, f_init, nVertLevels, spatialfilter, + vertseedtype)) + # apply surface particles everywhere to ensure that LIGHT works + # (allow for some load-imbalance for filters) + if "surface" in types or "all" in types: + particlelist.append( + _build_surface_floats(cpts, xCell, yCell, zCell, spatialfilter)) + + # write particles to disk + ParticleList(particlelist).write(f_name, f_decomp) diff --git a/compass/ocean/plot.py b/compass/ocean/plot.py new file mode 100644 index 0000000000..14b7fd3d78 --- /dev/null +++ b/compass/ocean/plot.py @@ -0,0 +1,202 @@ +import xarray +import xarray.plot +import numpy as np +import datetime +import matplotlib.pyplot as plt +from matplotlib.font_manager import FontProperties + + +def plot_initial_state(input_file_name='initial_state.nc', + output_file_name='initial_state.png'): + """ + creates histogram plots of the initial condition + + Parameters + ---------- + input_file_name : str, optional + The path to a NetCDF file with the initial state + + output_file_name: str, optional + The path to the output image file + """ + + # load mesh variables + chunks = {'nCells': 32768, 'nEdges': 32768} + ds = xarray.open_dataset(input_file_name, chunks=chunks) + nCells = ds.sizes['nCells'] + nEdges = ds.sizes['nEdges'] + nVertLevels = ds.sizes['nVertLevels'] + + fig = plt.figure() + fig.set_size_inches(16.0, 12.0) + plt.clf() + + print('plotting histograms of the initial condition') + print('see: init/initial_state/initial_state.png') + d = datetime.datetime.today() + txt = \ + 'MPAS-Ocean initial state\n' + \ + 'date: {}\n'.format(d.strftime('%m/%d/%Y')) + \ + 'number cells: {}\n'.format(nCells) + \ + 'number cells, millions: {:6.3f}\n'.format(nCells / 1.e6) + \ + 'number layers: {}\n\n'.format(nVertLevels) + \ + ' min val max val variable name\n' + + plt.subplot(3, 3, 2) + varName = 'maxLevelCell' + var = ds[varName] + maxLevelCell = var.values - 1 + xarray.plot.hist(var, bins=nVertLevels - 4) + plt.ylabel('frequency') + plt.xlabel(varName) + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, varName) + + plt.subplot(3, 3, 3) + varName = 'bottomDepth' + var = ds[varName] + xarray.plot.hist(var, bins=nVertLevels - 4) + plt.xlabel(varName) + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, varName) + + cellsOnEdge = ds['cellsOnEdge'].values - 1 + cellMask = np.zeros((nCells, nVertLevels), bool) + edgeMask = np.zeros((nEdges, nVertLevels), bool) + for k in range(nVertLevels): + cellMask[:, k] = k <= maxLevelCell + cell0 = cellsOnEdge[:, 0] + cell1 = cellsOnEdge[:, 1] + edgeMask[:, k] = np.logical_and(np.logical_and(cellMask[cell0, k], + cellMask[cell1, k]), + np.logical_and(cell0 >= 0, + cell1 >= 0)) + cellMask = xarray.DataArray(data=cellMask, dims=('nCells', 'nVertLevels')) + edgeMask = xarray.DataArray(data=edgeMask, dims=('nEdges', 'nVertLevels')) + + plt.subplot(3, 3, 4) + varName = 'temperature' + var = ds[varName].isel(Time=0).where(cellMask) + xarray.plot.hist(var, bins=100, log=True) + plt.ylabel('frequency') + plt.xlabel(varName) + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, varName) + + plt.subplot(3, 3, 5) + varName = 'salinity' + var = ds[varName].isel(Time=0).where(cellMask) + xarray.plot.hist(var, bins=100, log=True) + plt.xlabel(varName) + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, varName) + + plt.subplot(3, 3, 6) + varName = 'layerThickness' + var = ds[varName].isel(Time=0).where(cellMask) + xarray.plot.hist(var, bins=100, log=True) + plt.xlabel(varName) + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, varName) + + plt.subplot(3, 3, 7) + varName = 'rx1Edge' + var = ds[varName].isel(Time=0).where(edgeMask) + maxRx1Edge = var.max().values + xarray.plot.hist(var, bins=100, log=True) + plt.ylabel('frequency') + plt.xlabel('Haney Number, max={:4.2f}'.format(maxRx1Edge)) + txt = '{}{:9.2e} {:9.2e} {}\n'.format(txt, var.min().values, + var.max().values, varName) + + font = FontProperties() + font.set_family('monospace') + font.set_size(12) + print(txt) + plt.subplot(3, 3, 1) + plt.text(0, 1, txt, verticalalignment='top', fontproperties=font) + plt.axis('off') + + plt.tight_layout(pad=4.0) + + plt.savefig(output_file_name, bbox_inches='tight', pad_inches=0.1) + + +def plot_vertical_grid(grid_filename, config, + out_filename='vertical_grid.png'): + """ + Plot the vertical grid + + Parameters + ---------- + grid_filename : str + The name of the NetCDF file containing the vertical grid + + config : configparser.ConfigParser + Configuration options for the vertical grid + + out_filename : str, optional + The name of the image file to write to + """ + + ds = xarray.open_dataset(grid_filename) + nVertLevels = ds.sizes['nVertLevels'] + midDepth = ds.refMidDepth.values + layerThickness = ds.refLayerThickness.values + botDepth = ds.refBottomDepth.values + + fig = plt.figure() + fig.set_size_inches(16.0, 8.0) + zInd = np.arange(1, nVertLevels + 1) + plt.clf() + + plt.subplot(2, 2, 1) + plt.plot(zInd, midDepth, '.') + plt.gca().invert_yaxis() + plt.xlabel('vertical index (one-based)') + plt.ylabel('layer mid-depth [m]') + plt.grid() + + plt.subplot(2, 2, 2) + plt.plot(layerThickness, midDepth, '.') + plt.gca().invert_yaxis() + plt.xlabel('layer thickness [m]') + plt.ylabel('layer mid-depth [m]') + plt.grid() + + plt.subplot(2, 2, 3) + plt.plot(zInd, layerThickness, '.') + plt.xlabel('vertical index (one-based)') + plt.ylabel('layer thickness [m]') + plt.grid() + + txt = ['number layers: {}'.format(nVertLevels)] + + if config.has_option('vertical_grid', 'bottom_depth'): + bottom_depth = config.getfloat('vertical_grid', 'bottom_depth') + txt.extend( + ['bottom depth requested: {:8.2f}'.format(bottom_depth), + 'bottom depth actual: {:8.2f}'.format(np.amax(botDepth))]) + + if config.has_option('vertical_grid', 'min_layer_thickness'): + min_layer_thickness = config.getfloat('vertical_grid', + 'min_layer_thickness') + txt.extend( + ['min thickness requested: {:8.2f}'.format(min_layer_thickness), + 'min thickness actual: {:8.2f}'.format( + np.amin(layerThickness[:]))]) + + if config.has_option('vertical_grid', 'max_layer_thickness'): + max_layer_thickness = config.getfloat('vertical_grid', + 'max_layer_thickness') + txt.extend( + ['max thickness requested: {:8.2f}'.format(max_layer_thickness), + 'max thickness actual: {:8.2f}'.format( + np.amax(layerThickness[:]))]) + + txt = '\n'.join(txt) + print(txt) + plt.subplot(2, 2, 4) + plt.text(0, 0, txt, fontsize=12) + plt.axis('off') + plt.savefig(out_filename) diff --git a/compass/ocean/streams/__init__.py b/compass/ocean/streams/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/ocean/streams/streams.frazil b/compass/ocean/streams/streams.frazil new file mode 100644 index 0000000000..db295838e6 --- /dev/null +++ b/compass/ocean/streams/streams.frazil @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/streams/streams.land_ice_fluxes b/compass/ocean/streams/streams.land_ice_fluxes new file mode 100644 index 0000000000..16d86283a8 --- /dev/null +++ b/compass/ocean/streams/streams.land_ice_fluxes @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/streams/streams.ssh_adjust b/compass/ocean/streams/streams.ssh_adjust new file mode 100644 index 0000000000..b128f8cce1 --- /dev/null +++ b/compass/ocean/streams/streams.ssh_adjust @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/suites/__init__.py b/compass/ocean/suites/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/ocean/suites/ec30to60.txt b/compass/ocean/suites/ec30to60.txt new file mode 100644 index 0000000000..bad9cad863 --- /dev/null +++ b/compass/ocean/suites/ec30to60.txt @@ -0,0 +1,5 @@ +ocean/global_ocean/EC30to60/mesh +ocean/global_ocean/EC30to60/PHC/init +ocean/global_ocean/EC30to60/PHC/performance_test +ocean/global_ocean/EC30to60/PHC/dynamic_adjustment +ocean/global_ocean/EC30to60/PHC/files_for_e3sm diff --git a/compass/ocean/suites/ecwisc30to60.txt b/compass/ocean/suites/ecwisc30to60.txt new file mode 100644 index 0000000000..a6bf9238ed --- /dev/null +++ b/compass/ocean/suites/ecwisc30to60.txt @@ -0,0 +1,5 @@ +ocean/global_ocean/ECwISC30to60/mesh +ocean/global_ocean/ECwISC30to60/PHC/init +ocean/global_ocean/ECwISC30to60/PHC/performance_test +ocean/global_ocean/ECwISC30to60/PHC/dynamic_adjustment +ocean/global_ocean/ECwISC30to60/PHC/files_for_e3sm diff --git a/compass/ocean/suites/nightly.txt b/compass/ocean/suites/nightly.txt new file mode 100644 index 0000000000..8dc98b4bb8 --- /dev/null +++ b/compass/ocean/suites/nightly.txt @@ -0,0 +1,28 @@ +ocean/baroclinic_channel/10km/default +ocean/baroclinic_channel/10km/threads_test +ocean/baroclinic_channel/10km/decomp_test +ocean/baroclinic_channel/10km/restart_test + +ocean/global_ocean/QU240/mesh +ocean/global_ocean/QU240/PHC/init +ocean/global_ocean/QU240/PHC/performance_test +ocean/global_ocean/QU240/PHC/restart_test +ocean/global_ocean/QU240/PHC/decomp_test +ocean/global_ocean/QU240/PHC/threads_test +ocean/global_ocean/QU240/PHC/analysis_test + +ocean/global_ocean/QU240/PHC/RK4/performance_test +ocean/global_ocean/QU240/PHC/RK4/restart_test +ocean/global_ocean/QU240/PHC/RK4/decomp_test +ocean/global_ocean/QU240/PHC/RK4/threads_test + +ocean/global_ocean/QU240/EN4_1900/init +ocean/global_ocean/QU240/EN4_1900/performance_test + +ocean/global_ocean/QU240/PHC_BGC/init +ocean/global_ocean/QU240/PHC_BGC/performance_test + +ocean/ice_shelf_2d/5km/restart_test + +ocean/ziso/20km/default +ocean/ziso/20km/with_frazil diff --git a/compass/ocean/suites/qu240_for_e3sm.txt b/compass/ocean/suites/qu240_for_e3sm.txt new file mode 100644 index 0000000000..e030af0006 --- /dev/null +++ b/compass/ocean/suites/qu240_for_e3sm.txt @@ -0,0 +1,4 @@ +ocean/global_ocean/QU240/mesh +ocean/global_ocean/QU240/PHC/init +ocean/global_ocean/QU240/PHC/dynamic_adjustment +ocean/global_ocean/QU240/PHC/files_for_e3sm diff --git a/compass/ocean/suites/quwisc240.txt b/compass/ocean/suites/quwisc240.txt new file mode 100644 index 0000000000..27b7ef4091 --- /dev/null +++ b/compass/ocean/suites/quwisc240.txt @@ -0,0 +1,15 @@ +ocean/global_ocean/QUwISC240/mesh +ocean/global_ocean/QUwISC240/PHC/init +ocean/global_ocean/QUwISC240/PHC/performance_test +ocean/global_ocean/QUwISC240/PHC/restart_test +ocean/global_ocean/QUwISC240/PHC/decomp_test +ocean/global_ocean/QUwISC240/PHC/threads_test +ocean/global_ocean/QUwISC240/PHC/analysis_test +ocean/global_ocean/QUwISC240/PHC/RK4/performance_test +ocean/global_ocean/QUwISC240/PHC/RK4/restart_test +ocean/global_ocean/QUwISC240/PHC/RK4/decomp_test +ocean/global_ocean/QUwISC240/PHC/RK4/threads_test +ocean/global_ocean/QUwISC240/EN4_1900/init +ocean/global_ocean/QUwISC240/EN4_1900/performance_test +ocean/global_ocean/QUwISC240/PHC_BGC/init +ocean/global_ocean/QUwISC240/PHC_BGC/performance_test diff --git a/compass/ocean/suites/quwisc240_for_e3sm.txt b/compass/ocean/suites/quwisc240_for_e3sm.txt new file mode 100644 index 0000000000..2a5caab8da --- /dev/null +++ b/compass/ocean/suites/quwisc240_for_e3sm.txt @@ -0,0 +1,4 @@ +ocean/global_ocean/QUwISC240/mesh +ocean/global_ocean/QUwISC240/PHC/init +ocean/global_ocean/QUwISC240/PHC/dynamic_adjustment +ocean/global_ocean/QUwISC240/PHC/files_for_e3sm diff --git a/compass/ocean/tests/__init__.py b/compass/ocean/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/ocean/tests/baroclinic_channel/__init__.py b/compass/ocean/tests/baroclinic_channel/__init__.py new file mode 100644 index 0000000000..8bde84f10f --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/__init__.py @@ -0,0 +1,61 @@ +from compass.testgroup import TestGroup +from compass.ocean.tests.baroclinic_channel.decomp_test import DecompTest +from compass.ocean.tests.baroclinic_channel.default import Default +from compass.ocean.tests.baroclinic_channel.restart_test import RestartTest +from compass.ocean.tests.baroclinic_channel.rpe_test import RpeTest +from compass.ocean.tests.baroclinic_channel.threads_test import ThreadsTest + + +class BaroclinicChannel(TestGroup): + """ + A test group for baroclinic channel test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.MpasCore + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='baroclinic_channel') + + for resolution in ['1km', '4km', '10km']: + self.add_test_case( + RpeTest(test_group=self, resolution=resolution)) + for resolution in ['10km']: + self.add_test_case( + DecompTest(test_group=self, resolution=resolution)) + self.add_test_case( + Default(test_group=self, resolution=resolution)) + self.add_test_case( + RestartTest(test_group=self, resolution=resolution)) + self.add_test_case( + ThreadsTest(test_group=self, resolution=resolution)) + + +def configure(resolution, config): + """ + Modify the configuration options for one of the baroclinic test cases + + Parameters + ---------- + resolution : str + The resolution of the test case + + config : configparser.ConfigParser + Configuration options for this test case + """ + res_params = {'10km': {'nx': 16, + 'ny': 50, + 'dc': 10e3}, + '4km': {'nx': 40, + 'ny': 126, + 'dc': 4e3}, + '1km': {'nx': 160, + 'ny': 500, + 'dc': 1e3}} + + if resolution not in res_params: + raise ValueError('Unsupported resolution {}. Supported values are: ' + '{}'.format(resolution, list(res_params))) + res_params = res_params[resolution] + for param in res_params: + config.set('baroclinic_channel', param, '{}'.format(res_params[param])) diff --git a/compass/ocean/tests/baroclinic_channel/baroclinic_channel.cfg b/compass/ocean/tests/baroclinic_channel/baroclinic_channel.cfg new file mode 100644 index 0000000000..cdbe6b24cc --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/baroclinic_channel.cfg @@ -0,0 +1,43 @@ +# Options related to the vertical grid +[vertical_grid] + +# the type of vertical grid +grid_type = uniform + +# Number of vertical levels +vert_levels = 20 + +# Depth of the bottom of the ocean +bottom_depth = 1000.0 + + +# config options for baroclinic channel testcases +[baroclinic_channel] + +# Logical flag that determines if locations of features are defined by distance +# or fractions. False means fractions. +use_distances = False + +# Temperature of the surface in the northern half of the domain. +surface_temperature = 13.1 + +# Temperature of the bottom in the northern half of the domain. +bottom_temperature = 10.1 + +# Difference in the temperature field between the northern and southern halves +# of the domain. +temperature_difference = 1.2 + +# Fraction of domain in Y direction the temperature gradient should be linear +# over. +gradient_width_frac = 0.08 + +# Width of the temperature gradient around the center sin wave. Default value +# is relative to a 500km domain in Y. +gradient_width_dist = 40e3 + +# Salinity of the water in the entire domain. +salinity = 35.0 + +# Coriolis parameter for entire domain. +coriolis_parameter = -1.2e-4 \ No newline at end of file diff --git a/compass/ocean/tests/baroclinic_channel/decomp_test/__init__.py b/compass/ocean/tests/baroclinic_channel/decomp_test/__init__.py new file mode 100644 index 0000000000..985a643cb2 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/decomp_test/__init__.py @@ -0,0 +1,66 @@ +from compass.testcase import TestCase +from compass.ocean.tests.baroclinic_channel.initial_state import InitialState +from compass.ocean.tests.baroclinic_channel.forward import Forward +from compass.ocean.tests import baroclinic_channel +from compass.validate import compare_variables + + +class DecompTest(TestCase): + """ + A decomposition test case for the baroclinic channel test group, which + makes sure the model produces identical results on 1 and 4 cores. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.baroclinic_channel.BaroclinicChannel + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'decomp_test' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + InitialState(test_case=self, resolution=resolution)) + + for procs in [4, 8]: + name = '{}proc'.format(procs) + self.add_step( + Forward(test_case=self, name=name, subdir=name, cores=procs, + threads=1, resolution=resolution)) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + baroclinic_channel.configure(self.resolution, self.config) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + # perform validation + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + steps = self.steps_to_run + if '4proc' in steps and '8proc' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='4proc/output.nc', + filename2='8proc/output.nc') diff --git a/compass/ocean/tests/baroclinic_channel/default/__init__.py b/compass/ocean/tests/baroclinic_channel/default/__init__.py new file mode 100644 index 0000000000..bcfbcfd417 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/default/__init__.py @@ -0,0 +1,48 @@ +from compass.testcase import TestCase +from compass.ocean.tests.baroclinic_channel.initial_state import InitialState +from compass.ocean.tests.baroclinic_channel.forward import Forward +from compass.ocean.tests import baroclinic_channel + + +class Default(TestCase): + """ + The default test case for the baroclinic channel test group simply creates + the mesh and initial condition, then performs a short forward run on 4 + cores. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.baroclinic_channel.BaroclinicChannel + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'default' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + InitialState(test_case=self, resolution=resolution)) + self.add_step( + Forward(test_case=self, cores=4, threads=1, resolution=resolution)) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + baroclinic_channel.configure(self.resolution, self.config) + + # no run() is needed because we're doing the default: running all steps diff --git a/compass/ocean/tests/baroclinic_channel/forward.py b/compass/ocean/tests/baroclinic_channel/forward.py new file mode 100644 index 0000000000..843da5497f --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/forward.py @@ -0,0 +1,84 @@ +from compass.model import run_model +from compass.step import Step + + +class Forward(Step): + """ + A step for performing forward MPAS-Ocean runs as part of baroclinic + channel test cases. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + def __init__(self, test_case, resolution, name='forward', subdir=None, + cores=1, min_cores=None, threads=1, nu=None): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + resolution : str + The resolution of the test case + + name : str + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + nu : float, optional + the viscosity (if different from the default for the test group) + """ + self.resolution = resolution + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + self.add_namelist_file('compass.ocean.tests.baroclinic_channel', + 'namelist.forward') + self.add_namelist_file('compass.ocean.tests.baroclinic_channel', + 'namelist.{}.forward'.format(resolution)) + if nu is not None: + # update the viscosity to the requested value + options = {'config_mom_del2': '{}'.format(nu)} + self.add_namelist_options(options) + + self.add_streams_file('compass.ocean.tests.baroclinic_channel', + 'streams.forward') + + self.add_input_file(filename='init.nc', + target='../initial_state/ocean.nc') + self.add_input_file(filename='graph.info', + target='../initial_state/culled_graph.info') + + self.add_output_file(filename='output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + run_model(self) diff --git a/compass/ocean/tests/baroclinic_channel/initial_state.py b/compass/ocean/tests/baroclinic_channel/initial_state.py new file mode 100644 index 0000000000..4368f41079 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/initial_state.py @@ -0,0 +1,156 @@ +import xarray +import numpy + +from mpas_tools.planar_hex import make_planar_hex_mesh +from mpas_tools.io import write_netcdf +from mpas_tools.mesh.conversion import convert, cull + +from compass.ocean.vertical import generate_grid +from compass.step import Step + + +class InitialState(Step): + """ + A step for creating a mesh and initial condition for baroclinic channel + test cases + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + def __init__(self, test_case, resolution): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + resolution : str + The resolution of the test case + """ + super().__init__(test_case=test_case, name='initial_state') + self.resolution = resolution + + for file in ['base_mesh.nc', 'culled_mesh.nc', 'culled_graph.info', + 'ocean.nc']: + self.add_output_file(file) + + def run(self): + """ + Run this step of the test case + """ + config = self.config + logger = self.logger + + section = config['baroclinic_channel'] + nx = section.getint('nx') + ny = section.getint('ny') + dc = section.getfloat('dc') + + dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=False, + nonperiodic_y=True) + write_netcdf(dsMesh, 'base_mesh.nc') + + dsMesh = cull(dsMesh, logger=logger) + dsMesh = convert(dsMesh, graphInfoFileName='culled_graph.info', + logger=logger) + write_netcdf(dsMesh, 'culled_mesh.nc') + + section = config['baroclinic_channel'] + use_distances = section.getboolean('use_distances') + gradient_width_dist = section.getfloat('gradient_width_dist') + gradient_width_frac = section.getfloat('gradient_width_frac') + bottom_temperature = section.getfloat('bottom_temperature') + surface_temperature = section.getfloat('surface_temperature') + temperature_difference = section.getfloat('temperature_difference') + salinity = section.getfloat('salinity') + coriolis_parameter = section.getfloat('coriolis_parameter') + + ds = dsMesh.copy() + + interfaces = generate_grid(config=config) + + bottom_depth = interfaces[-1] + vert_levels = len(interfaces) - 1 + + ds['refBottomDepth'] = ('nVertLevels', interfaces[1:]) + ds['refZMid'] = ('nVertLevels', -0.5 * (interfaces[1:] + interfaces[0:-1])) + ds['vertCoordMovementWeights'] = xarray.ones_like(ds.refBottomDepth) + + xCell = ds.xCell + yCell = ds.yCell + + xMin = xCell.min().values + xMax = xCell.max().values + yMin = yCell.min().values + yMax = yCell.max().values + + yMid = 0.5*(yMin + yMax) + xPerturbMin = xMin + 4.0 * (xMax - xMin) / 6.0 + xPerturbMax = xMin + 5.0 * (xMax - xMin) / 6.0 + + if use_distances: + perturbationWidth = gradient_width_dist + else: + perturbationWidth = (yMax - yMin) * gradient_width_frac + + yOffset = perturbationWidth * numpy.sin( + 6.0 * numpy.pi * (xCell - xMin) / (xMax - xMin)) + + temp_vert = (bottom_temperature + + (surface_temperature - bottom_temperature) * + ((ds.refZMid + bottom_depth) / bottom_depth)) + + frac = xarray.where(yCell < yMid - yOffset, 1., 0.) + + mask = numpy.logical_and(yCell >= yMid - yOffset, + yCell < yMid - yOffset + perturbationWidth) + frac = xarray.where(mask, + 1. - (yCell - (yMid - yOffset)) / perturbationWidth, + frac) + + temperature = temp_vert - temperature_difference * frac + temperature = temperature.transpose('nCells', 'nVertLevels') + + # Determine yOffset for 3rd crest in sin wave + yOffset = 0.5 * perturbationWidth * numpy.sin( + numpy.pi * (xCell - xPerturbMin) / (xPerturbMax - xPerturbMin)) + + mask = numpy.logical_and( + numpy.logical_and(yCell >= yMid - yOffset - 0.5 * perturbationWidth, + yCell <= yMid - yOffset + 0.5 * perturbationWidth), + numpy.logical_and(xCell >= xPerturbMin, + xCell <= xPerturbMax)) + + temperature = (temperature + + mask * 0.3 * (1. - ((yCell - (yMid - yOffset)) / + (0.5 * perturbationWidth)))) + + temperature = temperature.expand_dims(dim='Time', axis=0) + + layerThickness = xarray.DataArray(data=interfaces[1:] - interfaces[0:-1], + dims='nVertLevels') + _, layerThickness = xarray.broadcast(xCell, layerThickness) + layerThickness = layerThickness.transpose('nCells', 'nVertLevels') + layerThickness = layerThickness.expand_dims(dim='Time', axis=0) + + normalVelocity = xarray.zeros_like(ds.xEdge) + normalVelocity, _ = xarray.broadcast(normalVelocity, ds.refBottomDepth) + normalVelocity = normalVelocity.transpose('nEdges', 'nVertLevels') + normalVelocity = normalVelocity.expand_dims(dim='Time', axis=0) + + ds['temperature'] = temperature + ds['salinity'] = salinity * xarray.ones_like(temperature) + ds['normalVelocity'] = normalVelocity + ds['layerThickness'] = layerThickness + ds['restingThickness'] = layerThickness + ds['bottomDepth'] = bottom_depth * xarray.ones_like(xCell) + ds['maxLevelCell'] = vert_levels * xarray.ones_like(xCell, dtype=int) + ds['fCell'] = coriolis_parameter * xarray.ones_like(xCell) + ds['fEdge'] = coriolis_parameter * xarray.ones_like(ds.xEdge) + ds['fVertex'] = coriolis_parameter * xarray.ones_like(ds.xVertex) + + write_netcdf(ds, 'ocean.nc') diff --git a/compass/ocean/tests/baroclinic_channel/namelist.10km.forward b/compass/ocean/tests/baroclinic_channel/namelist.10km.forward new file mode 100644 index 0000000000..04e0bf0ace --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/namelist.10km.forward @@ -0,0 +1,3 @@ +config_dt = '00:05:00' +config_btr_dt = '00:00:15' +config_mom_del2 = 10.0 diff --git a/compass/ocean/tests/baroclinic_channel/namelist.1km.forward b/compass/ocean/tests/baroclinic_channel/namelist.1km.forward new file mode 100644 index 0000000000..5c73113daf --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/namelist.1km.forward @@ -0,0 +1,3 @@ +config_dt = '00:00:30' +config_btr_dt = '00:00:02' +config_mom_del2 = 10.0 diff --git a/compass/ocean/tests/baroclinic_channel/namelist.4km.forward b/compass/ocean/tests/baroclinic_channel/namelist.4km.forward new file mode 100644 index 0000000000..f25666ce8c --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/namelist.4km.forward @@ -0,0 +1,3 @@ +config_dt = '00:02:00' +config_btr_dt = '00:00:06' +config_mom_del2 = 10.0 diff --git a/compass/ocean/tests/baroclinic_channel/namelist.forward b/compass/ocean/tests/baroclinic_channel/namelist.forward new file mode 100644 index 0000000000..4bf703a53c --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/namelist.forward @@ -0,0 +1,7 @@ +config_write_output_on_startup = .false. +config_run_duration = '0000_00:15:00' +config_use_mom_del2 = .true. +config_implicit_bottom_drag_coeff = 1.0e-2 +config_use_cvmix_background = .true. +config_cvmix_background_diffusion = 0.0 +config_cvmix_background_viscosity = 1.0e-4 diff --git a/compass/ocean/tests/baroclinic_channel/restart_test/__init__.py b/compass/ocean/tests/baroclinic_channel/restart_test/__init__.py new file mode 100644 index 0000000000..1fa7cd29b8 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/restart_test/__init__.py @@ -0,0 +1,74 @@ +from compass.testcase import TestCase +from compass.ocean.tests.baroclinic_channel.initial_state import InitialState +from compass.ocean.tests.baroclinic_channel.forward import Forward +from compass.ocean.tests import baroclinic_channel +from compass.validate import compare_variables + + +class RestartTest(TestCase): + """ + A restart test case for the baroclinic channel test group, which makes sure + the model produces identical results with one longer run and two shorter + runs with a restart in between. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.baroclinic_channel.BaroclinicChannel + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'restart_test' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + InitialState(test_case=self, resolution=resolution)) + + for part in ['full', 'restart']: + name = '{}_run'.format(part) + step = Forward(test_case=self, name=name, subdir=name, cores=4, + threads=1, resolution=resolution) + + step.add_namelist_file( + 'compass.ocean.tests.baroclinic_channel.restart_test', + 'namelist.{}'.format(part)) + step.add_streams_file( + 'compass.ocean.tests.baroclinic_channel.restart_test', + 'streams.{}'.format(part)) + self.add_step(step) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + baroclinic_channel.configure(self.resolution, self.config) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + # perform validation + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + steps = self.steps_to_run + if 'full_run' in steps and 'restart_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/output.nc', + filename2='restart_run/output.nc') diff --git a/compass/ocean/tests/baroclinic_channel/restart_test/namelist.full b/compass/ocean/tests/baroclinic_channel/restart_test/namelist.full new file mode 100644 index 0000000000..d99b25960d --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/restart_test/namelist.full @@ -0,0 +1,3 @@ +config_start_time = '0001-01-01_00:00:00' +config_run_duration = '0000_00:10:00' +config_write_output_on_startup = .false. diff --git a/compass/ocean/tests/baroclinic_channel/restart_test/namelist.restart b/compass/ocean/tests/baroclinic_channel/restart_test/namelist.restart new file mode 100644 index 0000000000..620cd94498 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/restart_test/namelist.restart @@ -0,0 +1,4 @@ +config_start_time = '0001-01-01_00:05:00' +config_run_duration = '0000_00:05:00' +config_write_output_on_startup = .false. +config_do_restart = .true. diff --git a/compass/ocean/tests/baroclinic_channel/restart_test/streams.full b/compass/ocean/tests/baroclinic_channel/restart_test/streams.full new file mode 100644 index 0000000000..5d80d1abb5 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/restart_test/streams.full @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/compass/ocean/tests/baroclinic_channel/restart_test/streams.restart b/compass/ocean/tests/baroclinic_channel/restart_test/streams.restart new file mode 100644 index 0000000000..c99ec09db0 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/restart_test/streams.restart @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/compass/ocean/tests/baroclinic_channel/rpe_test/__init__.py b/compass/ocean/tests/baroclinic_channel/rpe_test/__init__.py new file mode 100644 index 0000000000..a1dd9ea2ff --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/rpe_test/__init__.py @@ -0,0 +1,79 @@ +from compass.testcase import TestCase +from compass.ocean.tests.baroclinic_channel.initial_state import InitialState +from compass.ocean.tests.baroclinic_channel.forward import Forward +from compass.ocean.tests.baroclinic_channel.rpe_test.analysis import Analysis +from compass.ocean.tests import baroclinic_channel + + +class RpeTest(TestCase): + """ + The reference potential energy (RPE) test case for the baroclinic channel + test group performs a 20-day integration of the model forward in time at + 5 different values of the viscosity at the given resolution. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.baroclinic_channel.BaroclinicChannel + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'rpe_test' + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + nus = [1, 5, 10, 20, 200] + + res_params = {'1km': {'cores': 144, 'min_cores': 36}, + '4km': {'cores': 36, 'min_cores': 8}, + '10km': {'cores': 8, 'min_cores': 4}} + + if resolution not in res_params: + raise ValueError( + 'Unsupported resolution {}. Supported values are: ' + '{}'.format(resolution, list(res_params))) + + params = res_params[resolution] + + self.resolution = resolution + + self.add_step( + InitialState(test_case=self, resolution=resolution)) + + for index, nu in enumerate(nus): + name = 'rpe_test_{}_nu_{}'.format(index + 1, nu) + step = Forward( + test_case=self, name=name, subdir=name, cores=params['cores'], + min_cores=params['min_cores'], resolution=resolution, + nu=float(nu)) + + step.add_namelist_file( + 'compass.ocean.tests.baroclinic_channel.rpe_test', + 'namelist.forward') + step.add_streams_file( + 'compass.ocean.tests.baroclinic_channel.rpe_test', + 'streams.forward') + self.add_step(step) + + self.add_step( + Analysis(test_case=self, resolution=resolution, nus=nus)) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + baroclinic_channel.configure(self.resolution, self.config) + + # no run() is needed because we're doing the default: running all steps diff --git a/compass/ocean/tests/baroclinic_channel/rpe_test/analysis.py b/compass/ocean/tests/baroclinic_channel/rpe_test/analysis.py new file mode 100644 index 0000000000..47ed31c329 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/rpe_test/analysis.py @@ -0,0 +1,128 @@ +import numpy as np +from netCDF4 import Dataset +import matplotlib.pyplot as plt +import cmocean + +from compass.step import Step + + +class Analysis(Step): + """ + A step for plotting the results of a series of RPE runs in the baroclinic + channel test group + + Attributes + ---------- + resolution : str + The resolution of the test case + + nus : list of float + A list of viscosities + """ + def __init__(self, test_case, resolution, nus): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + resolution : str + The resolution of the test case + + nus : list of float + A list of viscosities + """ + super().__init__(test_case=test_case, name='analysis') + self.resolution = resolution + self.nus = nus + + for index, nu in enumerate(nus): + self.add_input_file( + filename='output_{}.nc'.format(index+1), + target='../rpe_test_{}_nu_{}/output.nc'.format(index+1, nu)) + + self.add_output_file( + filename='sections_baroclinic_channel_{}.png'.format(resolution)) + + def run(self): + """ + Run this step of the test case + """ + section = self.config['baroclinic_channel'] + nx = section.getint('nx') + ny = section.getint('ny') + _plot(nx, ny, self.outputs[0], self.nus) + + +def _plot(nx, ny, filename, nus): + """ + Plot section of the baroclinic channel at different viscosities + + Parameters + ---------- + nx : int + The number of cells in the x direction + + ny : int + The number of cells in the y direction (before culling) + + filename : str + The output file name + + nus : list of float + The viscosity values + """ + + plt.switch_backend('Agg') + + nRow = 1 + nCol = 5 + iTime = [0] + time = ['20'] + + fig, axs = plt.subplots(nRow, nCol, figsize=( + 2.1 * nCol, 5.0 * nRow), constrained_layout=True) + + for iCol in range(nCol): + for iRow in range(nRow): + ncfile = Dataset('output_{}.nc'.format(iCol + 1), 'r') + var = ncfile.variables['temperature'] + var1 = np.reshape(var[iTime[iRow], :, 0], [ny, nx]) + # flip in y-dir + var = np.flipud(var1) + + # Every other row in y needs to average two neighbors in x on + # planar hex mesh + var_avg = var + for j in range(0, ny, 2): + for i in range(1, nx - 2): + var_avg[j, i] = (var[j, i + 1] + var[j, i]) / 2.0 + + if nRow == 1: + ax = axs[iCol] + else: + ax = axs[iRow, iCol] + dis = ax.imshow( + var_avg, + extent=[0, 160, 0, 500], + cmap='cmo.thermal', + vmin=11.8, + vmax=13.0) + ax.set_title("day {}, $\\nu_h=${}".format(time[iRow], nus[iCol])) + ax.set_xticks(np.arange(0, 161, step=40)) + ax.set_yticks(np.arange(0, 501, step=50)) + + if iRow == nRow - 1: + ax.set_xlabel('x, km') + if iCol == 0: + ax.set_ylabel('y, km') + if iCol == nCol - 1: + if nRow == 1: + fig.colorbar(dis, ax=axs[nCol - 1], aspect=40) + else: + fig.colorbar(dis, ax=axs[iRow, nCol - 1], aspect=40) + ncfile.close() + + plt.savefig(filename) diff --git a/compass/ocean/tests/baroclinic_channel/rpe_test/namelist.forward b/compass/ocean/tests/baroclinic_channel/rpe_test/namelist.forward new file mode 100644 index 0000000000..e3e9d43636 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/rpe_test/namelist.forward @@ -0,0 +1 @@ +config_run_duration = '20_00:00:00' diff --git a/compass/ocean/tests/baroclinic_channel/rpe_test/streams.forward b/compass/ocean/tests/baroclinic_channel/rpe_test/streams.forward new file mode 100644 index 0000000000..1e3455b13c --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/rpe_test/streams.forward @@ -0,0 +1,16 @@ + + + + + + + + + + + + diff --git a/compass/ocean/tests/baroclinic_channel/streams.forward b/compass/ocean/tests/baroclinic_channel/streams.forward new file mode 100644 index 0000000000..ea4b7b5f55 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/streams.forward @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/baroclinic_channel/threads_test/__init__.py b/compass/ocean/tests/baroclinic_channel/threads_test/__init__.py new file mode 100644 index 0000000000..fe8c541942 --- /dev/null +++ b/compass/ocean/tests/baroclinic_channel/threads_test/__init__.py @@ -0,0 +1,66 @@ +from compass.testcase import TestCase +from compass.ocean.tests.baroclinic_channel.initial_state import InitialState +from compass.ocean.tests.baroclinic_channel.forward import Forward +from compass.ocean.tests import baroclinic_channel +from compass.validate import compare_variables + + +class ThreadsTest(TestCase): + """ + A thread test case for the baroclinic channel test group, which makes sure + the model produces identical results with 1 and 2 threads. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.baroclinic_channel.BaroclinicChannel + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'threads_test' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + InitialState(test_case=self, resolution=resolution)) + + for threads in [1, 2]: + name = '{}thread'.format(threads) + self.add_step( + Forward(test_case=self, name=name, subdir=name, cores=4, + threads=threads, resolution=resolution)) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + baroclinic_channel.configure(self.resolution, self.config) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + # perform validation + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + steps = self.steps_to_run + if '1thread' in steps and '2thread' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='1thread/output.nc', + filename2='2thread/output.nc') diff --git a/compass/ocean/tests/global_ocean/__init__.py b/compass/ocean/tests/global_ocean/__init__.py new file mode 100644 index 0000000000..9e66501e67 --- /dev/null +++ b/compass/ocean/tests/global_ocean/__init__.py @@ -0,0 +1,169 @@ +from compass.testgroup import TestGroup + +from compass.ocean.tests.global_ocean.mesh import Mesh +from compass.ocean.tests.global_ocean.mesh.qu240.dynamic_adjustment import \ + QU240DynamicAdjustment +from compass.ocean.tests.global_ocean.mesh.ec30to60.dynamic_adjustment import \ + EC30to60DynamicAdjustment +from compass.ocean.tests.global_ocean.mesh.so12to60.dynamic_adjustment import \ + SO12to60DynamicAdjustment +from compass.ocean.tests.global_ocean.init import Init +from compass.ocean.tests.global_ocean.performance_test import PerformanceTest +from compass.ocean.tests.global_ocean.restart_test import RestartTest +from compass.ocean.tests.global_ocean.decomp_test import DecompTest +from compass.ocean.tests.global_ocean.threads_test import ThreadsTest +from compass.ocean.tests.global_ocean.analysis_test import AnalysisTest +from compass.ocean.tests.global_ocean.daily_output_test import DailyOutputTest +from compass.ocean.tests.global_ocean.files_for_e3sm import FilesForE3SM + + +class GlobalOcean(TestGroup): + """ + A test group for setting up global initial conditions and performing + regression testing and dynamic adjustment for MPAS-Ocean + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.MpasCore + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='global_ocean') + + # we do a lot of tests for QU240/QUwISC240 + for mesh_name in ['QU240', 'QUwISC240']: + mesh = Mesh(test_group=self, mesh_name=mesh_name) + self.add_test_case(mesh) + + init = Init(test_group=self, mesh=mesh, + initial_condition='PHC', + with_bgc=False) + self.add_test_case(init) + + time_integrator = 'split_explicit' + self.add_test_case( + PerformanceTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + RestartTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + DecompTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + ThreadsTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + AnalysisTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + DailyOutputTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + + dynamic_adjustment = QU240DynamicAdjustment( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator) + self.add_test_case(dynamic_adjustment) + self.add_test_case( + FilesForE3SM( + test_group=self, mesh=mesh, init=init, + dynamic_adjustment=dynamic_adjustment)) + + time_integrator = 'RK4' + self.add_test_case( + PerformanceTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + RestartTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + DecompTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + self.add_test_case( + ThreadsTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + + # EN4_1900 tests + time_integrator = 'split_explicit' + init = Init(test_group=self, mesh=mesh, + initial_condition='EN4_1900', + with_bgc=False) + self.add_test_case(init) + self.add_test_case( + PerformanceTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + dynamic_adjustment = QU240DynamicAdjustment( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator) + self.add_test_case(dynamic_adjustment) + self.add_test_case( + FilesForE3SM( + test_group=self, mesh=mesh, init=init, + dynamic_adjustment=dynamic_adjustment)) + + # BGC tests + init = Init(test_group=self, mesh=mesh, + initial_condition='PHC', + with_bgc=True) + self.add_test_case(init) + self.add_test_case( + PerformanceTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + + # for other meshes, we do fewer tests + for mesh_name in ['EC30to60', 'ECwISC30to60']: + mesh = Mesh(test_group=self, mesh_name=mesh_name) + self.add_test_case(mesh) + + init = Init(test_group=self, mesh=mesh, + initial_condition='PHC', + with_bgc=False) + self.add_test_case(init) + + time_integrator = 'split_explicit' + self.add_test_case( + PerformanceTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + dynamic_adjustment = EC30to60DynamicAdjustment( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator) + self.add_test_case(dynamic_adjustment) + self.add_test_case( + FilesForE3SM( + test_group=self, mesh=mesh, init=init, + dynamic_adjustment=dynamic_adjustment)) + + # SOwISC12to60: just the version with cavities for now + for mesh_name in ['SOwISC12to60']: + mesh = Mesh(test_group=self, mesh_name=mesh_name) + self.add_test_case(mesh) + + init = Init(test_group=self, mesh=mesh, + initial_condition='PHC', + with_bgc=False) + self.add_test_case(init) + time_integrator = 'split_explicit' + self.add_test_case( + PerformanceTest( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator)) + dynamic_adjustment = SO12to60DynamicAdjustment( + test_group=self, mesh=mesh, init=init, + time_integrator=time_integrator) + self.add_test_case(dynamic_adjustment) + self.add_test_case( + FilesForE3SM( + test_group=self, mesh=mesh, init=init, + dynamic_adjustment=dynamic_adjustment)) diff --git a/compass/ocean/tests/global_ocean/analysis_test/__init__.py b/compass/ocean/tests/global_ocean/analysis_test/__init__.py new file mode 100644 index 0000000000..b85146be39 --- /dev/null +++ b/compass/ocean/tests/global_ocean/analysis_test/__init__.py @@ -0,0 +1,143 @@ +import traceback +from compass.validate import compare_variables, compare_timers +from compass.ocean.tests.global_ocean.forward import ForwardTestCase, \ + ForwardStep + + +class AnalysisTest(ForwardTestCase): + """ + A test case for performing a short forward run with an MPAS-Ocean global + initial condition and check nearly all MPAS-Ocean analysis members to make + sure they run successfully and output is identical to a baseline (if one + is provided). + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + name='analysis_test') + + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, cores=4, + threads=1) + + module = self.__module__ + step.add_namelist_file(module, 'namelist.forward') + step.add_streams_file(module, 'streams.forward') + self.add_step(step) + + def run(self): + """ + Run each step of the testcase + """ + # get cores, threads from config options and run the steps + super().run() + + config = self.config + work_dir = self.work_dir + + variables = { + 'forward/output.nc': + ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'], + 'forward/analysis_members/globalStats.0001-01-01_00.00.00.nc': + ['kineticEnergyCellMax', 'kineticEnergyCellMin', + 'kineticEnergyCellAvg', 'temperatureAvg', 'salinityAvg'], + 'forward/analysis_members/debugDiagnostics.0001-01-01.nc': + ['rx1MaxCell'], + 'forward/analysis_members/highFrequencyOutput.0001-01-01.nc': + ['temperatureAt250m'], + 'forward/analysis_members/mixedLayerDepths.0001-01-01.nc': + ['dThreshMLD', 'tThreshMLD'], + 'forward/analysis_members/waterMassCensus.0001-01-01_00.00.00.nc': + ['waterMassCensusTemperatureValues'], + 'forward/analysis_members/eliassenPalm.0001-01-01.nc': + ['EPFT'], + 'forward/analysis_members/' + 'layerVolumeWeightedAverage.0001-01-01_00.00.00.nc': + ['avgVolumeTemperature', 'avgVolumeRelativeVorticityCell'], + 'forward/analysis_members/okuboWeiss.0001-01-01_00.00.00.nc': + ['okuboWeiss'], + 'forward/analysis_members/zonalMeans.0001-01-01_00.00.00.nc': + ['velocityZonalZonalMean', 'temperatureZonalMean'], + 'forward/analysis_members/' + 'meridionalHeatTransport.0001-01-01_00.00.00.nc': + ['meridionalHeatTransportLat'], + 'forward/analysis_members/' + 'surfaceAreaWeightedAverages.0001-01-01_00.00.00.nc': + ['avgSurfaceSalinity', 'avgSeaSurfacePressure'], + 'forward/analysis_members/' + 'eddyProductVariables.0001-01-01.nc': + ['SSHSquared', 'velocityZonalSquared', + 'velocityZonalTimesTemperature'], + 'forward/analysis_members/oceanHeatContent.0001-01-01.nc': + ['oceanHeatContentSfcToBot', 'oceanHeatContentSfcTo700m', + 'oceanHeatContent700mTo2000m', 'oceanHeatContent2000mToBot'], + 'forward/analysis_members/mixedLayerHeatBudget.0001-01-01.nc': + ['temperatureHorAdvectionMLTend', 'salinityHorAdvectionMLTend', + 'temperatureML', 'salinityML', 'bruntVaisalaFreqML']} + + failed = list() + for filename, variables in variables.items(): + try: + compare_variables(variables, config, work_dir=work_dir, + filename1=filename) + except ValueError: + traceback.print_exc() + failed.append(filename) + + if len(failed) > 0: + raise ValueError('Comparison failed, see above, for the following ' + 'files:\n{}.'.format('\n'.join(failed))) + + timers = ['compute_globalStats', 'write_globalStats', + 'compute_debugDiagnostics', 'write_debugDiagnostics', + 'compute_eliassenPalm', 'write_eliassenPalm', + 'compute_highFrequency', 'write_highFrequency', + 'compute_layerVolumeWeightedAverage', + 'write_layerVolumeWeightedAverage', + 'compute_meridionalHeatTransport', + 'write_meridionalHeatTransport', 'compute_mixedLayerDepths', + 'write_mixedLayerDepths', 'compute_okuboWeiss', + 'write_okuboWeiss', 'compute_surfaceAreaWeightedAverages', + 'write_surfaceAreaWeightedAverages', + 'compute_waterMassCensus', + 'write_waterMassCensus', 'compute_zonalMean', + 'write_zonalMean', + 'compute_eddyProductVariables', 'write_eddyProductVariables', + 'compute_oceanHeatContent', 'write_oceanHeatContent', + 'compute_mixedLayerHeatBudget', 'write_mixedLayerHeatBudget'] + compare_timers(timers, config, work_dir, rundir1='forward') + + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + if self.init.with_bgc: + variables.extend( + ['PO4', 'NO3', 'SiO3', 'NH4', 'Fe', 'O2', 'DIC', 'DIC_ALT_CO2', + 'ALK', 'DOC', 'DON', 'DOFe', 'DOP', 'DOPr', 'DONr', 'zooC', + 'spChl', 'spC', 'spFe', 'spCaCO3', 'diatChl', 'diatC', + 'diatFe', 'diatSi', 'diazChl', 'diazC', 'diazFe', 'phaeoChl', + 'phaeoC', 'phaeoFe']) + + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='forward/output.nc') + + timers = ['time integration'] + compare_timers(timers, self.config, self.work_dir, rundir1='forward') diff --git a/compass/ocean/tests/global_ocean/analysis_test/namelist.forward b/compass/ocean/tests/global_ocean/analysis_test/namelist.forward new file mode 100644 index 0000000000..5ab325911b --- /dev/null +++ b/compass/ocean/tests/global_ocean/analysis_test/namelist.forward @@ -0,0 +1,19 @@ +config_AM_globalStats_enable = .true. +config_AM_globalStats_compute_on_startup = .true. +config_AM_globalStats_write_on_startup = .true. +config_AM_globalStats_text_file = .true. +config_AM_surfaceAreaWeightedAverages_enable = .true. +config_AM_waterMassCensus_enable = .true. +config_AM_layerVolumeWeightedAverage_enable = .true. +config_AM_zonalMean_enable = .true. +config_AM_okuboWeiss_enable = .true. +config_AM_meridionalHeatTransport_enable = .true. +config_AM_highFrequencyOutput_enable = .true. +config_AM_eliassenPalm_enable = .true. +config_AM_mixedLayerDepths_enable = .true. +config_AM_debugDiagnostics_enable = .true. +config_AM_eddyProductVariables_enable = .true. +config_AM_eddyProductVariables_compute_interval = 'output_interval' +config_AM_eddyProductVariables_write_on_startup = .true. +config_AM_oceanHeatContent_enable = .true. +config_AM_mixedLayerHeatBudget_enable = .true. diff --git a/compass/ocean/tests/global_ocean/analysis_test/streams.forward b/compass/ocean/tests/global_ocean/analysis_test/streams.forward new file mode 100644 index 0000000000..d2f2c96a69 --- /dev/null +++ b/compass/ocean/tests/global_ocean/analysis_test/streams.forward @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/configure.py b/compass/ocean/tests/global_ocean/configure.py new file mode 100644 index 0000000000..b02b93b476 --- /dev/null +++ b/compass/ocean/tests/global_ocean/configure.py @@ -0,0 +1,53 @@ +from compass.config import add_config + + +def configure_global_ocean(test_case, mesh, init=None): + """ + Modify the configuration options for this test case + + Parameters + ---------- + test_case : compass.TestCase + The test case to configure + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init, optional + The test case that produces the initial condition for this run + """ + config = test_case.config + mesh_step = mesh.mesh_step + add_config(config, mesh_step.package, mesh_step.mesh_config_filename, + exception=True) + + if mesh.with_ice_shelf_cavities: + config.set('global_ocean', 'prefix', '{}wISC'.format( + config.get('global_ocean', 'prefix'))) + + add_config(config, test_case.__module__, '{}.cfg'.format(test_case.name), + exception=False) + + # add a description of the initial condition + if init is not None: + initial_condition = init.initial_condition + descriptions = {'PHC': 'Polar science center Hydrographic ' + 'Climatology (PHC)', + 'EN4_1900': + "Met Office Hadley Centre's EN4 dataset from 1900"} + config.set('global_ocean', 'init_description', + descriptions[initial_condition]) + + # a description of the bathymetry + config.set('global_ocean', 'bathy_description', + 'Bathymetry is from GEBCO 2019, combined with BedMachine ' + 'Antarctica around Antarctica.') + + if init is not None and init.with_bgc: + # todo: this needs to be filled in! + config.set('global_ocean', 'bgc_description', + '<<>>') + + if mesh.with_ice_shelf_cavities: + config.set('global_ocean', 'wisc_description', + 'Includes cavities under the ice shelves around Antarctica') diff --git a/compass/ocean/tests/global_ocean/daily_output_test/__init__.py b/compass/ocean/tests/global_ocean/daily_output_test/__init__.py new file mode 100644 index 0000000000..c8158f5554 --- /dev/null +++ b/compass/ocean/tests/global_ocean/daily_output_test/__init__.py @@ -0,0 +1,63 @@ +from compass.validate import compare_variables +from compass.ocean.tests.global_ocean.forward import ForwardTestCase, \ + ForwardStep + + +class DailyOutputTest(ForwardTestCase): + """ + A test case to test the output for the TimeSeriesStatMonthly analysis + member in E3SM. In this test, the analysis member for daily output is used + instead for efficiency. + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + name='daily_output_test') + + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, cores=4, + threads=1) + + module = self.__module__ + step.add_namelist_file(module, 'namelist.forward') + step.add_streams_file(module, 'streams.forward') + self.add_step(step) + + def run(self): + """ + Run each step of the testcase + """ + # get cores, threads from config options and run the steps + super().run() + + config = self.config + work_dir = self.work_dir + + variables = [ + 'timeDaily_avg_activeTracers_temperature', + 'timeDaily_avg_activeTracers_salinity', + 'timeDaily_avg_layerThickness', 'timeDaily_avg_normalVelocity', + 'timeDaily_avg_ssh'] + + compare_variables( + variables, config, work_dir=work_dir, + filename1='forward/analysis_members/' + 'mpaso.hist.am.timeSeriesStatsDaily.0001-01-01.nc') diff --git a/compass/ocean/tests/global_ocean/daily_output_test/namelist.forward b/compass/ocean/tests/global_ocean/daily_output_test/namelist.forward new file mode 100644 index 0000000000..a10f1dcab1 --- /dev/null +++ b/compass/ocean/tests/global_ocean/daily_output_test/namelist.forward @@ -0,0 +1,3 @@ +config_run_duration = '0001_00:00:00' +config_AM_timeSeriesStatsDaily_enable = .true. +config_AM_timeSeriesStatsDaily_restart_stream = 'none' diff --git a/compass/ocean/tests/global_ocean/daily_output_test/streams.forward b/compass/ocean/tests/global_ocean/daily_output_test/streams.forward new file mode 100644 index 0000000000..8394f9120e --- /dev/null +++ b/compass/ocean/tests/global_ocean/daily_output_test/streams.forward @@ -0,0 +1,100 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/decomp_test/__init__.py b/compass/ocean/tests/global_ocean/decomp_test/__init__.py new file mode 100644 index 0000000000..7604e0860c --- /dev/null +++ b/compass/ocean/tests/global_ocean/decomp_test/__init__.py @@ -0,0 +1,53 @@ +from compass.validate import compare_variables +from compass.ocean.tests.global_ocean.forward import ForwardTestCase, \ + ForwardStep + + +class DecompTest(ForwardTestCase): + """ + A test case for performing two short forward runs to make sure the results + are identical with 4 and 8 cores + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + name='decomp_test') + for procs in [4, 8]: + name = '{}proc'.format(procs) + self.add_step( + ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=name, + subdir=name, cores=procs, threads=1)) + + def run(self): + """ + Run each step of the testcase + """ + # get cores, threads from config options and run the steps + super().run() + + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + steps = self.steps_to_run + if '4proc' in steps and '8proc' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='4proc/output.nc', + filename2='8proc/output.nc') diff --git a/compass/ocean/tests/global_ocean/dynamic_adjustment.py b/compass/ocean/tests/global_ocean/dynamic_adjustment.py new file mode 100644 index 0000000000..86bb377083 --- /dev/null +++ b/compass/ocean/tests/global_ocean/dynamic_adjustment.py @@ -0,0 +1,59 @@ +from compass.validate import compare_variables +from compass.ocean.tests.global_ocean.forward import ForwardTestCase + + +class DynamicAdjustment(ForwardTestCase): + """ + A parent test case for performing dynamic adjustment (dissipating + fast-moving waves) from an MPAS-Ocean initial condition. + + The final stage of the dynamic adjustment is assumed to be called + ``simulation``, and is expected to have a file ``output.nc`` that can be + compared against a baseline. + + Attributes + ---------- + restart_filenames : list of str + A list of restart files from each dynamic-adjustment step + """ + + def __init__(self, test_group, mesh, init, time_integrator, + restart_filenames): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + + restart_filenames : list of str + A list of restart files from each dynamic-adjustment step + """ + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + name='dynamic_adjustment') + + self.restart_filenames = restart_filenames + + def run(self): + """ + Run each step of the testcase + """ + # get cores, threads from config options and run the steps + super().run() + + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='simulation/output.nc') diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/README b/compass/ocean/tests/global_ocean/files_for_e3sm/README new file mode 100644 index 0000000000..e5b9d79b20 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/README @@ -0,0 +1,16 @@ +***********************WARNING***************************** +The files_for_e3sm step is a work in progress, and produces +only a subset of the files needed for E3SM support of a new +ocean and sea-ice mesh. +*********************************************************** + +After running run.py, the directory assembled_files is populated with links. +The directory structure is identical to the E3SM inputdata and diagnostics +directories found here: +https://web.lcrc.anl.gov/public/e3sm/ + +E3SM members should NOT attempt to upload these files. The files produced here +are only a subset of those needed to support a new mesh in E3SM and should not +be uploaded on their own. E3SM experts who know how to produce the other +coupling files may upload these files along with other required datasets to the +LCRC server. \ No newline at end of file diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py new file mode 100644 index 0000000000..5c771ef756 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/__init__.py @@ -0,0 +1,104 @@ +import os +from importlib.resources import path + +from compass.io import symlink +from compass.testcase import TestCase +from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_initial_condition \ + import OceanInitialCondition +from compass.ocean.tests.global_ocean.files_for_e3sm.seaice_initial_condition \ + import SeaiceInitialCondition +from compass.ocean.tests.global_ocean.files_for_e3sm.ocean_graph_partition \ + import OceanGraphPartition +from compass.ocean.tests.global_ocean.files_for_e3sm.scrip import Scrip +from compass.ocean.tests.global_ocean.files_for_e3sm.diagnostics_files \ + import DiagnosticsFiles +from compass.ocean.tests.global_ocean.forward import get_forward_subdir +from compass.ocean.tests.global_ocean.configure import configure_global_ocean + + +class FilesForE3SM(TestCase): + """ + A test case for assembling files needed for MPAS-Ocean and MPAS-Seaice + initial conditions in E3SM as well as files needed for diagnostics from + the Meridional Overturning Circulation analysis member and MPAS-Analysis + + Attributes + ---------- + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + dynamic_adjustment : compass.ocean.tests.global_ocean.dynamic_adjustment.DynamicAdjustment + The test case that performs dynamic adjustment to dissipate + fast-moving waves from the initial condition + + restart_filename : str + A restart file from the end of the dynamic adjustment test case to use + as the basis for an E3SM initial condition + """ + def __init__(self, test_group, mesh, init, dynamic_adjustment): + """ + Create test case for creating a global MPAS-Ocean mesh + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + dynamic_adjustment : compass.ocean.tests.global_ocean.dynamic_adjustment.DynamicAdjustment + The test case that performs dynamic adjustment to dissipate + fast-moving waves from the initial condition + """ + name = 'files_for_e3sm' + time_integrator = dynamic_adjustment.time_integrator + subdir = get_forward_subdir(init.init_subdir, time_integrator, name) + + super().__init__(test_group=test_group, name=name, subdir=subdir) + self.mesh = mesh + self.init = init + self.dynamic_adjustment = dynamic_adjustment + + restart_filename = os.path.join( + '..', 'dynamic_adjustment', + dynamic_adjustment.restart_filenames[-1]) + self.restart_filename = restart_filename + + self.add_step( + OceanInitialCondition(test_case=self, + restart_filename=restart_filename)) + + self.add_step( + OceanGraphPartition(test_case=self, mesh=mesh, + restart_filename=restart_filename)) + + self.add_step( + SeaiceInitialCondition( + test_case=self, restart_filename=restart_filename, + with_ice_shelf_cavities=mesh.with_ice_shelf_cavities)) + + self.add_step( + Scrip( + test_case=self, restart_filename=restart_filename, + with_ice_shelf_cavities=mesh.with_ice_shelf_cavities)) + + self.add_step( + DiagnosticsFiles( + test_case=self, restart_filename=restart_filename, + with_ice_shelf_cavities=mesh.with_ice_shelf_cavities)) + + def configure(self): + """ + Modify the configuration options for this test case + """ + configure_global_ocean(test_case=self, mesh=self.mesh, init=self.init) + with path('compass.ocean.tests.global_ocean.files_for_e3sm', + 'README') as target: + symlink(str(target), '{}/README'.format(self.work_dir)) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostics_files.py b/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostics_files.py new file mode 100644 index 0000000000..7dd87873e8 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/diagnostics_files.py @@ -0,0 +1,222 @@ +import os +import xarray +import glob + +from pyremap import get_lat_lon_descriptor, get_polar_descriptor, \ + MpasMeshDescriptor, Remapper +from geometric_features import GeometricFeatures +from geometric_features.aggregation import get_aggregator_by_name +from mpas_tools.mesh.conversion import mask +from mpas_tools.io import write_netcdf +from mpas_tools.ocean.moc import make_moc_basins_and_transects + +from compass.io import symlink +from compass.step import Step + + +class DiagnosticsFiles(Step): + """ + A step for creating files needed for the Meridional Overturning Circulation + analysis member and diagnostics from MPAS-Analysis + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): + """ + Create a step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + + restart_filename : str + A restart file from the end of the dynamic adjustment test case to + use as the basis for an E3SM initial condition + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + super().__init__(test_case, name='diagnostics_files', cores=18, + min_cores=1, threads=1) + + self.add_input_file(filename='README', target='../README') + self.add_input_file(filename='restart.nc', + target='../{}'.format(restart_filename)) + + self.with_ice_shelf_cavities = with_ice_shelf_cavities + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def run(self): + """ + Run this step of the testcase + """ + with_ice_shelf_cavities = self.with_ice_shelf_cavities + cores = self.cores + config = self.config + logger = self.logger + + restart_filename = 'restart.nc' + + with xarray.open_dataset(restart_filename) as ds: + mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] + + for directory in [ + '../assembled_files/inputdata/ocn/mpas-o/{}'.format( + mesh_short_name), + '../assembled_files/diagnostics/mpas_analysis/region_masks', + '../assembled_files/diagnostics/mpas_analysis/maps']: + try: + os.makedirs(directory) + except OSError: + pass + + _make_moc_masks(mesh_short_name, logger) + + gf = GeometricFeatures() + + region_groups = ['Antarctic Regions', 'Arctic Ocean Regions', + 'Arctic Sea Ice Regions', 'Ocean Basins', + 'Ocean Subbasins', 'ISMIP6 Regions', + 'Transport Transects'] + + if with_ice_shelf_cavities: + region_groups.append('Ice Shelves') + + for region_group in region_groups: + function, prefix, date = get_aggregator_by_name(region_group) + + suffix = '{}{}'.format(prefix, date) + + fcMask = function(gf) + _make_region_masks(mesh_short_name, suffix=suffix, fcMask=fcMask, + logger=logger) + + _make_analysis_lat_lon_map(config, mesh_short_name, cores, logger) + _make_analysis_polar_map(config, mesh_short_name, + projection='antarctic', cores=cores, + logger=logger) + _make_analysis_polar_map(config, mesh_short_name, projection='arctic', + cores=cores, logger=logger) + + # make links in output directory + files = glob.glob('map_*') + + # make links in output directory + output_dir = '../assembled_files/diagnostics/mpas_analysis/maps' + for filename in files: + symlink('../../../../diagnostics_files/{}'.format(filename), + '{}/{}'.format(output_dir, filename)) + + +def _make_region_masks(mesh_name, suffix, fcMask, logger): + mesh_filename = 'restart.nc' + + geojson_filename = '{}.geojson'.format(suffix) + mask_filename = '{}_{}.nc'.format(mesh_name, suffix) + + fcMask.to_geojson(geojson_filename) + + dsMesh = xarray.open_dataset(mesh_filename) + + dsMask = mask(dsMesh, fcMask=fcMask, logger=logger) + + write_netcdf(dsMask, mask_filename) + + # make links in output directory + output_dir = '../assembled_files/diagnostics/mpas_analysis/' \ + 'region_masks' + symlink('../../../../diagnostics_files/{}'.format(mask_filename), + '{}/{}'.format(output_dir, mask_filename)) + + +def _make_analysis_lat_lon_map(config, mesh_name, cores, logger): + mesh_filename = 'restart.nc' + + inDescriptor = MpasMeshDescriptor(mesh_filename, mesh_name) + + comparisonLatResolution = config.getfloat('files_for_e3sm', + 'comparisonLatResolution') + comparisonLonResolution = config.getfloat('files_for_e3sm', + 'comparisonLonResolution') + + # modify the resolution of the global lat-lon grid as desired + outDescriptor = get_lat_lon_descriptor(dLon=comparisonLatResolution, + dLat=comparisonLonResolution) + outGridName = outDescriptor.meshName + + mappingFileName = 'map_{}_to_{}_bilinear.nc'.format(mesh_name, outGridName) + + remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) + + remapper.build_mapping_file(method='bilinear', mpiTasks=cores, tempdir='.', + logger=logger) + + +def _make_analysis_polar_map(config, mesh_name, projection, cores, logger): + mesh_filename = 'restart.nc' + + upperProj = projection[0].upper() + projection[1:] + + inDescriptor = MpasMeshDescriptor(mesh_filename, mesh_name) + + comparisonStereoWidth = config.getfloat( + 'files_for_e3sm', 'comparison{}StereoWidth'.format(upperProj)) + comparisonStereoResolution = config.getfloat( + 'files_for_e3sm', 'comparison{}StereoResolution'.format(upperProj)) + + outDescriptor = get_polar_descriptor(Lx=comparisonStereoWidth, + Ly=comparisonStereoWidth, + dx=comparisonStereoResolution, + dy=comparisonStereoResolution, + projection=projection) + + outGridName = '{}x{}km_{}km_{}_stereo'.format( + comparisonStereoWidth, comparisonStereoWidth, + comparisonStereoResolution, upperProj) + + mappingFileName = 'map_{}_to_{}_bilinear.nc'.format(mesh_name, outGridName) + + remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) + + remapper.build_mapping_file(method='bilinear', mpiTasks=cores, tempdir='.', + logger=logger) + + +def _make_moc_masks(mesh_short_name, logger): + gf = GeometricFeatures() + + mesh_filename = 'restart.nc' + + mask_filename = '{}_moc_masks.nc'.format(mesh_short_name) + mask_and_transect_filename = '{}_moc_masks_and_transects.nc'.format( + mesh_short_name) + + geojson_filename = 'moc_basins.geojson' + + make_moc_basins_and_transects(gf, mesh_filename, + mask_and_transect_filename, + geojson_filename=geojson_filename, + mask_filename=mask_filename, + logger=logger) + + # make links in output directories (both inputdata and diagnostics) + output_dir = '../assembled_files/inputdata/ocn/mpas-o/{}'.format( + mesh_short_name) + symlink( + '../../../../../diagnostics_files/{}'.format( + mask_and_transect_filename), + '{}/{}'.format(output_dir, mask_and_transect_filename)) + + output_dir = '../assembled_files/diagnostics/mpas_analysis/' \ + 'region_masks' + symlink( + '../../../../diagnostics_files/{}'.format( + mask_and_transect_filename), + '{}/{}'.format(output_dir, mask_and_transect_filename)) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_graph_partition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_graph_partition.py new file mode 100644 index 0000000000..da707911d3 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_graph_partition.py @@ -0,0 +1,94 @@ +import os +import xarray +import numpy as np +from glob import glob + +from mpas_tools.logging import check_call + +from compass.io import symlink +from compass.step import Step + + +class OceanGraphPartition(Step): + """ + A step for creating an E3SM ocean initial condition from the results of + a dynamic-adjustment process to dissipate fast waves + """ + def __init__(self, test_case, mesh, restart_filename): + """ + Create a new step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that creates the mesh used by this test case + + restart_filename : str + A restart file from the end of the dynamic adjustment test case to + use as the basis for an E3SM initial condition + """ + + super().__init__(test_case, name='ocean_graph_partition', cores=1, + min_cores=1, threads=1) + + self.add_input_file(filename='README', target='../README') + self.add_input_file(filename='restart.nc', + target='../{}'.format(restart_filename)) + + mesh_path = mesh.mesh_step.path + self.add_input_file( + filename='graph.info', + work_dir_target='{}/culled_graph.info'.format(mesh_path)) + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def run(self): + """ + Run this step of the testcase + """ + logger = self.logger + + with xarray.open_dataset('restart.nc') as ds: + mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] + mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] + prefix = 'MPAS_Mesh_{}'.format(mesh_prefix) + creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)] + + try: + os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format( + mesh_short_name)) + except OSError: + pass + + symlink('graph.info', 'mpas-o.graph.info.{}'.format(creation_date)) + + nCells = sum(1 for _ in open('graph.info')) + min_graph_size = int(nCells / 6000) + max_graph_size = int(nCells / 100) + logger.info('Creating graph files between {} and {}'.format( + min_graph_size, max_graph_size)) + n_power2 = 2**np.arange(1, 21) + n_multiples12 = 12 * np.arange(1, 9) + + n = n_power2 + for power10 in range(3): + n = np.concatenate([n, 10**power10 * n_multiples12]) + + for index in range(len(n)): + if min_graph_size <= n[index] <= max_graph_size: + args = ['gpmetis', 'mpas-o.graph.info.{}'.format(creation_date), + '{}'.format(n[index])] + check_call(args, logger) + + # create link in assembled files directory + files = glob('mpas-o.graph.info.*') + dest_path = '../assembled_files/inputdata/ocn/mpas-o/{}'.format( + mesh_short_name) + for file in files: + symlink('../../../../../ocean_graph_partition/{}'.format(file), + '{}/{}'.format(dest_path, file)) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_initial_condition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_initial_condition.py new file mode 100644 index 0000000000..b7dc300770 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/ocean_initial_condition.py @@ -0,0 +1,67 @@ +import os +import xarray + +from mpas_tools.io import write_netcdf + +from compass.io import symlink +from compass.step import Step + + +class OceanInitialCondition(Step): + """ + A step for creating an E3SM ocean initial condition from the results of + a dynamic-adjustment process to dissipate fast waves + """ + def __init__(self, test_case, restart_filename): + """ + Create a new step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + + restart_filename : str + A restart file from the end of the dynamic adjustment test case to + use as the basis for an E3SM initial condition + """ + + super().__init__(test_case, name='ocean_initial_condition', cores=1, + min_cores=1, threads=1) + + self.add_input_file(filename='README', target='../README') + self.add_input_file(filename='restart.nc', + target='../{}'.format(restart_filename)) + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def run(self): + """ + Run this step of the testcase + """ + with xarray.open_dataset('restart.nc') as ds: + mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] + mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] + prefix = 'MPAS_Mesh_{}'.format(mesh_prefix) + creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)] + + try: + os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format( + mesh_short_name)) + except OSError: + pass + + source_filename = 'restart.nc' + dest_filename = 'mpaso.{}.{}.nc'.format(mesh_short_name, creation_date) + + with xarray.open_dataset(source_filename) as ds: + ds.load() + ds = ds.drop_vars('xtime') + write_netcdf(ds, dest_filename) + + symlink( + '../../../../../ocean_initial_condition/{}'.format(dest_filename), + '../assembled_files/inputdata/ocn/mpas-o/{}/{}'.format( + mesh_short_name, dest_filename)) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/scrip.py b/compass/ocean/tests/global_ocean/files_for_e3sm/scrip.py new file mode 100644 index 0000000000..116eee673d --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/scrip.py @@ -0,0 +1,89 @@ +import os +import xarray + +from mpas_tools.scrip.from_mpas import scrip_from_mpas + +from compass.io import symlink +from compass.step import Step + + +class Scrip(Step): + """ + A step for creating SCRIP files from the MPAS-Ocean mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): + """ + Create a new step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + + restart_filename : str + A restart file from the end of the dynamic adjustment test case to + use as the basis for an E3SM initial condition + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + super().__init__(test_case, name='scrip', cores=1, + min_cores=1, threads=1) + + self.add_input_file(filename='README', target='../README') + self.add_input_file(filename='restart.nc', + target='../{}'.format(restart_filename)) + + self.with_ice_shelf_cavities = with_ice_shelf_cavities + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def run(self): + """ + Run this step of the testcase + """ + with_ice_shelf_cavities = self.with_ice_shelf_cavities + + with xarray.open_dataset('restart.nc') as ds: + mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] + mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] + prefix = 'MPAS_Mesh_{}'.format(mesh_prefix) + creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)] + + try: + os.makedirs('../assembled_files/inputdata/ocn/mpas-o/{}'.format( + mesh_short_name)) + except OSError: + pass + + if with_ice_shelf_cavities: + nomask_str = '.nomask' + else: + nomask_str = '' + + scrip_filename = 'ocean.{}{}.scrip.{}.nc'.format( + mesh_short_name, nomask_str, creation_date) + + scrip_from_mpas('restart.nc', scrip_filename) + + symlink('../../../../../scrip/{}'.format(scrip_filename), + '../assembled_files/inputdata/ocn/mpas-o/{}/{}'.format( + mesh_short_name, scrip_filename)) + + if with_ice_shelf_cavities: + scrip_mask_filename = 'ocean.{}.mask.scrip.{}.nc'.format( + mesh_short_name, creation_date) + scrip_from_mpas('restart.nc', scrip_mask_filename, + useLandIceMask=True) + + symlink( + '../../../../../scrip/{}'.format( + scrip_mask_filename), + '../assembled_files/inputdata/ocn/mpas-o/{}/{}'.format( + mesh_short_name, scrip_mask_filename)) diff --git a/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_initial_condition.py b/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_initial_condition.py new file mode 100644 index 0000000000..7706038869 --- /dev/null +++ b/compass/ocean/tests/global_ocean/files_for_e3sm/seaice_initial_condition.py @@ -0,0 +1,90 @@ +import os +import xarray + +from mpas_tools.io import write_netcdf + +from compass.io import symlink +from compass.step import Step + + +class SeaiceInitialCondition(Step): + """ + A step for creating an E3SM sea-ice initial condition from variables from + an MPAS-Ocean restart file + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + def __init__(self, test_case, restart_filename, with_ice_shelf_cavities): + """ + Create a new step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.files_for_e3sm.FilesForE3SM + The test case this step belongs to + + restart_filename : str + A restart file from the end of the dynamic adjustment test case to + use as the basis for an E3SM initial condition + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + super().__init__(test_case, name='seaice_initial_condition', cores=1, + min_cores=1, threads=1) + + self.add_input_file(filename='README', target='../README') + self.add_input_file(filename='restart.nc', + target='../{}'.format(restart_filename)) + + self.with_ice_shelf_cavities = with_ice_shelf_cavities + + # for now, we won't define any outputs because they include the mesh + # short name, which is not known at setup time. Currently, this is + # safe because no other steps depend on the outputs of this one. + + def run(self): + """ + Run this step of the testcase + """ + with_ice_shelf_cavities = self.with_ice_shelf_cavities + + with xarray.open_dataset('restart.nc') as ds: + mesh_short_name = ds.attrs['MPAS_Mesh_Short_Name'] + mesh_prefix = ds.attrs['MPAS_Mesh_Prefix'] + prefix = 'MPAS_Mesh_{}'.format(mesh_prefix) + creation_date = ds.attrs['{}_Version_Creation_Date'.format(prefix)] + + try: + os.makedirs('../assembled_files/inputdata/ocn/mpas-seaice/{}'.format( + mesh_short_name)) + except OSError: + pass + + dest_filename = 'mpassi.{}.{}.nc'.format(mesh_short_name, + creation_date) + + keep_vars = [ + 'areaCell', 'cellsOnCell', 'edgesOnCell', 'fCell', 'indexToCellID', + 'latCell', 'lonCell', 'meshDensity', 'nEdgesOnCell', + 'verticesOnCell', 'xCell', 'yCell', 'zCell', 'angleEdge', + 'cellsOnEdge', 'dcEdge', 'dvEdge', 'edgesOnEdge', 'fEdge', + 'indexToEdgeID', 'latEdge', 'lonEdge', 'nEdgesOnCell', + 'nEdgesOnEdge', 'verticesOnEdge', 'weightsOnEdge', 'xEdge', + 'yEdge', 'zEdge', 'areaTriangle', 'cellsOnVertex', 'edgesOnVertex', + 'fVertex', 'indexToVertexID', 'kiteAreasOnVertex', 'latVertex', + 'lonVertex', 'xVertex', 'yVertex', 'zVertex'] + + if with_ice_shelf_cavities: + keep_vars.append('landIceMask') + + with xarray.open_dataset('restart.nc') as ds: + ds.load() + ds = ds[keep_vars] + write_netcdf(ds, dest_filename) + + symlink('../../../../../seaice_initial_condition/{}'.format(dest_filename), + '../assembled_files/inputdata/ocn/mpas-seaice/{}/{}'.format( + mesh_short_name, dest_filename)) diff --git a/compass/ocean/tests/global_ocean/forward.py b/compass/ocean/tests/global_ocean/forward.py new file mode 100644 index 0000000000..279f222705 --- /dev/null +++ b/compass/ocean/tests/global_ocean/forward.py @@ -0,0 +1,229 @@ +import os +from importlib.resources import contents + +from compass.ocean.tests.global_ocean.configure import configure_global_ocean +from compass.ocean.tests.global_ocean.metadata import \ + add_mesh_and_init_metadata +from compass.model import run_model +from compass.testcase import TestCase +from compass.step import Step + + +class ForwardStep(Step): + """ + A step for performing forward MPAS-Ocean runs as part of global ocean test + cases. + + Attributes + ---------- + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + def __init__(self, test_case, mesh, init, time_integrator, name='forward', + subdir=None, cores=None, min_cores=None, threads=None): + """ + Create a new step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + + name : str, optional + the name of the step + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + """ + self.mesh = mesh + self.init = init + self.time_integrator = time_integrator + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + self.add_namelist_file( + 'compass.ocean.tests.global_ocean', 'namelist.forward') + self.add_streams_file( + 'compass.ocean.tests.global_ocean', 'streams.forward') + + if mesh.with_ice_shelf_cavities: + self.add_namelist_file( + 'compass.ocean.tests.global_ocean', 'namelist.wisc') + + if init.with_bgc: + self.add_namelist_file( + 'compass.ocean.tests.global_ocean', 'namelist.bgc') + self.add_streams_file( + 'compass.ocean.tests.global_ocean', 'streams.bgc') + + mesh_package = mesh.mesh_step.package + mesh_package_contents = list(contents(mesh_package)) + mesh_namelists = ['namelist.forward', + 'namelist.{}'.format(time_integrator.lower())] + for mesh_namelist in mesh_namelists: + if mesh_namelist in mesh_package_contents: + self.add_namelist_file(mesh_package, mesh_namelist) + + mesh_streams = ['streams.forward', + 'streams.{}'.format(time_integrator.lower())] + for mesh_stream in mesh_streams: + if mesh_stream in mesh_package_contents: + self.add_streams_file(mesh_package, mesh_stream) + + mesh_path = mesh.mesh_step.path + + if mesh.with_ice_shelf_cavities: + initial_state_target = '{}/ssh_adjustment/adjusted_init.nc'.format( + init.path) + else: + initial_state_target = '{}/initial_state/initial_state.nc'.format( + init.path) + self.add_input_file(filename='init.nc', + work_dir_target=initial_state_target) + self.add_input_file( + filename='forcing_data.nc', + work_dir_target='{}/initial_state/init_mode_forcing_data.nc' + ''.format(init.path)) + self.add_input_file( + filename='graph.info', + work_dir_target='{}/culled_graph.info'.format(mesh_path)) + + self.add_output_file(filename='output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + if self.cores is None: + self.cores = self.config.getint( + 'global_ocean', 'forward_cores') + if self.min_cores is None: + self.min_cores = self.config.getint( + 'global_ocean', 'forward_min_cores') + if self.threads is None: + self.threads = self.config.getint( + 'global_ocean', 'forward_threads') + + def run(self): + """ + Run this step of the testcase + """ + run_model(self) + add_mesh_and_init_metadata(self.outputs, self.config, + init_filename='init.nc') + + +class ForwardTestCase(TestCase): + """ + A parent class for test cases for forward runs with global MPAS-Ocean mesh + + Attributes + ---------- + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + + def __init__(self, test_group, mesh, init, time_integrator, name): + """ + Create test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + + name : str + the name of the test case + """ + self.mesh = mesh + self.init = init + self.time_integrator = time_integrator + subdir = get_forward_subdir(init.init_subdir, time_integrator, name) + super().__init__(test_group=test_group, name=name, subdir=subdir) + + def configure(self): + """ + Modify the configuration options for this test case + """ + configure_global_ocean(test_case=self, mesh=self.mesh, init=self.init) + + def run(self): + """ + Run each step of the testcase + """ + config = self.config + # get the these properties from the config options + for step_name in self.steps_to_run: + step = self.steps[step_name] + # get the these properties from the config options + step.cores = config.getint('global_ocean', 'forward_cores') + step.min_cores = config.getint('global_ocean', 'forward_min_cores') + step.threads = config.getint('global_ocean', 'forward_threads') + + # run the steps + super().run() + + +def get_forward_subdir(init_subdir, time_integrator, name): + """ + Get the subdirectory for a test case that is based on a forward run with + a time integrator + """ + if time_integrator == 'split_explicit': + # this is the default so we won't make a subdir for the time + # integrator + subdir = os.path.join(init_subdir, name) + elif time_integrator == 'RK4': + subdir = os.path.join(init_subdir, time_integrator, name) + else: + raise ValueError('Unexpected time integrator {}'.format( + time_integrator)) + + return subdir diff --git a/compass/ocean/tests/global_ocean/global_ocean.cfg b/compass/ocean/tests/global_ocean/global_ocean.cfg new file mode 100644 index 0000000000..99cda63312 --- /dev/null +++ b/compass/ocean/tests/global_ocean/global_ocean.cfg @@ -0,0 +1,106 @@ +# options for global ocean testcases +[global_ocean] + +## each mesh should replace these with appropriate values in its config file + +## config options related to the mesh step +# number of cores to use +mesh_cores = 1 +# minimum of cores, below which the step fails +mesh_min_cores = 1 +# maximum memory usage allowed (in MB) +mesh_max_memory = 1000 +# maximum disk usage allowed (in MB) +mesh_max_disk = 1000 + +## config options related to the initial_state step +# number of cores to use +init_cores = 4 +# minimum of cores, below which the step fails +init_min_cores = 1 +# maximum memory usage allowed (in MB) +init_max_memory = 1000 +# maximum disk usage allowed (in MB) +init_max_disk = 1000 +# number of threads +init_threads = 1 + +## config options related to the forward steps +# number of cores to use +forward_cores = 4 +# minimum of cores, below which the step fails +forward_min_cores = 1 +# number of threads +forward_threads = 1 +# maximum memory usage allowed (in MB) +forward_max_memory = 1000 +# maximum disk usage allowed (in MB) +forward_max_disk = 1000 + +## metadata related to the mesh +# whether to add metadata to output files +add_metadata = True +# the prefix (e.g. QU, EC, WC, SO) +prefix = PREFIX +# a description of the mesh +mesh_description = <<>> +# a description of the bathymetry +bathy_description = <<>> +# a description of the mesh with ice-shelf cavities +init_description = <<>> +# E3SM version that the mesh is intended for +e3sm_version = 2 +# The revision number of the mesh, which should be incremented each time the +# mesh is revised +mesh_revision = <> +# the minimum (finest) resolution in the mesh +min_res = <<>> +# the maximum (coarsest) resolution in the mesh, can be the same as min_res +max_res = <<>> +# the maximum depth of the ocean, always detected automatically +max_depth = autodetect +# the number of vertical levels, always detected automatically +levels = autodetect + +# the date the mesh was created as YYMMDD, typically detected automatically +creation_date = autodetect +# The following options are detected from .gitconfig if not explicitly entered +author = autodetect +email = autodetect +# The URL of the pull request documenting the creation of the mesh +pull_request = <<>> + + +# config options related to initial condition and diagnostics support files +# for E3SM +[files_for_e3sm] + +# whether to generate an ocean initial condition in E3SM +enable_ocean_initial_condition = true +# whether to generate graph partitions for different numbers of ocean cores in +# E3SM +enable_ocean_graph_partition = true +# whether to generate a sea-ice initial condition in E3SM +enable_seaice_initial_condition = true +# whether to generate SCRIP files for later use in creating E3SM mapping files +enable_scrip = true +# whether to generate region masks, transects and mapping files for use in both +# online analysis members and offline with MPAS-Analysis +enable_diagnostics_files = true + +## the following relate to the comparison grids in MPAS-Analysis to generate +## mapping files for. The default values are also the defaults in +## MPAS-Analysis. Coarser or finer resolution may be desirable for some MPAS +## meshes. + +# The comparison lat/lon grid resolution in degrees +comparisonLatResolution = 0.5 +comparisonLonResolution = 0.5 + +# The comparison Antarctic polar stereographic grid size and resolution in km +comparisonAntarcticStereoWidth = 6000. +comparisonAntarcticStereoResolution = 10. + +# The comparison Arctic polar stereographic grid size and resolution in km +comparisonArcticStereoWidth = 6000. +comparisonArcticStereoResolution = 10. diff --git a/compass/ocean/tests/global_ocean/init/__init__.py b/compass/ocean/tests/global_ocean/init/__init__.py new file mode 100644 index 0000000000..4e7b50a88d --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/__init__.py @@ -0,0 +1,120 @@ +import os + +from compass.testcase import TestCase +from compass.ocean.tests.global_ocean.init.initial_state import InitialState +from compass.ocean.tests.global_ocean.init.ssh_adjustment import SshAdjustment +from compass.ocean.tests.global_ocean.configure import configure_global_ocean +from compass.validate import compare_variables + + +class Init(TestCase): + """ + A test case for creating initial conditions on a global MPAS-Ocean mesh + + Attributes + ---------- + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that creates the mesh used by this test case + + initial_condition : {'PHC', 'EN4_1900'} + The initial condition dataset to use + + with_bgc : bool + Whether to include biogeochemistry (BGC) in the initial condition + + init_subdir : str + The subdirectory within the test group for all test cases with this + initial condition + """ + def __init__(self, test_group, mesh, initial_condition, with_bgc): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that creates the mesh used by this test case + + initial_condition : {'PHC', 'EN4_1900'} + The initial condition dataset to use + + with_bgc : bool + Whether to include biogeochemistry (BGC) in the initial condition + """ + name = 'init' + mesh_name = mesh.mesh_name + if with_bgc: + ic_dir = '{}_BGC'.format(initial_condition) + else: + ic_dir = initial_condition + self.init_subdir = os.path.join(mesh_name, ic_dir) + subdir = os.path.join(self.init_subdir, name) + super().__init__(test_group=test_group, name=name, subdir=subdir) + + self.mesh = mesh + self.initial_condition = initial_condition + self.with_bgc = with_bgc + + self.add_step( + InitialState( + test_case=self, mesh=mesh, + initial_condition=initial_condition, with_bgc=with_bgc)) + + if mesh.with_ice_shelf_cavities: + self.add_step( + SshAdjustment(test_case=self, cores=4)) + + def configure(self): + """ + Modify the configuration options for this test case + """ + configure_global_ocean(test_case=self, mesh=self.mesh, init=self) + + def run(self): + """ + Run each step of the testcase + """ + config = self.config + steps = self.steps_to_run + work_dir = self.work_dir + if 'initial_state' in steps: + step = self.steps['initial_state'] + # get the these properties from the config options + step.cores = config.getint('global_ocean', 'init_cores') + step.min_cores = config.getint('global_ocean', 'init_min_cores') + step.threads = config.getint('global_ocean', 'init_threads') + + if 'ssh_adjustment' in steps: + step = self.steps['ssh_adjustment'] + # get the these properties from the config options + step.cores = config.getint('global_ocean', 'forward_cores') + step.min_cores = config.getint('global_ocean', 'forward_min_cores') + step.threads = config.getint('global_ocean', 'forward_threads') + + # run the steps + super().run() + + if 'initial_state' in steps: + variables = ['temperature', 'salinity', 'layerThickness'] + compare_variables(variables, config, work_dir, + filename1='initial_state/initial_state.nc') + + if self.with_bgc: + variables = [ + 'temperature', 'salinity', 'layerThickness', 'PO4', 'NO3', + 'SiO3', 'NH4', 'Fe', 'O2', 'DIC', 'DIC_ALT_CO2', 'ALK', + 'DOC', 'DON', 'DOFe', 'DOP', 'DOPr', 'DONr', 'zooC', + 'spChl', 'spC', 'spFe', 'spCaCO3', 'diatChl', 'diatC', + 'diatFe', 'diatSi', 'diazChl', 'diazC', 'diazFe', + 'phaeoChl', 'phaeoC', 'phaeoFe', 'DMS', 'DMSP', 'PROT', + 'POLY', 'LIP'] + compare_variables(variables, config, work_dir, + filename1='initial_state/initial_state.nc') + + if 'ssh_adjustment' in steps: + variables = ['ssh', 'landIcePressure'] + compare_variables(variables, config, work_dir, + filename1='ssh_adjustment/adjusted_init.nc') diff --git a/compass/ocean/tests/global_ocean/init/initial_state.py b/compass/ocean/tests/global_ocean/init/initial_state.py new file mode 100644 index 0000000000..dfa813f84d --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/initial_state.py @@ -0,0 +1,167 @@ +from compass.ocean.tests.global_ocean.metadata import \ + add_mesh_and_init_metadata +from compass.model import run_model +from compass.ocean.vertical import generate_grid, write_grid +from compass.ocean.plot import plot_vertical_grid, plot_initial_state +from compass.step import Step + + +class InitialState(Step): + """ + A step for creating a mesh and initial condition for baroclinic channel + test cases + + Attributes + ---------- + mesh : compass.ocean.tests.global_ocean.mesh.mesh.MeshStep + The step for creating the mesh + + initial_condition : {'PHC', 'EN4_1900'} + The initial condition dataset to use + + with_bgc : bool + Whether to include biogeochemistry (BGC) in the initial condition + """ + def __init__(self, test_case, mesh, initial_condition, with_bgc): + """ + Create the step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.init.Init + The test case this step belongs to + + initial_condition : {'PHC', 'EN4_1900'} + The initial condition dataset to use + + with_bgc : bool + Whether to include biogeochemistry (BGC) in the initial condition + """ + if initial_condition not in ['PHC', 'EN4_1900']: + raise ValueError('Unknown initial_condition {}'.format( + initial_condition)) + + super().__init__(test_case=test_case, name='initial_state') + self.mesh = mesh + self.initial_condition = initial_condition + self.with_bgc = with_bgc + + package = 'compass.ocean.tests.global_ocean.init' + + # generate the namelist, replacing a few default options + self.add_namelist_file(package, 'namelist.init', mode='init') + self.add_namelist_file( + package, 'namelist.{}'.format(initial_condition.lower()), + mode='init') + if mesh.with_ice_shelf_cavities: + self.add_namelist_file(package, 'namelist.wisc', mode='init') + if with_bgc: + self.add_namelist_file(package, 'namelist.bgc', mode='init') + + # generate the streams file + self.add_streams_file(package, 'streams.init', mode='init') + + if mesh.with_ice_shelf_cavities: + self.add_streams_file(package, 'streams.wisc', mode='init') + + self.add_input_file( + filename='topography.nc', + target='BedMachineAntarctica_and_GEBCO_2019_0.05_degree.200128.nc', + database='bathymetry_database') + + self.add_input_file( + filename='wind_stress.nc', + target='windStress.ncep_1958-2000avg.interp3600x2431.151106.nc', + database='initial_condition_database') + + self.add_input_file( + filename='swData.nc', + target='chlorophyllA_monthly_averages_1deg.151201.nc', + database='initial_condition_database') + + if initial_condition == 'PHC': + self.add_input_file( + filename='temperature.nc', + target='PotentialTemperature.01.filled.60levels.PHC.151106.nc', + database='initial_condition_database') + self.add_input_file( + filename='salinity.nc', + target='Salinity.01.filled.60levels.PHC.151106.nc', + database='initial_condition_database') + else: + # EN4_1900 + self.add_input_file( + filename='temperature.nc', + target='PotentialTemperature.100levels.Levitus.' + 'EN4_1900estimate.200813.nc', + database='initial_condition_database') + self.add_input_file( + filename='salinity.nc', + target='Salinity.100levels.Levitus.EN4_1900estimate.200813.nc', + database='initial_condition_database') + + if with_bgc: + self.add_input_file( + filename='ecosys.nc', + target='ecosys_jan_IC_360x180x60_corrO2_Dec2014phaeo.nc', + database='initial_condition_database') + self.add_input_file( + filename='ecosys_forcing.nc', + target='ecoForcingAllSurface.forMPASO.interp360x180.' + '1timeLevel.nc', + database='initial_condition_database') + + mesh_path = mesh.mesh_step.path + + self.add_input_file( + filename='mesh.nc', + work_dir_target='{}/culled_mesh.nc'.format(mesh_path)) + + self.add_input_file( + filename='critical_passages.nc', + work_dir_target='{}/critical_passages_mask_final.nc'.format( + mesh_path)) + + self.add_input_file( + filename='graph.info', + work_dir_target='{}/culled_graph.info'.format(mesh_path)) + + if mesh.with_ice_shelf_cavities: + self.add_input_file( + filename='land_ice_mask.nc', + work_dir_target='{}/land_ice_mask.nc'.format(mesh_path)) + + for file in ['initial_state.nc', 'init_mode_forcing_data.nc']: + self.add_output_file(filename=file) + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies. + """ + # get the these properties from the config options + config = self.config + self.cores = config.getint('global_ocean', 'init_cores') + self.min_cores = config.getint('global_ocean', 'init_min_cores') + self.threads = config.getint('global_ocean', 'init_threads') + + self.add_model_as_input() + + def run(self): + """ + Run this step of the testcase + """ + config = self.config + interfaces = generate_grid(config=config) + + write_grid(interfaces=interfaces, out_filename='vertical_grid.nc') + plot_vertical_grid(grid_filename='vertical_grid.nc', config=config, + out_filename='vertical_grid.png') + + run_model(self) + + add_mesh_and_init_metadata(self.outputs, config, + init_filename='initial_state.nc') + + plot_initial_state(input_file_name='initial_state.nc', + output_file_name='initial_state.png') diff --git a/compass/ocean/tests/global_ocean/init/namelist.bgc b/compass/ocean/tests/global_ocean/init/namelist.bgc new file mode 100644 index 0000000000..31900ac8a8 --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/namelist.bgc @@ -0,0 +1,4 @@ +config_use_ecosysTracers = .true. +config_use_DMSTracers = .true. +config_use_MacroMoleculesTracers = .true. +config_global_ocean_tracer_depth_conversion_factor = 0.01 diff --git a/compass/ocean/tests/global_ocean/init/namelist.en4_1900 b/compass/ocean/tests/global_ocean/init/namelist.en4_1900 new file mode 100644 index 0000000000..0680ab5bde --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/namelist.en4_1900 @@ -0,0 +1,12 @@ +config_global_ocean_temperature_file = 'temperature.nc' +config_global_ocean_salinity_file = 'salinity.nc' +config_global_ocean_tracer_nlat_dimname = 't_lat' +config_global_ocean_tracer_nlon_dimname = 't_lon' +config_global_ocean_tracer_ndepth_dimname = 'depth_t' +config_global_ocean_tracer_depth_conversion_factor = 1.0 +config_global_ocean_temperature_varname = 'TEMP' +config_global_ocean_salinity_varname = 'SALT' +config_global_ocean_tracer_latlon_degrees = .true. +config_global_ocean_tracer_lat_varname = 't_lat' +config_global_ocean_tracer_lon_varname = 't_lon' +config_global_ocean_tracer_depth_varname = 'depth_t' diff --git a/compass/ocean/tests/global_ocean/init/namelist.init b/compass/ocean/tests/global_ocean/init/namelist.init new file mode 100644 index 0000000000..17bf5ddcbc --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/namelist.init @@ -0,0 +1,65 @@ +config_init_configuration = 'global_ocean' +config_vert_levels = -1 +config_ocean_run_mode = 'init' +config_write_cull_cell_mask = .false. +config_eos_type = 'jm' +config_expand_sphere = .true. +config_realistic_coriolis_parameter = .true. +config_alter_ICs_for_pcs = .true. +config_pc_alteration_type = 'partial_cell' +config_use_activeTracers_surface_restoring = .true. +config_use_bulk_wind_stress = .true. +config_global_ocean_tracer_method = 'bilinear_interpolation' +config_global_ocean_smooth_TS_iterations = 5 +config_global_ocean_piston_velocity = 5.0e-5 +config_global_ocean_swData_file = 'swData.nc' +config_global_ocean_swData_nlat_dimname = 't_lat' +config_global_ocean_swData_nlon_dimname = 't_lon' +config_global_ocean_swData_lat_varname = 't_lat' +config_global_ocean_swData_lon_varname = 't_lon' +config_global_ocean_swData_latlon_degrees = .true. +config_global_ocean_swData_method = 'bilinear_interpolation' +config_global_ocean_chlorophyll_varname = 'Chlorophyll' +config_global_ocean_zenithAngle_varname = 'zenithAngle' +config_global_ocean_clearSky_varname = 'clearSky' +config_global_ocean_interior_restore_rate = 1.0e-7 +config_global_ocean_topography_file = 'topography.nc' +config_global_ocean_topography_nlat_dimname = 'lat' +config_global_ocean_topography_nlon_dimname = 'lon' +config_global_ocean_topography_latlon_degrees = .true. +config_global_ocean_topography_lat_varname = 'lat' +config_global_ocean_topography_lon_varname = 'lon' +config_global_ocean_topography_varname = 'bathymetry' +config_global_ocean_topography_has_ocean_frac = .false. +config_global_ocean_topography_ocean_frac_varname = 'ocean_mask' +config_global_ocean_topography_method = 'bilinear_interpolation' +config_global_ocean_fill_bathymetry_holes = .true. +config_global_ocean_cull_inland_seas = .false. +config_global_ocean_windstress_file = 'wind_stress.nc' +config_global_ocean_windstress_nlat_dimname = 'u_lat' +config_global_ocean_windstress_nlon_dimname = 'u_lon' +config_global_ocean_windstress_latlon_degrees = .true. +config_global_ocean_windstress_lat_varname = 'u_lat' +config_global_ocean_windstress_lon_varname = 'u_lon' +config_global_ocean_windstress_zonal_varname = 'TAUX' +config_global_ocean_windstress_meridional_varname = 'TAUY' +config_global_ocean_windstress_method = 'bilinear_interpolation' +config_global_ocean_windstress_conversion_factor = 1 +config_global_ocean_ecosys_file = 'ecosys.nc' +config_global_ocean_ecosys_forcing_file = 'ecosys_forcing.nc' +config_global_ocean_ecosys_nlat_dimname = 't_lat' +config_global_ocean_ecosys_nlon_dimname = 't_lon' +config_global_ocean_ecosys_ndepth_dimname = 'depth_t' +config_global_ocean_ecosys_lat_varname = 't_lat' +config_global_ocean_ecosys_lon_varname = 't_lon' +config_global_ocean_ecosys_depth_varname = 'depth_t' +config_global_ocean_ecosys_latlon_degrees = .true. +config_global_ocean_ecosys_method = 'bilinear_interpolation' +config_global_ecosys_forcing_time_dimname = 'record' +config_global_ocean_depth_file = 'vertical_grid.nc' +config_global_ocean_depth_dimname = 'nVertLevels' +config_global_ocean_depth_varname = 'refMidDepth' +config_global_ocean_depth_conversion_factor = 1.0 +config_global_ocean_minimum_depth = 10 +config_global_ocean_deepen_critical_passages = .false. +config_block_decomp_file_prefix = 'graph.info.part.' diff --git a/compass/ocean/tests/global_ocean/init/namelist.phc b/compass/ocean/tests/global_ocean/init/namelist.phc new file mode 100644 index 0000000000..d986c97866 --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/namelist.phc @@ -0,0 +1,12 @@ +config_global_ocean_temperature_file = 'temperature.nc' +config_global_ocean_salinity_file = 'salinity.nc' +config_global_ocean_tracer_nlat_dimname = 't_lat' +config_global_ocean_tracer_nlon_dimname = 't_lon' +config_global_ocean_tracer_ndepth_dimname = 'depth_t' +config_global_ocean_tracer_depth_conversion_factor = 0.01 +config_global_ocean_temperature_varname = 'TEMP' +config_global_ocean_salinity_varname = 'SALT' +config_global_ocean_tracer_latlon_degrees = .true. +config_global_ocean_tracer_lat_varname = 't_lat' +config_global_ocean_tracer_lon_varname = 't_lon' +config_global_ocean_tracer_depth_varname = 'depth_t' diff --git a/compass/ocean/tests/global_ocean/init/namelist.wisc b/compass/ocean/tests/global_ocean/init/namelist.wisc new file mode 100644 index 0000000000..f1c6b9ff60 --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/namelist.wisc @@ -0,0 +1,18 @@ +config_land_ice_flux_mode = 'standalone' +config_init_vertical_grid_type = 'haney-number' +config_rx1_max = 5.0 +config_global_ocean_depress_by_land_ice = .true. +config_global_ocean_land_ice_topo_file = 'topography.nc' +config_global_ocean_land_ice_topo_nlat_dimname = 'lat' +config_global_ocean_land_ice_topo_nlon_dimname = 'lon' +config_global_ocean_land_ice_topo_latlon_degrees = .true. +config_global_ocean_land_ice_topo_lat_varname = 'lat' +config_global_ocean_land_ice_topo_lon_varname = 'lon' +config_global_ocean_land_ice_topo_draft_varname = 'ice_draft' +config_global_ocean_land_ice_topo_thickness_varname = 'thickness' +config_global_ocean_land_ice_topo_ice_frac_varname = 'ice_mask' +config_global_ocean_land_ice_topo_grounded_frac_varname = 'grounded_mask' +config_global_ocean_land_ice_topo_method = 'bilinear_interpolation' +config_global_ocean_use_constant_land_ice_cavity_temperature = .true. +config_global_ocean_constant_land_ice_cavity_temperature = -1.8 + diff --git a/compass/ocean/tests/global_ocean/init/ssh_adjustment.py b/compass/ocean/tests/global_ocean/init/ssh_adjustment.py new file mode 100644 index 0000000000..6033750830 --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/ssh_adjustment.py @@ -0,0 +1,86 @@ +from compass.step import Step +from compass.ocean.iceshelf import adjust_ssh + + +class SshAdjustment(Step): + """ + A step for iteratively adjusting the pressure from the weight of the ice + shelf to match the sea-surface height as part of ice-shelf 2D test cases + """ + def __init__(self, test_case, cores=None, min_cores=None, threads=None): + """ + Create the step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.init.Init + The test case this step belongs to + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + """ + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name='ssh_adjustment', + cores=cores, min_cores=min_cores, threads=threads) + + self.add_namelist_file( + 'compass.ocean.tests.global_ocean', 'namelist.forward') + self.add_namelist_options({'config_AM_globalStats_enable': '.false.'}) + self.add_namelist_file('compass.ocean.namelists', + 'namelist.ssh_adjust') + + self.add_streams_file('compass.ocean.streams', 'streams.ssh_adjust') + self.add_streams_file('compass.ocean.tests.global_ocean.init', + 'streams.ssh_adjust') + + mesh_path = test_case.mesh.mesh_step.path + init_path = test_case.steps['initial_state'].path + + self.add_input_file( + filename='adjusting_init0.nc', + work_dir_target='{}/initial_state.nc'.format(init_path)) + self.add_input_file( + filename='forcing_data.nc', + work_dir_target='{}/init_mode_forcing_data.nc'.format(init_path)) + self.add_input_file( + filename='graph.info', + work_dir_target='{}/culled_graph.info'.format(mesh_path)) + + self.add_output_file(filename='adjusted_init.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + if self.cores is None: + self.cores = self.config.getint( + 'global_ocean', 'forward_cores') + if self.min_cores is None: + self.min_cores = self.config.getint( + 'global_ocean', 'forward_min_cores') + if self.threads is None: + self.threads = self.config.getint( + 'global_ocean', 'forward_threads') + + def run(self): + """ + Run this step of the testcase + """ + config = self.config + iteration_count = config.getint('ssh_adjustment', 'iterations') + adjust_ssh(variable='landIcePressure', iteration_count=iteration_count, + step=self) diff --git a/compass/ocean/tests/global_ocean/init/streams.init b/compass/ocean/tests/global_ocean/init/streams.init new file mode 100644 index 0000000000..befb0b6c67 --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/streams.init @@ -0,0 +1,89 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/init/streams.ssh_adjust b/compass/ocean/tests/global_ocean/init/streams.ssh_adjust new file mode 100644 index 0000000000..b7bfc83915 --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/streams.ssh_adjust @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/ocean/tests/global_ocean/init/streams.wisc b/compass/ocean/tests/global_ocean/init/streams.wisc new file mode 100644 index 0000000000..f747f41b5e --- /dev/null +++ b/compass/ocean/tests/global_ocean/init/streams.wisc @@ -0,0 +1,11 @@ + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/mesh/__init__.py b/compass/ocean/tests/global_ocean/mesh/__init__.py new file mode 100644 index 0000000000..9153069c1d --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/__init__.py @@ -0,0 +1,80 @@ +from compass.testcase import TestCase +from compass.ocean.tests.global_ocean.mesh.qu240 import QU240Mesh +from compass.ocean.tests.global_ocean.mesh.ec30to60 import EC30to60Mesh +from compass.ocean.tests.global_ocean.mesh.so12to60 import SO12to60Mesh +from compass.ocean.tests.global_ocean.configure import configure_global_ocean +from compass.validate import compare_variables + + +class Mesh(TestCase): + """ + A test case for creating a global MPAS-Ocean mesh + + Attributes + ---------- + mesh_step : compass.ocean.tests.global_ocean.mesh.mesh.MeshStep + The step for creating the mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + def __init__(self, test_group, mesh_name): + """ + Create test case for creating a global MPAS-Ocean mesh + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh_name : str + The name of the mesh + """ + name = 'mesh' + subdir = '{}/{}'.format(mesh_name, name) + super().__init__(test_group=test_group, name=name, subdir=subdir) + if mesh_name in 'QU240': + self.mesh_step = QU240Mesh(self, mesh_name, + with_ice_shelf_cavities=False) + elif mesh_name in 'QUwISC240': + self.mesh_step = QU240Mesh(self, mesh_name, + with_ice_shelf_cavities=True) + elif mesh_name in 'EC30to60': + self.mesh_step = EC30to60Mesh(self, mesh_name, + with_ice_shelf_cavities=False) + elif mesh_name in 'ECwISC30to60': + self.mesh_step = EC30to60Mesh(self, mesh_name, + with_ice_shelf_cavities=True) + elif mesh_name in 'SOwISC12to60': + self.mesh_step = SO12to60Mesh(self, mesh_name, + with_ice_shelf_cavities=True) + else: + raise ValueError('Unknown mesh name {}'.format(mesh_name)) + + self.add_step(self.mesh_step) + + self.mesh_name = mesh_name + self.with_ice_shelf_cavities = self.mesh_step.with_ice_shelf_cavities + + def configure(self): + """ + Modify the configuration options for this test case + """ + configure_global_ocean(test_case=self, mesh=self) + + def run(self): + """ + Run each step of the testcase + """ + step = self.mesh_step + config = self.config + # get the these properties from the config options + step.cores = config.getint('global_ocean', 'mesh_cores') + step.min_cores = config.getint('global_ocean', 'mesh_min_cores') + + # run the step + super().run() + + variables = ['xCell', 'yCell', 'zCell'] + compare_variables(variables, config, self.work_dir, + filename1='mesh/culled_mesh.nc') diff --git a/compass/ocean/tests/global_ocean/mesh/cull.py b/compass/ocean/tests/global_ocean/mesh/cull.py new file mode 100755 index 0000000000..c01609f781 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/cull.py @@ -0,0 +1,236 @@ +import xarray + +from geometric_features import GeometricFeatures, FeatureCollection, \ + read_feature_collection +from mpas_tools.mesh import conversion +from mpas_tools.io import write_netcdf +from mpas_tools.ocean.coastline_alteration import widen_transect_edge_masks, \ + add_critical_land_blockages, add_land_locked_cells_to_mask +from mpas_tools.viz.paraview_extractor import extract_vtk +from mpas_tools.logging import LoggingContext + + +def cull_mesh(with_cavities=False, with_critical_passages=False, + custom_critical_passages=None, custom_land_blockages=None, + preserve_floodplain=False, logger=None, use_progress_bar=True): + """ + First step of initializing the global ocean: + + 1. combining Natural Earth land coverage north of 60S with Antarctic + ice coverage or grounded ice coverage from BedMachineAntarctica + + 2. combining transects defining critical passages (if + ``with_critical_passages=True``) + + 3. combining points used to seed a flood fill of the global ocean. + + 4. create masks from land coverage + + 5. add land-locked cells to land coverage mask. + + 6. create masks from transects (if + ``with_critical_passages=True``) + + 7. cull cells based on land coverage but with transects present + + 8. create flood-fill mask based on seeds + + 9. cull cells based on flood-fill mask + + 10. create masks from transects on the final culled mesh (if + ``with_critical_passages=True``) + + Parameters + ---------- + with_cavities : bool, optional + Whether the mesh should include Antarctic ice-shelf cavities from + BedMachine Antarctica + + with_critical_passages : bool, optional + Whether the mesh should open the standard critical passages and close + land blockages from geometric_features + + custom_critical_passages : str, optional + The name of geojson file with critical passages to open. This file may + be supplied in addition to or instead of the default passages + (``with_critical_passages=True``) + + custom_land_blockages : str, optional + The name of a geojson file with critical land blockages to close. This + file may be supplied in addition to or instead of the default blockages + (``with_critical_passages=True``) + + preserve_floodplain : bool, optional + Whether to use the ``cellSeedMask`` field in the base mesh to preserve + a floodplain at elevations above z=0 + + logger : logging.Logger, optional + A logger for the output if not stdout + + use_progress_bar : bool, optional + Whether to display progress bars (problematic in logging to a file) + """ + with LoggingContext(name=__name__, logger=logger) as logger: + _cull_mesh_with_logging( + logger, with_cavities, with_critical_passages, + custom_critical_passages, custom_land_blockages, + preserve_floodplain, use_progress_bar) + + +def _cull_mesh_with_logging(logger, with_cavities, with_critical_passages, + custom_critical_passages, custom_land_blockages, + preserve_floodplain, use_progress_bar): + """ Cull the mesh once the logger is defined for sure """ + + # required for compatibility with MPAS + netcdf_format = 'NETCDF3_64BIT' + + critical_passages = with_critical_passages or \ + (custom_critical_passages is not None) + + land_blockages = with_critical_passages or \ + (custom_land_blockages is not None) + + gf = GeometricFeatures() + + # start with the land coverage from Natural Earth + fcLandCoverage = gf.read(componentName='natural_earth', + objectType='region', + featureNames=['Land Coverage']) + + # remove the region south of 60S so we can replace it based on ice-sheet + # topography + fcSouthMask = gf.read(componentName='ocean', objectType='region', + featureNames=['Global Ocean 90S to 60S']) + + fcLandCoverage = fcLandCoverage.difference(fcSouthMask) + + # Add "land" coverage from either the full ice sheet or just the grounded + # part + if with_cavities: + fcAntarcticLand = gf.read( + componentName='bedmachine', objectType='region', + featureNames=['AntarcticGroundedIceCoverage']) + else: + fcAntarcticLand = gf.read( + componentName='bedmachine', objectType='region', + featureNames=['AntarcticIceCoverage']) + + fcLandCoverage.merge(fcAntarcticLand) + + # save the feature collection to a geojson file + fcLandCoverage.to_geojson('land_coverage.geojson') + + # Create the land mask based on the land coverage, i.e. coastline data + dsBaseMesh = xarray.open_dataset('base_mesh.nc') + dsLandMask = conversion.mask(dsBaseMesh, fcMask=fcLandCoverage, + logger=logger) + + dsLandMask = add_land_locked_cells_to_mask(dsLandMask, dsBaseMesh, + latitude_threshold=43.0, + nSweeps=20) + + # create seed points for a flood fill of the ocean + # use all points in the ocean directory, on the assumption that they are, + # in fact, in the ocean + fcSeed = gf.read(componentName='ocean', objectType='point', + tags=['seed_point']) + + if land_blockages: + if with_critical_passages: + # merge transects for critical land blockages into + # critical_land_blockages.geojson + fcCritBlockages = gf.read( + componentName='ocean', objectType='transect', + tags=['Critical_Land_Blockage']) + else: + fcCritBlockages = FeatureCollection() + + if custom_land_blockages is not None: + fcCritBlockages.merge(read_feature_collection( + custom_land_blockages)) + + # create masks from the transects + dsCritBlockMask = conversion.mask(dsBaseMesh, fcMask=fcCritBlockages, + logger=logger) + + dsLandMask = add_critical_land_blockages(dsLandMask, dsCritBlockMask) + + fcCritPassages = FeatureCollection() + dsPreserve = [] + + if critical_passages: + if with_critical_passages: + # merge transects for critical passages into fcCritPassages + fcCritPassages.merge(gf.read(componentName='ocean', + objectType='transect', + tags=['Critical_Passage'])) + + if custom_critical_passages is not None: + fcCritPassages.merge(read_feature_collection( + custom_critical_passages)) + + # create masks from the transects + dsCritPassMask = conversion.mask(dsBaseMesh, fcMask=fcCritPassages, + logger=logger) + + # Alter critical passages to be at least two cells wide, to avoid sea + # ice blockage + dsCritPassMask = widen_transect_edge_masks(dsCritPassMask, dsBaseMesh, + latitude_threshold=43.0) + + dsPreserve.append(dsCritPassMask) + + if preserve_floodplain: + dsPreserve.append(dsBaseMesh) + + # cull the mesh based on the land mask + dsCulledMesh = conversion.cull(dsBaseMesh, dsMask=dsLandMask, + dsPreserve=dsPreserve, logger=logger) + + # create a mask for the flood fill seed points + dsSeedMask = conversion.mask(dsCulledMesh, fcSeed=fcSeed, logger=logger) + + # cull the mesh a second time using a flood fill from the seed points + dsCulledMesh = conversion.cull(dsCulledMesh, dsInverse=dsSeedMask, + graphInfoFileName='culled_graph.info', + logger=logger) + write_netcdf(dsCulledMesh, 'culled_mesh.nc', format=netcdf_format) + + if critical_passages: + # make a new version of the critical passages mask on the culled mesh + dsCritPassMask = conversion.mask(dsCulledMesh, fcMask=fcCritPassages, + logger=logger) + write_netcdf(dsCritPassMask, 'critical_passages_mask_final.nc', + format=netcdf_format) + + if with_cavities: + fcAntarcticIce = gf.read( + componentName='bedmachine', objectType='region', + featureNames=['AntarcticIceCoverage']) + fcAntarcticIce.to_geojson('ice_coverage.geojson') + dsMask = conversion.mask(dsCulledMesh, fcMask=fcAntarcticIce, + logger=logger) + landIceMask = dsMask.regionCellMasks.isel(nRegions=0) + dsLandIceMask = xarray.Dataset() + dsLandIceMask['landIceMask'] = landIceMask + + write_netcdf(dsLandIceMask, 'land_ice_mask.nc', format=netcdf_format) + + dsLandIceCulledMesh = conversion.cull(dsCulledMesh, dsMask=dsMask, + logger=logger) + write_netcdf(dsLandIceCulledMesh, 'no_ISC_culled_mesh.nc', + format=netcdf_format) + + extract_vtk(ignore_time=True, dimension_list=['maxEdges='], + variable_list=['allOnCells'], + filename_pattern='culled_mesh.nc', + out_dir='culled_mesh_vtk', + use_progress_bar=use_progress_bar) + + if with_cavities: + extract_vtk(ignore_time=True, dimension_list=['maxEdges='], + variable_list=['allOnCells'], + filename_pattern='no_ISC_culled_mesh.nc', + out_dir='no_ISC_culled_mesh_vtk', + use_progress_bar=use_progress_bar) diff --git a/compass/ocean/tests/global_ocean/mesh/ec30to60/__init__.py b/compass/ocean/tests/global_ocean/mesh/ec30to60/__init__.py new file mode 100644 index 0000000000..1a599c7952 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/ec30to60/__init__.py @@ -0,0 +1,58 @@ +import numpy as np +import mpas_tools.mesh.creation.mesh_definition_tools as mdt + +from compass.ocean.tests.global_ocean.mesh.mesh import MeshStep + + +class EC30to60Mesh(MeshStep): + """ + A step for creating EC30to60 and ECwISC30to60 meshes + """ + def __init__(self, test_case, mesh_name, with_ice_shelf_cavities): + """ + Create a new step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + mesh_name : str + The name of the mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + super().__init__(test_case, mesh_name, with_ice_shelf_cavities, + package=self.__module__, + mesh_config_filename='ec30to60.cfg') + + def build_cell_width_lat_lon(self): + """ + Create cell width array for this mesh on a regular latitude-longitude + grid + + Returns + ------- + cellWidth : numpy.array + m x n array of cell width in km + + lon : numpy.array + longitude in degrees (length n and between -180 and 180) + + lat : numpy.array + longitude in degrees (length m and between -90 and 90) + """ + + dlon = 10. + dlat = 0.1 + nlon = int(360./dlon) + 1 + nlat = int(180./dlat) + 1 + lon = np.linspace(-180., 180., nlon) + lat = np.linspace(-90., 90., nlat) + + cellWidthVsLat = mdt.EC_CellWidthVsLat(lat) + cellWidth = np.outer(cellWidthVsLat, np.ones([1, lon.size])) + + return cellWidth, lon, lat diff --git a/compass/ocean/tests/global_ocean/mesh/ec30to60/dynamic_adjustment/__init__.py b/compass/ocean/tests/global_ocean/mesh/ec30to60/dynamic_adjustment/__init__.py new file mode 100644 index 0000000000..893d865f9e --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/ec30to60/dynamic_adjustment/__init__.py @@ -0,0 +1,94 @@ +from compass.ocean.tests.global_ocean.dynamic_adjustment import \ + DynamicAdjustment +from compass.ocean.tests.global_ocean.forward import ForwardStep + + +class EC30to60DynamicAdjustment(DynamicAdjustment): + """ + A test case performing dynamic adjustment (dissipating fast-moving waves) + from an initial condition on the EC30to60 MPAS-Ocean mesh + + Attributes + ---------- + restart_filenames : list of str + A list of restart files from each dynamic-adjustment step + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + if time_integrator != 'split_explicit': + raise ValueError('{} dynamic adjustment not defined for {}'.format( + mesh.mesh_name, time_integrator)) + + restart_times = ['0001-01-11_00:00:00', '0001-01-21_00:00:00'] + restart_filenames = [ + 'restarts/rst.{}.nc'.format(restart_time.replace(':', '.')) + for restart_time in restart_times] + + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + restart_filenames=restart_filenames) + + module = self.__module__ + + # first step + step_name = 'damped_adjustment_1' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-10_00:00:00'", + 'config_dt': "'00:20:00'", + 'config_Rayleigh_friction': '.true.', + 'config_Rayleigh_damping_coeff': '1.0e-4'} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-10_00:00:00', + 'restart_interval': '00-00-10_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_output_file(filename='../{}'.format(restart_filenames[0])) + self.add_step(step) + + # final step + step_name = 'simulation' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-10_00:00:00'", + 'config_do_restart': '.true.', + 'config_start_time': "'{}'".format(restart_times[0])} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-10_00:00:00', + 'restart_interval': '00-00-10_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_input_file(filename='../{}'.format(restart_filenames[0])) + step.add_output_file(filename='../{}'.format(restart_filenames[1])) + self.add_step(step) + + self.restart_filenames = restart_filenames diff --git a/compass/ocean/tests/global_ocean/mesh/ec30to60/dynamic_adjustment/streams.template b/compass/ocean/tests/global_ocean/mesh/ec30to60/dynamic_adjustment/streams.template new file mode 100644 index 0000000000..66004d2541 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/ec30to60/dynamic_adjustment/streams.template @@ -0,0 +1,9 @@ + + + + + + diff --git a/compass/ocean/tests/global_ocean/mesh/ec30to60/ec30to60.cfg b/compass/ocean/tests/global_ocean/mesh/ec30to60/ec30to60.cfg new file mode 100644 index 0000000000..4a623e9c71 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/ec30to60/ec30to60.cfg @@ -0,0 +1,49 @@ +# Options related to the vertical grid +[vertical_grid] + +# the type of vertical grid +grid_type = 60layerPHC + + +# options for global ocean testcases +[global_ocean] + +## config options related to the initial_state step +# number of cores to use +init_cores = 36 +# minimum of cores, below which the step fails +init_min_cores = 8 +# maximum memory usage allowed (in MB) +init_max_memory = 1000 +# maximum disk usage allowed (in MB) +init_max_disk = 1000 + +## config options related to the forward steps +# number of cores to use +forward_cores = 128 +# minimum of cores, below which the step fails +forward_min_cores = 36 +# maximum memory usage allowed (in MB) +forward_max_memory = 1000 +# maximum disk usage allowed (in MB) +forward_max_disk = 1000 + +## metadata related to the mesh +# the prefix (e.g. QU, EC, WC, SO) +prefix = EC +# a description of the mesh and initial condition +mesh_description = MPAS Eddy Closure mesh for E3SM version ${e3sm_version} with + enhanced resolution around the equator (30 km), South pole + (35 km), Greenland (${min_res} km), ${max_res}-km resolution + at mid latitudes, and ${levels} vertical levels +# E3SM version that the mesh is intended for +e3sm_version = 2 +# The revision number of the mesh, which should be incremented each time the +# mesh is revised +mesh_revision = 3 +# the minimum (finest) resolution in the mesh +min_res = 30 +# the maximum (coarsest) resolution in the mesh, can be the same as min_res +max_res = 60 +# The URL of the pull request documenting the creation of the mesh +pull_request = <<>> diff --git a/compass/ocean/tests/global_ocean/mesh/ec30to60/namelist.split_explicit b/compass/ocean/tests/global_ocean/mesh/ec30to60/namelist.split_explicit new file mode 100644 index 0000000000..99a109cc77 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/ec30to60/namelist.split_explicit @@ -0,0 +1,8 @@ +config_time_integrator = 'split_explicit' +config_dt = '00:30:00' +config_btr_dt = '00:01:00' +config_run_duration = '0000_01:30:00' +config_mom_del2 = 1000.0 +config_mom_del4 = 1.2e11 +config_hmix_scaleWithMesh = .true. +config_use_GM = .true. diff --git a/compass/ocean/tests/global_ocean/mesh/mesh.py b/compass/ocean/tests/global_ocean/mesh/mesh.py new file mode 100644 index 0000000000..2e94566bf8 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/mesh.py @@ -0,0 +1,110 @@ +from mpas_tools.ocean import build_spherical_mesh + +from compass.ocean.tests.global_ocean.mesh.cull import cull_mesh +from compass.step import Step + + +class MeshStep(Step): + """ + A step for creating a global MPAS-Ocean mesh + + Attributes + ---------- + mesh_name : str + The name of the mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + + package : str + The python package for the mesh + + mesh_config_filename : str + The name of the mesh config file + """ + def __init__(self, test_case, mesh_name, with_ice_shelf_cavities, + package, mesh_config_filename, name='mesh', subdir=None): + """ + Create a new step + + Parameters + ---------- + test_case : compass.ocean.tests.global_ocean.mesh.Mesh + The test case this step belongs to + + mesh_name : str + The name of the mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + + package : str + The python package for the mesh + + mesh_config_filename : str + The name of the mesh config file + + name : str, optional + the name of the step + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + """ + super().__init__(test_case, name=name, subdir=subdir, cores=None, + min_cores=None, threads=None) + for file in ['culled_mesh.nc', 'culled_graph.info', + 'critical_passages_mask_final.nc']: + self.add_output_file(filename=file) + + self.mesh_name = mesh_name + self.with_ice_shelf_cavities = with_ice_shelf_cavities + self.package = package + self.mesh_config_filename = mesh_config_filename + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies. + """ + # get the these properties from the config options + config = self.config + self.cores = config.getint('global_ocean', 'mesh_cores') + self.min_cores = config.getint('global_ocean', 'mesh_min_cores') + + def run(self): + """ + Run this step of the test case + """ + with_ice_shelf_cavities = self.with_ice_shelf_cavities + logger = self.logger + + # only use progress bars if we're not writing to a log file + use_progress_bar = self.log_filename is None + + # create the base mesh + cellWidth, lon, lat = self.build_cell_width_lat_lon() + build_spherical_mesh(cellWidth, lon, lat, out_filename='base_mesh.nc', + logger=logger, use_progress_bar=use_progress_bar) + + cull_mesh(with_critical_passages=True, logger=logger, + use_progress_bar=use_progress_bar, + with_cavities=with_ice_shelf_cavities) + + def build_cell_width_lat_lon(self): + """ + A function for creating cell width array for this mesh on a regular + latitude-longitude grid. Child classes need to override this function + to return the expected data + + Returns + ------- + cellWidth : numpy.array + m x n array of cell width in km + + lon : numpy.array + longitude in degrees (length n and between -180 and 180) + + lat : numpy.array + longitude in degrees (length m and between -90 and 90) + """ + pass diff --git a/compass/ocean/tests/global_ocean/mesh/qu240/__init__.py b/compass/ocean/tests/global_ocean/mesh/qu240/__init__.py new file mode 100644 index 0000000000..5174e1de3a --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/qu240/__init__.py @@ -0,0 +1,56 @@ +import numpy as np + +from compass.ocean.tests.global_ocean.mesh.mesh import MeshStep + + +class QU240Mesh(MeshStep): + """ + A step for creating QU240 and QUwISC240 meshes + """ + def __init__(self, test_case, mesh_name, with_ice_shelf_cavities): + """ + Create a new step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + mesh_name : str + The name of the mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + super().__init__(test_case, mesh_name, with_ice_shelf_cavities, + package=self.__module__, + mesh_config_filename='qu240.cfg') + + def build_cell_width_lat_lon(self): + """ + Create cell width array for this mesh on a regular latitude-longitude + grid + + Returns + ------- + cellWidth : numpy.array + m x n array of cell width in km + + lon : numpy.array + longitude in degrees (length n and between -180 and 180) + + lat : numpy.array + longitude in degrees (length m and between -90 and 90) + """ + dlon = 10. + dlat = dlon + constantCellWidth = 240. + + nlat = int(180/dlat) + 1 + nlon = int(360/dlon) + 1 + lat = np.linspace(-90., 90., nlat) + lon = np.linspace(-180., 180., nlon) + + cellWidth = constantCellWidth * np.ones((lat.size, lon.size)) + return cellWidth, lon, lat diff --git a/compass/ocean/tests/global_ocean/mesh/qu240/dynamic_adjustment/__init__.py b/compass/ocean/tests/global_ocean/mesh/qu240/dynamic_adjustment/__init__.py new file mode 100644 index 0000000000..d305fe94fa --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/qu240/dynamic_adjustment/__init__.py @@ -0,0 +1,82 @@ +from compass.ocean.tests.global_ocean.dynamic_adjustment import \ + DynamicAdjustment +from compass.ocean.tests.global_ocean.forward import ForwardStep + + +class QU240DynamicAdjustment(DynamicAdjustment): + """ + A test case performing dynamic adjustment (dissipating fast-moving waves) + from an initial condition on the QU240 MPAS-Ocean mesh + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + restart_times = ['0001-01-02_00:00:00', '0001-01-03_00:00:00'] + restart_filenames = [ + 'restarts/rst.{}.nc'.format(restart_time.replace(':', '.')) + for restart_time in restart_times] + + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + restart_filenames=restart_filenames) + + module = self.__module__ + + # first step + step_name = 'damped_adjustment_1' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-01_00:00:00'", + 'config_Rayleigh_friction': '.true.', + 'config_Rayleigh_damping_coeff': '1.0e-4'} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-01_00:00:00', + 'restart_interval': '00-00-01_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_output_file(filename='../{}'.format(restart_filenames[0])) + self.add_step(step) + + # final step + step_name = 'simulation' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-01_00:00:00'", + 'config_do_restart': '.true.', + 'config_start_time': "'{}'".format(restart_times[0])} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-01_00:00:00', + 'restart_interval': '00-00-01_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_input_file(filename='../{}'.format(restart_filenames[0])) + step.add_output_file(filename='../{}'.format(restart_filenames[1])) + self.add_step(step) diff --git a/compass/ocean/tests/global_ocean/mesh/qu240/dynamic_adjustment/streams.template b/compass/ocean/tests/global_ocean/mesh/qu240/dynamic_adjustment/streams.template new file mode 100644 index 0000000000..66004d2541 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/qu240/dynamic_adjustment/streams.template @@ -0,0 +1,9 @@ + + + + + + diff --git a/compass/ocean/tests/global_ocean/mesh/qu240/namelist.rk4 b/compass/ocean/tests/global_ocean/mesh/qu240/namelist.rk4 new file mode 100644 index 0000000000..29f23a55dc --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/qu240/namelist.rk4 @@ -0,0 +1,6 @@ +config_time_integrator = 'RK4' +config_dt = '0000_00:05:00' +config_run_duration = '0000_00:10:00' +config_hmix_use_ref_cell_width = .true. +config_write_output_on_startup = .false. +config_use_debugTracers = .true. diff --git a/compass/ocean/tests/global_ocean/mesh/qu240/namelist.split_explicit b/compass/ocean/tests/global_ocean/mesh/qu240/namelist.split_explicit new file mode 100644 index 0000000000..7da661e3de --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/qu240/namelist.split_explicit @@ -0,0 +1,7 @@ +config_time_integrator = 'split_explicit' +config_dt = '02:00:00' +config_btr_dt = '00:06:00' +config_run_duration = '0000_06:00:00' +config_hmix_use_ref_cell_width = .true. +config_write_output_on_startup = .false. +config_use_debugTracers = .true. \ No newline at end of file diff --git a/compass/ocean/tests/global_ocean/mesh/qu240/qu240.cfg b/compass/ocean/tests/global_ocean/mesh/qu240/qu240.cfg new file mode 100644 index 0000000000..a6c2eacb2f --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/qu240/qu240.cfg @@ -0,0 +1,61 @@ +# Options related to the vertical grid +[vertical_grid] + +# the type of vertical grid +grid_type = tanh_dz + +# Number of vertical levels +vert_levels = 16 + +# Depth of the bottom of the ocean +bottom_depth = 3000.0 + +# The minimum layer thickness +min_layer_thickness = 3.0 + +# The maximum layer thickness +max_layer_thickness = 500.0 + + +# options for global ocean testcases +[global_ocean] + +## config options related to the initial_state step +# number of cores to use +init_cores = 4 +# minimum of cores, below which the step fails +init_min_cores = 1 +# maximum memory usage allowed (in MB) +init_max_memory = 1000 +# maximum disk usage allowed (in MB) +init_max_disk = 1000 + +## config options related to the forward steps +# number of cores to use +forward_cores = 4 +# minimum of cores, below which the step fails +forward_min_cores = 1 +# maximum memory usage allowed (in MB) +forward_max_memory = 1000 +# maximum disk usage allowed (in MB) +forward_max_disk = 1000 + +## metadata related to the mesh +# the prefix (e.g. QU, EC, WC, SO) +prefix = QU +# a description of the mesh +mesh_description = MPAS quasi-uniform mesh for E3SM version ${e3sm_version} at + ${min_res}-km global resolution with ${levels} vertical + level + +# E3SM version that the mesh is intended for +e3sm_version = 2 +# The revision number of the mesh, which should be incremented each time the +# mesh is revised +mesh_revision = 1 +# the minimum (finest) resolution in the mesh +min_res = 240 +# the maximum (coarsest) resolution in the mesh, can be the same as min_res +max_res = 240 +# The URL of the pull request documenting the creation of the mesh +pull_request = <<>> diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/__init__.py b/compass/ocean/tests/global_ocean/mesh/so12to60/__init__.py new file mode 100644 index 0000000000..ac1050f254 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to60/__init__.py @@ -0,0 +1,128 @@ +import numpy as np +from importlib import resources + +import mpas_tools.mesh.creation.mesh_definition_tools as mdt +from mpas_tools.mesh.creation.signed_distance import \ + signed_distance_from_geojson +from geometric_features import read_feature_collection +from mpas_tools.cime.constants import constants + +from compass.ocean.tests.global_ocean.mesh.mesh import MeshStep + + +class SO12to60Mesh(MeshStep): + """ + A step for creating SOwISC12to60 meshes + """ + def __init__(self, test_case, mesh_name, with_ice_shelf_cavities): + """ + Create a new step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + mesh_name : str + The name of the mesh + + with_ice_shelf_cavities : bool + Whether the mesh includes ice-shelf cavities + """ + + super().__init__(test_case, mesh_name, with_ice_shelf_cavities, + package=self.__module__, + mesh_config_filename='so12to60.cfg') + + def build_cell_width_lat_lon(self): + """ + Create cell width array for this mesh on a regular latitude-longitude + grid + + Returns + ------- + cellWidth : numpy.array + m x n array of cell width in km + + lon : numpy.array + longitude in degrees (length n and between -180 and 180) + + lat : numpy.array + longitude in degrees (length m and between -90 and 90) + """ + + dlon = 0.1 + dlat = dlon + earth_radius = constants['SHR_CONST_REARTH'] + nlon = int(360./dlon) + 1 + nlat = int(180./dlat) + 1 + lon = np.linspace(-180., 180., nlon) + lat = np.linspace(-90., 90., nlat) + + cellWidthSouth = mdt.EC_CellWidthVsLat(lat, cellWidthEq=30., + cellWidthMidLat=45., + cellWidthPole=45., + latPosEq=7.5, latWidthEq=3.0) + + cellWidthNorth = mdt.EC_CellWidthVsLat(lat, cellWidthEq=30., + cellWidthMidLat=60., + cellWidthPole=35., + latPosEq=7.5, latWidthEq=3.0) + + # Transition at Equator + latTransition = 0.0 + latWidthTransition = 2.5 + cellWidthVsLat = mdt.mergeCellWidthVsLat( + lat, + cellWidthSouth, + cellWidthNorth, + latTransition, + latWidthTransition) + + _, cellWidth = np.meshgrid(lon, cellWidthVsLat) + + cellWidthAtlantic = mdt.EC_CellWidthVsLat(lat, cellWidthEq=30., + cellWidthMidLat=30., + cellWidthPole=35., + latPosEq=7.5, latWidthEq=3.0) + + cellWidthAtlantic = mdt.mergeCellWidthVsLat( + lat, + cellWidthSouth, + cellWidthAtlantic, + latTransition, + latWidthTransition) + + _, cellWidthAtlantic = np.meshgrid(lon, cellWidthAtlantic) + + with resources.path(self.package, 'atlantic.geojson') as path: + fc = read_feature_collection(str(path)) + + atlantic_signed_distance = signed_distance_from_geojson( + fc, lon, lat, earth_radius, max_length=0.25) + + trans_width = 400e3 + trans_start = 0. + weights = 0.5 * (1 + np.tanh((atlantic_signed_distance - trans_start) / + trans_width)) + + cellWidth = cellWidthAtlantic * (1 - weights) + cellWidth * weights + + with resources.path(self.package, 'high_res_region.geojson') as path: + fc = read_feature_collection(str(path)) + + so_signed_distance = signed_distance_from_geojson(fc, lon, lat, + earth_radius, + max_length=0.25) + + # Equivalent to 20 degrees latitude + trans_width = 1600e3 + trans_start = 500e3 + dx_min = 12. + + weights = 0.5 * (1 + np.tanh((so_signed_distance - trans_start) / + trans_width)) + + cellWidth = dx_min * (1 - weights) + cellWidth * weights + + return cellWidth, lon, lat diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/atlantic.geojson b/compass/ocean/tests/global_ocean/mesh/so12to60/atlantic.geojson new file mode 100644 index 0000000000..1df1af372d --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to60/atlantic.geojson @@ -0,0 +1,97 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "name": "Atlantic region", + "component": "ocean", + "object": "region", + "author": "Xylar Asay-Davis" + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -97.3828125, + 85.05112877979998 + ], + [ + -102.3046875, + 40.17887331434696 + ], + [ + -102.3046875, + 23.241346102386135 + ], + [ + -93.1640625, + 15.623036831528264 + ], + [ + -85.78125, + 13.581920900545844 + ], + [ + -83.583984375, + 9.535748998133627 + ], + [ + -81.2109375, + 8.059229627200192 + ], + [ + -79.013671875, + 9.795677582829743 + ], + [ + -75.9375, + 5.61598581915534 + ], + [ + -77.6953125, + 0 + ], + [ + 16.171875, + 0 + ], + [ + 27.773437499999996, + 26.745610382199022 + ], + [ + 37.96875, + 32.24997445586331 + ], + [ + 39.7265625, + 39.36827914916014 + ], + [ + 32.6953125, + 53.9560855309879 + ], + [ + 37.6171875, + 61.438767493682825 + ], + [ + 25.664062500000004, + 68.26938680456564 + ], + [ + 24.609375, + 85.05112877979998 + ], + [ + -97.3828125, + 85.05112877979998 + ] + ] + ] + } + } + ] +} \ No newline at end of file diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment/__init__.py b/compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment/__init__.py new file mode 100644 index 0000000000..c183fae208 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment/__init__.py @@ -0,0 +1,148 @@ +from compass.ocean.tests.global_ocean.dynamic_adjustment import \ + DynamicAdjustment +from compass.ocean.tests.global_ocean.forward import ForwardStep + + +class SO12to60DynamicAdjustment(DynamicAdjustment): + """ + A test case performing dynamic adjustment (dissipating fast-moving waves) + from an initial condition on the SO12to60 MPAS-Ocean mesh + + Attributes + ---------- + restart_filenames : list of str + A list of restart files from each dynamic-adjustment step + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + if time_integrator != 'split_explicit': + raise ValueError('{} dynamic adjustment not defined for {}'.format( + mesh.mesh_name, time_integrator)) + + restart_times = ['0001-01-03_00:00:00', '0001-01-07_00:00:00', + '0001-01-11_00:00:00', '0001-01-21_00:00:00'] + restart_filenames = [ + 'restarts/rst.{}.nc'.format(restart_time.replace(':', '.')) + for restart_time in restart_times] + + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + restart_filenames=restart_filenames) + + module = self.__module__ + + # first step + step_name = 'damped_adjustment_1' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-02_00:00:00'", + 'config_dt': "'00:05:00'", + 'config_btr_dt': "'00:00:20'", + 'config_Rayleigh_friction': '.true.', + 'config_Rayleigh_damping_coeff': '1.0e-4'} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-10_00:00:00', + 'restart_interval': '00-00-02_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_output_file(filename='../{}'.format(restart_filenames[0])) + self.add_step(step) + + # second step + step_name = 'damped_adjustment_2' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-04_00:00:00'", + 'config_dt': "'00:07:30'", + 'config_btr_dt': "'00:00:20'", + 'config_Rayleigh_friction': '.true.', + 'config_Rayleigh_damping_coeff': '4.0e-5', + 'config_do_restart': '.true.', + 'config_start_time': "'{}'".format(restart_times[0])} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-10_00:00:00', + 'restart_interval': '00-00-02_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_input_file(filename='../{}'.format(restart_filenames[0])) + step.add_output_file(filename='../{}'.format(restart_filenames[1])) + self.add_step(step) + + # third step + step_name = 'damped_adjustment_3' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-04_00:00:00'", + 'config_dt': "'00:10:00'", + 'config_btr_dt': "'00:00:20'", + 'config_Rayleigh_friction': '.true.', + 'config_Rayleigh_damping_coeff': '1.0e-5', + 'config_do_restart': '.true.', + 'config_start_time': "'{}'".format(restart_times[1])} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-10_00:00:00', + 'restart_interval': '00-00-02_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_input_file(filename='../{}'.format(restart_filenames[1])) + step.add_output_file(filename='../{}'.format(restart_filenames[2])) + self.add_step(step) + + # final step + step_name = 'simulation' + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=step_name, + subdir=step_name) + + namelist_options = { + 'config_run_duration': "'00-00-10_00:00:00'", + 'config_do_restart': '.true.', + 'config_start_time': "'{}'".format(restart_times[2])} + step.add_namelist_options(namelist_options) + + stream_replacements = { + 'output_interval': '00-00-10_00:00:00', + 'restart_interval': '00-00-10_00:00:00'} + step.add_streams_file(module, 'streams.template', + template_replacements=stream_replacements) + + step.add_input_file(filename='../{}'.format(restart_filenames[2])) + step.add_output_file(filename='../{}'.format(restart_filenames[3])) + self.add_step(step) + + self.restart_filenames = restart_filenames diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment/streams.template b/compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment/streams.template new file mode 100644 index 0000000000..66004d2541 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment/streams.template @@ -0,0 +1,9 @@ + + + + + + diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/high_res_region.geojson b/compass/ocean/tests/global_ocean/mesh/so12to60/high_res_region.geojson new file mode 100644 index 0000000000..76e93d0886 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to60/high_res_region.geojson @@ -0,0 +1,73 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "name": "SOwISC12to60 high res region", + "component": "ocean", + "object": "region", + "author": "Xylar Asay-Davis" + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -75.5859375, + -48.92249926375823 + ], + [ + -96.50390625, + -54.67383096593114 + ], + [ + -124.45312499999999, + -53.95608553098789 + ], + [ + -180, + -53 + ], + [ + -180, + -90 + ], + [ + 180, + -90 + ], + [ + 180, + -53 + ], + [ + 168.3984375, + -51.17934297928927 + ], + [ + 121.640625, + -45.82879925192133 + ], + [ + 22.148437499999996, + -37.99616267972812 + ], + [ + -61.17187499999999, + -34.30714385628803 + ], + [ + -68.90625, + -40.97989806962013 + ], + [ + -75.5859375, + -48.92249926375823 + ] + ] + ] + } + } + ] +} diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/namelist.split_explicit b/compass/ocean/tests/global_ocean/mesh/so12to60/namelist.split_explicit new file mode 100644 index 0000000000..a8a9c31b83 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to60/namelist.split_explicit @@ -0,0 +1,7 @@ +config_time_integrator = 'split_explicit' +config_dt = '00:10:00' +config_btr_dt = '00:00:20' +config_run_duration = '0000_01:00:00' +config_mom_del4 = 1.5e10 +config_hmix_scaleWithMesh = .true. +config_use_GM = .true. diff --git a/compass/ocean/tests/global_ocean/mesh/so12to60/so12to60.cfg b/compass/ocean/tests/global_ocean/mesh/so12to60/so12to60.cfg new file mode 100644 index 0000000000..1cec7f1ba4 --- /dev/null +++ b/compass/ocean/tests/global_ocean/mesh/so12to60/so12to60.cfg @@ -0,0 +1,53 @@ +# Options related to the vertical grid +[vertical_grid] + +# the type of vertical grid +grid_type = 60layerPHC + + +# options for global ocean testcases +[global_ocean] + +## config options related to the initial_state step +# number of cores to use +init_cores = 36 +# minimum of cores, below which the step fails +init_min_cores = 8 +# maximum memory usage allowed (in MB) +init_max_memory = 1000 +# maximum disk usage allowed (in MB) +init_max_disk = 1000 + +## config options related to the forward steps +# number of cores to use +forward_cores = 1296 +# minimum of cores, below which the step fails +forward_min_cores = 128 +# maximum memory usage allowed (in MB) +forward_max_memory = 1000 +# maximum disk usage allowed (in MB) +forward_max_disk = 1000 + +## metadata related to the mesh +# the prefix (e.g. QU, EC, WC, SO) +prefix = SO +# a description of the mesh and initial condition +mesh_description = MPAS Southern Ocean regionally refined mesh for E3SM version + ${e3sm_version} with enhanced resolution (${min_res} km) around + Antarctica, 45-km resolution in the mid southern latitudes, + 30-km resolution in a 15-degree band around the equator, 60-km + resolution in northern mid latitudes, 30 km in the north + Atlantic and 35 km in the Arctic. This mesh has ${levels} + vertical levels and includes cavities under the ice shelves + around Antarctica. +# E3SM version that the mesh is intended for +e3sm_version = 2 +# The revision number of the mesh, which should be incremented each time the +# mesh is revised +mesh_revision = 4 +# the minimum (finest) resolution in the mesh +min_res = 12 +# the maximum (coarsest) resolution in the mesh, can be the same as min_res +max_res = 60 +# The URL of the pull request documenting the creation of the mesh +pull_request = https://github.com/MPAS-Dev/compass/pull/37 diff --git a/compass/ocean/tests/global_ocean/metadata.py b/compass/ocean/tests/global_ocean/metadata.py new file mode 100644 index 0000000000..195ebe9d8e --- /dev/null +++ b/compass/ocean/tests/global_ocean/metadata.py @@ -0,0 +1,181 @@ +import subprocess +from datetime import datetime +import numpy +import xarray +import os +import shutil + + +def get_e3sm_mesh_names(config, levels): + """ + Get short and long E3SM mesh name from config options and the given number + of vertical levels (typically taken from an initial condition or restart + file). + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options for this test case + + levels : int + The number of vertical levels + + Returns + ------- + short_mesh_name : str + The short E3SM name of the ocean and sea-ice mesh + + long_mesh_name : str + The long E3SM name of the ocean and sea-ice mesh + """ + + mesh_prefix = config.get('global_ocean', 'prefix') + min_res = config.get('global_ocean', 'min_res') + max_res = config.get('global_ocean', 'max_res') + config.set('global_ocean', 'levels', '{}'.format(levels)) + e3sm_version = config.get('global_ocean', 'e3sm_version') + mesh_revision = config.get('global_ocean', 'mesh_revision') + + if min_res == max_res: + res = min_res + else: + res = '{}to{}'.format(min_res, max_res) + + short_mesh_name = '{}{}E{}r{}'.format(mesh_prefix, res, e3sm_version, + mesh_revision) + long_mesh_name = '{}{}kmL{}E3SMv{}r{}'.format(mesh_prefix, res, levels, + e3sm_version, mesh_revision) + + return short_mesh_name, long_mesh_name + + +def add_mesh_and_init_metadata(output_filenames, config, init_filename): + """ + Add MPAS mesh and initial condition metadata to NetCDF outputs of the given + step + + Parameters + ---------- + output_filenames : list + A list of output files. + + config : configparser.ConfigParser + Configuration options for this test case + + init_filename : str + The name of an initial condition file to get the number of vertical + levels and maximum depth from + """ + + if config.getboolean('global_ocean', 'add_metadata'): + with xarray.open_dataset(init_filename) as dsInit: + metadata = _get_metadata(dsInit, config) + + for filename in output_filenames: + if filename.endswith('.nc'): + args = ['ncra'] + for key, value in metadata.items(): + args.extend(['--glb_att_add', '{}={}'.format(key, value)]) + name, ext = os.path.splitext(filename) + new_filename = '{}_with_metadata{}'.format(name, ext) + args.extend([filename, new_filename]) + subprocess.check_call(args) + shutil.move(new_filename, filename) + + +def _get_metadata(dsInit, config): + """ add metadata to a given dataset """ + + author = config.get('global_ocean', 'author') + if author == 'autodetect': + author = subprocess.check_output( + ['git', 'config', 'user.name']).decode("utf-8").strip() + config.set('global_ocean', 'author', author) + + email = config.get('global_ocean', 'email') + if email == 'autodetect': + email = subprocess.check_output( + ['git', 'config', 'user.email']).decode("utf-8").strip() + config.set('global_ocean', 'email', email) + + creation_date = config.get('global_ocean', 'creation_date') + if creation_date == 'autodetect': + now = datetime.now() + creation_date = now.strftime("%y%m%d") + config.set('global_ocean', 'creation_date', creation_date) + + max_depth = dsInit.bottomDepth.max().values + # round to the nearest 0.1 m + max_depth = numpy.round(max_depth, 1) + config.set('global_ocean', 'max_depth', '{}'.format(max_depth)) + + mesh_prefix = config.get('global_ocean', 'prefix') + min_res = config.get('global_ocean', 'min_res') + max_res = config.get('global_ocean', 'max_res') + levels = dsInit.sizes['nVertLevels'] + config.set('global_ocean', 'levels', '{}'.format(levels)) + e3sm_version = config.get('global_ocean', 'e3sm_version') + mesh_revision = config.get('global_ocean', 'mesh_revision') + pull_request = config.get('global_ocean', 'pull_request') + + short_name, long_name = get_e3sm_mesh_names(config, levels) + + descriptions = dict() + + for prefix in ['mesh', 'init', 'bathy', 'bgc', 'wisc']: + option = '{}_description'.format(prefix) + if config.has_option('global_ocean', option): + description = config.get('global_ocean', option) + description = ' '.join( + [line.strip() for line in description.split('\n')]) + descriptions[prefix] = description + + prefix = 'MPAS_Mesh_{}'.format(mesh_prefix) + + metadata = {'MPAS_Mesh_Short_Name': short_name, + 'MPAS_Mesh_Long_Name': long_name, + 'MPAS_Mesh_Prefix': mesh_prefix, + 'MPAS_Mesh_E3SM_Version': e3sm_version, + 'MPAS_Mesh_Pull_Request': pull_request, + '{}_Revision'.format(prefix): mesh_revision, + '{}_Version_Author'.format(prefix): author, + '{}_Version_Author_E-mail'.format(prefix): email, + '{}_Version_Creation_Date'.format(prefix): creation_date, + '{}_Minimum_Resolution_km'.format(prefix): min_res, + '{}_Maximum_Resolution_km'.format(prefix): max_res, + '{}_Maximum_Depth_m'.format(prefix): '{}'.format(max_depth), + '{}_Number_of_Levels'.format(prefix): '{}'.format(levels), + 'MPAS_Mesh_Description': descriptions['mesh'], + 'MPAS_Mesh_Bathymetry': descriptions['bathy'], + 'MPAS_Initial_Condition': descriptions['init']} + + if 'wisc' in descriptions: + metadata['MPAS_Mesh_Ice_Shelf_Cavities'] = descriptions['wisc'] + + if 'bgc' in descriptions: + metadata['MPAS_Mesh_Biogeochemistry'] = descriptions['bgc'] + + packages = {'compass': 'compass', 'JIGSAW': 'jigsaw', + 'JIGSAW-Python': 'jigsawpy', 'MPAS-Tools': 'mpas_tools', + 'NCO': 'nco', 'ESMF': 'esmf', + 'geometric_features': 'geometric_features', + 'Metis': 'metis', 'pyremap': 'pyremap'} + + for name in packages: + package = packages[name] + metadata['MPAS_Mesh_{}_Version'.format(name)] = \ + _get_conda_package_version(package) + + return metadata + + +def _get_conda_package_version(package): + conda = subprocess.check_output(['conda', 'list', package]).decode("utf-8") + lines = conda.split('\n') + for line in lines: + parts = line.split() + if parts[0] == package: + return parts[1] + + raise ValueError('Package {} not found in the conda environment'.format( + package)) diff --git a/compass/ocean/tests/global_ocean/namelist.bgc b/compass/ocean/tests/global_ocean/namelist.bgc new file mode 100644 index 0000000000..35bd254872 --- /dev/null +++ b/compass/ocean/tests/global_ocean/namelist.bgc @@ -0,0 +1,3 @@ +config_use_ecosysTracers = .true. +config_use_DMSTracers = .true. +config_use_MacroMoleculesTracers = .true. diff --git a/compass/ocean/tests/global_ocean/namelist.forward b/compass/ocean/tests/global_ocean/namelist.forward new file mode 100644 index 0000000000..911fba6624 --- /dev/null +++ b/compass/ocean/tests/global_ocean/namelist.forward @@ -0,0 +1,21 @@ +config_ocean_run_mode = 'forward' +config_block_decomp_file_prefix = 'graph.info.part.' +config_time_integrator = 'split_explicit' +config_use_mom_del2 = .true. +config_use_mom_del4 = .true. +config_use_cvmix = .true. +config_use_cvmix_background = .true. +config_use_cvmix_convection = .true. +config_use_cvmix_shear = .true. +config_use_cvmix_kpp = .true. +config_cvmix_shear_mixing_scheme = 'KPP' +config_pressure_gradient_type = 'Jacobian_from_TS' +config_eos_type = 'jm' +config_implicit_bottom_drag_coeff = 1.0e-3 +config_use_bulk_wind_stress = .true. +config_use_bulk_thickness_flux = .true. +config_use_activeTracers_surface_restoring = .true. +config_use_Redi = .true. +config_use_GM = .true. +config_AM_mixedLayerDepths_enable = .true. +config_AM_mixedLayerDepths_compute_interval = 'dt' diff --git a/compass/ocean/tests/global_ocean/namelist.wisc b/compass/ocean/tests/global_ocean/namelist.wisc new file mode 100644 index 0000000000..152ba570b4 --- /dev/null +++ b/compass/ocean/tests/global_ocean/namelist.wisc @@ -0,0 +1,2 @@ +config_check_ssh_consistency = .false. +config_land_ice_flux_mode = 'pressure_only' diff --git a/compass/ocean/tests/global_ocean/performance_test/__init__.py b/compass/ocean/tests/global_ocean/performance_test/__init__.py new file mode 100644 index 0000000000..a9f83e7813 --- /dev/null +++ b/compass/ocean/tests/global_ocean/performance_test/__init__.py @@ -0,0 +1,79 @@ +from compass.validate import compare_variables, compare_timers +from compass.ocean.tests.global_ocean.forward import ForwardTestCase, \ + ForwardStep + + +class PerformanceTest(ForwardTestCase): + """ + A test case for performing a short forward run with an MPAS-Ocean global + initial condition assess performance and compare with previous results + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + name='performance_test') + + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator) + + if mesh.with_ice_shelf_cavities: + module = self.__module__ + step.add_namelist_file(module, 'namelist.wisc') + step.add_streams_file(module, 'streams.wisc') + step.add_output_file(filename='land_ice_fluxes.nc') + self.add_step(step) + + def run(self): + """ + Run each step of the testcase + """ + # get cores, threads from config options and run the steps + super().run() + + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + if self.init.with_bgc: + variables.extend( + ['PO4', 'NO3', 'SiO3', 'NH4', 'Fe', 'O2', 'DIC', 'DIC_ALT_CO2', + 'ALK', 'DOC', 'DON', 'DOFe', 'DOP', 'DOPr', 'DONr', 'zooC', + 'spChl', 'spC', 'spFe', 'spCaCO3', 'diatChl', 'diatC', + 'diatFe', 'diatSi', 'diazChl', 'diazC', 'diazFe', 'phaeoChl', + 'phaeoC', 'phaeoFe']) + + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='forward/output.nc') + + if self.mesh.with_ice_shelf_cavities: + variables = [ + 'ssh', 'landIcePressure', 'landIceDraft', 'landIceFraction', + 'landIceMask', 'landIceFrictionVelocity', 'topDrag', + 'topDragMagnitude', 'landIceFreshwaterFlux', 'landIceHeatFlux', + 'heatFluxToLandIce', 'landIceBoundaryLayerTemperature', + 'landIceBoundaryLayerSalinity', 'landIceHeatTransferVelocity', + 'landIceSaltTransferVelocity', 'landIceInterfaceTemperature', + 'landIceInterfaceSalinity', 'accumulatedLandIceMass', + 'accumulatedLandIceHeat'] + + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='forward/land_ice_fluxes.nc') + + timers = ['time integration'] + compare_timers(timers, self.config, self.work_dir, rundir1='forward') diff --git a/compass/ocean/tests/global_ocean/performance_test/namelist.wisc b/compass/ocean/tests/global_ocean/performance_test/namelist.wisc new file mode 100644 index 0000000000..98469ee98f --- /dev/null +++ b/compass/ocean/tests/global_ocean/performance_test/namelist.wisc @@ -0,0 +1,2 @@ +config_check_ssh_consistency = .false. +config_land_ice_flux_mode = 'standalone' diff --git a/compass/ocean/tests/global_ocean/performance_test/streams.wisc b/compass/ocean/tests/global_ocean/performance_test/streams.wisc new file mode 100644 index 0000000000..e03cfd8cd1 --- /dev/null +++ b/compass/ocean/tests/global_ocean/performance_test/streams.wisc @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/restart_test/__init__.py b/compass/ocean/tests/global_ocean/restart_test/__init__.py new file mode 100644 index 0000000000..3c7cd7420d --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/__init__.py @@ -0,0 +1,69 @@ +from compass.validate import compare_variables +from compass.ocean.tests.global_ocean.forward import ForwardTestCase, \ + ForwardStep + + +class RestartTest(ForwardTestCase): + """ + A test case for performing two forward run, one without a restart and one + with to make sure the results are identical + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + name='restart_test') + module = __name__ + + restart_time = {'split_explicit': '0001-01-01_04:00:00', + 'RK4': '0001-01-01_00:10:00'} + restart_filename = '../restarts/rst.{}.nc'.format( + restart_time[time_integrator].replace(':', '.')) + input_file = {'restart': restart_filename} + output_file = {'full': restart_filename} + for part in ['full', 'restart']: + name = '{}_run'.format(part) + step = ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=name, + subdir=name, cores=4, threads=1) + + suffix = '{}.{}'.format(time_integrator.lower(), part) + step.add_namelist_file(module, 'namelist.{}'.format(suffix)) + step.add_streams_file(module, 'streams.{}'.format(suffix)) + if part in input_file: + step.add_input_file(filename=input_file[part]) + if part in output_file: + step.add_output_file(filename=output_file[part]) + self.add_step(step) + + def run(self): + """ + Run each step of the testcase + """ + # get cores, threads from config options and run the steps + super().run() + + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + steps = self.steps_to_run + if 'full_run' in steps and 'restart_run' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/output.nc', + filename2='restart_run/output.nc') diff --git a/compass/ocean/tests/global_ocean/restart_test/namelist.rk4.full b/compass/ocean/tests/global_ocean/restart_test/namelist.rk4.full new file mode 100644 index 0000000000..73c4f01493 --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/namelist.rk4.full @@ -0,0 +1,3 @@ +config_start_time = '0001-01-01_00:00:00' +config_run_duration = '00:20:00' +config_write_output_on_startup = .false. diff --git a/compass/ocean/tests/global_ocean/restart_test/namelist.rk4.restart b/compass/ocean/tests/global_ocean/restart_test/namelist.rk4.restart new file mode 100644 index 0000000000..3d03516cff --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/namelist.rk4.restart @@ -0,0 +1,4 @@ +config_start_time = '0001-01-01_00:10:00' +config_run_duration = '00:10:00' +config_write_output_on_startup = .false. +config_do_restart = .true. diff --git a/compass/ocean/tests/global_ocean/restart_test/namelist.split_explicit.full b/compass/ocean/tests/global_ocean/restart_test/namelist.split_explicit.full new file mode 100644 index 0000000000..a9cacbd9fb --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/namelist.split_explicit.full @@ -0,0 +1,3 @@ +config_start_time = '0001-01-01_00:00:00' +config_run_duration = '08:00:00' +config_write_output_on_startup = .false. diff --git a/compass/ocean/tests/global_ocean/restart_test/namelist.split_explicit.restart b/compass/ocean/tests/global_ocean/restart_test/namelist.split_explicit.restart new file mode 100644 index 0000000000..6a6dc68b3d --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/namelist.split_explicit.restart @@ -0,0 +1,4 @@ +config_start_time = '0001-01-01_04:00:00' +config_run_duration = '04:00:00' +config_write_output_on_startup = .false. +config_do_restart = .true. diff --git a/compass/ocean/tests/global_ocean/restart_test/streams.rk4.full b/compass/ocean/tests/global_ocean/restart_test/streams.rk4.full new file mode 100644 index 0000000000..f96a802544 --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/streams.rk4.full @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/restart_test/streams.rk4.restart b/compass/ocean/tests/global_ocean/restart_test/streams.rk4.restart new file mode 100644 index 0000000000..c46c0a8db5 --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/streams.rk4.restart @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/restart_test/streams.split_explicit.full b/compass/ocean/tests/global_ocean/restart_test/streams.split_explicit.full new file mode 100644 index 0000000000..e976b23db8 --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/streams.split_explicit.full @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/restart_test/streams.split_explicit.restart b/compass/ocean/tests/global_ocean/restart_test/streams.split_explicit.restart new file mode 100644 index 0000000000..4d57bb314f --- /dev/null +++ b/compass/ocean/tests/global_ocean/restart_test/streams.split_explicit.restart @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/streams.bgc b/compass/ocean/tests/global_ocean/streams.bgc new file mode 100644 index 0000000000..32f0619354 --- /dev/null +++ b/compass/ocean/tests/global_ocean/streams.bgc @@ -0,0 +1,75 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/streams.forward b/compass/ocean/tests/global_ocean/streams.forward new file mode 100644 index 0000000000..d26230477d --- /dev/null +++ b/compass/ocean/tests/global_ocean/streams.forward @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/global_ocean/threads_test/__init__.py b/compass/ocean/tests/global_ocean/threads_test/__init__.py new file mode 100644 index 0000000000..6508de275b --- /dev/null +++ b/compass/ocean/tests/global_ocean/threads_test/__init__.py @@ -0,0 +1,53 @@ +from compass.validate import compare_variables +from compass.ocean.tests.global_ocean.forward import ForwardTestCase, \ + ForwardStep + + +class ThreadsTest(ForwardTestCase): + """ + A test case for performing two short forward runs to make sure the results + are identical with 1 and 2 thread per MPI process + """ + + def __init__(self, test_group, mesh, init, time_integrator): + """ + Create test case + + Parameters + ---------- + test_group : compass.ocean.tests.global_ocean.GlobalOcean + The global ocean test group that this test case belongs to + + mesh : compass.ocean.tests.global_ocean.mesh.Mesh + The test case that produces the mesh for this run + + init : compass.ocean.tests.global_ocean.init.Init + The test case that produces the initial condition for this run + + time_integrator : {'split_explicit', 'RK4'} + The time integrator to use for the forward run + """ + super().__init__(test_group=test_group, mesh=mesh, init=init, + time_integrator=time_integrator, + name='threads_test') + for threads in [1, 2]: + name = '{}thread'.format(threads) + self.add_step( + ForwardStep(test_case=self, mesh=mesh, init=init, + time_integrator=time_integrator, name=name, + subdir=name, cores=4, threads=threads)) + + def run(self): + """ + Run each step of the testcase + """ + # get cores, threads from config options and run the steps + super().run() + + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + steps = self.steps_to_run + if '1thread' in steps and '2thread' in steps: + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='1thread/output.nc', + filename2='2thread/output.nc') diff --git a/compass/ocean/tests/ice_shelf_2d/__init__.py b/compass/ocean/tests/ice_shelf_2d/__init__.py new file mode 100644 index 0000000000..61f3074d5e --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/__init__.py @@ -0,0 +1,50 @@ +from compass.config import add_config +from compass.testgroup import TestGroup +from compass.ocean.tests.ice_shelf_2d.default import Default +from compass.ocean.tests.ice_shelf_2d.restart_test import RestartTest + + +class IceShelf2d(TestGroup): + """ + A test group for ice-shelf 2D test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.MpasCore + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='ice_shelf_2d') + + for resolution in ['5km']: + self.add_test_case( + Default(test_group=self, resolution=resolution)) + self.add_test_case( + RestartTest(test_group=self, resolution=resolution)) + + +def configure(name, resolution, config): + """ + Modify the configuration options for this test case + + Parameters + ---------- + name : str + the name of the test case + + resolution : str + The resolution of the test case + + config : configparser.ConfigParser + Configuration options for this test case + """ + res_params = {'5km': {'nx': 10, 'ny': 44, 'dc': 5e3}} + + if resolution not in res_params: + raise ValueError('Unsupported resolution {}. Supported values are: ' + '{}'.format(resolution, list(res_params))) + res_params = res_params[resolution] + for param in res_params: + config.set('ice_shelf_2d', param, '{}'.format(res_params[param])) + + add_config(config, 'compass.ocean.tests.ice_shelf_2d.{}'.format(name), + '{}.cfg'.format(name), exception=False) diff --git a/compass/ocean/tests/ice_shelf_2d/default/__init__.py b/compass/ocean/tests/ice_shelf_2d/default/__init__.py new file mode 100644 index 0000000000..fa46f686ac --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/default/__init__.py @@ -0,0 +1,76 @@ +from compass.testcase import TestCase +from compass.ocean.tests.ice_shelf_2d.initial_state import InitialState +from compass.ocean.tests.ice_shelf_2d.ssh_adjustment import SshAdjustment +from compass.ocean.tests.ice_shelf_2d.forward import Forward +from compass.ocean.tests import ice_shelf_2d +from compass.validate import compare_variables + + +class Default(TestCase): + """ + The default ice-shelf 2D test case, which performs a short forward run with + the z-star vertical coordinate and with 15 iterations of adjustment to make + the pressure from the weight of the ice shelf match the sea-surface height + + Attributes + ---------- + resolution : str + The horizontal resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.ice_shelf_2d.IceShelf2d + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'default' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + InitialState(test_case=self, resolution=resolution)) + self.add_step( + SshAdjustment(test_case=self, cores=4, threads=1)) + self.add_step( + Forward(test_case=self, cores=4, threads=1, resolution=resolution, + with_frazil=True)) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + ice_shelf_2d.configure(self.name, self.resolution, self.config) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + # perform validation + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='forward/output.nc') + + variables = \ + ['ssh', 'landIcePressure', 'landIceDraft', 'landIceFraction', + 'landIceMask', 'landIceFrictionVelocity', 'topDrag', + 'topDragMagnitude', 'landIceFreshwaterFlux', + 'landIceHeatFlux', 'heatFluxToLandIce', + 'landIceBoundaryLayerTemperature', 'landIceBoundaryLayerSalinity', + 'landIceHeatTransferVelocity', 'landIceSaltTransferVelocity', + 'landIceInterfaceTemperature', 'landIceInterfaceSalinity', + 'accumulatedLandIceMass', 'accumulatedLandIceHeat'] + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='forward/land_ice_fluxes.nc') diff --git a/compass/ocean/tests/ice_shelf_2d/default/default.cfg b/compass/ocean/tests/ice_shelf_2d/default/default.cfg new file mode 100644 index 0000000000..0e6daa54c2 --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/default/default.cfg @@ -0,0 +1,6 @@ +# Options relate to adjusting the sea-surface height or land-ice pressure +# below ice shelves to they are dynamically consistent with one another +[ssh_adjustment] + +# the number of iterations of ssh adjustment to perform +iterations = 15 diff --git a/compass/ocean/tests/ice_shelf_2d/forward.py b/compass/ocean/tests/ice_shelf_2d/forward.py new file mode 100644 index 0000000000..1e52990203 --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/forward.py @@ -0,0 +1,87 @@ +from compass.model import run_model +from compass.step import Step + + +class Forward(Step): + """ + A step for performing forward MPAS-Ocean runs as part of ice-shelf 2D test + cases. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + def __init__(self, test_case, resolution, name='forward', subdir=None, + cores=1, min_cores=None, threads=1, with_frazil=True): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + resolution : str + The resolution of the test case + + name : str + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + with_frazil : bool, optional + whether the simulation includes frazil ice formation + """ + self.resolution = resolution + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + self.add_namelist_file('compass.ocean.tests.ice_shelf_2d', + 'namelist.forward') + if with_frazil: + options = {'config_use_frazil_ice_formation': '.true.', + 'config_frazil_maximum_depth': '2000.0'} + self.add_namelist_options(options) + self.add_streams_file('compass.ocean.streams', 'streams.frazil') + + self.add_streams_file('compass.ocean.streams', + 'streams.land_ice_fluxes') + + self.add_streams_file('compass.ocean.tests.ice_shelf_2d', + 'streams.forward') + + self.add_input_file(filename='init.nc', + target='../ssh_adjustment/adjusted_init.nc') + self.add_input_file(filename='graph.info', + target='../initial_state/culled_graph.info') + + self.add_output_file('output.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + run_model(self) diff --git a/compass/ocean/tests/ice_shelf_2d/ice_shelf_2d.cfg b/compass/ocean/tests/ice_shelf_2d/ice_shelf_2d.cfg new file mode 100644 index 0000000000..2f6fc4c059 --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/ice_shelf_2d.cfg @@ -0,0 +1,39 @@ +# Options related to the vertical grid +[vertical_grid] + +# the type of vertical grid +grid_type = uniform + +# Number of vertical levels +vert_levels = 20 + +# Depth of the bottom of the ocean +bottom_depth = 2000.0 + + +# config options for 2D ice-shelf testcases +[ice_shelf_2d] + +# Vertical thickness of ocean sub-ice cavity +cavity_thickness = 10.0 + +# Vertical thickness of fixed slope +slope_height = 500.0 + +# Horizontal width of angled part of the ice +edge_width = 15.0e3 + +# cavity edge in y +y1 = 30.0e3 + +# shelf edge in y +y2 = 60.0e3 + +# Temperature of the surface in the northern half of the domain +temperature = 1.0 + +# Salinity of the water in the entire domain +surface_salinity = 34.5 + +# Salinity of the water in the entire domain +bottom_salinity = 34.7 diff --git a/compass/ocean/tests/ice_shelf_2d/initial_state.py b/compass/ocean/tests/ice_shelf_2d/initial_state.py new file mode 100644 index 0000000000..1b04b1450d --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/initial_state.py @@ -0,0 +1,149 @@ +import xarray + +from mpas_tools.planar_hex import make_planar_hex_mesh +from mpas_tools.io import write_netcdf +from mpas_tools.mesh.conversion import convert, cull +from mpas_tools.cime.constants import constants + +from compass.step import Step +from compass.ocean.vertical import generate_grid +from compass.ocean.iceshelf import compute_land_ice_pressure_and_draft +from compass.ocean.vertical.zstar import compute_layer_thickness_and_zmid + + +class InitialState(Step): + """ + A step for creating a mesh and initial condition for ice-shelf 2D test + cases + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + def __init__(self, test_case, resolution): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + resolution : str + The resolution of the test case + """ + super().__init__(test_case=test_case, name='initial_state') + self.resolution = resolution + + for file in ['base_mesh.nc', 'culled_mesh.nc', 'culled_graph.info', + 'initial_state.nc']: + self.add_output_file(file) + + def run(self): + """ + Run this step of the test case + """ + config = self.config + logger = self.logger + + section = config['ice_shelf_2d'] + nx = section.getint('nx') + ny = section.getint('ny') + dc = section.getfloat('dc') + + dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=False, + nonperiodic_y=True) + write_netcdf(dsMesh, 'base_mesh.nc') + + dsMesh = cull(dsMesh, logger=logger) + dsMesh = convert(dsMesh, graphInfoFileName='culled_graph.info', + logger=logger) + write_netcdf(dsMesh, 'culled_mesh.nc') + + section = config['ice_shelf_2d'] + temperature = section.getfloat('temperature') + surface_salinity = section.getfloat('surface_salinity') + bottom_salinity = section.getfloat('bottom_salinity') + + interfaces = generate_grid(config=config) + + bottom_depth = interfaces[-1] + vert_levels = len(interfaces) - 1 + + # points 1 and 2 are where angles on ice shelf are located. + # point 3 is at the surface. + # d variables are total water-column thickness below ice shelf + y1 = section.getfloat('y1') + y2 = section.getfloat('y2') + y3 = y2 + section.getfloat('edge_width') + d1 = section.getfloat('cavity_thickness') + d2 = d1 + section.getfloat('slope_height') + d3 = bottom_depth + + ds = dsMesh.copy() + + ds['refBottomDepth'] = ('nVertLevels', interfaces[1:]) + ds['refZMid'] = ('nVertLevels', + -0.5 * (interfaces[1:] + interfaces[0:-1])) + ds['vertCoordMovementWeights'] = xarray.ones_like(ds.refBottomDepth) + + yCell = ds.yCell + ds['bottomDepth'] = bottom_depth * xarray.ones_like(yCell) + ds['maxLevelCell'] = vert_levels * xarray.ones_like(yCell, dtype=int) + + column_thickness = xarray.where( + yCell < y1, d1, d1 + (d2 - d1) * (yCell - y1) / (y2 - y1)) + column_thickness = xarray.where( + yCell < y2, column_thickness, + d2 + (d3 - d2) * (yCell - y2) / (y3 - y2)) + column_thickness = xarray.where(yCell < y3, column_thickness, d3) + + ssh = -bottom_depth + column_thickness + + cellMask = xarray.ones_like(yCell) + cellMask, _ = xarray.broadcast(cellMask, ds.refBottomDepth) + cellMask = cellMask.transpose('nCells', 'nVertLevels') + + restingThickness, layerThickness, zMid = \ + compute_layer_thickness_and_zmid( + cellMask, ds.refBottomDepth, ds.bottomDepth, ds.maxLevelCell-1, + ssh=ssh) + + layerThickness = layerThickness.expand_dims(dim='Time', axis=0) + ssh = ssh.expand_dims(dim='Time', axis=0) + modify_mask = xarray.where(yCell < y3, 1, 0).expand_dims( + dim='Time', axis=0) + landIceFraction = modify_mask.astype(float) + landIceMask = modify_mask.copy() + + ref_density = constants['SHR_CONST_RHOSW'] + landIcePressure, landIceDraft = compute_land_ice_pressure_and_draft( + ssh=ssh, modify_mask=modify_mask, ref_density=ref_density) + + salinity = surface_salinity + ((bottom_salinity - surface_salinity) * + (zMid / (-bottom_depth))) + salinity, _ = xarray.broadcast(salinity, layerThickness) + salinity = salinity.transpose('Time', 'nCells', 'nVertLevels') + + normalVelocity = xarray.zeros_like(ds.xEdge) + normalVelocity, _ = xarray.broadcast(normalVelocity, ds.refBottomDepth) + normalVelocity = normalVelocity.transpose('nEdges', 'nVertLevels') + normalVelocity = normalVelocity.expand_dims(dim='Time', axis=0) + + ds['temperature'] = temperature * xarray.ones_like(layerThickness) + ds['salinity'] = salinity + ds['normalVelocity'] = normalVelocity + ds['layerThickness'] = layerThickness + ds['ssh'] = ssh + ds['restingThickness'] = restingThickness + ds['fCell'] = xarray.zeros_like(ds.xCell) + ds['fEdge'] = xarray.zeros_like(ds.xEdge) + ds['fVertex'] = xarray.zeros_like(ds.xVertex) + ds['modifyLandIcePressureMask'] = modify_mask + ds['landIceFraction'] = landIceFraction + ds['landIceMask'] = landIceMask + ds['landIcePressure'] = landIcePressure + ds['landIceDraft'] = landIceDraft + + write_netcdf(ds, 'initial_state.nc') diff --git a/compass/ocean/tests/ice_shelf_2d/namelist.forward b/compass/ocean/tests/ice_shelf_2d/namelist.forward new file mode 100644 index 0000000000..9ce08ca5a5 --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/namelist.forward @@ -0,0 +1,13 @@ +config_dt = '00:05:00' +config_btr_dt = '00:00:15' +config_run_duration = '0000_00:10:00' +config_use_mom_del2 = .true. +config_mom_del2 = 10.0 +config_implicit_bottom_drag_coeff = 1.0e-3 +config_eos_type = 'jm' +config_pressure_gradient_type = 'Jacobian_from_TS' +config_land_ice_flux_mode = 'standalone' +config_check_ssh_consistency = .false. +config_AM_globalStats_enable = .true. +config_AM_globalStats_compute_on_startup = .true. +config_AM_globalStats_write_on_startup = .true. diff --git a/compass/ocean/tests/ice_shelf_2d/restart_test/__init__.py b/compass/ocean/tests/ice_shelf_2d/restart_test/__init__.py new file mode 100644 index 0000000000..bf41aba2ae --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/restart_test/__init__.py @@ -0,0 +1,103 @@ +from compass.testcase import TestCase +from compass.ocean.tests.ice_shelf_2d.initial_state import InitialState +from compass.ocean.tests.ice_shelf_2d.ssh_adjustment import SshAdjustment +from compass.ocean.tests.ice_shelf_2d.forward import Forward +from compass.ocean.tests import ice_shelf_2d +from compass.validate import compare_variables + + +class RestartTest(TestCase): + """ + A restart test case for the ice-shelf 2D test case test group, which makes + sure the model produces identical results with one longer run and two + shorter runs with a restart in between. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.ice_shelf_2d.IceShelf2d + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'restart_test' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + self.add_step( + InitialState(test_case=self, resolution=resolution)) + self.add_step( + SshAdjustment(test_case=self, cores=4, threads=1)) + + for part in ['full', 'restart']: + name = '{}_run'.format(part) + step = Forward(test_case=self, name=name, subdir=name, cores=4, + threads=1, resolution=resolution, with_frazil=True) + + step.add_namelist_file( + 'compass.ocean.tests.ice_shelf_2d.restart_test', + 'namelist.{}'.format(part)) + step.add_streams_file( + 'compass.ocean.tests.ice_shelf_2d.restart_test', + 'streams.{}'.format(part)) + self.add_step(step) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + ice_shelf_2d.configure(self.name, self.resolution, self.config) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + # perform validation + steps = self.steps_to_run + if 'full_run' in steps and 'restart_run' in steps: + variables = ['temperature', 'salinity', 'layerThickness', + 'normalVelocity'] + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/output.nc', + filename2='restart_run/output.nc') + + variables = ['ssh', 'landIcePressure', 'landIceDraft', + 'landIceFraction', + 'landIceMask', 'landIceFrictionVelocity', 'topDrag', + 'topDragMagnitude', 'landIceFreshwaterFlux', + 'landIceHeatFlux', 'heatFluxToLandIce', + 'landIceBoundaryLayerTemperature', + 'landIceBoundaryLayerSalinity', + 'landIceHeatTransferVelocity', + 'landIceSaltTransferVelocity', + 'landIceInterfaceTemperature', + 'landIceInterfaceSalinity', 'accumulatedLandIceMass', + 'accumulatedLandIceHeat'] + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/land_ice_fluxes.nc', + filename2='restart_run/land_ice_fluxes.nc') + + variables = ['accumulatedFrazilIceMass', + 'accumulatedFrazilIceSalinity', + 'seaIceEnergy', 'frazilLayerThicknessTendency', + 'frazilTemperatureTendency', 'frazilSalinityTendency', + 'frazilSurfacePressure', + 'accumulatedLandIceFrazilMass'] + compare_variables(variables, self.config, work_dir=self.work_dir, + filename1='full_run/frazil.nc', + filename2='restart_run/frazil.nc') diff --git a/compass/ocean/tests/ice_shelf_2d/restart_test/namelist.full b/compass/ocean/tests/ice_shelf_2d/restart_test/namelist.full new file mode 100644 index 0000000000..d99b25960d --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/restart_test/namelist.full @@ -0,0 +1,3 @@ +config_start_time = '0001-01-01_00:00:00' +config_run_duration = '0000_00:10:00' +config_write_output_on_startup = .false. diff --git a/compass/ocean/tests/ice_shelf_2d/restart_test/namelist.restart b/compass/ocean/tests/ice_shelf_2d/restart_test/namelist.restart new file mode 100644 index 0000000000..5aaf89c647 --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/restart_test/namelist.restart @@ -0,0 +1,5 @@ +config_start_time = '0001-01-01_00:00:00' +config_write_output_on_startup = .false. +config_do_restart = .true. +config_run_duration = 0000_00:05:00 +config_start_time = 0001-01-01_00:05:00 diff --git a/compass/ocean/tests/ice_shelf_2d/restart_test/restart_test.cfg b/compass/ocean/tests/ice_shelf_2d/restart_test/restart_test.cfg new file mode 100644 index 0000000000..7af3394ae1 --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/restart_test/restart_test.cfg @@ -0,0 +1,6 @@ +# Options relate to adjusting the sea-surface height or land-ice pressure +# below ice shelves to they are dynamically consistent with one another +[ssh_adjustment] + +# the number of iterations of ssh adjustment to perform +iterations = 2 diff --git a/compass/ocean/tests/ice_shelf_2d/restart_test/streams.full b/compass/ocean/tests/ice_shelf_2d/restart_test/streams.full new file mode 100644 index 0000000000..529c7382ac --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/restart_test/streams.full @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/ice_shelf_2d/restart_test/streams.restart b/compass/ocean/tests/ice_shelf_2d/restart_test/streams.restart new file mode 100644 index 0000000000..99be073a8e --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/restart_test/streams.restart @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/ice_shelf_2d/ssh_adjustment.py b/compass/ocean/tests/ice_shelf_2d/ssh_adjustment.py new file mode 100644 index 0000000000..6c2097b937 --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/ssh_adjustment.py @@ -0,0 +1,73 @@ +from compass.step import Step +from compass.ocean.iceshelf import adjust_ssh + + +class SshAdjustment(Step): + """ + A step for iteratively adjusting the pressure from the weight of the ice + shelf to match the sea-surface height as part of ice-shelf 2D test cases + """ + def __init__(self, test_case, cores=1, min_cores=None, threads=1): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + """ + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name='ssh_adjustment', + cores=cores, min_cores=min_cores, threads=threads) + + # generate the namelist, replacing a few default options + # start with the same namelist settings as the forward run + self.add_namelist_file('compass.ocean.tests.ice_shelf_2d', + 'namelist.forward') + + # we don't want the global stats AM for this run + self.add_namelist_options({'config_AM_globalStats_enable': '.false.'}) + + # we want a shorter run and no freshwater fluxes under the ice shelf from + # these namelist options + self.add_namelist_file('compass.ocean.namelists', 'namelist.ssh_adjust') + + self.add_streams_file('compass.ocean.streams', 'streams.ssh_adjust') + + self.add_input_file(filename='adjusting_init0.nc', + target='../initial_state/initial_state.nc') + + self.add_input_file(filename='graph.info', + target='../initial_state/culled_graph.info') + + self.add_output_file(filename='adjusted_init.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + config = self.config + iteration_count = config.getint('ssh_adjustment', 'iterations') + adjust_ssh(variable='landIcePressure', iteration_count=iteration_count, + step=self) diff --git a/compass/ocean/tests/ice_shelf_2d/streams.forward b/compass/ocean/tests/ice_shelf_2d/streams.forward new file mode 100644 index 0000000000..15f3cab2ca --- /dev/null +++ b/compass/ocean/tests/ice_shelf_2d/streams.forward @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/ziso/__init__.py b/compass/ocean/tests/ziso/__init__.py new file mode 100644 index 0000000000..c27a9865ed --- /dev/null +++ b/compass/ocean/tests/ziso/__init__.py @@ -0,0 +1,52 @@ +from compass.testgroup import TestGroup +from compass.ocean.tests.ziso.default import Default +from compass.ocean.tests.ziso.with_frazil import WithFrazil +from compass.config import add_config + + +class Ziso(TestGroup): + """ + A test group for Zonally Invariant Southern Ocean (ZISO) test cases + """ + def __init__(self, mpas_core): + """ + mpas_core : compass.MpasCore + the MPAS core that this test group belongs to + """ + super().__init__(mpas_core=mpas_core, name='ziso') + + for resolution in ['20km']: + self.add_test_case( + Default(test_group=self, resolution=resolution)) + self.add_test_case( + WithFrazil(test_group=self, resolution=resolution)) + + +def configure(name, resolution, config): + """ + Modify the configuration options for this test case + + Parameters + ---------- + name : str + the name of the test case + + resolution : str + The resolution of the test case + + config : configparser.ConfigParser + Configuration options for this test case + """ + res_params = {'20km': {'nx': 50, + 'ny': 112, + 'dc': 20e3}} + + if resolution not in res_params: + raise ValueError('Unsupported resolution {}. Supported values are: ' + '{}'.format(resolution, list(res_params))) + res_params = res_params[resolution] + for param in res_params: + config.set('ziso', param, '{}'.format(res_params[param])) + + add_config(config, 'compass.ocean.tests.ziso.{}'.format(name), + '{}.cfg'.format(name), exception=False) diff --git a/compass/ocean/tests/ziso/default/__init__.py b/compass/ocean/tests/ziso/default/__init__.py new file mode 100644 index 0000000000..8a5202cad6 --- /dev/null +++ b/compass/ocean/tests/ziso/default/__init__.py @@ -0,0 +1,96 @@ +from compass.testcase import TestCase +from compass.ocean.tests.ziso.initial_state import InitialState +from compass.ocean.tests.ziso.forward import Forward +from compass.ocean.tests import ziso +from compass.validate import compare_variables, compare_timers + + +class Default(TestCase): + """ + The default test case for the ZISO test group simply creates the mesh and + initial condition, then performs a short forward run with analysis members + but without frazil. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.ziso.Ziso + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'default' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + res_params = {'20km': {'cores': 4, 'min_cores': 2}} + + if resolution not in res_params: + raise ValueError( + 'Unsupported resolution {}. Supported values are: ' + '{}'.format(resolution, list(res_params))) + + res_params = res_params[resolution] + + self.add_step( + InitialState(test_case=self, resolution=resolution, + with_frazil=False)) + step = Forward(test_case=self, resolution=resolution, + cores=res_params['cores'], + min_cores=res_params['min_cores'], + with_analysis=True, with_frazil=False) + + if resolution == '20km': + # particles are on only for the 20km test case + step.add_namelist_file('compass.ocean.tests.ziso.default', + 'namelist.{}.forward'.format(resolution)) + self.add_step(step) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + ziso.configure(self.name, self.resolution, self.config) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + # perform validation + config = self.config + work_dir = self.work_dir + + steps = self.steps_to_run + if 'forward' in steps: + variables = ['temperature', 'layerThickness'] + compare_variables( + variables, config, work_dir, + filename1='forward/output/output.0001-01-01_00.00.00.nc') + + variables = [ + 'xParticle', 'yParticle', 'zParticle', 'zLevelParticle', + 'buoyancyParticle', 'indexToParticleID', 'currentCell', + 'transfered', 'numTimesReset'] + compare_variables(variables, config, work_dir, + filename1='forward/analysis_members/' + 'lagrPartTrack.0001-01-01_00.00.00.nc') + + timers = ['init_lagrPartTrack', 'compute_lagrPartTrack', + 'write_lagrPartTrack', 'restart_lagrPartTrack', + 'finalize_lagrPartTrack'] + compare_timers(timers, config, work_dir, rundir1='forward') diff --git a/compass/ocean/tests/ziso/default/default.cfg b/compass/ocean/tests/ziso/default/default.cfg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/compass/ocean/tests/ziso/default/namelist.20km.forward b/compass/ocean/tests/ziso/default/namelist.20km.forward new file mode 100644 index 0000000000..afef462271 --- /dev/null +++ b/compass/ocean/tests/ziso/default/namelist.20km.forward @@ -0,0 +1,5 @@ +config_AM_lagrPartTrack_enable = .true. +config_AM_lagrPartTrack_sample_temperature = .true. +config_AM_lagrPartTrack_sample_salinity = .true. +config_AM_lagrPartTrack_reset_criteria = 'none' +config_AM_lagrPartTrack_reset_global_timestamp = '0000_00:00:00' diff --git a/compass/ocean/tests/ziso/forward.py b/compass/ocean/tests/ziso/forward.py new file mode 100644 index 0000000000..0ef937a765 --- /dev/null +++ b/compass/ocean/tests/ziso/forward.py @@ -0,0 +1,115 @@ +from compass.step import Step +from compass.model import partition, run_model +from compass.ocean import particles + + +class Forward(Step): + """ + A step for performing forward MPAS-Ocean runs as part of ZISO test cases. + + Attributes + ---------- + resolution : str + The resolution of the test case + + with_analysis : bool, optional + whether analysis members are enabled as part of the run + + with_frazil : bool, optional + whether the run includes frazil formation + """ + def __init__(self, test_case, resolution, name='forward', subdir=None, + cores=1, min_cores=None, threads=1, with_analysis=False, + with_frazil=False): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + resolution : str + The resolution of the test case + + name : str + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + with_analysis : bool, optional + whether analysis members are enabled as part of the run + + with_frazil : bool, optional + whether the run includes frazil formation + """ + self.resolution = resolution + self.with_analysis = with_analysis + self.with_frazil = with_frazil + if min_cores is None: + min_cores = cores + super().__init__(test_case=test_case, name=name, subdir=subdir, + cores=cores, min_cores=min_cores, threads=threads) + + self.add_namelist_file('compass.ocean.tests.ziso', 'namelist.forward') + self.add_streams_file('compass.ocean.tests.ziso', 'streams.forward') + + if with_analysis: + self.add_namelist_file('compass.ocean.tests.ziso', + 'namelist.analysis') + self.add_streams_file('compass.ocean.tests.ziso', + 'streams.analysis') + + if with_frazil: + self.add_namelist_options( + {'config_use_frazil_ice_formation': '.true.'}) + self.add_streams_file('compass.ocean.streams', 'streams.frazil') + + self.add_namelist_file('compass.ocean.tests.ziso', + 'namelist.{}.forward'.format(resolution)) + self.add_streams_file('compass.ocean.tests.ziso', + 'streams.{}.forward'.format(resolution)) + + self.add_input_file(filename='init.nc', + target='../initial_state/ocean.nc') + self.add_input_file(filename='forcing.nc', + target='../initial_state/forcing.nc') + self.add_input_file(filename='graph.info', + target='../initial_state/culled_graph.info') + + self.add_output_file(filename='output/output.0001-01-01_00.00.00.nc') + + if with_analysis: + self.add_output_file( + filename='analysis_members/lagrPartTrack.0001-01-01_00.00.00.nc') + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies + """ + self.add_model_as_input() + + def run(self): + """ + Run this step of the test case + """ + cores = self.cores + partition(cores, self.config, self.logger) + particles.write(init_filename='init.nc', particle_filename='particles.nc', + graph_filename='graph.info.part.{}'.format(cores), + types='buoyancy') + run_model(self, partition_graph=False) diff --git a/compass/ocean/tests/ziso/initial_state.py b/compass/ocean/tests/ziso/initial_state.py new file mode 100644 index 0000000000..e802597e3c --- /dev/null +++ b/compass/ocean/tests/ziso/initial_state.py @@ -0,0 +1,271 @@ +import xarray +import numpy + +from mpas_tools.planar_hex import make_planar_hex_mesh +from mpas_tools.io import write_netcdf +from mpas_tools.mesh.conversion import convert, cull + +from compass.ocean.vertical.zstar import compute_layer_thickness_and_zmid +from compass.ocean.vertical import generate_grid +from compass.step import Step + + +class InitialState(Step): + """ + A step for creating a mesh and initial condition for ZISO test cases + + Attributes + ---------- + resolution : str + The resolution of the test case + + with_frazil : bool + Whether frazil formation is included in the simulation + """ + + def __init__(self, test_case, resolution, with_frazil): + """ + Create the step + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + resolution : str + The resolution of the test case + + with_frazil : bool + Whether frazil formation is included in the simulation + """ + super().__init__(test_case=test_case, name='initial_state') + self.resolution = resolution + self.with_frazil = with_frazil + + for file in ['base_mesh.nc', 'culled_mesh.nc', 'culled_graph.info', + 'ocean.nc', 'forcing.nc']: + self.add_output_file(file) + + def run(self): + """ + Run this step of the test case + """ + config = self.config + logger = self.logger + + section = config['ziso'] + nx = section.getint('nx') + ny = section.getint('ny') + dc = section.getfloat('dc') + + dsMesh = make_planar_hex_mesh(nx=nx, ny=ny, dc=dc, nonperiodic_x=False, + nonperiodic_y=True) + write_netcdf(dsMesh, 'base_mesh.nc') + + dsMesh = cull(dsMesh, logger=logger) + dsMesh = convert(dsMesh, graphInfoFileName='culled_graph.info', + logger=logger) + write_netcdf(dsMesh, 'culled_mesh.nc') + + ds = _write_initial_state(config, dsMesh, self.with_frazil) + + _write_forcing(config, ds.yCell, ds.zMid) + + +def _write_initial_state(config, dsMesh, with_frazil): + section = config['ziso'] + reference_coriolis = section.getfloat('reference_coriolis') + coriolis_gradient = section.getfloat('coriolis_gradient') + + ds = dsMesh.copy() + + interfaces = generate_grid(config=config) + bottom_depth = config.getfloat('vertical_grid', 'bottom_depth') + + ds['refBottomDepth'] = ('nVertLevels', interfaces[1:]) + ds['refZMid'] = ('nVertLevels', -0.5 * (interfaces[1:] + interfaces[0:-1])) + ds['vertCoordMovementWeights'] = xarray.ones_like(ds.refBottomDepth) + + xCell = ds.xCell + yCell = ds.yCell + + shelf_depth = section.getfloat('shelf_depth') + slope_center_position = section.getfloat('slope_center_position') + slope_half_width = section.getfloat('slope_half_width') + + bottomDepth = (shelf_depth + 0.5 * (bottom_depth - shelf_depth) * + (1.0 + numpy.tanh((yCell - slope_center_position) / + slope_half_width))) + + refTopDepth = xarray.DataArray(data=interfaces[0:-1], + dims=('nVertLevels',)) + + cellMask = (refTopDepth < bottomDepth).transpose('nCells', 'nVertLevels') + + maxLevelCell = cellMask.sum(dim='nVertLevels') - 1 + + # We want full cells, so deepen bottomDepth to be the bottom of the last + # valid layer + bottomDepth = ds.refBottomDepth.isel(nVertLevels=maxLevelCell) + + restingThickness, layerThickness, zMid = compute_layer_thickness_and_zmid( + cellMask, ds.refBottomDepth, bottomDepth, maxLevelCell) + + layerThickness = layerThickness.expand_dims(dim='Time', axis=0) + zMid = zMid.expand_dims(dim='Time', axis=0) + + initial_temp_t1 = section.getfloat('initial_temp_t1') + initial_temp_t2 = section.getfloat('initial_temp_t2') + initial_temp_h1 = section.getfloat('initial_temp_h1') + initial_temp_mt = section.getfloat('initial_temp_mt') + if with_frazil: + extent = section.getfloat('meridional_extent') + frazil_anomaly = section.getfloat('frazil_temperature_anomaly') + distanceX = extent/4.0 - xCell + distanceY = extent/2.0 - yCell + distance = numpy.sqrt(distanceY**2 + distanceX**2) + scaleFactor = numpy.exp(-distance/extent*20.0) + + mask = zMid > -50. + + frazil_temp = (frazil_anomaly + + initial_temp_t2 * numpy.tanh(zMid / initial_temp_h1) + + initial_temp_mt * zMid + + mask * 1.0*numpy.cos(zMid/50.0 * numpy.pi/2.0)) + + temperature = (initial_temp_t1 + + initial_temp_t2 * numpy.tanh(zMid / initial_temp_h1) + + initial_temp_mt * zMid) + + temperature = ((1.0-scaleFactor) * temperature + + scaleFactor * frazil_temp) + temperature = temperature.transpose('Time', 'nCells', 'nVertLevels') + else: + temperature = (initial_temp_t1 + + initial_temp_t2 * numpy.tanh(zMid / initial_temp_h1) + + initial_temp_mt * zMid) + + salinity = 34.0 * xarray.ones_like(temperature) + + normalVelocity = xarray.zeros_like(ds.xEdge) + normalVelocity = normalVelocity.broadcast_like(ds.refBottomDepth) + normalVelocity = normalVelocity.transpose('nEdges', 'nVertLevels') + normalVelocity = normalVelocity.expand_dims(dim='Time', axis=0) + + ds['temperature'] = temperature + ds['salinity'] = salinity + ds['normalVelocity'] = normalVelocity + ds['layerThickness'] = layerThickness + ds['restingThickness'] = layerThickness + ds['zMid'] = zMid + ds['bottomDepth'] = bottomDepth + # fortran 1-based indexing + ds['maxLevelCell'] = maxLevelCell+1 + ds['fCell'] = reference_coriolis + yCell * coriolis_gradient + ds['fEdge'] = reference_coriolis + ds.yEdge * coriolis_gradient + ds['fVertex'] = reference_coriolis + ds.yVertex * coriolis_gradient + + write_netcdf(ds, 'ocean.nc') + return ds + + +def _write_forcing(config, yCell, zMid): + section = config['ziso'] + + extent = section.getfloat('meridional_extent') + mean_restoring_temp = section.getfloat('mean_restoring_temp') + restoring_temp_dev_ta = section.getfloat('restoring_temp_dev_ta') + restoring_temp_dev_tb = section.getfloat('restoring_temp_dev_tb') + restoring_temp_piston_vel = section.getfloat('restoring_temp_piston_vel') + y_trans = section.getfloat('wind_transition_position') + wind_stress_max = section.getfloat('wind_stress_max') + front_width = section.getfloat('antarctic_shelf_front_width') + front_max = section.getfloat('wind_stress_shelf_front_max') + restoring_sponge_l = section.getfloat('restoring_sponge_l') + restoring_temp_ze = section.getfloat('restoring_temp_ze') + restoring_temp_tau = section.getfloat('restoring_temp_tau') + + # set wind stress + windStressZonal = xarray.where( + yCell >= y_trans, + wind_stress_max * numpy.sin(numpy.pi * (yCell - y_trans) / + (extent - y_trans))**2, + front_max * numpy.sin(numpy.pi * (y_trans - yCell) / + front_width)**2) + + windStressZonal = xarray.where(yCell >= y_trans - front_width, + windStressZonal, 0.0) + + windStressZonal = windStressZonal.expand_dims(dim='Time', axis=0) + + windStressMeridional = xarray.zeros_like(windStressZonal) + + arg = (yCell - 0.5 * extent) / (0.5 * extent) + + # surface restoring + temperatureSurfaceRestoringValue = \ + (mean_restoring_temp + restoring_temp_dev_ta * numpy.tanh(2.0*arg) + + restoring_temp_dev_tb * arg) + temperatureSurfaceRestoringValue = \ + temperatureSurfaceRestoringValue.expand_dims(dim='Time', axis=0) + + temperaturePistonVelocity = \ + restoring_temp_piston_vel * xarray.ones_like( + temperatureSurfaceRestoringValue) + + salinitySurfaceRestoringValue = \ + 34.0 * xarray.ones_like(temperatureSurfaceRestoringValue) + salinityPistonVelocity = xarray.zeros_like(temperaturePistonVelocity) + + # set restoring at northern boundary + mask = extent - yCell <= 1.5 * restoring_sponge_l + mask = mask.broadcast_like(zMid).transpose('Time', 'nCells', 'nVertLevels') + + # convert from days to inverse seconds + rate = 1.0 / (restoring_temp_tau*86400.0) + + temperatureInteriorRestoringValue = xarray.where( + mask, (temperatureSurfaceRestoringValue * + numpy.exp(zMid/restoring_temp_ze)), 0.) + + temperatureInteriorRestoringRate = xarray.where( + mask, numpy.exp(-(extent-yCell)/restoring_sponge_l) * rate, 0.) + + salinityInteriorRestoringValue = xarray.where( + mask, 34.0, 0) + + # set restoring at southern boundary + mask = yCell <= 2.0 * restoring_sponge_l + mask = mask.broadcast_like(zMid).transpose('Time', 'nCells', 'nVertLevels') + + temperatureInteriorRestoringValue = xarray.where( + mask, temperatureSurfaceRestoringValue, + temperatureInteriorRestoringValue) + + temperatureInteriorRestoringRate = xarray.where( + mask, numpy.exp(-yCell/restoring_sponge_l) * rate, + temperatureInteriorRestoringRate) + + salinityInteriorRestoringValue = xarray.where( + mask, 34.0, salinityInteriorRestoringValue) + + salinityInteriorRestoringRate = \ + xarray.zeros_like(temperatureInteriorRestoringRate) + + dsForcing = xarray.Dataset() + dsForcing['windStressZonal'] = windStressZonal + dsForcing['windStressMeridional'] = windStressMeridional + dsForcing['temperaturePistonVelocity'] = temperaturePistonVelocity + dsForcing['salinityPistonVelocity'] = salinityPistonVelocity + dsForcing['temperatureSurfaceRestoringValue'] = \ + temperatureSurfaceRestoringValue + dsForcing['salinitySurfaceRestoringValue'] = salinitySurfaceRestoringValue + dsForcing['temperatureInteriorRestoringRate'] = \ + temperatureInteriorRestoringRate + dsForcing['salinityInteriorRestoringRate'] = salinityInteriorRestoringRate + dsForcing['temperatureInteriorRestoringValue'] = \ + temperatureInteriorRestoringValue + dsForcing['salinityInteriorRestoringValue'] = \ + salinityInteriorRestoringValue + + write_netcdf(dsForcing, 'forcing.nc') diff --git a/compass/ocean/tests/ziso/namelist.10km.forward b/compass/ocean/tests/ziso/namelist.10km.forward new file mode 100644 index 0000000000..ab954c537c --- /dev/null +++ b/compass/ocean/tests/ziso/namelist.10km.forward @@ -0,0 +1,4 @@ +config_dt = '00:06:00' +config_btr_dt = '00:00:18' +config_run_duration = '0000_00:18:01' +config_mom_del4 = 6.25e9 diff --git a/compass/ocean/tests/ziso/namelist.20km.forward b/compass/ocean/tests/ziso/namelist.20km.forward new file mode 100644 index 0000000000..40c038b724 --- /dev/null +++ b/compass/ocean/tests/ziso/namelist.20km.forward @@ -0,0 +1,5 @@ +config_dt = '00:00:30' +config_btr_dt = '00:00:15' +config_run_duration = '0000_00:01:30' +config_write_output_on_startup = .false. +config_mom_del4 = 5.0e10 diff --git a/compass/ocean/tests/ziso/namelist.analysis b/compass/ocean/tests/ziso/namelist.analysis new file mode 100644 index 0000000000..58d6a9983c --- /dev/null +++ b/compass/ocean/tests/ziso/namelist.analysis @@ -0,0 +1,20 @@ +config_AM_globalStats_enable = .true. +config_AM_globalStats_compute_on_startup = .true. +config_AM_globalStats_write_on_startup = .true. +config_AM_zonalMean_enable = .true. +config_AM_zonalMean_num_bins = 100 +config_AM_okuboWeiss_enable = .true. +config_AM_okuboWeiss_eddy_min_cells = 100 +config_AM_highFrequencyOutput_enable = .true. +config_AM_eliassenPalm_enable = .true. +config_AM_eliassenPalm_compute_interval = '03_00:00:00' +config_AM_eliassenPalm_nBuoyancyLayers = 100 +config_AM_eliassenPalm_rhomin_buoycoor = 1028.0 +config_AM_eliassenPalm_rhomax_buoycoor = 1030.7 +config_AM_mixedLayerDepths_enable = .true. +config_AM_timeFilters_enable = .true. +config_AM_timeFilters_initialize_filters = .false. +config_AM_lagrPartTrack_enable = .true. +config_AM_lagrPartTrack_reset_criteria = 'global_time' +config_AM_lagrPartTrack_reset_global_timestamp = '0029_23:59:59' +config_AM_lagrPartTrack_region_stream = 'none' diff --git a/compass/ocean/tests/ziso/namelist.forward b/compass/ocean/tests/ziso/namelist.forward new file mode 100644 index 0000000000..934f599653 --- /dev/null +++ b/compass/ocean/tests/ziso/namelist.forward @@ -0,0 +1,19 @@ +config_use_mom_del4 = .true. +config_mom_del4_div_factor = 10.0 +config_use_cvmix = .true. +config_use_cvmix_background = .true. +config_use_cvmix_convection = .true. +config_cvmix_background_diffusion = 5.0e-6 +config_cvmix_convective_basedOnBVF = .true. +config_use_cvmix_shear = .true. +config_cvmix_shear_mixing_scheme = 'KPP' +config_use_cvmix_kpp = .true. +config_use_bulk_wind_stress = .true. +config_eos_linear_alpha = 0.255 +config_eos_linear_Tref = 19.0 +config_eos_linear_Sref = 34.0 +config_eos_linear_densityref = 1025.0 +config_implicit_bottom_drag_coeff = 3.0e-3 +config_use_activeTracers_surface_bulk_forcing = .false. +config_use_activeTracers_surface_restoring = .true. +config_use_activeTracers_interior_restoring = .true. diff --git a/compass/ocean/tests/ziso/streams.20km.forward b/compass/ocean/tests/ziso/streams.20km.forward new file mode 100644 index 0000000000..efe121943c --- /dev/null +++ b/compass/ocean/tests/ziso/streams.20km.forward @@ -0,0 +1,6 @@ + + + + + diff --git a/compass/ocean/tests/ziso/streams.analysis b/compass/ocean/tests/ziso/streams.analysis new file mode 100644 index 0000000000..815c3fddf7 --- /dev/null +++ b/compass/ocean/tests/ziso/streams.analysis @@ -0,0 +1,54 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/ziso/streams.forward b/compass/ocean/tests/ziso/streams.forward new file mode 100644 index 0000000000..6a6601f513 --- /dev/null +++ b/compass/ocean/tests/ziso/streams.forward @@ -0,0 +1,58 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/compass/ocean/tests/ziso/with_frazil/__init__.py b/compass/ocean/tests/ziso/with_frazil/__init__.py new file mode 100644 index 0000000000..ef58bb904e --- /dev/null +++ b/compass/ocean/tests/ziso/with_frazil/__init__.py @@ -0,0 +1,87 @@ +from compass.testcase import TestCase +from compass.ocean.tests.ziso.initial_state import InitialState +from compass.ocean.tests.ziso.forward import Forward +from compass.ocean.tests import ziso +from compass.validate import compare_variables + + +class WithFrazil(TestCase): + """ + The with frazil test case for the ZISO test group simply creates the mesh + and initial condition, then performs a short forward run including frazil + formation. + + Attributes + ---------- + resolution : str + The resolution of the test case + """ + + def __init__(self, test_group, resolution): + """ + Create the test case + + Parameters + ---------- + test_group : compass.ocean.tests.ziso.Ziso + The test group that this test case belongs to + + resolution : str + The resolution of the test case + """ + name = 'with_frazil' + self.resolution = resolution + subdir = '{}/{}'.format(resolution, name) + super().__init__(test_group=test_group, name=name, + subdir=subdir) + + res_params = {'20km': {'cores': 4, 'min_cores': 2}} + + if resolution not in res_params: + raise ValueError( + 'Unsupported resolution {}. Supported values are: ' + '{}'.format(resolution, list(res_params))) + + res_params = res_params[resolution] + + self.add_step( + InitialState(test_case=self, resolution=resolution, + with_frazil=True)) + self.add_step( + Forward(test_case=self, resolution=resolution, + cores=res_params['cores'], + min_cores=res_params['min_cores'], + with_analysis=False, with_frazil=True)) + + def configure(self): + """ + Modify the configuration options for this test case. + """ + ziso.configure(self.name, self.resolution, self.config) + + def run(self): + """ + Run each step of the test case + """ + # run the steps + super().run() + + # perform validation + config = self.config + work_dir = self.work_dir + + steps = self.steps_to_run + if 'forward' in steps: + variables = ['temperature', 'layerThickness'] + compare_variables( + variables, config, work_dir, + filename1='forward/output/output.0001-01-01_00.00.00.nc') + + variables = ['accumulatedFrazilIceMass', + 'accumulatedFrazilIceSalinity', + 'seaIceEnergy', 'frazilLayerThicknessTendency', + 'frazilTemperatureTendency', 'frazilSalinityTendency', + 'frazilSurfacePressure', + 'accumulatedLandIceFrazilMass'] + compare_variables(variables, config, work_dir, + filename1='forward/frazil.nc') diff --git a/compass/ocean/tests/ziso/with_frazil/with_frazil.cfg b/compass/ocean/tests/ziso/with_frazil/with_frazil.cfg new file mode 100644 index 0000000000..10961aa5f0 --- /dev/null +++ b/compass/ocean/tests/ziso/with_frazil/with_frazil.cfg @@ -0,0 +1,15 @@ +# config options for Zonally periodic Idealized Southern Ocean (ZISO) +# testcases +[ziso] + +# Initial temperature profile constant +initial_temp_t1 = 0.0 + +# Initial temperature profile tanh coefficient +initial_temp_t2 = -1.0 + +# Initial temperature profile tanh length scale +initial_temp_h1 = 300.0 + +# Initial temperature profile linear coefficient +initial_temp_mt = 0.0 diff --git a/compass/ocean/tests/ziso/ziso.cfg b/compass/ocean/tests/ziso/ziso.cfg new file mode 100644 index 0000000000..b1d38f0d60 --- /dev/null +++ b/compass/ocean/tests/ziso/ziso.cfg @@ -0,0 +1,80 @@ +# Options related to the vertical grid +[vertical_grid] + +# the type of vertical grid +grid_type = 100layerE3SMv1 + +# Depth of the bottom of the ocean +bottom_depth = 2500.0 + + +# config options for Zonally periodic Idealized Southern Ocean (ZISO) +# testcases +[ziso] + +# meridional position where wind stress switches to easterly +wind_transition_position = 800000.0 + +# meridional extent over which the easterly wind stress is applied" +antarctic_shelf_front_width = 600000 + +# Maximum zonal wind stress value in the shelf front region, following Stewart +# et al. (2013) +wind_stress_shelf_front_max = -0.05 + +# Meridional extent of the domain +meridional_extent = 2.0e6 + +# Shelf depth in the domain +shelf_depth = 500.0 + +# Shelf half width +slope_half_width = 1.0e5 + +# Slope center position +slope_center_position = 5.0e5 + +# Reference coriolis parameter +reference_coriolis = -1e-4 + +# Meridional gradient of coriolis parameter +coriolis_gradient = 1e-11 + +# Maximum zonal wind stress value +wind_stress_max = 0.2 + +# Mean restoring temperature +mean_restoring_temp = 3.0 + +# Tanh coefficient in restoring temperature +restoring_temp_dev_ta = 2.0 + +# Linear coefficient in restoring temperature +restoring_temp_dev_tb = 2.0 + +# Time scale for interior restoring of temperature +restoring_temp_tau = 30.0 + +# Restoring piston velocity for surface temperature +restoring_temp_piston_vel = 1.93e-5 + +# Vertical e-folding scale in restoring temperature for northern wall +restoring_temp_ze = 1250.0 + +# E-folding distance parameter for the sponge vertical temperature profile +restoring_sponge_l = 8.0e4 + +# Initial temperature profile constant +initial_temp_t1 = 6.0 + +# Initial temperature profile tanh coefficient +initial_temp_t2 = 3.6 + +# Initial temperature profile tanh length scale +initial_temp_h1 = 300.0 + +# Initial temperature profile linear coefficient +initial_temp_mt = 7.5e-5 + +# Temperature anomaly to produce frazil +frazil_temperature_anomaly = -3.0 diff --git a/compass/ocean/vertical/100layerE3SMv1.json b/compass/ocean/vertical/100layerE3SMv1.json new file mode 100644 index 0000000000..2d2b62a57c --- /dev/null +++ b/compass/ocean/vertical/100layerE3SMv1.json @@ -0,0 +1,101 @@ +[0.0000e0, + 0.1510e1, + 0.3135e1, + 0.4882e1, + 0.6761e1, + 0.8779e1, + 0.1095e2, + 0.1327e2, + 0.1577e2, + 0.1845e2, + 0.2132e2, + 0.2440e2, + 0.2769e2, + 0.3122e2, + 0.3500e2, + 0.3904e2, + 0.4335e2, + 0.4797e2, + 0.5289e2, + 0.5815e2, + 0.6377e2, + 0.6975e2, + 0.7614e2, + 0.8294e2, + 0.9018e2, + 0.9790e2, + 0.1061e3, + 0.1148e3, + 0.1241e3, + 0.1340e3, + 0.1445e3, + 0.1556e3, + 0.1674e3, + 0.1799e3, + 0.1932e3, + 0.2072e3, + 0.2221e3, + 0.2379e3, + 0.2546e3, + 0.2722e3, + 0.2909e3, + 0.3106e3, + 0.3314e3, + 0.3534e3, + 0.3766e3, + 0.4011e3, + 0.4269e3, + 0.4541e3, + 0.4827e3, + 0.5128e3, + 0.5445e3, + 0.5779e3, + 0.6130e3, + 0.6498e3, + 0.6885e3, + 0.7291e3, + 0.7717e3, + 0.8164e3, + 0.8633e3, + 0.9124e3, + 0.9638e3, + 0.1018e4, + 0.1074e4, + 0.1133e4, + 0.1194e4, + 0.1259e4, + 0.1326e4, + 0.1396e4, + 0.1469e4, + 0.1546e4, + 0.1625e4, + 0.1708e4, + 0.1794e4, + 0.1884e4, + 0.1978e4, + 0.2075e4, + 0.2176e4, + 0.2281e4, + 0.2390e4, + 0.2503e4, + 0.2620e4, + 0.2742e4, + 0.2868e4, + 0.2998e4, + 0.3134e4, + 0.3274e4, + 0.3418e4, + 0.3568e4, + 0.3723e4, + 0.3882e4, + 0.4047e4, + 0.4218e4, + 0.4393e4, + 0.4574e4, + 0.4761e4, + 0.4953e4, + 0.5151e4, + 0.5354e4, + 0.5564e4, + 0.5779e4, + 0.6000e4] \ No newline at end of file diff --git a/compass/ocean/vertical/60layerPHC.json b/compass/ocean/vertical/60layerPHC.json new file mode 100644 index 0000000000..b2f57e9ea6 --- /dev/null +++ b/compass/ocean/vertical/60layerPHC.json @@ -0,0 +1,63 @@ +[ + 0.0, + 10.0, + 20.0, + 30.0, + 40.0, + 50.0, + 60.0, + 70.0, + 80.0, + 90.0, + 100.0, + 110.0, + 120.0, + 130.0, + 140.0, + 150.0, + 160.0, + 170.19677734375, + 180.76129150390625, + 191.82119750976562, + 203.49929809570312, + 215.92340087890625, + 229.23312377929688, + 243.58447265625, + 259.1557922363281, + 276.1524963378906, + 294.8147277832031, + 315.4236145019531, + 338.3122863769531, + 363.8746032714844, + 392.5804748535156, + 424.9887390136719, + 461.7665710449219, + 503.7067565917969, + 551.7491760253906, + 606.9965515136719, + 670.7285461425781, + 744.3980407714844, + 829.6069641113281, + 928.0434265136719, + 1041.3681945800781, + 1171.0402526855469, + 1318.0935363769531, + 1482.9008483886719, + 1664.9919738769531, + 1863.0146179199219, + 2074.873809814453, + 2298.039276123047, + 2529.903594970703, + 2768.098846435547, + 3010.670196533203, + 3256.138885498047, + 3503.448516845703, + 3751.892791748047, + 4001.011505126953, + 4250.525604248047, + 4500.259552001953, + 4750.121307373047, + 5000.045684814453, + 5250.010955810547, + 5499.989044189453 +] \ No newline at end of file diff --git a/compass/ocean/vertical/__init__.py b/compass/ocean/vertical/__init__.py new file mode 100644 index 0000000000..5d97893a24 --- /dev/null +++ b/compass/ocean/vertical/__init__.py @@ -0,0 +1,249 @@ +import numpy +from importlib import resources +import json +from netCDF4 import Dataset +import numpy as np +from scipy.optimize import root_scalar + + +def generate_grid(config): + """ + Generate a vertical grid for a test case, using the config options in the + ``vertical_grid`` section + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options with parameters used to construct the vertical + grid + + Returns + ------- + interfaces : numpy.ndarray + A 1D array of positive depths for layer interfaces in meters + """ + section = config['vertical_grid'] + grid_type = section.get('grid_type') + if grid_type == 'uniform': + vert_levels = section.getint('vert_levels') + interfaces = _generate_uniform(vert_levels) + elif grid_type == 'tanh_dz': + vert_levels = section.getint('vert_levels') + min_layer_thickness = section.getfloat('min_layer_thickness') + max_layer_thickness = section.getfloat('max_layer_thickness') + bottom_depth = section.getfloat('bottom_depth') + interfaces = _create_tanh_dz_grid(vert_levels, bottom_depth, + min_layer_thickness, + max_layer_thickness) + + elif grid_type in ['60layerPHC', '100layerE3SMv1']: + interfaces = _read_json(grid_type) + else: + raise ValueError('Unexpected grid type: {}'.format(grid_type)) + + if config.has_option('vertical_grid', 'bottom_depth') and \ + grid_type != 'tanh_dz': + bottom_depth = section.getfloat('bottom_depth') + # renormalize to the requested range + interfaces = (bottom_depth/interfaces[-1]) * interfaces + + return interfaces + + +def write_grid(interfaces, out_filename): + """ + write the vertical grid to a file + + Parameters + ---------- + interfaces : numpy.ndarray + A 1D array of positive depths for layer interfaces in meters + + out_filename : str + MPAS file name for output of vertical grid + """ + + nz = len(interfaces) - 1 + + # open a new netCDF file for writing. + ncfile = Dataset(out_filename, 'w') + # create the depth_t dimension. + ncfile.createDimension('nVertLevels', nz) + + refBottomDepth = ncfile.createVariable( + 'refBottomDepth', np.dtype('float64').char, ('nVertLevels',)) + refMidDepth = ncfile.createVariable( + 'refMidDepth', np.dtype('float64').char, ('nVertLevels',)) + refLayerThickness = ncfile.createVariable( + 'refLayerThickness', np.dtype('float64').char, ('nVertLevels',)) + + botDepth = interfaces[1:] + midDepth = 0.5 * (interfaces[0:-1] + interfaces[1:]) + + refBottomDepth[:] = botDepth + refMidDepth[:] = midDepth + refLayerThickness[:] = interfaces[1:] - interfaces[0:-1] + ncfile.close() + + +def _generate_uniform(vert_levels): + """ Generate uniform layer interfaces between 0 and 1 """ + interfaces = numpy.linspace(0., 1., vert_levels+1) + return interfaces + + +def _read_json(grid_type): + """ Read the grid interfaces from a json file """ + + filename = '{}.json'.format(grid_type) + with resources.open_text("compass.ocean.vertical", filename) as data_file: + data = json.load(data_file) + interfaces = numpy.array(data) + + return interfaces + + +def _create_tanh_dz_grid(num_vert_levels, bottom_depth, min_layer_thickness, + max_layer_thickness): + """ + Creates the vertical grid for MPAS-Ocean and writes it to a NetCDF file + + Parameters + ---------- + num_vert_levels : int + Number of vertical levels for the grid + + bottom_depth : float + bottom depth for the chosen vertical coordinate [m] + + min_layer_thickness : float + Target thickness of the first layer [m] + + max_layer_thickness : float + Target maximum thickness in column [m] + + Returns + ------- + interfaces : numpy.ndarray + A 1D array of positive depths for layer interfaces in meters + """ + + nz = num_vert_levels + dz1 = min_layer_thickness + dz2 = max_layer_thickness + + # the bracket here is large enough that it should hopefully encompass any + # reasonable value of delta, the characteristic length scale over which + # dz varies. The args are passed on to the match_bottom function below, + # and the root finder will determine a value of delta (sol.root) such that + # match_bottom is within a tolerance of zero, meaning the bottom of the + # coordinate computed by cumsum_z hits bottom_depth almost exactly + sol = root_scalar(_match_bottom, method='brentq', + bracket=[dz1, 10 * bottom_depth], + args=(nz, dz1, dz2, bottom_depth)) + + delta = sol.root + layerThickness, z = _cumsum_z(delta, nz, dz1, dz2) + interfaces = -z + + return interfaces + + +def _match_bottom(delta, nz, dz1, dz2, bottom_depth): + """ + Compute the difference between the bottom depth computed with the given + parameters and the target ``bottom_depth``, used in the root finding + algorithm to determine which value of ``delta`` to use. + + Parameters + ---------- + delta : float + The characteristic length scale over which dz varies (this parameter + will be optimized to hit a target depth in a target number of layers) + + nz : int + The number of layers + + dz1 : float + The layer thickness at the top of the ocean (z = 0) + + dz2 : float + The layer thickness at z --> -infinity + + bottom_depth: float + depth of the bottom of the ocean that should match the bottom layer + interface. Note: the bottom_depth is positive, whereas the layer + interfaces are negative. + + Returns + ------- + diff : float + The computed bottom depth minus the target ``bottom_depth``. ``diff`` + should be zero when we have found the desired ``delta``. + """ + _, z = _cumsum_z(delta, nz, dz1, dz2) + diff = -bottom_depth - z[-1] + return diff + + +def _cumsum_z(delta, nz, dz1, dz2): + """ + Compute layer interface depths and layer thicknesses over ``nz`` layers + + Parameters + ---------- + delta : float + The characteristic length scale over which dz varies (this parameter + will be optimized to hit a target depth in a target number of layers) + + nz : int + The number of layers + + dz1 : float + The layer thickness at the top of the ocean (z = 0) + + dz2 : float + The layer thickness at z --> -infinity + + Returns + ------- + dz : numpy.ndarray + The layer thicknesses for each layer + + z : numpy.ndarray + The depth (positive up) of each layer interface (``nz + 1`` total + elements) + """ + dz = np.zeros(nz) + z = np.zeros(nz + 1) + for zindex in range(nz): + dz[zindex] = _dz_z(z[zindex], dz1, dz2, delta) + z[zindex + 1] = z[zindex] - dz[zindex] + return dz, z + + +def _dz_z(z, dz1, dz2, delta): + """ + layer thickness as a function of depth + + Parameters + ---------- + z : float + Depth coordinate (positive up) at which to find the layer thickness + + dz1 : float + The layer thickness at the top of the ocean (z = 0) + + dz2 : float + The layer thickness at z --> -infinity + + delta : float + The characteristic length scale over which dz varies (this parameter + will be optimized to hit a target depth in a target numer of layers) + + Returns + ------- + dz : float + The layer thickness + """ + return (dz2 - dz1) * np.tanh(-z * np.pi / delta) + dz1 diff --git a/compass/ocean/vertical/zstar.py b/compass/ocean/vertical/zstar.py new file mode 100644 index 0000000000..b4389cab0f --- /dev/null +++ b/compass/ocean/vertical/zstar.py @@ -0,0 +1,76 @@ +import xarray + + +def compute_layer_thickness_and_zmid(cellMask, refBottomDepth, bottomDepth, + maxLevelCell, ssh=None): + """ + Initialize the vertical coordinate to a z-star coordinate + + Parameters + ---------- + cellMask : xarray.DataArray + A bool mask indicating where cells are valid (above the bathymetry) + + refBottomDepth : xarray.DataArray + The (positive down) depth of the bottom of each level in a 1D reference + depth coordinate used for MPAS's z-star coordinate + + bottomDepth : xarray.DataArray + The (positive down) depth of the bathymetry for each cell in the mesh + + maxLevelCell : xarray.DataArray + The zero-based index of the last valid level in each cell in the mesh + + ssh : xarray.DataArray, optional + The sea surface height for each cell in the mesh, assumed to be all + zeros if not supplied + + Returns + ------- + restingThickness : xarray.DataArray + A reference thickness of each layer (level) for all cells and levels in + the mesh if ``ssh`` were zero everywhere, the same as + ``layerThickness`` if ``ssh`` is not provided + + layerThickness : xarray.DataArray + The thickness of each layer (level) for all cells and levels in the + mesh + + zMid : xarray.DataArray + The vertical location of the middle of each level for all cells and + levels in the mesh + """ + + nVertLevels = cellMask.sizes['nVertLevels'] + + refLayerThickness = refBottomDepth.isel(nVertLevels=0) + + restingThicknesses = [cellMask.isel(nVertLevels=0) * refLayerThickness] + for levelIndex in range(1, nVertLevels): + refLayerThickness = (refBottomDepth.isel(nVertLevels=levelIndex) - + refBottomDepth.isel(nVertLevels=levelIndex-1)) + sliceThickness = \ + cellMask.isel(nVertLevels=levelIndex)*refLayerThickness + mask = levelIndex == maxLevelCell + partialThickness = (bottomDepth - + refBottomDepth.isel(nVertLevels=levelIndex-1)) + sliceThickness = xarray.where(mask, partialThickness, sliceThickness) + sliceThickness = sliceThickness.where( + cellMask.isel(nVertLevels=levelIndex)) + restingThicknesses.append(sliceThickness) + + restingThickness = xarray.concat(restingThicknesses, dim='nVertLevels') + restingThickness = restingThickness.transpose('nCells', 'nVertLevels') + + if ssh is not None: + layerStretch = (ssh + bottomDepth) / bottomDepth + layerThickness = restingThickness * layerStretch + else: + ssh = xarray.zeros_like(bottomDepth) + layerThickness = restingThickness + + zBot = ssh - layerThickness.cumsum(dim='nVertLevels') + + zMid = zBot + 0.5*layerThickness + + return restingThickness, layerThickness, zMid diff --git a/compass/parallel.py b/compass/parallel.py new file mode 100644 index 0000000000..f33b4022a7 --- /dev/null +++ b/compass/parallel.py @@ -0,0 +1,45 @@ +import os +import multiprocessing +import subprocess + + +def get_available_cores_and_nodes(config): + """ + Get the number of total cores and nodes available for running steps + + Parameters + ---------- + config : configparser.ConfigParser + Configuration options for the test case + + Returns + ------- + cores : int + The number of cores available for running steps + + nodes : int + The number of cores available for running steps + """ + + parallel_system = config.get('parallel', 'system') + if parallel_system == 'slurm': + job_id = os.environ['SLURM_JOB_ID'] + args = ['squeue', '--noheader', '-j', job_id, '-o', '%C'] + cores = _get_subprocess_int(args) + args = ['squeue', '--noheader', '-j', job_id, '-o', '%D'] + nodes = _get_subprocess_int(args) + elif parallel_system == 'single_node': + cores_per_node = config.getint('parallel', 'cores_per_node') + cores = min(multiprocessing.cpu_count(), cores_per_node) + nodes = 1 + else: + raise ValueError('Unexpected parallel system: {}'.format( + parallel_system)) + + return cores, nodes + + +def _get_subprocess_int(args): + value = subprocess.check_output(args) + value = int(value.decode('utf-8').strip('\n')) + return value diff --git a/compass/provenance.py b/compass/provenance.py new file mode 100644 index 0000000000..4955a15f09 --- /dev/null +++ b/compass/provenance.py @@ -0,0 +1,82 @@ +import os +import sys +import subprocess + + +def write(work_dir, test_cases): + """ + Write a file with provenance, such as the git version, conda packages, + command, and test cases, to the work directory + + Parameters + ---------- + work_dir : str + The path to the work directory where the test cases will be set up + + test_cases : dict + A dictionary describing all of the test cases and their steps + """ + try: + args = ['git', 'describe', '--tags', '--dirty', '--always'] + git_version = subprocess.check_output(args).decode('utf-8') + git_version = git_version.strip('\n') + except subprocess.CalledProcessError: + git_version = None + + try: + args = ['conda', 'list'] + conda_list = subprocess.check_output(args).decode('utf-8') + except subprocess.CalledProcessError: + conda_list = None + + calling_command = ' '.join(sys.argv) + + try: + os.makedirs(work_dir) + except OSError: + pass + + provenance_path = '{}/provenance'.format(work_dir) + if os.path.exists(provenance_path): + provenance_file = open(provenance_path, 'a') + provenance_file.write('\n') + else: + provenance_file = open(provenance_path, 'w') + + provenance_file.write('**************************************************' + '*********************\n') + if git_version is not None: + provenance_file.write('git_version: {}\n\n'.format(git_version)) + provenance_file.write('command: {}\n\n'.format(calling_command)) + provenance_file.write('test cases:\n') + + for path, test_case in test_cases.items(): + prefix = ' ' + lines = list() + to_print = {'path': test_case.path, + 'name': test_case.name, + 'MPAS core': test_case.mpas_core.name, + 'test group': test_case.test_group.name, + 'subdir': test_case.subdir} + for key in to_print: + key_string = '{}: '.format(key).ljust(15) + lines.append('{}{}{}'.format(prefix, key_string, to_print[key])) + lines.append('{}steps:'.format(prefix)) + for step in test_case.steps.values(): + if step.name == step.subdir: + lines.append('{} - {}'.format(prefix, step.name)) + else: + lines.append('{} - {}: {}'.format(prefix, step.name, + step.subdir)) + lines.append('') + print_string = '\n'.join(lines) + + provenance_file.write('{}\n'.format(print_string)) + + if conda_list is not None: + provenance_file.write('conda list:\n') + provenance_file.write('{}\n'.format(conda_list)) + + provenance_file.write('**************************************************' + '*********************\n') + provenance_file.close() diff --git a/compass/setup.py b/compass/setup.py new file mode 100644 index 0000000000..452bca5e6c --- /dev/null +++ b/compass/setup.py @@ -0,0 +1,273 @@ +import argparse +import sys +import configparser +import os +import pickle + +from compass.mpas_cores import get_mpas_cores +from compass.config import add_config, ensure_absolute_paths +from compass.io import symlink +from compass import provenance + + +def setup_cases(tests=None, numbers=None, config_file=None, machine=None, + work_dir=None, baseline_dir=None, mpas_model_path=None): + """ + Set up one or more test cases + + Parameters + ---------- + tests : list of str, optional + Relative paths for a test cases to set up + + numbers : list of int, optional + Case numbers to setup, as listed from ``compass list`` + + config_file : str, optional + Configuration file with custom options for setting up and running test + cases + + machine : str, optional + The name of one of the machines with defined config options, which can + be listed with ``compass list --machines`` + + work_dir : str, optional + A directory that will serve as the base for creating case directories + + baseline_dir : str, optional + Location of baseslines that can be compared to + + mpas_model_path : str, optional + The relative or absolute path to the root of a branch where the MPAS + model has been built + + Returns + ------- + test_cases : dict of compass.TestCase + A dictionary of test cases, with the relative path in the work + directory as keys + """ + + if config_file is None and machine is None: + raise ValueError('At least one of config_file and machine is needed.') + + if tests is None and numbers is None: + raise ValueError('At least one of tests or numbers is needed.') + + if work_dir is None: + work_dir = os.getcwd() + + mpas_cores = get_mpas_cores() + + all_test_cases = dict() + for mpas_core in mpas_cores: + for test_group in mpas_core.test_groups.values(): + for test_case in test_group.test_cases.values(): + all_test_cases[test_case.path] = test_case + + test_cases = dict() + if numbers is not None: + keys = list(all_test_cases) + for number in numbers: + if number >= len(keys): + raise ValueError('test number {} is out of range. There are ' + 'only {} tests.'.format(number, len(keys))) + path = keys[number] + test_cases[path] = all_test_cases[path] + + if tests is not None: + for path in tests: + if path not in all_test_cases: + raise ValueError('Test case with path {} is not in ' + 'test_cases'.format(path)) + test_cases[path] = all_test_cases[path] + + provenance.write(work_dir, test_cases) + + print('Setting up test cases:') + for path, test_case in test_cases.items(): + setup_case(path, test_case, config_file, machine, work_dir, + baseline_dir, mpas_model_path) + + return test_cases + + +def setup_case(path, test_case, config_file, machine, work_dir, baseline_dir, + mpas_model_path): + """ + Set up one or more test cases + + Parameters + ---------- + path : str + Relative path for a test cases to set up + + test_case : compass.TestCase + A test case to set up + + config_file : str + Configuration file with custom options for setting up and running test + cases + + machine : str + The name of one of the machines with defined config options, which can + be listed with ``compass list --machines`` + + work_dir : str + A directory that will serve as the base for creating case directories + + baseline_dir : str + Location of baseslines that can be compared to + + mpas_model_path : str + The relative or absolute path to the root of a branch where the MPAS + model has been built + """ + + print(' {}'.format(path)) + + config = configparser.ConfigParser( + interpolation=configparser.ExtendedInterpolation()) + + # start with default compass config options + add_config(config, 'compass', 'default.cfg') + + # add the machine config file + if machine is None: + machine = 'default' + add_config(config, 'compass.machines', '{}.cfg'.format(machine)) + + # add the config options for the MPAS core + mpas_core = test_case.mpas_core.name + add_config(config, 'compass.{}'.format(mpas_core), + '{}.cfg'.format(mpas_core)) + + # add the config options for the configuration (if defined) + test_group = test_case.test_group.name + add_config(config, 'compass.{}.tests.{}'.format(mpas_core, test_group), + '{}.cfg'.format(test_group), exception=False) + + test_case_dir = os.path.join(work_dir, path) + try: + os.makedirs(test_case_dir) + except OSError: + pass + test_case.work_dir = test_case_dir + test_case.base_work_dir = work_dir + + # add config options specific to the test case + test_case.config = config + test_case.configure() + + # add the custom config file last, so these options are the defaults + if config_file is not None: + config.read(config_file) + + # add the baseline directory for this test case + if baseline_dir is not None: + baseline_root = os.path.join(baseline_dir, path) + config.set('paths', 'baseline_dir', baseline_root) + + # set the mpas_model path from the command line if provided + if mpas_model_path is not None: + config.set('paths', 'mpas_model', mpas_model_path) + + # make sure all paths in the paths, namelists and streams sections are + # absolute paths + ensure_absolute_paths(config) + + # write out the config file + test_case_config = '{}.cfg'.format(test_case.name) + test_case.config_filename = test_case_config + with open(os.path.join(test_case_dir, test_case_config), 'w') as f: + config.write(f) + + # iterate over steps + for step in test_case.steps.values(): + # make the step directory if it doesn't exist + step_dir = os.path.join(work_dir, step.path) + try: + os.makedirs(step_dir) + except OSError: + pass + + symlink(os.path.join('..', test_case_config), + os.path.join(step_dir, test_case_config)) + + test_case_pickle = '{}.pickle'.format(test_case.name) + symlink(os.path.join('..', test_case_pickle), + os.path.join(step_dir, + 'test_case_{}'.format(test_case_pickle))) + + step.work_dir = step_dir + step.base_work_dir = work_dir + step.config_filename = test_case_config + step.config = config + + # set up the step + step.setup() + + # write a run script for each step + _write_run(step) + + # write a run script for each test case + _write_run(test_case) + + +def main(): + parser = argparse.ArgumentParser( + description='Set up one or more test cases', prog='compass setup') + parser.add_argument("-t", "--test", dest="test", + help="Relative path for a test case to set up", + metavar="PATH") + parser.add_argument("-n", "--case_number", nargs='+', dest="case_num", + type=int, + help="Case number(s) to setup, as listed from " + "'compass list'. Can be a space-separated" + "list of case numbers.", metavar="NUM") + parser.add_argument("-f", "--config_file", dest="config_file", + help="Configuration file for test case setup", + metavar="FILE") + parser.add_argument("-m", "--machine", dest="machine", + help="The name of the machine for loading machine-" + "related config options", metavar="MACH") + parser.add_argument("-w", "--work_dir", dest="work_dir", + help="If set, case directories are created in " + "work_dir rather than the current directory.", + metavar="PATH") + parser.add_argument("-b", "--baseline_dir", dest="baseline_dir", + help="Location of baselines that can be compared to", + metavar="PATH") + parser.add_argument("-p", "--mpas_model", dest="mpas_model", + help="The path to the build of the MPAS model for the " + "core.", + metavar="PATH") + + args = parser.parse_args(sys.argv[2:]) + if args.test is None: + tests = None + else: + tests = [args.test] + setup_cases(tests=tests, numbers=args.case_num, + config_file=args.config_file, machine=args.machine, + work_dir=args.work_dir, baseline_dir=args.baseline_dir, + mpas_model_path=args.mpas_model) + + +def _write_run(test): + """pickle the test/step info and write the run script""" + + # if compass/__init__.py exists, we're using a local version of the compass + # package and we'll want to link to that in the tests and steps + compass_path = os.path.join(os.getcwd(), 'compass') + if os.path.exists(os.path.join(compass_path, '__init__.py')): + symlink(compass_path, os.path.join(test.work_dir, 'compass')) + + # pickle the test or step dictionary for use at runtime + pickle_file = os.path.join(test.work_dir, + '{}.pickle'.format(test.name)) + with open(pickle_file, 'wb') as handle: + pickle.dump(test, handle, protocol=pickle.HIGHEST_PROTOCOL) + + # write a run script + test.generate() diff --git a/compass/step/__init__.py b/compass/step/__init__.py new file mode 100644 index 0000000000..8b1eca0f93 --- /dev/null +++ b/compass/step/__init__.py @@ -0,0 +1,550 @@ +import os +import stat +from jinja2 import Template +from importlib import resources +from lxml import etree + +from compass.io import download, symlink +import compass.namelist +import compass.streams + + +class Step: + """ + The base class for a step of a test cases, such as setting up a mesh, + creating an initial condition, or running the MPAS core forward in time. + The step is the smallest unit of work in compass that can be run on its + own by a user, though users will typically run full test cases or test + suites. + + Attributes + ---------- + name : str + the name of the test case + + test_case : compass.TestCase + The test case this step belongs to + + test_group : compass.TestGroup + The test group the test case belongs to + + mpas_core : compass.MpasCore + The MPAS core the test group belongs to + + subdir : str + the subdirectory for the step + + path : str + the path within the base work directory of the step, made up of + ``mpas_core``, ``test_group``, the test case's ``subdir`` and the + step's ``subdir`` + + cores : int + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int + the number of threads the step will use + + max_memory : int + the amount of memory that the step is allowed to use in MB. + This is currently just a placeholder for later use with task + parallelism + + max_disk : int + the amount of disk space that the step is allowed to use in MB. + This is currently just a placeholder for later use with task + parallelism + + input_data : list of dict + a list of dict used to define input files typically to be + downloaded to a database and/or symlinked in the work directory + + inputs : list of str + a list of absolute paths of input files produced from ``input_data`` as + part of setting up the step. These input files must all exist at run + time or the step will raise an exception + + outputs : list of str + a list of absolute paths of output files produced by this step and + available as inputs to other test cases and steps. These files must + exist after the test has run or an exception will be raised + + namelist_data : dict + a dictionary used internally to keep track of updates to the default + namelist options from calls to + :py:meth:`compass.Step.add_namelist_file` + and :py:meth:`compass.Step.add_namelist_options` + + streams_data : dict + a dictionary used internally to keep track of updates to the default + streams from calls to :py:meth:`compass.Step.add_streams_file` + + config : configparser.ConfigParser + Configuration options for this test case, a combination of the defaults + for the machine, core and configuration + + config_filename : str + The local name of the config file that ``config`` has been written to + during setup and read from during run + + work_dir : str + The step's work directory, defined during setup as the combination + of ``base_work_dir`` and ``path`` + + base_work_dir : str + The base work directory + + logger : logging.Logger + A logger for output from the step + + log_filename : str + At run time, the name of a log file where output/errors from the step + are being logged, or ``None`` if output is to stdout/stderr + """ + + def __init__(self, test_case, name, subdir=None, cores=1, min_cores=1, + threads=1, max_memory=1000, max_disk=1000): + """ + Create a new test case + + Parameters + ---------- + test_case : compass.TestCase + The test case this step belongs to + + name : str + the name of the test case + + subdir : str, optional + the subdirectory for the step. The default is ``name`` + + cores : int, optional + the number of cores the step would ideally use. If fewer cores + are available on the system, the step will run on all available + cores as long as this is not below ``min_cores`` + + min_cores : int, optional + the number of cores the step requires. If the system has fewer + than this number of cores, the step will fail + + threads : int, optional + the number of threads the step will use + + max_memory : int, optional + the amount of memory that the step is allowed to use in MB. + This is currently just a placeholder for later use with task + parallelism + + max_disk : int, optional + the amount of disk space that the step is allowed to use in MB. + This is currently just a placeholder for later use with task + parallelism + """ + self.name = name + self.test_case = test_case + self.mpas_core = test_case.mpas_core + self.test_group = test_case.test_group + if subdir is not None: + self.subdir = subdir + else: + self.subdir = name + + self.cores = cores + self.min_cores = min_cores + self.threads = threads + self.max_memory = max_memory + self.max_disk = max_disk + + self.path = os.path.join(self.mpas_core.name, self.test_group.name, + test_case.subdir, self.subdir) + + # child steps (or test cases) will add to these + self.input_data = list() + self.inputs = list() + self.outputs = list() + self.namelist_data = dict() + self.streams_data = dict() + + # these will be set later during setup + self.config = None + self.config_filename = None + self.work_dir = None + self.base_work_dir = None + + # these will be set before running the step + self.logger = None + self.log_filename = None + + def setup(self): + """ + Set up the test case in the work directory, including downloading any + dependencies. The step should override this function to perform setup + operations such as generating namelist and streams files, adding inputs + and outputs. + """ + pass + + def run(self): + """ + Run the step. The step should override this function to perform the + main work. + """ + pass + + def add_input_file(self, filename=None, target=None, database=None, + url=None, work_dir_target=None): + """ + Add an input file to the step. The file can be local, a symlink to + a file that will be created in another step, a symlink to a file in one + of the databases for files cached after download, and/or come from a + specified URL. + + Parameters + ---------- + filename : str, optional + The relative path of the input file within the step's work + directory. The default is the file name (without the path) of + ``target``. + + target : str, optional + A file that will be the target of a symlink to ``filename``. If + ``database`` is not specified, this should be an absolute path or a + relative path from the step's work directory. If ``database`` is + specified, this is a relative path within the database and the name + of the remote file to download. + + database : str, optional + The name of a database for caching local files. This will be a + subdirectory of the local cache directory for this core. If + ``url`` is not provided, the URL for downloading the file will be + determined by combining the base URL of the data server, the + relative path for the core, ``database`` and ``target``. + + url : str, optional + The base URL for downloading ``target`` (if provided, or + ``filename`` if not). This option should be set if the file is not + in a database on the data server. The file's URL is determined by + combining ``url`` with the filename (without the directory) from + ``target`` (or ``filename`` if ``target`` is not provided). + ``database`` is not included in the file's URL even if it is + provided. + + work_dir_target : str, optional + Same as ``target`` but with a path relative to the base work + directory. This is useful if it is not easy to determine the + relative path between the step's work directory and the target. + """ + if filename is None: + if target is None: + raise ValueError('At least one of local_name and target are ' + 'required.') + filename = os.path.basename(target) + + self.input_data.append(dict(filename=filename, target=target, + database=database, url=url, + work_dir_target=work_dir_target)) + + def add_output_file(self, filename): + """ + Add the output file to the step + + Parameters + ---------- + filename : str + The relative path of the output file within the step's work + directory + """ + self.outputs.append(filename) + + def add_model_as_input(self): + """ + make a link to the model executable and add it to the inputs + """ + model = self.config.get('executables', 'model') + model_basename = os.path.basename(model) + self.add_input_file(filename=model_basename, + target=os.path.abspath(model)) + + def add_namelist_file(self, package, namelist, out_name=None, + mode='forward'): + """ + Add a file with updates to namelist options to the step to be parsed + when generating a complete namelist file if and when the step gets set + up. + + Parameters + ---------- + package : Package + The package name or module object that contains ``namelist`` + + namelist : str + The name of the namelist replacements file to read from + + out_name : str, optional + The name of the namelist file to write out, ``namelist.`` by + default + + mode : {'init', 'forward'}, optional + The mode that the model will run in + """ + if out_name is None: + out_name = 'namelist.{}'.format(self.mpas_core.name) + + if out_name not in self.namelist_data: + self.namelist_data[out_name] = list() + + namelist_list = self.namelist_data[out_name] + + namelist_list.append(dict(package=package, namelist=namelist, + mode=mode)) + + def add_namelist_options(self, options, out_name=None, mode='forward'): + """ + Add the namelist replacements to be parsed when generating a namelist + file if and when the step gets set up. + + Parameters + ---------- + options : dict + A dictionary of options and value to replace namelist options with + new values. + + out_name : str, optional + The name of the namelist file to write out, ``namelist.`` by + default + + mode : {'init', 'forward'}, optional + The mode that the model will run in + """ + if out_name is None: + out_name = 'namelist.{}'.format(self.mpas_core.name) + + if out_name not in self.namelist_data: + self.namelist_data[out_name] = list() + + namelist_list = self.namelist_data[out_name] + + namelist_list.append(dict(options=options, mode=mode)) + + def add_streams_file(self, package, streams, template_replacements=None, + out_name=None, mode='forward'): + """ + Add a streams file to the step to be parsed when generating a complete + streams file if and when the step gets set up. + + Parameters + ---------- + package : Package + The package name or module object that contains the streams file + + streams : str + The name of the streams file to read from + + template_replacements : dict, optional + A dictionary of replacements, in which case ``streams`` must be a + Jinja2 template to be rendered with these replacements + + out_name : str, optional + The name of the streams file to write out, ``streams.`` by + default + + mode : {'init', 'forward'}, optional + The mode that the model will run in + """ + if out_name is None: + out_name = 'streams.{}'.format(self.mpas_core.name) + + if out_name not in self.streams_data: + self.streams_data[out_name] = list() + + self.streams_data[out_name].append( + dict(package=package, streams=streams, + replacements=template_replacements, mode=mode)) + + def generate(self): + """ + Generate a ``run.py`` script for the step in the work directory. + This is the script that a user can call to run the step on its own. + """ + + self._process_inputs_and_outputs() + self._generate_namelists() + self._generate_streams() + + template = Template( + resources.read_text('compass.step', 'step.template')) + test_case = {'name': self.test_case.name} + step = {'name': self.name, + 'config_filename': self.config_filename} + work_dir = self.work_dir + script = template.render(test_case=test_case, step=step) + + run_filename = os.path.join(work_dir, 'run.py') + with open(run_filename, 'w') as handle: + handle.write(script) + + # make sure it has execute permission + st = os.stat(run_filename) + os.chmod(run_filename, st.st_mode | stat.S_IEXEC) + + def _process_inputs_and_outputs(self): + """ + Process the inputs to and outputs from a step added with + :py:meth:`compass.Step.add_input_file` and + :py:meth:`compass.Step.add_output_file`. This includes downloading + files, making symlinks, and converting relative paths to absolute + paths. + """ + mpas_core = self.mpas_core.name + step_dir = self.work_dir + config = self.config + + inputs = [] + for entry in self.input_data: + filename = entry['filename'] + target = entry['target'] + database = entry['database'] + url = entry['url'] + work_dir_target = entry['work_dir_target'] + + if work_dir_target is not None: + target = os.path.join(self.base_work_dir, work_dir_target) + + download_target = None + download_path = None + + if database is not None: + # we're downloading a file to a cache of a database (if it's + # not already there. + if url is None: + base_url = config.get('download', 'server_base_url') + core_path = config.get('download', 'core_path') + url = '{}/{}/{}'.format(base_url, core_path, database) + + if target is None: + target = filename + + download_target = target + + database_root = config.get( + 'paths', '{}_database_root'.format(mpas_core)) + download_path = os.path.join(database_root, database) + elif url is not None: + if target is None: + download_target = filename + download_path = '.' + else: + download_path, download_target = os.path.split(target) + + if url is not None: + download_target = download(download_target, url, config, + download_path) + if target is not None: + # this is the absolute path that we presumably want + target = download_target + + if target is not None: + symlink(target, os.path.join(step_dir, filename)) + inputs.append(target) + else: + inputs.append(filename) + + # convert inputs and outputs to absolute paths + self.inputs = [os.path.abspath(os.path.join(step_dir, filename)) for + filename in inputs] + + self.outputs = [os.path.abspath(os.path.join(step_dir, filename)) for + filename in self.outputs] + + def _generate_namelists(self): + """ + Writes out a namelist file in the work directory with new values given + by parsing the files and dictionaries in the step's ``namelist_data``. + """ + + step_work_dir = self.work_dir + config = self.config + + for out_name in self.namelist_data: + + replacements = dict() + + mode = None + + for entry in self.namelist_data[out_name]: + if mode is None: + mode = entry['mode'] + else: + assert mode == entry['mode'] + if 'options' in entry: + # this is a dictionary of replacement namelist options + options = entry['options'] + else: + options = compass.namelist.parse_replacements( + entry['package'], entry['namelist']) + replacements.update(options) + + defaults_filename = config.get('namelists', mode) + out_filename = '{}/{}'.format(step_work_dir, out_name) + + namelist = compass.namelist.ingest(defaults_filename) + + namelist = compass.namelist.replace(namelist, replacements) + + compass.namelist.write(namelist, out_filename) + + def _generate_streams(self): + """ + Writes out a streams file in the work directory with new values given + by parsing the files and dictionaries in the step's ``streams_data``. + """ + + step_work_dir = self.work_dir + config = self.config + + for out_name in self.streams_data: + + # generate the streams file + tree = None + + mode = None + + for entry in self.streams_data[out_name]: + if mode is None: + mode = entry['mode'] + else: + assert mode == entry['mode'] + + tree = compass.streams.read( + package=entry['package'], + streams_filename=entry['streams'], + replacements=entry['replacements'], tree=tree) + + defaults_filename = config.get('streams', mode) + out_filename = '{}/{}'.format(step_work_dir, out_name) + + defaults_tree = etree.parse(defaults_filename) + + defaults = next(defaults_tree.iter('streams')) + streams = next(tree.iter('streams')) + + for stream in streams: + compass.streams.update_defaults(stream, defaults) + + # remove any streams that aren't requested + for default in defaults: + found = False + for stream in streams: + if stream.attrib['name'] == default.attrib['name']: + found = True + break + if not found: + defaults.remove(default) + + compass.streams.write(defaults_tree, out_filename) diff --git a/compass/step/step.template b/compass/step/step.template new file mode 100644 index 0000000000..f7d40a6de0 --- /dev/null +++ b/compass/step/step.template @@ -0,0 +1,31 @@ +#!/usr/bin/env python +import pickle +import configparser + +from mpas_tools.logging import LoggingContext + + +def main(): + with open('test_case_{{ test_case.name }}.pickle', 'rb') as handle: + test_case = pickle.load(handle) + test_case.steps_to_run = ['{{ step.name }}'] + test_case.new_step_log_file = False + + with open('{{ step.name }}.pickle', 'rb') as handle: + step = pickle.load(handle) + + config = configparser.ConfigParser( + interpolation=configparser.ExtendedInterpolation()) + config.read('{{ step.config_filename }}') + test_case.config = config + + # start logging to stdout/stderr + test_name = step.path.replace('/', '_') + with LoggingContext(name=test_name) as logger: + test_case.logger = logger + test_case.run() + + +if __name__ == '__main__': + main() + diff --git a/compass/streams.py b/compass/streams.py new file mode 100644 index 0000000000..cc83c514cf --- /dev/null +++ b/compass/streams.py @@ -0,0 +1,179 @@ +from lxml import etree +from copy import deepcopy +from importlib import resources +from jinja2 import Template + + +def read(package, streams_filename, tree=None, replacements=None): + """ + Parse the given streams file + + Parameters + ---------- + package : Package + The package name or module object that contains the streams file + + streams_filename : str + The name of the streams file to read from + + tree : lxml.etree, optional + An existing set of streams to add to or modify + + replacements : dict, optional + A dictionary of replacements, in which case ``streams_filename`` is + assumed to be a Jinja2 template to be rendered with these replacements + + Returns + ------- + tree : lxml.etree + A tree of XML data describing MPAS i/o streams with the content from + the given streams file + """ + if replacements is None: + text = resources.read_text(package, streams_filename) + else: + template = Template(resources.read_text(package, streams_filename)) + text = template.render(**replacements) + + new_tree = etree.fromstring(text) + + tree = _update_tree(tree, new_tree) + + return tree + + +def write(streams, out_filename): + """ write the streams XML data to the file """ + + with open(out_filename, 'w') as stream_file: + + stream_file.write('\n') + + # Write out all immutable streams first + for stream in streams.findall('immutable_stream'): + stream_name = stream.attrib['name'] + + stream_file.write('\n') + stream_file.write('\n') + + # Write out all immutable streams + for stream in streams.findall('stream'): + stream_name = stream.attrib['name'] + + stream_file.write('\n') + stream_file.write('\n\n') + + # Write out all contents of the stream + for tag in ['stream', 'var_struct', 'var_array', 'var']: + for child in stream.findall(tag): + child_name = child.attrib['name'] + if tag == 'stream' and child_name == stream_name: + # don't include the stream itself + continue + if 'packages' in child.attrib.keys(): + package_name = child.attrib['packages'] + entry = ' <{} name="{}" packages="{}"/>\n' \ + ''.format(tag, child_name, package_name) + else: + entry = ' <{} name="{}"/>\n'.format(tag, child_name) + stream_file.write(entry) + + stream_file.write('\n') + + stream_file.write('\n') + stream_file.write('\n') + + +def update_defaults(new_child, defaults): + """ + Update a stream or its children (sub-stream, var, etc.) starting from the + defaults or add it if it's new. + """ + if 'name' not in new_child.attrib: + return + + name = new_child.attrib['name'] + found = False + for child in defaults: + if child.attrib['name'] == name: + found = True + if child.tag != new_child.tag: + raise ValueError('Trying to update stream "{}" with ' + 'inconsistent tags {} vs. {}.'.format( + name, child.tag, new_child.tag)) + + # copy the attributes + for attr, value in new_child.attrib.items(): + child.attrib[attr] = value + + if len(new_child) > 0: + # we don't want default grandchildren + for grandchild in child: + child.remove(grandchild) + + # copy or add the grandchildren's contents + for new_grandchild in new_child: + update_defaults(new_grandchild, child) + + if not found: + # add a deep copy of the element + defaults.append(deepcopy(new_child)) + + +def _update_tree(tree, new_tree): + + if tree is None: + tree = new_tree + else: + streams = next(tree.iter('streams')) + new_streams = next(new_tree.iter('streams')) + + for new_stream in new_streams: + _update_element(new_stream, streams) + + return tree + + +def _update_element(new_child, elements): + """ + add the new child/grandchildren or add/update attributes if they exist + """ + if 'name' not in new_child.attrib: + return + + name = new_child.attrib['name'] + found = False + for child in elements: + if child.attrib['name'] == name: + found = True + if child.tag != new_child.tag: + raise ValueError('Trying to update stream "{}" with ' + 'inconsistent tags {} vs. {}.'.format( + name, child.tag, new_child.tag)) + + # copy the attributes + for attr, value in new_child.attrib.items(): + child.attrib[attr] = value + + # copy or add the grandchildren's contents + for new_grandchild in new_child: + _update_element(new_grandchild, child) + + if not found: + # add a deep copy of the element + elements.append(deepcopy(new_child)) diff --git a/compass/suite/__init__.py b/compass/suite/__init__.py new file mode 100644 index 0000000000..bc197603c5 --- /dev/null +++ b/compass/suite/__init__.py @@ -0,0 +1,291 @@ +import argparse +import sys +import os +from importlib import resources +import pickle +import configparser +import stat +from jinja2 import Template +import time +import numpy + +from mpas_tools.logging import LoggingContext + +from compass.setup import setup_cases +from compass.io import symlink +from compass.clean import clean_cases + + +def setup_suite(mpas_core, suite_name, config_file=None, machine=None, + work_dir=None, baseline_dir=None, mpas_model_path=None): + """ + Set up a test suite + + Parameters + ---------- + mpas_core : str + The MPAS core ('ocean', 'landice', etc.) of the test suite + + suite_name : str + The name of the test suite. A file ``.txt`` must exist + within the core's ``suites`` package that lists the paths of the tests + in the suite + + config_file : str, optional + Configuration file with custom options for setting up and running + test cases + + machine : str, optional + The name of one of the machines with defined config options, which can + be listed with ``compass list --machines`` + + work_dir : str, optional + A directory that will serve as the base for creating test case + directories + + baseline_dir : str, optional + Location of baseslines that can be compared to + + mpas_model_path : str, optional + The relative or absolute path to the root of a branch where the MPAS + model has been built + """ + + if config_file is None and machine is None: + raise ValueError('At least one of config_file and machine is needed.') + + text = resources.read_text('compass.{}.suites'.format(mpas_core), + '{}.txt'.format(suite_name)) + tests = list() + for test in text.split('\n'): + test = test.strip() + if len(test) > 0 and test not in tests: + tests.append(test) + + if work_dir is None: + work_dir = os.getcwd() + work_dir = os.path.abspath(work_dir) + + test_cases = setup_cases(tests, config_file=config_file, machine=machine, + work_dir=work_dir, baseline_dir=baseline_dir, + mpas_model_path=mpas_model_path) + + # if compass/__init__.py exists, we're using a local version of the compass + # package and we'll want to link to that in the tests and steps + compass_path = os.path.join(os.getcwd(), 'compass') + if os.path.exists(os.path.join(compass_path, '__init__.py')): + symlink(compass_path, os.path.join(work_dir, 'compass')) + + test_suite = {'name': suite_name, + 'test_cases': test_cases, + 'work_dir': work_dir} + + # pickle the test or step dictionary for use at runtime + pickle_file = os.path.join(test_suite['work_dir'], + '{}.pickle'.format(suite_name)) + with open(pickle_file, 'wb') as handle: + pickle.dump(test_suite, handle, protocol=pickle.HIGHEST_PROTOCOL) + + template = Template(resources.read_text('compass.suite', 'suite.template')) + script = template.render(suite_name=suite_name) + + run_filename = os.path.join(work_dir, '{}.py'.format(suite_name)) + with open(run_filename, 'w') as handle: + handle.write(script) + + # make sure it has execute permission + st = os.stat(run_filename) + os.chmod(run_filename, st.st_mode | stat.S_IEXEC) + + max_cores, max_of_min_cores = _get_required_cores(test_cases) + + print('target cores: {}'.format(max_cores)) + print('minimum cores: {}'.format(max_of_min_cores)) + + +def clean_suite(mpas_core, suite_name, work_dir=None): + """ + Clean up a test suite by removing its test cases and run script + + Parameters + ---------- + mpas_core : str + The MPAS core ('ocean', 'landice', etc.) of the test suite + + suite_name : str + The name of the test suite. A file ``.txt`` must exist + within the core's ``suites`` package that lists the paths of the tests + in the suite + + work_dir : str, optional + A directory that will serve as the base for creating test case + directories + """ + + text = resources.read_text('compass.{}.suites'.format(mpas_core), + '{}.txt'.format(suite_name)) + tests = [test.strip() for test in text.split('\n') if + len(test.strip()) > 0] + + if work_dir is None: + work_dir = os.getcwd() + work_dir = os.path.abspath(work_dir) + + clean_cases(tests=tests, work_dir=work_dir) + + # delete the pickle file and run script + pickle_file = os.path.join(work_dir, '{}.pickle'.format(suite_name)) + run_filename = os.path.join(work_dir, '{}.py'.format(suite_name)) + + for filename in [pickle_file, run_filename]: + try: + os.remove(filename) + except OSError: + pass + + +def run_suite(suite_name): + """ + Run the given test suite + + Parameters + ---------- + suite_name : str + The name of the test suite + """ + with open('{}.pickle'.format(suite_name), 'rb') as handle: + test_suite = pickle.load(handle) + + # start logging to stdout/stderr + with LoggingContext(suite_name) as logger: + + os.environ['PYTHONUNBUFFERED'] = '1' + + try: + os.makedirs('case_outputs') + except OSError: + pass + + failures = 0 + cwd = os.getcwd() + suite_start = time.time() + test_times = dict() + success = dict() + for test_name in test_suite['test_cases']: + test_case = test_suite['test_cases'][test_name] + + logger.info('{}'.format(test_name)) + + test_name = test_case.path.replace('/', '_') + log_filename = '{}/case_outputs/{}.log'.format(cwd, test_name) + with LoggingContext(test_name, log_filename=log_filename) as \ + test_logger: + test_case.logger = test_logger + test_case.log_filename = log_filename + test_case.new_step_log_file = False + + os.chdir(test_case.work_dir) + + config = configparser.ConfigParser( + interpolation=configparser.ExtendedInterpolation()) + config.read(test_case.config_filename) + test_case.config = config + + test_start = time.time() + try: + test_case.run() + logger.info(' PASS') + success[test_name] = 'PASS' + except BaseException: + test_logger.exception('Exception raised') + logger.error( + ' FAIL see: case_outputs/{}.log'.format(test_name)) + success[test_name] = 'FAIL' + failures += 1 + test_times[test_name] = time.time() - test_start + + suite_time = time.time() - suite_start + + os.chdir(cwd) + + logger.info('Test Runtimes:') + for test_name, test_time in test_times.items(): + mins = int(numpy.floor(test_time / 60.0)) + secs = int(numpy.ceil(test_time - mins * 60)) + logger.info('{:02d}:{:02d} {} {}'.format( + mins, secs, success[test_name], test_name)) + mins = int(numpy.floor(suite_time / 60.0)) + secs = int(numpy.ceil(suite_time - mins * 60)) + logger.info('Total runtime {:02d}:{:02d}'.format(mins, secs)) + + if failures == 0: + logger.info('PASS: All passed successfully!') + else: + if failures == 1: + message = '1 test' + else: + message = '{} tests'.format(failures) + logger.error('FAIL: {} failed, see above.'.format(message)) + sys.exit(1) + + +def main(): + parser = argparse.ArgumentParser( + description='Set up a regression test suite', prog='compass suite') + parser.add_argument("-c", "--core", dest="core", + help="The MPAS core for the test suite", + metavar="CORE", required=True) + parser.add_argument("-t", "--test_suite", dest="test_suite", + help="Path to file containing a test suite to setup", + metavar="SUITE", required=True) + parser.add_argument("-f", "--config_file", dest="config_file", + help="Configuration file for test case setup", + metavar="FILE") + parser.add_argument("-s", "--setup", dest="setup", + help="Option to determine if regression suite should " + "be setup or not.", action="store_true") + parser.add_argument("--clean", dest="clean", + help="Option to determine if regression suite should " + "be cleaned or not.", action="store_true") + parser.add_argument("-m", "--machine", dest="machine", + help="The name of the machine for loading machine-" + "related config options", metavar="MACH") + parser.add_argument("-b", "--baseline_dir", dest="baseline_dir", + help="Location of baseslines that can be compared to", + metavar="PATH") + parser.add_argument("-w", "--work_dir", dest="work_dir", + help="If set, script will setup the test suite in " + "work_dir rather in this script's location.", + metavar="PATH") + parser.add_argument("-p", "--mpas_model", dest="mpas_model", + help="The path to the build of the MPAS model for the " + "core.", + metavar="PATH") + args = parser.parse_args(sys.argv[2:]) + + if not args.clean and not args.setup: + raise ValueError('At least one of -s/--setup or --clean must be ' + 'specified') + + if args.clean: + clean_suite(mpas_core=args.core, suite_name=args.test_suite, + work_dir=args.work_dir) + + if args.setup: + setup_suite(mpas_core=args.core, suite_name=args.test_suite, + config_file=args.config_file, machine=args.machine, + work_dir=args.work_dir, baseline_dir=args.baseline_dir, + mpas_model_path=args.mpas_model) + + +def _get_required_cores(test_cases): + """ Get the maximum number of target cores and the max of min cores """ + + max_cores = 0 + max_of_min_cores = 0 + for test_case in test_cases.values(): + for step in test_case.steps.values(): + max_cores = max(max_cores, step.cores) + max_of_min_cores = max(max_of_min_cores, step.min_cores) + + return max_cores, max_of_min_cores diff --git a/compass/suite/suite.template b/compass/suite/suite.template new file mode 100644 index 0000000000..f7367a7f84 --- /dev/null +++ b/compass/suite/suite.template @@ -0,0 +1,7 @@ +#!/usr/bin/env python + +from compass.suite import run_suite + + +suite_name = '{{ suite_name }}' +run_suite(suite_name) diff --git a/compass/testcase/__init__.py b/compass/testcase/__init__.py new file mode 100644 index 0000000000..9ce6ba0448 --- /dev/null +++ b/compass/testcase/__init__.py @@ -0,0 +1,253 @@ +import os +import stat +from jinja2 import Template +from importlib import resources + +from mpas_tools.logging import LoggingContext +from compass.parallel import get_available_cores_and_nodes + + +class TestCase: + """ + The base class for test cases---such as a decomposition, threading or + restart test---that are made up of one or more steps + + Attributes + ---------- + name : str + the name of the test case + + test_group : compass.TestGroup + The test group the test case belongs to + + mpas_core : compass.MpasCore + The MPAS core the test group belongs to + + steps : dict + A dictionary of steps in the test case with step names as keys + + steps_to_run : list + A list of the steps to run when ``run()`` gets called. This list + includes all steps by default but can be replaced with a list of only + those tests that should run by default if some steps are optional and + should be run manually by the user. + + subdir : str + the subdirectory for the test case + + path : str + the path within the base work directory of the test case, made up of + ``mpas_core``, ``test_group``, and the test case's ``subdir`` + + config : configparser.ConfigParser + Configuration options for this test case, a combination of the defaults + for the machine, core and configuration + + config_filename : str + The local name of the config file that ``config`` has been written to + during setup and read from during run + + work_dir : str + The test case's work directory, defined during setup as the combination + of ``base_work_dir`` and ``path`` + + base_work_dir : str + The base work directory + + logger : logging.Logger + A logger for output from the test case + + log_filename : str + At run time, the name of a log file where output/errors from the test + case are being logged, or ``None`` if output is to stdout/stderr + + new_step_log_file : bool + Whether to create a new log file for each step or to log output to a + common log file for the whole test case. The latter is used when + running the test case as part of a test suite + """ + + def __init__(self, test_group, name, subdir=None): + """ + Create a new test case + + Parameters + ---------- + test_group : compass.TestGroup + the test group that this test case belongs to + + name : str + the name of the test case + + subdir : str, optional + the subdirectory for the test case. The default is ``name`` + """ + self.name = name + self.mpas_core = test_group.mpas_core + self.test_group = test_group + if subdir is not None: + self.subdir = subdir + else: + self.subdir = name + + self.path = os.path.join(self.mpas_core.name, test_group.name, + self.subdir) + + # steps will be added by calling add_step() + self.steps = dict() + self.steps_to_run = list() + + # these will be set during setup + self.config = None + self.config_filename = None + self.work_dir = None + self.base_work_dir = None + + # these will be set when running the test case + self.new_step_log_file = True + self.logger = None + self.log_filename = None + + def configure(self): + """ + Modify the configuration options for this test case. Test cases should + override this method if they want to add config options specific to + the test case, e.g. from a config file stored in the test case's python + package + """ + pass + + def run(self): + """ + Run each step of the test case. Test cases can override this method + to perform additional operations in addition to running the test case's + steps + + """ + logger = self.logger + cwd = os.getcwd() + for step_name in self.steps_to_run: + step = self.steps[step_name] + step.config = self.config + new_log_file = self.new_step_log_file + if self.log_filename is not None: + step.log_filename = self.log_filename + do_local_logging = True + else: + # We only want to do local log output if the step output is + # being redirected to a file. Otherwise, we assume we're + # probably just running one step and the local logging is + # redundant and unnecessary + do_local_logging = new_log_file + + if do_local_logging: + logger.info(' * Running {}'.format(step_name)) + try: + self._run_step(step, new_log_file) + except BaseException: + if do_local_logging: + logger.info(' Failed') + raise + + if do_local_logging: + logger.info(' Complete') + + os.chdir(cwd) + + def add_step(self, step, run_by_default=True): + """ + Add a step to the test case + + Parameters + ---------- + step : compass.Step + The step to add + + run_by_default : bool, optional + Whether to add this step to the list of steps to run when the + ``run()`` method gets called. If ``run_by_default=False``, users + would need to run this step manually. + """ + self.steps[step.name] = step + if run_by_default: + self.steps_to_run.append(step.name) + + def generate(self): + """ + Generate a ``run.py`` script for the test case in the work directory. + This is the script that a user calls to run the test case. + """ + + template = Template( + resources.read_text('compass.testcase', 'testcase.template')) + test_case = {'name': self.name, + 'config_filename': self.config_filename} + work_dir = self.work_dir + script = template.render(test_case=test_case) + + run_filename = os.path.join(work_dir, 'run.py') + with open(run_filename, 'w') as handle: + handle.write(script) + + # make sure it has execute permission + st = os.stat(run_filename) + os.chmod(run_filename, st.st_mode | stat.S_IEXEC) + + def _run_step(self, step, new_log_file): + """ + Run the requested step + + Parameters + ---------- + step : compass.Step + The step to run + + new_log_file : bool + Whether to log to a new log file + """ + logger = self.logger + config = self.config + cwd = os.getcwd() + available_cores, _ = get_available_cores_and_nodes(config) + step.cores = min(step.cores, available_cores) + if step.min_cores is not None: + if step.cores < step.min_cores: + raise ValueError( + 'Available cores for {} is below the minimum of {}' + ''.format(step.cores, step.min_cores)) + + missing_files = list() + for input_file in step.inputs: + if not os.path.exists(input_file): + missing_files.append(input_file) + + if len(missing_files) > 0: + raise OSError( + 'input file(s) missing in step {} of {}/{}/{}: {}'.format( + step.name, step.mpas_core.name, step.test_group.name, + step.test_case.subdir, missing_files)) + + test_name = step.path.replace('/', '_') + if new_log_file: + log_filename = '{}/{}.log'.format(cwd, step.name) + step.log_filename = log_filename + step_logger = None + else: + step_logger = logger + log_filename = None + with LoggingContext(name=test_name, logger=step_logger, + log_filename=log_filename) as step_logger: + step.logger = step_logger + os.chdir(step.work_dir) + step.run() + + missing_files = list() + for output_file in step.outputs: + if not os.path.exists(output_file): + missing_files.append(output_file) + + if len(missing_files) > 0: + raise OSError( + 'output file(s) missing in step {} of {}/{}/{}: {}'.format( + step.name, step.mpas_core.name, step.test_group.name, + step.test_case.subdir, missing_files)) diff --git a/compass/testcase/testcase.template b/compass/testcase/testcase.template new file mode 100644 index 0000000000..6c15343e6f --- /dev/null +++ b/compass/testcase/testcase.template @@ -0,0 +1,27 @@ +#!/usr/bin/env python +import pickle +import configparser + +from mpas_tools.logging import LoggingContext + + +def main(): + with open('{{ test_case.name }}.pickle', 'rb') as handle: + test_case = pickle.load(handle) + + config = configparser.ConfigParser( + interpolation=configparser.ExtendedInterpolation()) + config.read('{{ test_case.config_filename }}') + test_case.config = config + + # start logging to stdout/stderr + test_name = test_case.path.replace('/', '_') + test_case.new_step_log_file = True + with LoggingContext(name=test_name) as logger: + test_case.logger = logger + test_case.run() + + +if __name__ == '__main__': + main() + diff --git a/compass/testgroup.py b/compass/testgroup.py new file mode 100644 index 0000000000..26de33a730 --- /dev/null +++ b/compass/testgroup.py @@ -0,0 +1,47 @@ +class TestGroup: + """ + The base class for test groups, which are collections of test cases with + a common purpose (e.g. global ocean, baroclinic channel, Greenland, or + EISMINT2) + + Attributes + ---------- + name : str + the name of the test group + + mpas_core : compass.MpasCore + the MPAS core that this test group belongs to + + test_cases : dict + A dictionary of test cases in the test group with the names of the + test cases as keys + """ + + def __init__(self, mpas_core, name): + """ + Create a new test group + + Parameters + ---------- + mpas_core : compass.MpasCore + the MPAS core that this test group belongs to + + name : str + the name of the test group + """ + self.name = name + self.mpas_core = mpas_core + + # test cases will be added with calls to add_test_case() + self.test_cases = dict() + + def add_test_case(self, test_case): + """ + Add a test case to the test group + + Parameters + ---------- + test_case : compass.TestCase + The test case to add + """ + self.test_cases[test_case.subdir] = test_case diff --git a/compass/validate.py b/compass/validate.py new file mode 100644 index 0000000000..97f1fff814 --- /dev/null +++ b/compass/validate.py @@ -0,0 +1,324 @@ +import os +import numpy +import xarray +import re +import fnmatch + + +def compare_variables(variables, config, work_dir, filename1, filename2=None, + l1_norm=0.0, l2_norm=0.0, linf_norm=0.0, quiet=True): + """ + Compare variables between files in the current test case and/or with the + baseline results. + + Parameters + ---------- + variables : list + A list of variable names to compare + + config : configparser.ConfigParser + Configuration options for the test case + + work_dir : str + The work directory for the test case + + filename1 : str + The relative path to a file within the ``work_dir``. If ``filename2`` + is also given, comparison will be performed with ``variables`` in that + file. If a baseline directory was provided when setting up the + test case, the ``variables`` will be compared between this test case and + the same relative filename in the baseline version of the test case. + + filename2 : str, optional + The relative path to another file within the ``work_dir`` if comparing + between files within the current test case. If a baseline directory + was provided, the ``variables`` from this file will also be compared + with those in the corresponding baseline file. + + l1_norm : float, optional + The maximum allowed L1 norm difference between the variables in + ``filename1`` and ``filename2``. + + l2_norm : float, optional + The maximum allowed L2 norm difference between the variables in + ``filename1`` and ``filename2``. + + linf_norm : float, optional + The maximum allowed L-Infinity norm difference between the variables in + ``filename1`` and ``filename2``. + + quiet : bool, optional + Whether to print + + Raises + ------ + ValueError + If one or more of the norms is outside the required bounds + + """ + + all_pass = True + if filename2 is not None: + result = _compare_variables( + variables, os.path.join(work_dir, filename1), + os.path.join(work_dir, filename2), l1_norm, l2_norm, linf_norm, + quiet) + all_pass = all_pass and result + + if config.has_option('paths', 'baseline_dir'): + baseline_root = config.get('paths', 'baseline_dir') + + result = _compare_variables( + variables, os.path.join(work_dir, filename1), + os.path.join(baseline_root, filename1), l1_norm=0.0, l2_norm=0.0, + linf_norm=0.0, quiet=quiet) + all_pass = all_pass and result + + if filename2 is not None: + result = _compare_variables( + variables, os.path.join(work_dir, filename2), + os.path.join(baseline_root, filename2), l1_norm=0.0, + l2_norm=0.0, linf_norm=0.0, quiet=quiet) + all_pass = all_pass and result + + if not all_pass: + raise ValueError('Comparison failed, see above.') + + +def compare_timers(timers, config, work_dir, rundir1, rundir2=None): + """ + Compare variables between files in the current test case and/or with the + baseline results. + + Parameters + ---------- + timers : list + A list of timer names to compare + + config : configparser.ConfigParser + Configuration options for the test case + + work_dir : str + The work directory for the test case + + rundir1 : str + The relative path to a directory within the ``work_dir``. If + ``rundir2`` is also given, comparison will be performed with ``timers`` + in that file. If a baseline directory was provided when setting up the + test case, the ``timers`` will be compared between this test case and + the same relative directory under the baseline version of the test case. + + rundir2 : str, optional + The relative path to another file within the ``work_dir`` if comparing + between files within the current test case. If a baseline directory + was provided, the ``timers`` from this file will also be compared with + those in the corresponding baseline directory. + """ + + if rundir2 is not None: + _compute_timers(os.path.join(work_dir, rundir1), + os.path.join(work_dir, rundir2), timers) + + if config.has_option('paths', 'baseline_dir'): + baseline_root = config.get('paths', 'baseline_dir') + + _compute_timers(os.path.join(baseline_root, rundir1), + os.path.join(work_dir, rundir1), timers) + + if rundir2 is not None: + _compute_timers(os.path.join(baseline_root, rundir2), + os.path.join(work_dir, rundir2), timers) + + +def _compare_variables(variables, filename1, filename2, l1_norm, l2_norm, + linf_norm, quiet): + """ compare fields in the two files """ + + for filename in [filename1, filename2]: + if not os.path.exists(filename): + raise OSError('File {} does not exist.'.format(filename)) + + ds1 = xarray.open_dataset(filename1) + ds2 = xarray.open_dataset(filename2) + + all_pass = True + + for variable in variables: + for ds, filename in [(ds1, filename1), (ds2, filename2)]: + if variable not in ds: + raise ValueError('Variable {} not in {}.'.format( + variable, filename)) + + da1 = ds1[variable] + da2 = ds2[variable] + + if not numpy.all(da1.dims == da2.dims): + raise ValueError("Dimensions for variable {} don't match between " + "files {} and {}.".format( + variable, filename1, filename2)) + + for dim in da1.sizes: + if da1.sizes[dim] != da2.sizes[dim]: + raise ValueError("Field sizes for variable {} don't match " + "files {} and {}.".format( + variable, filename1, filename2)) + + if not quiet: + print(" Pass thresholds are:") + if l1_norm is not None: + print(" L1: {:16.14e}".format(l1_norm)) + if l2_norm is not None: + print(" L2: {:16.14e}".format(l2_norm)) + if linf_norm is not None: + print(" L_Infinity: {:16.14e}".format( + linf_norm)) + variable_pass = True + if 'Time' in da1.dims: + time_range = range(0, da1.sizes['Time']) + time_str = ', '.join(['{}'.format(j) for j in time_range]) + print('{} Time index: {}'.format(variable.ljust(20), time_str)) + for time_index in time_range: + slice1 = da1.isel(Time=time_index) + slice2 = da2.isel(Time=time_index) + result = _compute_norms(slice1, slice2, quiet, l1_norm, + l2_norm, linf_norm, + time_index=time_index) + variable_pass = variable_pass and result + + else: + print('{}'.format(variable)) + result = _compute_norms(da1, da2, quiet, l1_norm, l2_norm, + linf_norm) + variable_pass = variable_pass and result + + if variable_pass: + print(' PASS {}\n'.format(filename1)) + else: + print(' FAIL {}\n'.format(filename1)) + print(' {}\n'.format(filename2)) + all_pass = all_pass and variable_pass + + return all_pass + + +def _compute_norms(da1, da2, quiet, max_l1_norm, max_l2_norm, max_linf_norm, + time_index=None): + """ Compute norms between variables in two DataArrays """ + + da1 = _rename_duplicate_dims(da1) + da2 = _rename_duplicate_dims(da2) + + result = True + diff = numpy.abs(da1 - da2).values.ravel() + + l1_norm = numpy.linalg.norm(diff, ord=1) + l2_norm = numpy.linalg.norm(diff, ord=2) + linf_norm = numpy.linalg.norm(diff, ord=numpy.inf) + + if time_index is None: + diff_str = '' + else: + diff_str = '{:d}: '.format(time_index) + + if l1_norm is not None: + if max_l1_norm < l1_norm: + result = False + diff_str = '{} l1: {:16.14e} '.format(diff_str, l1_norm) + + if l2_norm is not None: + if max_l2_norm < l2_norm: + result = False + diff_str = '{} l2: {:16.14e} '.format(diff_str, l2_norm) + + if linf_norm is not None: + if max_linf_norm < linf_norm: + result = False + diff_str = '{} linf: {:16.14e} '.format(diff_str, linf_norm) + + if not quiet or not result: + print(diff_str) + + return result + + +def _compute_timers(base_directory, comparison_directory, timers): + """ Find timers and compute speedup between two run directories """ + for timer in timers: + timer1_found, timer1 = _find_timer_value(timer, base_directory) + timer2_found, timer2 = _find_timer_value(timer, comparison_directory) + + if timer1_found and timer2_found: + if timer2 > 0.: + speedup = timer1 / timer2 + else: + speedup = 1.0 + + percent = (timer2 - timer1) / timer1 + + print("Comparing timer {}:".format(timer)) + print(" Base: {}".format(timer1)) + print(" Compare: {}".format(timer2)) + print(" Percent Change: {}%".format(percent * 100)) + print(" Speedup: {}".format(speedup)) + + +def _find_timer_value(timer_name, directory): + """ Find a timer in the given directory """ + # Build a regular expression for any two characters with a space between + # them. + regex = re.compile(r'(\S) (\S)') + + sub_timer_name = timer_name.replace(' ', '_') + + timer = 0.0 + timer_found = False + for file in os.listdir(directory): + if not timer_found: + # Compare files written using built in MPAS timers + if fnmatch.fnmatch(file, "log.*.out"): + timer_line_size = 6 + name_index = 1 + total_index = 2 + # Compare files written using GPTL timers + elif fnmatch.fnmatch(file, "timing.*"): + timer_line_size = 6 + name_index = 0 + total_index = 3 + else: + continue + + with open(os.path.join(directory, file), "r") as stats_file: + for block in iter(lambda: stats_file.readline(), ""): + new_block = regex.sub(r"\1_\2", block[2:]) + new_block_arr = new_block.split() + if len(new_block_arr) >= timer_line_size: + if sub_timer_name.find(new_block_arr[name_index]) >= 0: + try: + timer = \ + timer + float(new_block_arr[total_index]) + timer_found = True + except ValueError: + pass + + return timer_found, timer + + +def _rename_duplicate_dims(da): + dims = list(da.dims) + new_dims = list(dims) + duplicates = False + for index, dim in enumerate(dims): + if dim in dims[index+1:]: + duplicates = True + suffix = 2 + for other_index, other in enumerate(dims[index+1:]): + if other == dim: + new_dims[other_index + index + 1] = \ + '{}_{}'.format(dim, suffix) + suffix += 1 + + if not duplicates: + return da + + da = xarray.DataArray(data=da.values, dims=new_dims) + return da diff --git a/deploy/create_compass_env.py b/deploy/create_compass_env.py index 892a80ef0d..958f15dc27 100755 --- a/deploy/create_compass_env.py +++ b/deploy/create_compass_env.py @@ -15,41 +15,35 @@ def get_envs(): # to use. envs = [{'suffix': '_nompi', - 'version': '0.1.12', + 'version': '1.0', 'python': '3.8', 'mpi': 'nompi'}, {'suffix': '', - 'version': '0.1.12', + 'version': '1.0', 'python': '3.8', 'mpi': 'mpich'}] # whether to delete and rebuild each environment if it already exists - force_recreate = False + force_recreate = True # whether these are to be test environments - is_test = False + is_test = True return envs, force_recreate, is_test def get_host_info(): hostname = socket.gethostname() + system_mpich_version = None if hostname.startswith('cori') or hostname.startswith('dtn'): base_path = "/global/cfs/cdirs/e3sm/software/anaconda_envs/base" activ_path = "/global/cfs/cdirs/e3sm/software/anaconda_envs" group = "e3sm" - elif hostname.startswith('acme1') or hostname.startswith('aims4'): - base_path = "/usr/local/e3sm_unified/envs/base" - activ_path = "/usr/local/e3sm_unified/envs" - group = "climate" - elif hostname.startswith('blueslogin'): + elif hostname.startswith('blueslogin') or hostname.startswith('chrysalis'): base_path = "/lcrc/soft/climate/e3sm-unified/base" activ_path = "/lcrc/soft/climate/e3sm-unified" group = "cels" - elif hostname.startswith('rhea'): - base_path = "/ccs/proj/cli900/sw/rhea/e3sm-unified/base" - activ_path = "/ccs/proj/cli900/sw/rhea/e3sm-unified" - group = "cli900" + system_mpich_version = "3.3.*" elif hostname.startswith('cooley'): base_path = "/lus/theta-fs0/projects/ccsm/acme/tools/e3sm-unified/base" activ_path = "/lus/theta-fs0/projects/ccsm/acme/tools/e3sm-unified" @@ -58,7 +52,7 @@ def get_host_info(): base_path = "/share/apps/E3SM/conda_envs/base" activ_path = "/share/apps/E3SM/conda_envs" group = "users" - elif hostname.startswith('gr-fe') or hostname.startswith('wf-fe'): + elif hostname.startswith('gr-fe') or hostname.startswith('ba-fe'): base_path = "/usr/projects/climate/SHARED_CLIMATE/anaconda_envs/base" activ_path = "/usr/projects/climate/SHARED_CLIMATE/anaconda_envs" group = "climate" @@ -71,7 +65,7 @@ def get_host_info(): "Unknown host name {}. Add env_path and group for " "this machine to the script.".format(hostname)) - return base_path, activ_path, group + return base_path, activ_path, group, system_mpich_version def check_env(base_path, env_name, env): @@ -80,13 +74,17 @@ def check_env(base_path, env_name, env): activate = 'source {}/etc/profile.d/conda.sh; conda activate {}'.format( base_path, env_name) - imports = ['geometric_features', 'mpas_tools', 'jigsawpy'] + imports = ['geometric_features', 'mpas_tools', 'jigsawpy', 'compass'] for import_name in imports: command = '{}; python -c "import {}"'.format(activate, import_name) test_command(command, os.environ, import_name) commands = [['gpmetis', '--help'], - ['ffmpeg', '--help']] + ['ffmpeg', '--help'], + ['compass', 'list'], + ['compass', 'setup', '--help'], + ['compass', 'suite', '--help'], + ['compass', 'clean', '--help']] for command in commands: package = command[0] @@ -107,7 +105,7 @@ def test_command(command, env, package): def main(): envs, force_recreate, is_test = get_envs() - base_path, activ_path, group = get_host_info() + base_path, activ_path, group, system_mpich_version = get_host_info() if not os.path.exists(base_path): miniconda = 'Miniconda3-latest-Linux-x86_64.sh' @@ -141,10 +139,18 @@ def main(): else: mpi_prefix = 'mpi_{}'.format(mpi) - channels = '--override-channels -c conda-forge -c defaults -c e3sm' + channels = '--override-channels -c conda-forge -c defaults' + if is_test: + channels = '{} -c e3sm/label/test'.format(channels) + else: + channels = '{} -c e3sm'.format(channels) packages = 'python={} "compass={}={}_*"'.format( python, version, mpi_prefix) + if mpi == 'mpich' and system_mpich_version is not None: + packages = '{} "mpich={}=external*"'.format( + packages, system_mpich_version) + if is_test: env_name = 'test_compass_{}{}'.format(version, suffix) else: diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000000..86d4c2dd38 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +generated diff --git a/docs/Makefile b/docs/Makefile index 19e1d4f711..487a471ce2 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -4,7 +4,7 @@ # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build -SPHINXPROJ = mpas_model +SPHINXPROJ = compass SOURCEDIR = . BUILDDIR = _build diff --git a/docs/conf.py b/docs/conf.py index a2b66a2bec..881a0c659d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# COMPASS documentation build configuration file, created by +# compass documentation build configuration file, created by # sphinx-quickstart on Sat Mar 25 14:39:11 2017. # # This file is execfile()d with the current directory set to its @@ -13,6 +13,7 @@ # serve to show the default. import os +import compass # -- General configuration ------------------------------------------------ @@ -51,11 +52,11 @@ master_doc = 'index' # General information about the project. -project = u'COMPASS' -copyright = u'Copyright (c) 2013-2020, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047;' \ +project = u'compass' +copyright = u'Copyright (c) 2013-2021, Los Alamos National Security, LLC (LANS) (Ocean: LA-CC-13-047;' \ u'Land Ice: LA-CC-13-117) and the University Corporation for Atmospheric Research (UCAR).' author = u'Xylar Asay-Davis, Matt Hoffman, Doug Jacobsen, Mark Petersen, ' \ - u'Philip Wolfram Tong Zhang' + u'Philip Wolfram, Luke Van Roekel, Tong Zhang' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -65,9 +66,9 @@ release = version else: # The short X.Y.Z version. - version = '1.0' + version = compass.__version__ # The full version, including alpha/beta/rc tags. - release = '1.0' + release = compass.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -113,7 +114,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ @@ -146,7 +147,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'compass.tex', u'COMPASS Documentation', + (master_doc, 'compass.tex', u'compass Documentation', author, 'manual'), ] @@ -156,7 +157,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'compass', u'COMPASS Documentation', + (master_doc, 'compass', u'compass Documentation', [author], 1) ] @@ -167,8 +168,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'compass', u'COMPASS Documentation', - author, 'COMPASS', 'One line description of project.', + (master_doc, 'compass', u'compass Documentation', + author, 'compass', 'One line description of project.', 'Miscellaneous'), ] diff --git a/docs/developers_guide/api.rst b/docs/developers_guide/api.rst new file mode 100644 index 0000000000..aa6d251c78 --- /dev/null +++ b/docs/developers_guide/api.rst @@ -0,0 +1,233 @@ +.. _dev_api: + +############# +API reference +############# + +This page provides an auto-generated summary of the ``compass`` API. For more +details and examples, refer to the relevant sections in the main part of the +documentation. + +MPAS Cores +========== + +.. toctree:: + :titlesonly: + :maxdepth: 1 + + landice/api + ocean/api + + +compass framework +================= + +Command-line interface +^^^^^^^^^^^^^^^^^^^^^^ + +.. currentmodule:: compass + +.. autosummary:: + :toctree: generated/ + + __main__.main + + +list +~~~~ + +.. currentmodule:: compass.list + +.. autosummary:: + :toctree: generated/ + + list_cases + +setup +~~~~~ + +.. currentmodule:: compass.setup + +.. autosummary:: + :toctree: generated/ + + setup_cases + setup_case + +clean +~~~~~ + +.. currentmodule:: compass.clean + +.. autosummary:: + :toctree: generated/ + + clean_cases + +suite +~~~~~ + +.. currentmodule:: compass.suite + +.. autosummary:: + :toctree: generated/ + + setup_suite + clean_suite + +run +~~~ + +.. currentmodule:: compass.run + +.. autosummary:: + :toctree: generated/ + + run_suite + run_test_case + run_step + + +Base Classes +^^^^^^^^^^^^ + +mpas_core +~~~~~~~~~ + +.. currentmodule:: compass + +.. autosummary:: + :toctree: generated/ + + MpasCore + MpasCore.add_test_group + +testgroup +~~~~~~~~~ + +.. currentmodule:: compass + +.. autosummary:: + :toctree: generated/ + + TestGroup + TestGroup.add_test_case + +testcase +^^^^^^^^ + +.. currentmodule:: compass + +.. autosummary:: + :toctree: generated/ + + TestCase + TestCase.configure + TestCase.run + TestCase.add_step + +step +^^^^ + +.. currentmodule:: compass + +.. autosummary:: + :toctree: generated/ + + Step + Step.setup + Step.run + Step.add_input_file + Step.add_output_file + Step.add_model_as_input + Step.add_namelist_file + Step.add_namelist_options + Step.add_streams_file + +config +^^^^^^ + +.. currentmodule:: compass.config + +.. autosummary:: + :toctree: generated/ + + duplicate_config + add_config + ensure_absolute_paths + get_source_file + +io +^^ + +.. currentmodule:: compass.io + +.. autosummary:: + :toctree: generated/ + + download + symlink + +model +^^^^^ + +.. currentmodule:: compass.model + +.. autosummary:: + :toctree: generated/ + + run_model + partition + update_namelist_pio + make_graph_file + +mpas_cores +^^^^^^^^^^ + +.. currentmodule:: compass.mpas_cores + +.. autosummary:: + :toctree: generated/ + + get_mpas_cores + +namelist +^^^^^^^^ + +.. currentmodule:: compass.namelist + +.. autosummary:: + :toctree: generated/ + + update + +parallel +^^^^^^^^ + +.. currentmodule:: compass.parallel + +.. autosummary:: + :toctree: generated/ + + get_available_cores_and_nodes + +provenance +^^^^^^^^^^ + +.. currentmodule:: compass.provenance + +.. autosummary:: + :toctree: generated/ + + write + +validate +^^^^^^^^ + +.. currentmodule:: compass.validate + +.. autosummary:: + :toctree: generated/ + + compare_variables + compare_timers diff --git a/docs/developers_guide/building_docs.rst b/docs/developers_guide/building_docs.rst index fce20fc5d6..dc81fcbffe 100644 --- a/docs/developers_guide/building_docs.rst +++ b/docs/developers_guide/building_docs.rst @@ -4,20 +4,54 @@ Building the Documentation ************************** -To make a local test build of the documentation, you need to set up a conda -environment with some required packages: +To make a local test build of the documentation, you need to make a local +build of the ``compass`` conda package and include it in a conda environment +with some other required packages. + +If you haven't installed +`Miniconda3 `_, do so. Then, +`add conda-forge `_ and install conda-build: .. code-block:: bash - $ conda create -y -n test_compass_docs python=3.8 sphinx mock sphinx_rtd_theme - $ conda activate test_compass_docs + miniconda=${HOME}/miniconda3 + source ${miniconda}/etc/profile.d/conda.sh + conda activate base + conda install conda-build + +If you installed Miniconda3 somewhere other than the default location, change +``$miniconda`` both above and in the script below. -Then, to build the documentation, run: +Then, run the following script to build the docs: .. code-block:: bash - $ export DOCS_VERSION="test" - $ cd docs - $ make html + #!/bin/bash + + miniconda=${HOME}/miniconda3 + + source ${miniconda}/etc/profile.d/conda.sh + + # exit if a subprocess ends in errors + set -e + + py=3.8 + mpi=mpich + + env=test_compass_mpi_${mpi} + rm -rf ${miniconda}/conda-bld + conda build -m ci/mpi_${mpi}.yaml recipe + + conda create --yes --quiet --name ${env} -c ${miniconda}/conda-bld/ \ + python=$py compass sphinx mock sphinx_rtd_theme + + conda activate $env + + version=$(python -c "import compass; print(compass.__version__)") + echo "version: $version" + export DOCS_VERSION="test" + cd docs || exit 1 + rm -rf developers_guide/generated/ developers_guide/*/generated/ _build/ + make html -Then, you can view the documentation by opening ``_build/html/index.html``. +Finally, you can view the documentation by opening ``_build/html/index.html``. diff --git a/docs/developers_guide/command_line.rst b/docs/developers_guide/command_line.rst new file mode 100644 index 0000000000..85875348f4 --- /dev/null +++ b/docs/developers_guide/command_line.rst @@ -0,0 +1,223 @@ +.. _dev_command_line: + +Command-line interface +====================== + +The command-line interface for ``compass`` acts essentially like 5 independent +scripts: ``compass list``, ``compass setup``, ``compass clean`` and +``compass suite``, and ``compass run``. These are the primary user interface +to the package, as described below. + +When the ``compass`` package is installed into your conda environment, you can +run these commands as above. If you are developing ``compass`` from a local +branch off of https://github.com/MPAS-Dev/compass, you will need to use a +conda environment appropriate for development (see :ref:`dev_conda_env`) and +you will have to tell python to use the local ``compass``, for example: + +.. code-block:: bash + + python -m compass list + +The `-m flag `_ tells +python to first look for the ``compass`` package in the local ``compass`` +directory before looking in the conda environment. + +.. _dev_compass_list: + +compass list +------------ + +The ``compass list`` command is used to list test cases, test suites, and +supported machines. The command-line options are: + +.. code-block:: none + + [python -m] compass list [-h] [-t TEST] [-n NUMBER] [--machines] [--suites] [-v] + +By default, all test cases are listed: + +.. code-block:: none + + $ compass list + Testcases: + 0: examples/example_compact/1km/test1 + 1: examples/example_compact/1km/test2 + ... + +The number of each test case is displayed, followed by the relative path that +will be used for the test case in the work directory. + +The ``-h`` or ``--help`` options will display the help message describing the +command-line options. + +The ``-t`` or ``--test_expr`` flag can be used to supply a substring or regular +expression that can be used to list a subset of the tests. Think of this as +as search expression within the default list of test-case relative paths. + +The flags ``-n`` or ``--number`` are used to list the name (relative path) of +a single test case with the given number. + +Instead of listing test cases, you can list all the supported machines that can +be passed to the ``compass setup`` and ``compass suite`` by using the +``--machines`` flag. + +Similarly, you can list all the available test suites for all :ref:`dev_cores` +by using the ``--suites`` flag. The result are the flags that would be passed +to ``compass suite`` as part of setting up this test suite. + +The ``-v`` or ``--verbose`` flag lists more detail about each test case, +including its description, short name, core, configuration, subdirectory within +the configuration and the names of its steps: + +.. code-block:: none + + $ compass list -n 0 -v + path: examples/example_compact/1km/test1 + description: Tempate 1km test1 + name: test1 + core: examples + configuration: example_compact + subdir: 1km/test1 + steps: + - step1 + - step2 + +.. _dev_compass_setup: + +compass setup +------------- + +The ``compass setup`` command is used to set up one or more test cases. The +command-line options are: + +.. code-block:: none + + [python -m] compass setup [-h] [-t PATH] [-n NUM [NUM ...]] [-f FILE] [-m MACH] + [-w PATH] [-b PATH] [-p PATH] + +The ``-h`` or ``--help`` options will display the help message describing the +command-line options. + +The test cases to set up can be specified either by relative path or by number. +The ``-t`` or ``--test`` flag is used to pass the relative path of the test +case within the resulting work directory. The is the path given by +:ref:`dev_compass_list`. Only one test case at a time can be supplied to +``compass setup`` this way. + +Alternatively, you can supply the test numbers of any number of test cases to +the ``-n`` or ``--case_number`` flag. Multiple test numbers are separated by +spaces (not commas like in :ref:`legacy_compass`). These are the test numbers +given by :ref:`dev_compass_list`. + +``compass setup`` requires a few basic pieces of information to be able to set +up a test case. These include places to download and cache some data files +used in the test cases and the location where you built the MPAS model. There +are a few ways to to supply these. The ``-m`` -r ``--machine`` option is used +to tell ``compass setup`` which supported machine you're running on (leave this +off if you're working on an "unknown" machine). See :ref:`dev_compass_list` +above for how to list the supported machines. + +You can supply the path to the MPAS model you built with the ``-p`` or +``--mpas_model`` flag. This can be a relative or absolute path. The default +depends on the core for the test case and is the relative path +``MPAS-Model//develop`` to the +`git submodule `_ for the +source code for that core. + +You can also supply a config file with config options pointing to the +directories for cached data files, the location of MPAS model, and much more +(see :ref:`config_files` and :ref:`setup_overview`). Point to your config file +using the ``-f`` or ``--config_file`` flag. + +The ``-w`` or ``--work_dir`` flags point to a relative or absolute path that +is the base path where the test case(s) should be set up. The default is the +current directory. It is recommended that you supply a work directory in +another location such as a temp or scratch directory to avoid confusing the +compass code with test cases setups and output within the branch. + +To compare test cases with a previous run of the same test cases, use the +``-b`` or ``--baseline_dir`` flag to point to the work directory of the +previous run. Many test cases validate variables to make sure they are +identical between runs, compare timers to see how much performance has changed, +or both. See :ref:`dev_validation`. + + +.. _dev_compass_clean: + +compass clean +------------- + +The ``compass clean`` command is used to clean up one or more test cases, +removing the contents of their directories so there are no old files left +behind before a fresh call to :ref:`dev_compass_setup`. The command-line +options are: + +.. code-block:: none + + [python -m] compass clean [-h] [-t PATH] [-n NUM [NUM ...]] [-w PATH] + +The ``-h`` or ``--help`` options will display the help message describing the +command-line options. + +As with :ref:`dev_compass_setup`, the test cases to cleaned up can be specified +either by relative path or by number. The meanings of the ``-t`` or ``--test``, +``-n`` or ``--case_number``, and ``-w`` or ``--work_dir`` flags are the same +as in :ref:`dev_compass_setup`. + +.. _dev_compass_suite: + +compass suite +------------- + +The ``compass suite`` command is used to set up a test suite. The command-line +options are: + +.. code-block:: none + + [python -m] compass suite [-h] -c CORE -t SUITE [-f FILE] [-s] [--clean] [-v] + [-m MACH] [-b PATH] [-w PATH] [-p PATH] + +The ``-h`` or ``--help`` options will display the help message describing the +command-line options. + +The required argument are ``-c`` or ``--core``, one of the :ref:`dev_cores`, +where the test suite and its test cases reside; and ``-t`` or ``--test_suite``, +the name of the test suite. These are the options listed when you run +``compass list --suites``. + +You must also specify whether you would like to set up the test suite +(``-s`` or ``--setup``), clean it up (``--clean``) or both. If you choose to +clean up, the contents of each test case will be removed one by one before +(optionally) setting up each test case again. Provenance for the test suite +such as previous output and the ``provenance`` file are retained and new +output is appended. Manually delete the entire work directory if you would +like to start completely fresh. + +As in :ref:`dev_compass_setup`, you can supply one or more of: a supported +machine with ``-m`` or ``--machine``; a path where you build MPAS model via +``-p`` or ``--mpas_model``; and a config file containing config options to +override the defaults with ``-f`` or ``--config_file``. As with +:ref:`dev_compass_setup`, you may optionally supply a work directory with +``-w`` or ``--work_dir`` and/or a baseline directory for comparison with +``-b`` or ``--baseline_dir``. If supplied, each test case in the suite that +includes :ref:`dev_validation` will be validated against the previous run in +the baseline. + +.. _dev_compass_run: + +compass run +----------- + +The ``compass run`` command is used to run a test suite, test case or step +that has been set up in the current directory: + +.. code-block:: none + + [python -m] compass run [-h] [suite] + +Whereas other ``compass`` commands are typically run in the local clone of the +compass repo, ``compass run`` needs to be run in the appropriate work +directory. If you are running a test suite, you need to provide the name of the +test suite because more than one suite can be set up in the same work +directory. If you are in the work directory for a test case or step, you do +not need to provide any arguments. diff --git a/docs/developers_guide/config.rst b/docs/developers_guide/config.rst deleted file mode 100644 index ab4e624f21..0000000000 --- a/docs/developers_guide/config.rst +++ /dev/null @@ -1,303 +0,0 @@ -.. _compass_config: - -config -====== - -A config file is used to setup a case directory. -This file contains information describing how to configure a case -directory, including files that the case depends on, executables that are -required for the case, namelists and streams files the case requires, and run -scripts which can be used to automate running a case. - -How to use pre-defined paths ----------------------------- - -This testing infrastructure has several predefined paths available as -attributes to several XML tags. Attributes that can use these will have the -line "Can use pre-defined paths" in their description. - -In order to help you make use of these pre-defined paths, this section will -describe what they are, and how to use them. - -To begin, there are two standard paths. These are referred to as ```` -and ````. - - - ```` is the location where the test cases are setup to run. - - ```` is the location where the testing infrastructure scripts live. - -Additionally, there are 4 sub-paths: - - - ```` - This is the core directory that contains the test case - - ```` - This is the configuration directory that contains the test case - - ```` - This is the resolution directory that contains the test case - - ```` - This is the test directory that contains the test case - - ```` - This is the case directory that is generated from an XML config file - -Now, all attributes that can use pre-defined paths can build a path using the -following syntax:: - - {base}_{sub} - -Where ``{base}`` can be either ``work`` or ``script``, and ``{sub}`` can be any of -``core_dir``, ``configuration_dir``, ``resolution_dir``, ``test_dir``, and ``case_dir``. - -Note however, ``case_dir`` isn't valid when {base} is ``script`` as a case -directory isn't typically generated in the script path if it's different from -the work path. - -As an example: - - - ``script_test_dir`` would point to the location that the XML files exist to - setup a testcase - - ``work_test_dir`` would point to the location that the testcase will be setup, - and will not include the case directory created from an XML file. - - -Description of XML file ------------------------ - -Below, you will see text describing the various XML tags available in a config -file. Each will describe the tag itself, any attributes the tag can have, and -what children can be placed below the tag. - -```` - This is the overarching parent tag of a config file that describes the setup for a case. - - - Attributes: - * ``case``: The name of the case directory that will be created from this - config tag. - - - Children: - * ```` - - * ```` - - * ```` - - * ```` - - * ```` - - * ```` - -```` - This tag defines the need for ensuring a required file is available, and the -appropriate ways of acquiring the file. - - - Attributes: - * ``hash``: (Optional) The expected hash of the mesh file. The acquired - mesh file will be validated using this. If this attribute is omitted, - the resulting file will not be validated. - - * ``dest_path``: The path the resulting file should be placed in. Should be - the name of a path defined in the config file, or optionally 'case' - which is expanded to be the case directory generated from the XML - file containing the get_file tag. Can additionally take the values of - pre-defined paths - - * ``file_name``: The name of the file that will be downloaded and placed in dest_path. - - - Children: - * ```` - -```` - This tag defined the different methods of acquiring a required file. - - - Attributes: - * ``protocol``: A description of how the mesh should be retrieved. - Currently supports ``wget``. - - * ``url``: Only used if ``protocol == wget``. The url (pre-filename) portion of - the ``wget`` command. - -```` - This tag defined the need to link an executable defined in a -configuration file (e.g. general.config) into a case directory. - - - Attributes: - * ``source``: The name of the executable, defined in the configuration file - (e.g. ``general.config``). This name is a short name, and will be - expanded to executables.source - - * ``dest``: The name of the link that will be generated from the executable. - -```` - This tag defined the need to link a file into a case directory. - - - Attributes: - * ``source_path``: The path variable from a configure file to find the - source file in. If it is empty, source is assumed to - have the full path to the file. Additionally, it can - take the values of: - - - Can use pre-defined paths - - * ``source``: The source to generate a symlink from. Relative to the case - directory that will be generated from the parent ```` tag. - - * ``dest``: The name of the resulting symlink. - -```` - This tag defines a namelist that should be generated from a template. - - - Attributes: - * ``name``: The name of the namelist file that will be generated from the - template namelist pointed to by its mode attribute. - - * ``mode``: The name of the mode to use from the template input files - Each core can define these arbitrarily - - - Children: - * ``