From 556d92f6ad968ed9e77eb386a0f4be52e3538169 Mon Sep 17 00:00:00 2001 From: Lina Muryanto <35277663+muryanto1@users.noreply.github.com> Date: Wed, 8 Aug 2018 09:49:13 -0700 Subject: [PATCH] Revisit run tests (#262) * migrate run_tests.py to use TestRunnerBase * remove accidentally added tests/coverage.json * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * revisit run_tests.py * add cacert.pem in run_tests.py when running cdms test within the lab * add cacert.pem in run_tests.py when running cdms test within the lab * add cacert.pem in run_tests.py when running cdms test within the lab * add cacert.pem in run_tests.py when running cdms test within the lab - rerun tests * add cacert.pem in run_tests.py when running cdms test within the lab - remove install from -c cdat/label/unstable * put back -c cdat/label/unstable --- .circleci/config.yml | 17 +- run_tests.py | 362 +++++++++------------------------------ tests/cdms_runtests.json | 8 + 3 files changed, 98 insertions(+), 289 deletions(-) create mode 100644 tests/cdms_runtests.json diff --git a/.circleci/config.yml b/.circleci/config.yml index 6e1432ed..4b1a83a6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -22,8 +22,8 @@ aliases: conda config --set always_yes yes --set changeps1 no conda update -y -q conda conda config --set anaconda_upload no - conda create -n py3 -c cdat/label/unstable -c cdat/label/nightly -c conda-forge -c cdat libcf distarray cdtime libcdms cdat_info numpy esmf esmpy libdrs_f pyopenssl nose requests flake8 myproxyclient "python>3" - conda create -n py2 -c cdat/label/unstable -c cdat/label/nightly -c conda-forge -c cdat libcf distarray cdtime libcdms cdat_info numpy esmf esmpy libdrs_f pyopenssl nose requests flake8 "python<3" + conda create -n py3 -c cdat/label/unstable -c cdat/label/nightly -c conda-forge -c cdat libcf distarray cdtime libcdms cdat_info testsrunner numpy esmf esmpy libdrs_f pyopenssl nose requests flake8 myproxyclient "python>3" + conda create -n py2 -c cdat/label/unstable -c cdat/label/nightly -c conda-forge -c cdat libcf distarray cdtime libcdms cdat_info testsrunner numpy esmf esmpy libdrs_f pyopenssl nose requests flake8 myproxyclient "python<3" if [ $(uname) == "Linux" ]; then conda install -n py3 -c cdat/label/unstable -c cdat/label/nightly -c conda-forge gcc_linux-64 conda install -n py2 -c cdat/label/unstable -c cdat/label/nightly -c conda-forge gcc_linux-64 @@ -38,17 +38,10 @@ aliases: export PATH=$HOME/project/$WORKDIR/miniconda/bin:$PATH export UVCDAT_ANONYMOUS_LOG=False source activate py3 - mkdir $HOME/.esg - echo "Get ESGF certificates" - echo ${ESGF_PWD} | myproxyclient logon -s esgf-node.llnl.gov -p 7512 -t 12 -S -b -l ${ESGF_USER} -o $HOME/.esg/esgf.cert - echo "Create .dods_cookies" - curl -L -v -c $HOME/.esg/.dods_cookies --cert $HOME/.esg/esgf.cert --key $HOME/.esg/esgf.cert "https://aims3.llnl.gov/thredds/dodsC/cmip5_css02_data/cmip5/output1/CMCC/CMCC-CM/decadal2005/mon/atmos/Amon/r1i1p1/cct/1/cct_Amon_CMCC-CM_decadal2005_r1i1p1_202601-203512.nc.dds" if [ $(uname) == "Linux" ];then - cp tests/dodsrccircleciLinux $HOME/.dodsrc export LDSHARED="$CC -shared -pthread" LDSHARED="$CC -shared -pthread" python setup.py install else - cp tests/dodsrccircleciDarwin $HOME/.dodsrc python setup.py install fi source activate py2 @@ -67,11 +60,11 @@ aliases: export UVCDAT_ANONYMOUS_LOG=False set -e source activate py2 - python run_tests.py -v2 + python run_tests.py --subdir -v2 PY2_RESULT=$? - echo "*** py2 test result: "${PY2_RESULT} + echo "**** py2 test result: "${PY2_RESULT} source activate py3 - python run_tests.py -v2 + python run_tests.py --subdir -v2 PY3_RESULT=$? echo "*** py3 test result: "${PY3_RESULT} echo $PY2_RESULT > $HOME/project/$WORKDIR/py2_result.txt diff --git a/run_tests.py b/run_tests.py index 287a7814..ce3e7df9 100644 --- a/run_tests.py +++ b/run_tests.py @@ -1,291 +1,99 @@ -#!/usr/bin/env python -from __future__ import print_function -import shutil -import glob -import sys import os -import argparse -import multiprocessing -import subprocess -import codecs -import time -import webbrowser -import shlex +import sys +import shutil import cdat_info -import numpy.distutils -import distutils - -root = os.getcwd() -cpus = multiprocessing.cpu_count() - -parser = argparse.ArgumentParser(description="Run VCS tests", - formatter_class=argparse.ArgumentDefaultsHelpFormatter) -parser.add_argument("-H", "--html", action="store_true", - help="create and show html result page") -parser.add_argument("-p", "--package", action="store_true", - help="package test results") - -parser.add_argument( - "-c", - "--coverage", - action="store_true", - help="run coverage (not implemented)") -parser.add_argument( - "-v", - "--verbosity", - default=1, - choices=[ - 0, - 1, - 2], - type=int, - help="verbosity output level") -parser.add_argument( - "-n", - "--cpus", - default=cpus, - type=int, - help="number of cpus to use") -parser.add_argument( - "-f", - "--failed-only", - action="store_true", - default=False, - help="runs only tests that failed last time and are in the list you provide") -parser.add_argument("-s","--subdir",action="store_true",help="run in a subdirectory") - -parser.add_argument("tests", nargs="*", help="tests to run") - -args = parser.parse_args() - - -def abspath(path, name, prefix): - full_path = os.path.abspath(os.path.join(os.getcwd(), "..", path)) - if not os.path.exists(name): - os.makedirs(name) - new = os.path.join(nm, prefix + "_" + os.path.basename(full_path)) - try: - shutil.copy(full_path, new) - except: - pass - return new - - -def findDiffFiles(log): - i = -1 - file1 = "" - file2 = "" - diff = "" - N = len(log) - while log[i].find("Source file") == -1 and i > -N: - i -= 1 - if i > -N: - file1 = log[i - 1].split()[-1] - for j in range(i, N): - if log[j].find("New best!") > -1: - if log[j].find("Comparing") > -1: - file2 = log[j].split()[2] - else: - k = j - 1 - while log[k].find("Comparing") == -1 and k > -N: - k -= 1 - try: - file2 = log[k].split()[2] - except: - file2 = log[k].split()[1][:-1]+log[j].split()[0] - print(("+++++++++++++++++++++++++",file2)) - if log[j].find("Saving image diff") > -1: - diff = log[j].split()[-1] - # break - return file1, file2, diff - - -def run_command(command, join_stderr=True): - if isinstance(command, str): - command = shlex.split(command) - if args.verbosity > 0: - print("Executing %s in %s" % (" ".join(command), os.getcwd())) - if join_stderr: - stderr = subprocess.STDOUT - else: - stderr = subprocess.PIPE - P = subprocess.Popen( - command, - stdout=subprocess.PIPE, - stderr=stderr, - bufsize=0, - cwd=os.getcwd()) - out = [] - while P.poll() is None: - read = P.stdout.readline().rstrip() - out.append(read) - if args.verbosity > 1 and len(read) != 0: - print(read) - return P, out - - -def run_nose(test_name): - opts = [] - if args.coverage: - opts += ["--with-coverage"] - command = ["nosetests", ] + opts + ["-s", test_name] - start = time.time() - P, out = run_command(command) - end = time.time() - return {test_name: {"result": P.poll(), "log": out, "times": { - "start": start, "end": end}}} - - -sys.path.append( - os.path.join( - os.path.dirname( - os.path.abspath(__file__)), - "tests")) - - -if len(args.tests) == 0: - names = glob.glob("tests/test_*.py") -else: - names = set(args.tests) - - -if args.failed_only and os.path.exists(os.path.join("tests",".last_failure")): - if not os.path.exists("tests"): - os.makedirs("tests") - f = open(os.path.join("tests",".last_failure")) - failed = set(eval(f.read().strip())) - f.close() - new_names = [] - for fnm in failed: - if fnm in names: - new_names.append(fnm) - names = new_names +import socket + +from testsrunner.Util import run_command +import tempfile + +SUCCESS = 0 + +class CDMSTestRunner(cdat_info.TestRunnerBase): + + def __setup_cdms(self): + home = os.environ["HOME"] + esg_dir = "{h}/.esg".format(h=home) + if os.path.isdir(esg_dir): + shutil.rmtree(esg_dir) + os.mkdir(esg_dir) + + # check if we are running tests from within the lab. + hostname = socket.gethostname() + cacert_pem = "" + if hostname.endswith('.llnl.gov'): + cmd = "curl https://access.llnl.gov/cspca.cer -o {h}/cspca.cer".format(h=home) + ret_code, out = run_command(cmd) + if ret_code != SUCCESS: + return ret_code + + python_ver = "python{a}.{i}".format(a=sys.version_info.major, + i=sys.version_info.minor) + coverage_opts = "" + dest = os.path.join(sys.prefix, 'lib', python_ver, 'site-packages', 'certifi', 'cacert.pem') + cmd = "cat {h}/cspca.cer >> {dest}".format(h=home, dest=dest) + cacert_pem = "--cacert {cacert}".format(cacert=dest) + + esgf_pwd = os.environ["ESGF_PWD"] + esgf_user = os.environ["ESGF_USER"] + cmd = "echo {p} | myproxyclient logon -s esgf-node.llnl.gov -p 7512 -t 12 -S -b -l {u} -o {h}/.esg/esgf.cert".format(p=esgf_pwd, u=esgf_user, h=home) + os.system(cmd) + + cookies = "-c {h}/.esg/.dods_cookies".format(h=home) + cert_opt = "--cert {h}/.esg/esgf.cert".format(h=home) + key_opt = "--key {h}/.esg/esgf.cert".format(h=home) + dds = "https://aims3.llnl.gov/thredds/dodsC/cmip5_css02_data/cmip5/output1/CMCC/CMCC-CM/decadal2005/mon/atmos/Amon/r1i1p1/cct/1/cct_Amon_CMCC-CM_decadal2005_r1i1p1_202601-203512.nc.dds" + cmd = "curl -L -v {cacert} {cookies} {cert} {key} \"{dds}\"".format(cacert=cacert_pem, + cookies=cookies, + cert=cert_opt, + key=key_opt, + dds=dds) + print("CMD: {cmd}".format(cmd=cmd)) + os.system(cmd) + + if sys.platform == 'darwin': + cmd = "cp tests/dodsrccircleciDarwin {h}/.dodsrc".format(h=home) + else: + cmd = "cp tests/dodsrccircleciLinux {h}/.dodsrc".format(h=home) + ret_code, out = run_command(cmd) + return ret_code -if args.subdir: - import tempfile - tmpdir = tempfile.mkdtemp() - os.chdir(tmpdir) - names = [ os.path.join(root,t) for t in names] - print("RUNNNIG FROM:",tmpdir) + def run(self, workdir, tests=None): -if len(names)==0: - print("No tests to run") - sys.exit(0) + os.chdir(workdir) + test_names = super(CDMSTestRunner, self)._get_tests(workdir, self.args.tests) -if args.verbosity > 1: - print(("Names:", names)) + ret_code = self.__setup_cdms() + if ret_code != SUCCESS: + return(ret_code) -# Make sure we have sample data -cdat_info.download_sample_data_files(os.path.join(distutils.sysconfig.get_python_lib(),"share","cdms2","test_data_files.txt"),cdat_info.get_sampledata_path()) + if self.args.checkout_baseline: + ret_code = super(CDMSTestRunner, self)._get_baseline(workdir) + if ret_code != SUCCESS: + return(ret_code) -p = multiprocessing.Pool(args.cpus) -outs = p.map(run_nose, names) -results = {} -failed = [] -for d in outs: - results.update(d) - nm = list(d.keys())[0] - if d[nm]["result"] != 0: - failed.append(nm) -if args.subdir: - f = open(os.path.join(root,"tests",".last_failure"),"w") -else: - f = open(os.path.join("tests",".last_failure"),"w") -f.write(repr(failed)) -f.close() + if self.args.subdir: + tmpdir = tempfile.mkdtemp() + os.chdir(tmpdir) + ret_code = super(CDMSTestRunner, self)._do_run_tests(workdir, test_names) + os.chdir(workdir) -if args.verbosity > 0: - print("Ran %i tests, %i failed (%.2f%% success)" %\ - (len(outs), len(failed), 100. - float(len(failed)) / len(outs) * 100.)) - if len(failed) > 0: - print("Failed tests:") - for f in failed: - print("\t", f) -if args.html or args.package: - if not os.path.exists("tests_html"): - os.makedirs("tests_html") - os.chdir("tests_html") + if self.args.html or self.args.package: + super(CDMSTestRunner, self)._generate_html(workdir) - js = "" + if self.args.package: + super(CDMSTestRunner, self)._package_results(workdir) - fi = open("index.html", "w") - print("", file=fi) - print("""VCS Test Results %s - - - - - """ % time.asctime(), file=fi) - print("

VCS Test results: %s

" % time.asctime(), file=fi) - print("", file=fi) - print("", file=fi) - print("", file=fi) + return ret_code - for t in sorted(results.keys()): - result = results[t] - nm = t.split("/")[-1][:-3] - print("" % nm, end=' ', file=fi) - fe = codecs.open("%s.html" % nm, "w", encoding="utf-8") - print("", file=fe) - print("%s" % nm, file=fe) - if result["result"] == 0: - print("" % nm, end=' ', file=fi) - print("", file=fe) - print("Back To Results List", file=fe) - else: - print("" % nm, end=' ', file=fi) - print("" % js, file=fe) - print("Back To Results List", file=fe) - print("

Failed test: %s on %s

" % (nm, time.asctime()), file=fe) - file1, file2, diff = findDiffFiles(result["log"]) - if file1 != "": - print('
' % ( - abspath(file2, nm, "test"), abspath(file1, nm, "source")), file=fe) - print("
Back To Results List
", file=fe) - print("
diff file
" % abspath( - diff, nm, "diff"), file=fe) - print("
Back To Results List
", file=fe) - print('

Log

%s
' % "\n".join(result[ - "log"]), file=fe) - print("Back To Results List", file=fe) - print("", file=fe) - fe.close() - t = result["times"] - print("" % ( - time.ctime(t["start"]), time.ctime(t["end"]), t["end"] - t["start"]), file=fi) - print("
TestResultStart TimeEnd TimeTime
TestResultStart TimeEnd TimeTime
%sOKFail%s%s%s
", file=fi) - fi.close() - if args.html: - webbrowser.open("file://%s/index.html" % os.getcwd()) -else: - if len(failed) == 0 and args.subdir: - print("Remving temp run dir: %s" % tmpdir) - os.chdir(root) - shutil.rmtree(tmpdir) +test_suite_name = 'cdms' -if args.package: - import tarfile - os.chdir(tmpdir) - tnm = "results_%s_%s_%s.tar.bz2" % (os.uname()[0],os.uname()[1],time.strftime("%Y-%m-%d_%H:%M")) - t = tarfile.open(tnm, "w:bz2") - print("PATH TARRING FROM: %s" % os.getcwd()) - t.add("tests_html") - t.add("tests_html") - t.close() - if args.verbosity > 0: - print("Packaged Result Info in:", tnm) +workdir = os.getcwd() +runner = CDMSTestRunner(test_suite_name, options=["--subdir"], + options_files=["tests/cdms_runtests.json"], + get_sample_data=True, + test_data_files_info="share/test_data_files.txt") +ret_code = runner.run(workdir) -if args.subdir and len(failed)!=0: - print("Do not removing to clean temp directory: %s" % tmpdir) -os.chdir(root) -sys.exit(len(failed)) +sys.exit(ret_code) diff --git a/tests/cdms_runtests.json b/tests/cdms_runtests.json new file mode 100644 index 00000000..2870a7a5 --- /dev/null +++ b/tests/cdms_runtests.json @@ -0,0 +1,8 @@ +{ + "--subdir": { + "action": "store_true", + "default": true, + "help": "specifies if tests should be run in a subdir", + "type": null + } +}