diff --git a/.github/workflows/python-app.yaml b/.github/workflows/python-app.yaml new file mode 100644 index 00000000..30effcdd --- /dev/null +++ b/.github/workflows/python-app.yaml @@ -0,0 +1,57 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: Python application + +on: + push: + branches: [ "ci" ] + pull_request: + branches: [ "ci" ] + +permissions: + contents: read + +jobs: + build: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + - name: Set up Python 2.7.18 + uses: actions/setup-python@v3 + with: + python-version: "2.7.18" + - name: Install OneFLUX + run: | + python -m pip install --upgrade pip + pip install setuptools==44.1.1 wheel pytest + # pip install flake8 pytest + # if [ -f frozen-requirement.txt ]; then pip install -r frozen-requirements.txt; fi + make + - name: Get data + run: | + mkdir datadir + # get necessary data + wget ftp://ftp.fluxdata.org/.ameriflux_downloads/.test/US-ARc_sample_input.zip + wget ftp://ftp.fluxdata.org/.ameriflux_downloads/.test/US-ARc_sample_output.zip + unzip US-ARc_sample_input.zip -d ./datadir/test_input/ + unzip US-ARc_sample_output.zip -d ./datadir/test_output/ + - name: Run OneFLUX partitioning_nt + run: | + # copy necessary output data to force partitioning_nt to run + cp -r ./datadir/test_output/US-ARc_sample_output/02_qc_auto/ ./datadir/test_input/US-ARc_sample_input/ + cp -r ./datadir/test_output/US-ARc_sample_output/07_meteo_proc/ ./datadir/test_input/US-ARc_sample_input/ + cp -r ./datadir/test_output/US-ARc_sample_output/08_nee_proc/ ./datadir/test_input/US-ARc_sample_input/ + python runoneflux.py partition_nt ./datadir/test_input/ US-ARc US-ARc_sample_input 2005 2006 -l fluxnet_pipeline_US-ARc.log --recint hh + - name: Run pytest + run: | + pytest oneflux/tools/partition_nt.py + # - name: Lint with flake8 + # run: | + # # stop the build if there are Python syntax errors or undefined names + # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + # - name: Test with pytest + # run: | + # pytest diff --git a/Makefile b/Makefile index 58665241..4d258567 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ buildpy: @echo "Installing Python dependencies..." ifeq ($(PYPACKAGE),pip) @echo "Using pip to install dependencies..." - pip install -r requirements.txt + pip install -r frozen-requirements.txt else ifeq ($(PYPACKAGE),conda) @echo "Using conda to install dependencies..." diff --git a/frozen-requirements.txt b/frozen-requirements.txt new file mode 100644 index 00000000..d92e10cb --- /dev/null +++ b/frozen-requirements.txt @@ -0,0 +1,14 @@ +backports.functools-lru-cache==1.6.4 +cycler==0.10.0 +kiwisolver==1.1.0 +matplotlib==2.2.5 +pandas==0.20.1 +patsy==0.5.2 +pyparsing==2.4.7 +python-dateutil==2.8.2 +pytz==2022.4 +scipy==1.2.3 +six==1.16.0 +statsmodels==0.8.0 +subprocess32==3.5.4 +setuptools==44.1.1 diff --git a/oneflux/tools/partition_nt.py b/oneflux/tools/partition_nt.py index 92034b8d..60240df2 100644 --- a/oneflux/tools/partition_nt.py +++ b/oneflux/tools/partition_nt.py @@ -12,6 +12,7 @@ ''' import sys import os +import glob import logging import time import numpy @@ -19,6 +20,7 @@ import socket import numpy import calendar +import pytest from datetime import datetime from io import StringIO @@ -133,6 +135,56 @@ def run_partition_nt(datadir, siteid, sitedir, years_to_compare, remove_previous_run(datadir=datadir, siteid=siteid, sitedir=sitedir, python=py_remove_old, prod_to_compare=prod_to_compare, perc_to_compare=perc_to_compare, years_to_compare=years_to_compare) run_python(datadir=datadir, siteid=siteid, sitedir=sitedir, prod_to_compare=prod_to_compare, perc_to_compare=perc_to_compare, years_to_compare=years_to_compare) +@pytest.fixture +def get_data(): + pass + +def equal_csv(csv_1, csv_2): + with open(csv_1, 'r') as t1, open(csv_2, 'r') as t2: + fileone = t1.readlines() + filetwo = t2.readlines() + for line in filetwo: + if line not in fileone: + return False + +# TODO: deal with fixtures for running nt_test +# step 10 +def test_run_partition_nt(): + datadir = "./datadir/test_input" + data_output = "./datadir/test_output" + siteid = "US-ARc" + sitedir = "US-ARc_sample_input" + years = [2005] # years = [2005, 2006] + PROD_TO_COMPARE = ['c', 'y'] + # PERC_TO_COMPARE = ['1.25', '3.75',] + PERC_TO_COMPARE = ['1.25',] + remove_previous_run(datadir=datadir, siteid=siteid, sitedir=sitedir, python=True, + prod_to_compare=PROD_TO_COMPARE, perc_to_compare=PERC_TO_COMPARE, + years_to_compare=years) + + run_python(datadir=datadir, siteid=siteid, sitedir=sitedir, prod_to_compare=PROD_TO_COMPARE, + perc_to_compare=PERC_TO_COMPARE, years_to_compare=years) + + # now do simple check of output + rootdir = os.path.join(datadir, sitedir, "10_nee_partition_nt") + nee_y_files = glob.glob(os.path.join(rootdir, "nee_y_1.25_US-ARc_2005*")) + ref_output = os.path.join(data_output, sitedir, "10_nee_partition_nt") + ref_y_files = glob.glob(os.path.join(ref_output, "nee_y_1.25_US-ARc_2005*")) + + # log.info(nee_y_files) + # log.info(compare_y_files) + for f, b in zip(nee_y_files, ref_y_files): + if not equal_csv(f, b): + return False + + # glob the files with this root + # for file in glob.glob(nee_y_files): + # print(file) + # log.info(file) + # if not equal_csv(file, ) + # with open('saved/nee_y_1.25_US-ARc_2005.csv', 'r') as t1, open(nee_y, 'r') as t2: + + if __name__ == '__main__': raise ONEFluxError('Not executable') diff --git a/oneflux_steps/Makefile b/oneflux_steps/Makefile index d56115bf..a5e22cc9 100644 --- a/oneflux_steps/Makefile +++ b/oneflux_steps/Makefile @@ -31,7 +31,7 @@ CC := gcc -O3 MKDIR = mkdir -p # copy file command (verbose, keep file metadata) -COPY = cp -av +COPY = cp -v SRCDIR := $(shell pwd)/ TGTDIR := ${HOME}/bin/oneflux/ diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..f5276a23 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +log_cli = 1 +log_cli_level = INFO +log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) +log_cli_date_format=%Y-%m-%d %H:%M:%S