Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rcal-298: Set Up Testing Infrastructure #14

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions .github/workflows/cancel_workflows.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
name: Cancel duplicate workflows

on:
workflow_run:
workflows: ["CI"]
types:
- requested

# Note: This has to be in workflow_run so it works for PRs from forks. And only cancel
# pull_request triggers, not push, i.e. merges into master.
jobs:
cancel:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.event == 'pull_request' }}
steps:
- name: Cancel previous runs
uses: styfle/[email protected]
with:
workflow_id: ${{ github.event.workflow.id }}
232 changes: 232 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,232 @@
name: CI

on:
push:
branches:
- main
- '*.x'
tags:
- '*'
pull_request:
branches:
- main
schedule:
# Weekly Monday 9AM build
# * is a special character in YAML so you have to quote this string
- cron: '0 9 * * 0'

env:
CRDS_SERVER_URL: "https://roman-crds-test.stsci.edu"
CRDS_PATH: "/tmp/crds_cache"
CRDS_CLIENT_RETRY_COUNT: 3
CRDS_CLIENT_RETRY_DELAY_SECONDS: 20

jobs:
style:
name: Code style checks
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: 3.9
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: style-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install pyproject-flake8
- run: pip freeze
- run: flake8
audit:
name: Bandit security audit
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: 3.9
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: audit-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install bandit
- run: pip freeze
- run: bandit romanisim -r -x tests,regtest
dependencies:
name: verify dependencies are correct
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: 3.9
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: dependencies-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install .
- run: pip freeze
- run: verify_install_requires
test:
name: test
needs: [ style, audit, dependencies ]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ ubuntu-latest, macos-latest ]
python: [ 3.8, 3.9, '3.10' ]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: ${{ matrix.python }}
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: test-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install ".[test]" pytest-xdist
- run: pip freeze
- run: pytest -n auto
test_alldeps:
name: test optional dependencies
needs: [ test ]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ ubuntu-latest, macos-latest ]
python: [ 3.8, 3.9, '3.10' ]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: ${{ matrix.python }}
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: test-alldeps-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install ".[test,all]" pytest-xdist
- run: pip freeze
- run: pytest -n auto
test_devdeps:
name: test developer versions
needs: [ test ]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ ubuntu-latest, macos-latest ]
python: [ 3.8, 3.9, '3.10' ]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: ${{ matrix.python }}
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: test-devdeps-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install git+https://github.com/asdf-format/asdf git+https://github.com/spacetelescope/stpipe git+https://github.com/spacetelescope/stdatamodels --pre astropy numpy
- run: pip install ".[test]" pytest-xdist
- run: pip freeze
- run: pytest -n auto
test_pyargs:
name: test --pyargs
needs: [ test ]
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ ubuntu-latest, macos-latest ]
python: [ 3.8, 3.9, '3.10' ]
exclude:
- os: macos-latest
python: '3.10'
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: ${{ matrix.python }}
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: test-pyargs-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install ".[test]" pytest-xdist
- run: pip freeze
- run: pytest -n auto ./docs --pyargs romanisim
test_older_numpy:
name: test Numpy ${{ matrix.numpy }} (Python ${{ matrix.python }})
needs: [ test ]
runs-on: ${{ matrix.os }}
continue-on-error: true
strategy:
fail-fast: false
matrix:
os: [ ubuntu-latest ]
python: [ 3.8, 3.9, '3.10' ]
numpy: [ '1.20.*', '1.21.*', '1.22.*' ]
exclude:
PaulHuwe marked this conversation as resolved.
Show resolved Hide resolved
- python: '3.10'
numpy: '1.20.*'
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: ${{ matrix.python }}
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: test-numpy${{ matrix.numpy }}-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install -e ".[test]" pytest-xdist
- run: pip install numpy==${{ matrix.numpy }}
- run: pip freeze
- run: pytest -n auto
test_with_coverage:
name: test with coverage
needs: [ test, test_alldeps, test_devdeps, test_pyargs, test_older_numpy ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: 3.9
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: test-coverage-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: pip install ".[test]" pytest-xdist pytest-cov
- run: pip freeze
- run: pytest -n auto --cov-report xml --cov-report term-missing --cov .
- uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: unit
fail_ci_if_error: true
build_docs:
name: build HTML docs
needs: [ test, test_alldeps, test_devdeps, test_pyargs, test_older_numpy ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
id: python
with:
python-version: 3.9
- uses: actions/cache@v3
with:
path: ${{ env.pythonLocation }}
key: build-docs-${{ runner.os }}-${{ env.pythonLocation }}-${{ hashFiles('**/pyproject.toml', '**/setup.*') }}
- run: sudo apt-get install graphviz texlive-latex-extra dvipng
- run: pip install ".[docs]"
- run: pip freeze
- run: sphinx-build -W docs docs/_build
4 changes: 4 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[pytest]
markers =
soctests: run only the SOC tests in the suite.

File renamed without changes.
16 changes: 16 additions & 0 deletions regtest/test_l1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
"""
Regression tests for Level 1 generation.
"""

import pytest


@pytest.mark.bigdata
def test_dummy_reg():
assert 1 > 0


@pytest.mark.bigdata
@pytest.mark.soctests
def test_dummy_soctest_reg():
assert 1 > 0
6 changes: 3 additions & 3 deletions romanisim/apt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@
fraction of what an APT file seems able to do.
"""

import xml
from xml.etree import ElementTree
import defusedxml.ElementTree
from astropy import coordinates
from astropy import units as u
import dataclasses
Expand Down Expand Up @@ -48,7 +47,8 @@ def read_apt(filename):
"""
# I don't know anything about reading XML.
# In general it's very flexible and can do anything.
tree = ElementTree.parse(filename)
# tree = ElementTree.parse(filename)
PaulHuwe marked this conversation as resolved.
Show resolved Hide resolved
tree = defusedxml.ElementTree.parse(filename)
targs = tree.find(XMLNS + 'Targets')
target_elements = targs.findall(XMLNS + 'FixedTarget')
target_dict = dict()
Expand Down
10 changes: 5 additions & 5 deletions romanisim/bandpass.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
# the galsim bandpass names to the Roman bandpass names and vice versa.
# it would be nice to be agnostic about which one we use.
galsim_bandpasses = ['Z087', 'Y106', 'J129', 'H158', 'F184', 'W149']
galsim2roman_bandpass = {x: 'F'+x[1:] for x in galsim_bandpasses}
galsim2roman_bandpass = {x: 'F' + x[1:] for x in galsim_bandpasses}
roman2galsim_bandpass = {v: k for k, v in galsim2roman_bandpass.items()}

# provide some no-ops if we are given a key in the right bandpass
Expand Down Expand Up @@ -84,13 +84,13 @@ def compute_abflux(effarea=None):
out = dict()
for bandpass in filter_names:
integrand = abfv * constants.c / (
effarea['Wave']*u.micron)**2 # f_lambda
effarea['Wave'] * u.micron)**2 # f_lambda
integrand /= constants.h * constants.c / (
effarea['Wave']*u.micron) # hc/lambda
integrand *= effarea[bandpass]*u.m**2 # effective area in filter
effarea['Wave'] * u.micron) # hc/lambda
integrand *= effarea[bandpass] * u.m**2 # effective area in filter
# integrate.simpson looks like it loses units. So convert to something
# we know about.
integrand = integrand.to(1/(u.s*u.micron)).value
integrand = integrand.to(1 / (u.s * u.micron)).value
zpflux = integrate.simpson(integrand, effarea['Wave'])
# effarea['Wave'] is in microns, so we're left with a number of counts
# per second
Expand Down
16 changes: 8 additions & 8 deletions romanisim/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ def make_dummy_catalog(coord, radius=0.1, rng=None, seed=42, nobj=1000,
elif p < 0.9: # 10% of targets; stars
mu_x = 1.e5
sigma_x = 2.e5
mu = np.log(mu_x**2 / (mu_x**2+sigma_x**2)**0.5)
sigma = (np.log(1 + sigma_x**2/mu_x**2))**0.5
mu = np.log(mu_x**2 / (mu_x**2 + sigma_x**2)**0.5)
sigma = (np.log(1 + sigma_x**2 / mu_x**2))**0.5
gd = galsim.GaussianDeviate(rng, mean=mu, sigma=sigma)
flux = np.exp(gd()) / roman.exptime
if chromatic:
Expand Down Expand Up @@ -132,7 +132,7 @@ def make_dummy_table_catalog(coord, radius=0.1, rng=None, nobj=1000,
# at least not crazy for a dummy catalog
faintmag = 26 - 3 # get some brighter sources!
hlr_at_faintmag = 0.6 # arcsec
mag = faintmag - np.random.exponential(size=nobj, scale=5/3/np.log(10))
mag = faintmag - np.random.exponential(size=nobj, scale=5 / 3 / np.log(10))
# okay, now we need to mark some star/galaxy decisions.
sersic_index = np.random.uniform(low=1, high=4.0, size=nobj)
star = np.random.uniform(size=nobj) < 0.1
Expand All @@ -148,10 +148,10 @@ def make_dummy_table_catalog(coord, radius=0.1, rng=None, nobj=1000,
ba = np.clip(ba, 0.2, 1)
ba[star] = 1
# ugh. Half light radii should correlate with magnitude, with some scatter.
hlr = 10**((faintmag - mag)/5) * hlr_at_faintmag
hlr = 10**((faintmag - mag) / 5) * hlr_at_faintmag
# hlr is hlr_at_faintmag for faintmag sources
# and let's put some log normal distribution on top of this
hlr *= np.clip(np.exp(np.random.randn(nobj)*0.5), 0.1, 10)
hlr *= np.clip(np.exp(np.random.randn(nobj) * 0.5), 0.1, 10)
# let's not make anything too too small.
hlr[hlr < 0.01] = 0.01
hlr[star] = 0
Expand All @@ -168,7 +168,7 @@ def make_dummy_table_catalog(coord, radius=0.1, rng=None, nobj=1000,
mag_thisband = mag + np.random.randn(nobj)
# sigma of one mag isn't nuts. But this will be totally uncorrelated
# in different bands, so we'll get some weird colored objects
out[bandpass] = 10.**(-mag_thisband/2.5)
out[bandpass] = 10.**(-mag_thisband / 2.5)
# maggies! what units should I actually pick here?
return out

Expand Down Expand Up @@ -209,7 +209,7 @@ def table_to_catalog(table, bandpasses):

out = list()
for i in range(len(table)):
pos = coordinates.SkyCoord(table['ra'][i]*u.deg, table['dec'][i]*u.deg,
pos = coordinates.SkyCoord(table['ra'][i] * u.deg, table['dec'][i] * u.deg,
frame='icrs')
pos = util.celestialcoord(pos)
fluxes = {bp: table[bp][i] for bp in bandpasses}
Expand All @@ -218,7 +218,7 @@ def table_to_catalog(table, bandpasses):
elif table['type'][i] == 'SER':
obj = galsim.Sersic(table['n'][i], table['half_light_radius'][i])
obj = obj.shear(
q=table['ba'][i], beta=(table['pa'][i]+np.pi/2)*galsim.radians)
q=table['ba'][i], beta=(table['pa'][i] + np.pi / 2) * galsim.radians)
else:
raise ValueError('Catalog types must be either PSF or SER.')
out.append(CatalogObject(pos, obj, fluxes))
Expand Down
Loading