Skip to content

Commit

Permalink
quick test with ROsteen & BKuhn on WR96 (#83)
Browse files Browse the repository at this point in the history
* quick test with ROsteen & BKuhn on WR96

* Bump the actions group across 1 directory with 2 updates

Bumps the actions group with 2 updates in the /.github/workflows directory: [codecov/codecov-action](https://github.com/codecov/codecov-action) and [OpenAstronomy/github-actions-workflows](https://github.com/openastronomy/github-actions-workflows).


Updates `codecov/codecov-action` from 4.6.0 to 5.1.2
- [Release notes](https://github.com/codecov/codecov-action/releases)
- [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md)
- [Commits](codecov/codecov-action@b9fd7d1...1e68e06)

Updates `OpenAstronomy/github-actions-workflows` from 1.13.0 to 1.15.0
- [Release notes](https://github.com/openastronomy/github-actions-workflows/releases)
- [Commits](OpenAstronomy/github-actions-workflows@9244411...9f1f432)

---
updated-dependencies:
- dependency-name: codecov/codecov-action
  dependency-type: direct:production
  update-type: version-update:semver-major
  dependency-group: actions
- dependency-name: OpenAstronomy/github-actions-workflows
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: actions
...

Signed-off-by: dependabot[bot] <[email protected]>

* fix codestyle updates

* quick test with ROsteen & BKuhn on WR96

* fix codestyle updates

* Remove 3.10 CI test

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Ricky O'Steen <[email protected]>
  • Loading branch information
3 people authored Jan 23, 2025
1 parent 665f346 commit 18a6397
Show file tree
Hide file tree
Showing 13 changed files with 236 additions and 161 deletions.
6 changes: 0 additions & 6 deletions .github/workflows/ci_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,6 @@ jobs:
strategy:
matrix:
include:
- os: ubuntu-latest
python: '3.10'
tox_env: 'py310-test'
allow_failure: false
prefix: ''

- os: ubuntu-latest
python: '3.11'
tox_env: 'py311-test-cov'
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ classifiers = [
'Topic :: Scientific/Engineering :: Astronomy',
]
dynamic = ['version']
requires-python = '>=3.10'
requires-python = '>=3.11'
dependencies = [
'astropy>=5.0.4',
'astroquery',
Expand Down
175 changes: 98 additions & 77 deletions slitlessutils/core/modules/extract/single/single.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@ class Single(Module):
to set this. Default is None.
outpath : str, optional
A path where output products will be written. If set to None or invalid,
then CWD will be used. If set to valid str, then path will be created
(if possible), then used. If still not valid, then will use CWD.
Default is None.
A path where output products will be written. If set to None or
invalid, then CWD will be used. If set to valid str, then path
will be created (if possible), then used. If still not valid, then
will use CWD. Default is None.
writecsv : bool, optional
Flag to write light-weight boolean files. Default is True.
Expand Down Expand Up @@ -275,7 +275,7 @@ def _combine(self, results, data, sources, **kwargs):
func = np.array(res['func'])
wave = np.array(res['wave'])
cont = np.array(res['cont'])
file = np.array(res['file'])
nimg = np.array(res['file'])

# find the spectral bins that these elements belong to
lamb = pars.indices(wave)
Expand All @@ -290,7 +290,7 @@ def _combine(self, results, data, sources, **kwargs):
cont = cont[g] # contamination model
wave = wave[g] # wavelengths in A
lamb = lamb[g] # wavelength indices
file = file[g]
nimg = nimg[g]

# get reverse elements
ri = indices.reverse(lamb)
Expand Down Expand Up @@ -390,7 +390,7 @@ def apply_bitmask(dqa, *args, bitmask=None):
out = tuple(a[g] for a in args)
else:
LOGGER.warning(f'Bitmask ({bitmask}) removes all pixels')
out = tuple([] for a in args)
out = tuple(np.empty(0, dtype=a.dtype) for a in args)
if len(args) == 1:
out = out[0]
return out
Expand Down Expand Up @@ -446,7 +446,11 @@ def extract(self, data, sources, **kwargs):

# sort out optional inputs
cartesian = kwargs.get('cartesian', True)
profile = kwargs.get('profile', 'uniform').lower()
profile = kwargs.get('profile', 'uniform')
if isinstance(profile, str):
profile = profile.lower()
else:
profile = 'none'

# padding for the contamination models
padx = (5, 5)
Expand Down Expand Up @@ -484,7 +488,6 @@ def extract(self, data, sources, **kwargs):
time = phdr['EXPTIME']
sci /= time
unc /= time

# initialize the contamination modeling
self.contamination.make_model(sources, h5)

Expand Down Expand Up @@ -537,7 +540,7 @@ def extract(self, data, sources, **kwargs):

if cartesian:
# for making the residuals
# mod = np.zeros_like(sci)
mod = np.zeros_like(sci)
for x in range(x0, x1 + 1, 1):
g = np.where(x == xg)[0]
xx = xg[g]
Expand All @@ -549,75 +552,93 @@ def extract(self, data, sources, **kwargs):
# apply the bitmask
xx, yy, vv, ww, dw = self.apply_bitmask(
dqa[yy, xx], xx, yy, vv, ww, dw, bitmask=bitmask)

ww, _y = indices.decimate(ww * vv, yy)
vv, yy = indices.decimate(vv, yy)
ww /= vv
xx = np.full_like(yy, x, dtype=int)

# the average wavelength in this column
wave = np.average(ww, weights=vv)

# compute the calibrations
disp = detdata.config[ordname].dispersion(
*source.xyc, wavelength=ww)
sens = detdata.config[ordname].sensitivity(ww)
flat = flatfield(xx, yy, ww)
area = detdata.relative_pixelarea(xx, yy)
den = flat * area * sens * fluxscale * disp

# apply calibrations
ss = sci[yy, xx] / den
uu = unc[yy, xx] / den

# set the cross dispersion profile
if profile == 'uniform':
# simple summing over pixels
flam = np.sum(ss)
func = np.sqrt(np.sum(uu**2))
else:
# weighting by profile (a la Horne)
if profile == 'forward':
prof = vv.copy()
elif profile == 'data':
prof = np.maximum(sci[yy, x], 0.)
if len(xx) > 0:
ww, _y = indices.decimate(ww * vv, yy)
vv, yy = indices.decimate(vv, yy)
ww /= vv
xx = np.full_like(yy, x, dtype=int)

# the average wavelength in this column
wave = np.average(ww, weights=vv)

# compute the calibrations
disp = detdata.config[ordname].dispersion(
*source.xyc, wavelength=ww)
sens = detdata.config[ordname].sensitivity(ww)
flat = flatfield(xx, yy, ww)
area = detdata.relative_pixelarea(xx, yy)
den = flat * area * sens * fluxscale * disp

# apply calibrations, but guard against
# divide by zero errors
bad = np.where(den == 0)[0]
den[bad] = 1.
ss = sci[yy, xx] / den
uu = unc[yy, xx] / den
ss[bad] = np.nan
uu[bad] = np.nan

# set the cross dispersion profile
if profile is None or profile == 'none':
# simple summing over pixels
flam = np.nansum(ss)
func = np.sqrt(np.nansum(uu**2))

# set the profile to a dummy value
prof = 1.0
else:
msg = f'Profile setting ({profile}) is invalid'
LOGGER.error(msg)
raise RuntimeError(msg)

# normalize the profile
prof /= np.sum(prof)

wht = prof / uu**2
norm = np.sum(prof * wht)
flam = np.sum(ss * wht) / norm
func = np.sqrt(np.sum(prof) / norm)

# this can happen if sens==0
if np.isnan(flam):
flam = 0.0

# update the model
# mod[yy, x] = flam*den*prof

# compute contamination
if self.contamination:
cc = chdu.data[yy - yoff, xx - xoff] / den
if profile == 'uniform':
cont = np.sum(cc)
# if using a profile
if profile == 'forward':
prof = vv.copy()
elif profile == 'data':
prof = np.maximum(sci[yy, x], 0.)
elif profile == 'uniform':
prof = np.ones_like(ss, dtype=float)
else:
msg = f'Profile setting ({profile}) is invalid'
LOGGER.error(msg)
raise RuntimeError(msg)

# normalize the profile
profnorm = np.nansum(prof)
prof /= profnorm

wht = prof / uu**2
norm = np.nansum(prof * wht)

# take out some protection from
# divide by zero
if norm == 0:
flam = np.nan
func = np.nan
else:
flam = np.nansum(ss * wht) / norm
func = np.sqrt(profnorm / norm)

# update the model
mod[yy, x] = flam * den * prof

# compute contamination
if self.contamination:
cc = chdu.data[yy - yoff, xx - xoff] / den
if profile == 'uniform':
cont = np.sum(cc)
else:
cont = np.sum(cc * wht) / norm
else:
cont = np.sum(cc * wht) / norm
else:
cont = 0.0

# save the results
results[segid]['flam'].append(flam)
results[segid]['func'].append(func)
results[segid]['wave'].append(wave)
results[segid]['dwav'].append(0.0)
results[segid]['cont'].append(cont)

cont = 0.0

# save the results
results[segid]['flam'].append(flam)
results[segid]['func'].append(func)
results[segid]['wave'].append(wave)
results[segid]['dwav'].append(0.0)
results[segid]['cont'].append(cont)
with open(f'{data.dataset}_{segid}.sed', 'w') as fp:
for args in zip(results[segid]['wave'], results[segid]['flam']):
print(*args, file=fp)

# fits.writeto(f'{data.dataset}_res.fits', (sci-mod)/unc, overwrite=True)
else:

di = 0.5
Expand Down
2 changes: 1 addition & 1 deletion slitlessutils/core/modules/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class Module:
"""

def __init__(self, func, path='tables', ncpu=None, postfunc=None,
def __init__(self, func, path='su_tables', ncpu=None, postfunc=None,
multiprocess=True, **kwargs):

self.multiprocess = multiprocess
Expand Down
5 changes: 3 additions & 2 deletions slitlessutils/core/modules/tabulate/tabulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,10 @@ class Tabulate(Module):
"""

# should be +0.5 for uvis
# define the pixel footprint
DX = np.array([0, 0, 1, 1], dtype=float) - 0.5
DY = np.array([0, 1, 1, 0], dtype=float) - 0.5
DX = np.array([0, 0, 1, 1], dtype=float) + 0.5
DY = np.array([0, 1, 1, 0], dtype=float) + 0.5

DESCRIPTION = 'Tabulating'

Expand Down
Loading

0 comments on commit 18a6397

Please sign in to comment.