Skip to content

Commit

Permalink
Merge pull request #5 from emolter/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
emolter authored Nov 15, 2023
2 parents 1b8e220 + 996b0bd commit f660e50
Show file tree
Hide file tree
Showing 12 changed files with 8,087 additions and 5,848 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10"]
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]

steps:
- uses: actions/checkout@v3
Expand Down
8 changes: 7 additions & 1 deletion nirc2_reduce/data/header_kw_dicts/nirc2.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
---
object: OBJECT
instrument: INSTRUME
observer: OBSERVER
date: DATE-OBS
time: EXPSTART
filter: FILTER
wl: EFFWAVE
subc: NAXIS1
Expand All @@ -15,6 +17,10 @@ lamps:
kw: FLSPECTR
lampon: "on"
lampoff: "off"
shutter:
kw: SHRNAME
shutopen: "open"
shutclose: "closed"
warpx_file: nirc2_distort_X_post20150413_v1.fits
warpy_file: nirc2_distort_Y_post20150413_v1.fits
dfits_kws: ["OBJECT", "DATE-OBS", "FILTER", "EFFWAVE", "NAXIS1", "ITIME", "COADDS", "AXESTAT", "FLSPECTR"]
dfits_kws: ["OBJECT", "DATE-OBS", "FILTER", "EFFWAVE", "NAXIS1", "ITIME", "COADDS", "AXESTAT", "FLSPECTR", "SHRNAME", "EXPSTART", "OBSERVER"]
26 changes: 26 additions & 0 deletions nirc2_reduce/data/header_kw_dicts/nirc2_newrtc.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
---
object: OBJECT
instrument: INSTRUME
observer: OBSERVER
date: DATE-OBS
time: DATE-BEG
filter: FILTER
wl: EFFWAVE
subc: NAXIS1
itime: ITIME
coadds: COADDS
isdome:
kw: AXESTAT
yesdome: not controlling
nodome: tracking
lamps:
kw: FLSPECTR
lampon: "on"
lampoff: "off"
shutter:
kw: SHRNAME
shutopen: "open"
shutclose: "closed"
warpx_file: nirc2_distort_X_post20150413_v1.fits
warpy_file: nirc2_distort_Y_post20150413_v1.fits
dfits_kws: ["OBJECT", "DATE-OBS", "FILTER", "EFFWAVE", "NAXIS1", "ITIME", "COADDS", "AXESTAT", "FLSPECTR", "SHRNAME", "DATE-BEG", "OBSERVER"]
8 changes: 7 additions & 1 deletion nirc2_reduce/data/header_kw_dicts/osiris.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
---
object: OBJECT
instrument: INSTRUME
observer:
date: DATE-OBS
time:
filter: IFILTER
wl: EFFWAVE
subc: DETENDX
Expand All @@ -15,6 +17,10 @@ lamps:
kw: FLSPECTR
lampon: "on"
lampoff: "off"
shutter:
kw:
shutopen: ""
shutclose: ""
warpx_file: nirc2_distort_X_post20150413_v1.fits
warpy_file: nirc2_distort_Y_post20150413_v1.fits
dfits_kws: ["OBJECT", "INSTRUME", "DATE-OBS", "IFILTER", "EFFWAVE", "NAXIS1", "TRUITIME", "COADDS", "AXESTAT", "FLSPECTR"]
dfits_kws: ["OBJECT", "INSTRUME", "DATE-OBS", "IFILTER", "EFFWAVE", "NAXIS1", "TRUITIME", "COADDS", "AXESTAT", "FLSPECTR", ]
21 changes: 13 additions & 8 deletions nirc2_reduce/flats.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,15 @@ def __init__(self, fnames_off, fnames_on):
self.frames_off = np.asarray([Image(f).data for f in fnames_off])
self.frames_on = np.asarray([Image(f).data for f in fnames_on])

off = np.median(self.frames_off, axis=0)
on = np.median(self.frames_on, axis=0)
flat = on - off
self.flat = flat / np.median(flat)
off = np.nanmedian(self.frames_off, axis=0)
on = np.nanmedian(self.frames_on, axis=0)
if (np.nanmean(on) - np.nanmean(off))/np.nanmean(on) < 0.01:
warnings.warn(f'brightness difference between domeflaton and domeflatoff is near zero for flat including fname {fnames_on[0]}. setting to off. if this is a thermal filter (lp, ms) then this is probably ok', stacklevel=2)
flat = off
else:
flat = on - off

self.flat = flat / np.nanmedian(flat)

def write(self, outfile):
"""
Expand Down Expand Up @@ -76,10 +81,10 @@ def make_badpx_map(self, outfile, tol=0.07, blocksize=6):
warnings.filterwarnings('ignore', category=RuntimeWarning)
med = np.median(flatblock)

# if not within tolerance, set to NaN
mapblock[np.where(flatblock / med > 1 + tol)] = 0
mapblock[np.where(flatblock / med < 1 - tol)] = 0
badpx_map[i : i + blocksize, j : j + blocksize] = mapblock
# if not within tolerance, set to NaN
mapblock[np.where(flatblock / med > 1 + tol)] = 0
mapblock[np.where(flatblock / med < 1 - tol)] = 0
badpx_map[i : i + blocksize, j : j + blocksize] = mapblock
self.badpx_map = badpx_map
# change some header info and write to .fits
hdulist_out = self.dummy_fits.hdulist
Expand Down
103 changes: 59 additions & 44 deletions nirc2_reduce/multi_reduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def process_flats(
"""
date_kw = self.header_kw_dict['date']
filter_kw = self.header_kw_dict['filter']
subc_kw = self.header_kw_dict['subc']
wl_kw = self.header_kw_dict['wl']
standard_filts = np.array(list(standard_wleff.keys()))
standard_wls = np.array([float(standard_wleff[key]) for key in standard_filts])
Expand All @@ -84,34 +85,37 @@ def process_flats(

counter = 0
for i, tab in enumerate(tabs_by_filt):
flatoff, flaton = sort_rawfiles.get_flats(self.tab, self.instrument)
date = self.tab[date_kw].data[0]

# if there are new flats in that filter, make master flat and badpx map
# and put them into flatdir
if (len(flatoff) > 0) and (len(flaton) > 0):
counter += 1

wl_eff = float(tab[wl_kw].data[0])
standard_idx, _ = find_nearest(standard_wls, wl_eff)
nearest_stand_filt = standard_filts[standard_idx]
short_name = nearest_stand_filt
badpx_outpath = os.path.join(
flatdir, f"{date}_badpx_map_{short_name}.fits"
)
flat_outpath = os.path.join(
flatdir, f"{date}_flat_master_{short_name}.fits"
)
flatobj = flats.Flats(flatoff, flaton)
flatobj.write(flat_outpath)
flatobj.make_badpx_map(badpx_outpath, **badpx_kwargs)
print(f"wrote files {flat_outpath} and {badpx_outpath}")

# sort by subarray
subcs, tabs_by_subc = sort_rawfiles.split_by_kw(tab, subc_kw)
for j, tab in enumerate(tabs_by_subc):

flatoff, flaton = sort_rawfiles.get_flats(tab, self.instrument)
# if there are new flats in that filter, make master flat and badpx map
# and put them into flatdir
if (len(flatoff) > 0) and (len(flaton) > 0):
counter += 1
date = tab[date_kw].data[0]
wl_eff = float(tab[wl_kw].data[0])
standard_idx, _ = find_nearest(standard_wls, wl_eff)
nearest_stand_filt = standard_filts[standard_idx]
short_name = nearest_stand_filt
badpx_outpath = os.path.join(
flatdir, f"{date}_badpx_map_{subcs[j]}_{short_name}.fits"
)
flat_outpath = os.path.join(
flatdir, f"{date}_flat_master_{subcs[j]}_{short_name}.fits"
)
flatobj = flats.Flats(flatoff, flaton)
flatobj.write(flat_outpath)
flatobj.make_badpx_map(badpx_outpath, **badpx_kwargs)
print(f"wrote files {flat_outpath} and {badpx_outpath}")
if counter == 0:
warnings.warn("No flats processed! (none found by sort_rawfiles.get_flats)")

return

def _find_flats(self, flatdir, date, filt):
def _find_flats(self, flatdir, date, filt, subc):
"""
find nearest-in-time flat and badpx map
searches flatdir for nearest-in-time flats
Expand All @@ -125,6 +129,7 @@ def _find_flats(self, flatdir, date, filt):
directory to search for flats
date : str, required.
filt : str, required.
subc : str, required.
Returns
-------
Expand All @@ -135,15 +140,15 @@ def _find_flats(self, flatdir, date, filt):
"""

# find all the flats and bad pixel maps
all_flats = glob.glob(f"{flatdir}/*flat*{filt}*.fits")
all_badpx = glob.glob(f"{flatdir}/*badpx*{filt}*.fits")
all_flats = glob.glob(f"{flatdir}/*flat*{subc}*{filt}*.fits")
all_badpx = glob.glob(f"{flatdir}/*badpx*{subc}*{filt}*.fits")
if len(all_flats) < 1:
raise FileNotFoundError(
f"No flats in directory {flatdir} match wildcard *flat*{filt}*.fits"
f"No flats in directory {flatdir} match wildcard *flat*{subc}*{filt}*.fits"
)
if len(all_badpx) < 1:
raise FileNotFoundError(
f"No badpx maps in directory {flatdir} match wildcard *badpx*{filt}*.fits"
f"No badpx maps in directory {flatdir} match wildcard *badpx*{subc}*{filt}*.fits"
)
flat_dates = [f.split("/")[-1].split("_")[0] for f in all_flats]
badpx_dates = [f.split("/")[-1].split("_")[0] for f in all_badpx]
Expand Down Expand Up @@ -182,7 +187,8 @@ def run(
self,
outdir,
flatdir,
filts_want=None):
filts_want=None,
show=False):
"""
Parameters
----------
Expand All @@ -193,17 +199,22 @@ def run(
filts_want : list-like or None, optional. default None.
list of filter names to run
if None, runs all filters found in rawdir
show : bool, optional. default False
show final quicklook images while running?
Writes
------
"""
object_kw = self.header_kw_dict['object']
date_kw = self.header_kw_dict['date']
time_kw = self.header_kw_dict['time']
wl_kw = self.header_kw_dict['wl']
subc_kw = self.header_kw_dict['subc']
filter_kw = self.header_kw_dict['filter']
flatposkw = self.header_kw_dict['isdome']['kw']
flatposarg = self.header_kw_dict['isdome']['yesdome']
trackingarg = self.header_kw_dict['isdome']['nodome']

if not os.path.exists(outdir):
os.mkdir(outdir)
Expand All @@ -212,11 +223,9 @@ def run(
standard_filts = np.array(list(standard_wleff.keys()))
standard_wls = np.array([float(standard_wleff[key]) for key in standard_filts])

# ignore all files with dome in flat position
# select only files where scope is tracking, i.e., the science frames
isinflatpos, flatpostabs = sort_rawfiles.split_by_kw(self.tab, flatposkw)
tab = flatpostabs[
np.argwhere([not (s.startswith(flatposarg)) for s in isinflatpos])[0, 0]
]
tab = flatpostabs[np.argwhere(isinflatpos == trackingarg)[0,0]]
date = tab[date_kw].data[0]

# split by object
Expand All @@ -229,40 +238,46 @@ def run(
filts, filt_tabs = sort_rawfiles.split_by_kw(targ_tab, filter_kw)

# loop over all filters
failures = 0
for i, filt_name in enumerate(filts):
filt_str = filt_name.replace(" ", "")
filt_str = filt_str.replace("+", "")
filt_str = filt_str.replace("clear", "")
targ_str = targ.split(" ")[0]
if (filts_want is not None) and (filt_name not in filts_want):
continue

print(f"Starting filter {filt_name} ({i+1} of {len(filts)})")
failures = 0
filt_tab = filt_tabs[i]
fnames = np.sort(filt_tab["FILENAME"].data)
fnames_i = np.argsort(filt_tab["FILENAME"].data)
fnames = filt_tab["FILENAME"].data[fnames_i]
time_strs = filt_tab[time_kw].data[fnames_i]
time_strs = [s[:5].replace(":", "")+'UT' for s in time_strs]

# check right number of frames for bxy3
if len(fnames) < 3:
warnings.warn(
f"Fewer than 3 frames found for object {targ}, filter {filt_name}; skipping filter {filt_name}!"
f"Fewer than 3 frames found for object {targ}, filter {filt_name}; skipping filter {filt_name}!", stacklevel=2
)
failures += 1
continue
if len(fnames) > 3:
warnings.warn(
f"More than 3 frames found for object {targ}, filter {filt_name}; using last three!"
f"More than 3 frames found for object {targ}, filter {filt_name}; using last three!", stacklevel=2
)
fnames = fnames[-3:]

# find corresponding wideband flatfield and badpx map
wl_eff = float(tab[wl_kw].data[0])
standard_idx, _ = find_nearest(standard_wls, wl_eff)
flat_filt = standard_filts[standard_idx]
subc = filt_tab[subc_kw].data[0]
try:
flat_fname, badpx_fname = self._find_flats(flatdir, date, flat_filt)
flat_fname, badpx_fname = self._find_flats(flatdir, date, flat_filt, subc)
print(f'Applying flat {flat_fname}, badpx {badpx_fname}')
except (ValueError, FileNotFoundError):
warnings.warn(
f"Could not find flat for filter {flat_filt} as requested by {date} {targ} {filt_name}; skipping filter {filt_name}!"
f"Could not find flat for filter {flat_filt} as requested by {date} {targ} {filt_name} {subc}; skipping filter {filt_name}!", stacklevel=2
)
failures += 1
continue
Expand All @@ -281,6 +296,7 @@ def run(
obs.dewarp()
obs.rotate()
obs.per_second()
obs.trim()
obs.write_frames(
[
os.path.join(
Expand All @@ -294,18 +310,17 @@ def run(
),
],
)
obs.trim()
obs.stack()
obs.crop_final(50)
obs.plot_final(
show=False,
png_file=os.path.join(outdir, f"{date}_{targ_str}_{filt_str}.png"),
)
obs.write_final(
os.path.join(
outdir, f"{date}_{targ_str}_stacked_nophot_{filt_str}.fits"
)
)
obs.com_crop_final(100)
obs.plot_final(
show=show,
png_file=os.path.join(outdir, f"{targ_str}_{filt_str}_{time_strs[0]}.png"),
)
print(f"Object {targ_str} finished with {failures} failed filters")

return
Expand Down
Loading

0 comments on commit f660e50

Please sign in to comment.