From be0f1f2b8b11d5e559a0698cf544d81ea71c5f19 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Mon, 23 Sep 2024 14:04:46 -0400 Subject: [PATCH 01/13] Remove use of aiprops in reporting --- mica/report/report.py | 17 ------------ mica/report/templates/aiprops.html | 42 ------------------------------ mica/report/templates/vv.html | 1 - 3 files changed, 60 deletions(-) delete mode 100644 mica/report/templates/aiprops.html diff --git a/mica/report/report.py b/mica/report/report.py index 426898b2..3a459c4d 100644 --- a/mica/report/report.py +++ b/mica/report/report.py @@ -521,14 +521,6 @@ def star_info(id): } -def get_aiprops(obsid): - ACA_DB = Sqsh(dbi="sybase", server="sybase", database="aca", user="aca_read") - aiprops = ACA_DB.fetchall( - "select * from aiprops where obsid = {} order by tstart".format(obsid) - ) - return aiprops - - def main(obsid): report_root = REPORT_ROOT strobs = "%05d" % obsid @@ -747,15 +739,6 @@ def main(obsid): f.close() interval["loglink"] = newlogname - aiprops = get_aiprops(obsid) - aiprops_template = jinja_env.get_template("aiprops.html") - aiprops_page = aiprops_template.render(obsid=obsid, aiprops=aiprops) - aiprops_page_file = os.path.join(outdir, "aiprops.html") - logger.debug("AIPROPS report to {}".format(aiprops_page_file)) - f = open(aiprops_page_file, "w") - f.write(aiprops_page) - f.close() - props_template = jinja_env.get_template("props.html") props_page = props_template.render(obsid=obsid, vv=vv) props_page_file = os.path.join(outdir, "props.html") diff --git a/mica/report/templates/aiprops.html b/mica/report/templates/aiprops.html deleted file mode 100644 index 5b394105..00000000 --- a/mica/report/templates/aiprops.html +++ /dev/null @@ -1,42 +0,0 @@ - - -V&V obsid {{ obsid }} - - - -
-

V&V obsid {{ obsid }}

- -

AIPROPS - - -{% for field in ['datestart', 'datestop', 'obsid', 'pcad_mode', -'aspect_mode', 'sim_mode'] %} - -{% for row in aiprops %} - -{% for field in ['datestart', 'datestop', 'obsid', 'pcad_mode', -'aspect_mode', 'sim_mode'] %} - -{% endfor %} - -{% endfor %} -
{{ field }} -{% endfor %} -
{{ row[field] }}
-

- - diff --git a/mica/report/templates/vv.html b/mica/report/templates/vv.html index ab9d2741..b7ac4825 100644 --- a/mica/report/templates/vv.html +++ b/mica/report/templates/vv.html @@ -93,7 +93,6 @@

Aspect Intervals

-AIPROPS and Guide/Fid Props

From a4acabea8ed235a59c54917622b5c728b3c066cd Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Fri, 27 Sep 2024 17:10:32 -0400 Subject: [PATCH 02/13] Remove slew_err_arcsec from manvr printing in report --- mica/report/templates/report.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mica/report/templates/report.html b/mica/report/templates/report.html index e32964a4..64ccb8ee 100644 --- a/mica/report/templates/report.html +++ b/mica/report/templates/report.html @@ -167,8 +167,8 @@

Starcheck

manvr['target_Q2'], manvr['target_Q3'], manvr['target_Q4']) }} -{{ " MANVR: Angle= %.2f deg Duration= %d sec Slew err= %.1f arcsec"|format( -manvr['angle_deg'], manvr['duration_sec'], manvr['slew_err_arcsec']) -}} +{{ " MANVR: Angle= %.2f deg Duration= %d sec"|format( +manvr['angle_deg'], manvr['duration_sec']) -}} {% endfor %} From 29c0d071926721baeb9f129833217a6f65d4a991 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Thu, 3 Oct 2024 11:21:44 -0400 Subject: [PATCH 03/13] Use 'with' on Sqsh sqlsao calls instead of broken conn.close --- mica/vv/core.py | 32 +++++++++++++++----------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/mica/vv/core.py b/mica/vv/core.py index d93b51c1..b0d2a269 100644 --- a/mica/vv/core.py +++ b/mica/vv/core.py @@ -194,16 +194,15 @@ def _aiid_info(self, save_cols=save_asol_header): # this should probably be handled in mica.archive.asp_l1 @staticmethod def _asp1_lookup(obsid, obi, revision): - apstat = Sqsh(dbi="sybase", server="sqlsao", database="axafapstat") - # take these from the first aspect solution file header - aspect_1 = apstat.fetchall( - """SELECT * FROM aspect_1 - WHERE obsid = {obsid} - AND obi = {obi} - AND revision = {revision} - """.format(obsid=obsid, obi=obi, revision=revision) - ) - apstat.conn.close() + with Sqsh(dbi="sybase", server="sqlsao", database="axafapstat") as apstat: + # take these from the first aspect solution file header + aspect_1 = apstat.fetchall( + """SELECT * FROM aspect_1 + WHERE obsid = {obsid} + AND obi = {obi} + AND revision = {revision} + """.format(obsid=obsid, obi=obi, revision=revision) + ) if len(aspect_1) > 1: raise ValueError("More than one entry found for obsid/obi/rev in aspect_1") if len(aspect_1) == 0: @@ -932,13 +931,12 @@ def _get_prop(self, propname, propstring): def _read_ocat_stars(self): obsid = int(self.asol_header["OBS_ID"]) obi = int(self.asol_header["OBI_NUM"]) - ocat_db = Sqsh(dbi="sybase", server="sqlsao", database="axafocat") - stars = ocat_db.fetchall( - "select * from stars where " - "obsid = {} and obi = {} " - "and type != 0".format(obsid, obi) - ) - ocat_db.conn.close() + with Sqsh(dbi="sybase", server="sqlsao", database="axafocat") as ocat_db: + stars = ocat_db.fetchall( + "select * from stars where " + "obsid = {} and obi = {} " + "and type != 0".format(obsid, obi) + ) if len(np.unique(stars["obi"])) > 1: raise ValueError( "Multi-obi observation. OCAT stars unhelpful to identify missing slot" From 3e2472e28bf50eec8a2512e291c4a099b1f6070c Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Thu, 3 Oct 2024 11:40:02 -0400 Subject: [PATCH 04/13] Change some dict checks to gets --- mica/vv/core.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mica/vv/core.py b/mica/vv/core.py index b0d2a269..ffcf9882 100644 --- a/mica/vv/core.py +++ b/mica/vv/core.py @@ -268,7 +268,7 @@ def _save_info_json(self, file=None): logger.info("Saved JSON to {}".format(file)) def slots_to_db(self): - if self.info()["aspect_1_id"] is None: + if self.info().get("aspect_1_id") is None: logger.warning( "Database save not implemented for obsids without aspect_1_ids" ) @@ -300,7 +300,7 @@ def _get_ccd_temp(tstart, tstop): def slots_to_table(self): save = self.info() - if save["aspect_1_id"] is None: + if save.get("aspect_1_id") is None: logger.warning("Table save not implemented for obsids without aspect_1_ids") return mean_aacccdpt = self._get_ccd_temp(save["tstart"], save["tstop"]) From ddc66a5871036f5227be44ddf258e4c7d0b3dd51 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Fri, 4 Oct 2024 12:16:20 -0400 Subject: [PATCH 05/13] Fix bug that tbl.cols not accessible out of context scope --- mica/stats/update_acq_stats.py | 2 +- mica/stats/update_guide_stats.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mica/stats/update_acq_stats.py b/mica/stats/update_acq_stats.py index 32ca5f76..2e68be65 100755 --- a/mica/stats/update_acq_stats.py +++ b/mica/stats/update_acq_stats.py @@ -481,7 +481,7 @@ def _get_obsids_to_update(check_missing=False): try: with tables.open_file(table_file, "r") as h5: tbl = h5.get_node("/", "data") - last_tstart = tbl.cols.guide_tstart[tbl.colindexes["guide_tstart"][-1]] + last_tstart = tbl.cols.guide_tstart[tbl.colindexes["guide_tstart"][-1]] except Exception: last_tstart = "2002:012:12:00:00" kadi_obsids = events.obsids.filter(start=last_tstart) diff --git a/mica/stats/update_guide_stats.py b/mica/stats/update_guide_stats.py index 6fdffeaa..be082620 100755 --- a/mica/stats/update_guide_stats.py +++ b/mica/stats/update_guide_stats.py @@ -411,7 +411,7 @@ def _get_obsids_to_update(check_missing=False, table_file=None, start=None, stop try: with tables.open_file(table_file, "r") as h5: tbl = h5.get_node("/", "data") - last_tstart = tbl.cols.kalman_tstart[tbl.colindexes["kalman_tstart"][-1]] + last_tstart = tbl.cols.kalman_tstart[tbl.colindexes["kalman_tstart"][-1]] except Exception: last_tstart = start if start is not None else "2002:012:12:00:00" kadi_obsids = events.obsids.filter(start=last_tstart, stop=stop) From ea27feae6766183826d0c3e5c9a5ef9601958e9e Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Fri, 4 Oct 2024 12:18:33 -0400 Subject: [PATCH 06/13] Ruff format fix --- mica/stats/update_guide_stats.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mica/stats/update_guide_stats.py b/mica/stats/update_guide_stats.py index be082620..2252ecd3 100755 --- a/mica/stats/update_guide_stats.py +++ b/mica/stats/update_guide_stats.py @@ -411,7 +411,9 @@ def _get_obsids_to_update(check_missing=False, table_file=None, start=None, stop try: with tables.open_file(table_file, "r") as h5: tbl = h5.get_node("/", "data") - last_tstart = tbl.cols.kalman_tstart[tbl.colindexes["kalman_tstart"][-1]] + last_tstart = tbl.cols.kalman_tstart[ + tbl.colindexes["kalman_tstart"][-1] + ] except Exception: last_tstart = start if start is not None else "2002:012:12:00:00" kadi_obsids = events.obsids.filter(start=last_tstart, stop=stop) From 23d867192968388ee0c82d2dc1cc8a02e3b3259c Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Sun, 6 Oct 2024 10:16:32 -0400 Subject: [PATCH 07/13] Handle missing omitted stars in 1.8 --- mica/vv/core.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/mica/vv/core.py b/mica/vv/core.py index ffcf9882..7f67303c 100644 --- a/mica/vv/core.py +++ b/mica/vv/core.py @@ -1087,10 +1087,17 @@ def _read_in_data(self): if ocat_info["type"] == 1: import agasc + from agasc.agasc import IdNotFound - star_info = agasc.get_star( - ocat_info["id"], use_supplement=False, agasc_file="miniagasc_*" - ) + try: + star_info = agasc.get_star( + ocat_info["id"], use_supplement=False, agasc_file="miniagasc_*" + ) + except IdNotFound: + agasc_file = agasc.get_agasc_filename("miniagasc_*", version="1p7") + star_info = agasc.get_star( + ocat_info["id"], use_supplement=False, agasc_file=agasc_file + ) mock_prop = dict( cel_loc_flag=0, id_status="OMITTED", From d9e51682ddf8d1214164dd302e1f82deb25074fc Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Sun, 6 Oct 2024 10:53:20 -0400 Subject: [PATCH 08/13] Eh, just use full agasc --- mica/vv/core.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/mica/vv/core.py b/mica/vv/core.py index 7f67303c..5b20ad25 100644 --- a/mica/vv/core.py +++ b/mica/vv/core.py @@ -1087,17 +1087,10 @@ def _read_in_data(self): if ocat_info["type"] == 1: import agasc - from agasc.agasc import IdNotFound - try: - star_info = agasc.get_star( - ocat_info["id"], use_supplement=False, agasc_file="miniagasc_*" - ) - except IdNotFound: - agasc_file = agasc.get_agasc_filename("miniagasc_*", version="1p7") - star_info = agasc.get_star( - ocat_info["id"], use_supplement=False, agasc_file=agasc_file - ) + star_info = agasc.get_star( + ocat_info["id"], use_supplement=False, agasc_file="agasc*" + ) mock_prop = dict( cel_loc_flag=0, id_status="OMITTED", From 44a8278a372c263f5f1286fd3c561266455214c9 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Sun, 6 Oct 2024 17:09:13 -0400 Subject: [PATCH 09/13] Add COLOR1 to dasboard agasc columns for proseco filter --- mica/centroid_dashboard.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mica/centroid_dashboard.py b/mica/centroid_dashboard.py index e433acdf..f3774e4d 100755 --- a/mica/centroid_dashboard.py +++ b/mica/centroid_dashboard.py @@ -689,6 +689,7 @@ def plot_crs_visualization( "MAG_ACA", "MAG_ACA_ERR", "CLASS", + "COLOR1", "ASPQ1", "ASPQ2", "ASPQ3", From 52f59a140c9d9a851da7c0bd116128637d8e6491 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Wed, 9 Oct 2024 13:57:51 -0400 Subject: [PATCH 10/13] Use a dictionary instead of a table to avoid truncating text --- mica/report/report.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mica/report/report.py b/mica/report/report.py index 3a459c4d..2dc43d5c 100644 --- a/mica/report/report.py +++ b/mica/report/report.py @@ -307,6 +307,9 @@ def official_vv_notes(obsid, summary): all_vv["aspect_review"] = None + # Convert all_vv to a list of dictionaries + all_vv = [dict(row) for row in all_vv] + for report in all_vv: aspect_rev = vv_db.fetchone( f"select * from vvreview where vvid = {report['vvid']}" From 6f54ac3a111961792230af1734c8a88af1537063 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Wed, 9 Oct 2024 15:18:09 -0400 Subject: [PATCH 11/13] Open up filename globs and regex for CP data --- mica/vv/core.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mica/vv/core.py b/mica/vv/core.py index 5b20ad25..64956ef5 100644 --- a/mica/vv/core.py +++ b/mica/vv/core.py @@ -377,7 +377,7 @@ def _aiid_from_asol(self, asol_file, obsdir): self.obspar["tstop"] <= header["TSTART"] ): return None - aiid_match = re.search(r"(pcadf\d+[^_]*)_", asol_file) + aiid_match = re.search(r"(pcad\w\d+[^_]*)_", asol_file) if aiid_match: return dict(id=aiid_match.group(1), dir=obsdir) @@ -946,7 +946,7 @@ def _read_ocat_stars(self): def _identify_missing_slot(self, slot): datadir = self.aspdir adat_files = glob( - os.path.join(datadir, "pcadf*N???_adat{}1.fits*".format(slot)) + os.path.join(datadir, "pcad*N???_adat{}1.fits*".format(slot)) ) if not len(adat_files): return None From 55508a65cd334eba7dc8998d6b01a99e87319ad2 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Wed, 9 Oct 2024 15:21:13 -0400 Subject: [PATCH 12/13] Ruff --- mica/vv/core.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mica/vv/core.py b/mica/vv/core.py index 64956ef5..923d2136 100644 --- a/mica/vv/core.py +++ b/mica/vv/core.py @@ -945,9 +945,7 @@ def _read_ocat_stars(self): def _identify_missing_slot(self, slot): datadir = self.aspdir - adat_files = glob( - os.path.join(datadir, "pcad*N???_adat{}1.fits*".format(slot)) - ) + adat_files = glob(os.path.join(datadir, "pcad*N???_adat{}1.fits*".format(slot))) if not len(adat_files): return None hdulist = pyfits.open(adat_files[0]) From fd55941b1e7eab68e153af617f61dec53d7c9090 Mon Sep 17 00:00:00 2001 From: Jean Connelly Date: Tue, 29 Oct 2024 13:27:43 -0400 Subject: [PATCH 13/13] Add a get_aca_images method for mica l0 (#301) * Add a get_aca_images method for mica l0 * Handle instances when slot_data is empty / has no rows * Test more about empty table * Improve docstring * Implement full compliance with maude_decom API * Remove pyrightconfig.json * Ruff fixes * Fix docstring formatting * Exclude new IMG_VCDUCTR and IMG_TIME from col comparison * Remove IMG_TIME as it was cut * Add IMG_VCDUCTR column to match maude --------- Co-authored-by: Tom Aldcroft --- mica/archive/aca_l0.py | 161 +++++++++++++++++++++++++++++- mica/archive/tests/test_aca_l0.py | 71 ++++++++++++- 2 files changed, 228 insertions(+), 4 deletions(-) diff --git a/mica/archive/aca_l0.py b/mica/archive/aca_l0.py index 71b05015..4b74f6e4 100644 --- a/mica/archive/aca_l0.py +++ b/mica/archive/aca_l0.py @@ -17,9 +17,10 @@ import Ska.File import ska_dbi import tables -from astropy.table import Table +from astropy.table import Table, vstack from Chandra.Time import DateTime from chandra_aca.aca_image import ACAImage +from cxotime import CxoTimeLike from numpy import ma from mica.common import MICA_ARCHIVE, MissingDataError @@ -61,6 +62,8 @@ ("MNF", ">i4"), ("END_INTEG_TIME", ">f8"), ("INTEG", ">f4"), + ("PIXTLM", " np.ndarray: + """Extract UINT8's from a 2-D array of uint8 bit values into a new array. + + Input array is Nx8 where N is the number of rows and 8 is the number of bits. + It must be in little-endian order with the most significant bit in the rightmost + position of the array. + """ + # This returns a shape of (N, 1) so we need to squeeze it. + out = np.packbits(arr[:, bit_start:bit_stop], axis=1, bitorder="little") + return out.squeeze() + + +def get_aca_images( + start: CxoTimeLike, + stop: CxoTimeLike, +): + """ + Get ACA images from mica L0 archive in same format as maude_decom.get_aca_images. + + This gets a table of images from the mica L0 file archive that includes the same + output columns as ``chandra_aca.maude_decom.get_aca_images()``. The images are 8x8 + and are centered in the 8x8 image, with masks indicating empty pixels (for 4x4 or + 6x6 data). + + The units of the image data are DN, so you need to multiply by 5.0 / INTEG to get + e-/sec. + + This function returns additional columns that are not in the maude images: + - 'HD3TLM*': uint8, raw 8-bit header-3 telemetry values + - 'IMGSIZE': int32, 4, 6, or 8 + + Parameters + ---------- + start : CxoTimeLike + Start time of the time range. + stop : CxoTimeLike + Stop time of the time range. + + Returns + ------- + astropy.table.Table + ACA images and accompanying data. + """ + slot_data_list = [] + for slot in range(8): + slot_data_raw = get_slot_data( + start, + stop, + slot=slot, + centered_8x8=True, + ) + # Convert from numpy structured array to astropy table, keeping only good data + ok = slot_data_raw["QUALITY"] == 0 + slot_data = Table(slot_data_raw[ok]) + + # Add slot number if there are any rows (if statement not needed after + # https://github.com/astropy/astropy/pull/17102). + # if len(slot_data) > 0: + # slot_data["IMGNUM"] = slot + + for name in ["IMGFID", "IMGFUNC", "IMGNUM"]: + slot_data[name] = slot_data[f"{name}1"] + for ii in (1, 2, 3, 4): + name_ii = f"{name}{ii}" + if name_ii in slot_data.colnames: + del slot_data[name_ii] + + slot_data_list.append(slot_data) + + # Combine all slot data into one output table and sort by time and slot + out = vstack(slot_data_list) + out.sort(keys=["TIME", "IMGNUM"]) + + # Rename and rejigger colums to match chandra_aca.maude_decum.get_aca_images + is_6x6 = out["IMGSIZE"] == 6 + is_4x4 = out["IMGSIZE"] == 4 + for rc in ["ROW", "COL"]: + # IMGROW/COL0 are row/col of lower/left of 4x4, 6x6, or 8x8 image + # Make these new columns: + # IMGROW/COL_A1: row/col of pixel A1 of 4x4, 6x6, or 8x8 image + # IMGROW/COL0_8x8: row/col of lower/left of 8x8 image with smaller img centered + out[f"IMG{rc}_A1"] = out[f"IMG{rc}0"] + out[f"IMG{rc}_A1"][is_6x6] += 1 + out[f"IMG{rc}0_8X8"] = out[f"IMG{rc}0"] + out[f"IMG{rc}0_8X8"][is_6x6] -= 1 + out[f"IMG{rc}0_8X8"][is_4x4] -= 2 + + out["IMGTYPE"] = np.full(shape=len(out), fill_value=4) # 8x8 + out["IMGTYPE"][is_6x6] = 1 # 6x6 + out["IMGTYPE"][is_4x4] = 0 # 4x4 + + out.rename_column("IMGRAW", "IMG") + out.rename_column("BGDTYP", "AABGDTYP") + out.rename_column("PIXTLM", "AAPIXTLM") + del out["FILENAME"] + del out["QUALITY"] + + # maude_decom.get_aca_images() provides both VCDUCTR (VCDU for each sub-image) and + # IMG_VCDUCTR (VCDU of first sub-image). For combined images, these are identical. + out["VCDUCTR"] = out["MJF"] * 128 + out["MNF"] + out["IMG_VCDUCTR"] = out["VCDUCTR"] + + # Split uint8 GLBSTAT and IMGSTAT into individual bits. The stat_bit_names are + # in MSB order so we need to reverse them below. + for stat_name, stat_bit_names in ( + ("GLBSTAT", GLBSTAT_NAMES), + ("IMGSTAT", IMGSTAT_NAMES), + ): + for bit, name in zip(range(8), reversed(stat_bit_names)): + out[name] = (out[stat_name] & (1 << bit)) > 0 + + # Extract fields from the mica L0 8-bit COMMCNT value which is really three MSIDs. + # We need to use .data attribute of MaskedColumn to get a numpy masked array. The + # unpackbits function fails on MaskedColumn because it looks for a _mask attribute. + bits = np.unpackbits(out["COMMCNT"].data).reshape(-1, 8) + bits_rev = bits[:, ::-1] # MSB is on the right (index=7) + out["COMMCNT"] = _extract_bits_as_uint8(bits_rev, 2, 8) + out["COMMCNT_CHECKSUM_FAIL"] = bits_rev[:, 0] > 0 + out["COMMCNT_SYNTAX_ERROR"] = bits_rev[:, 1] > 0 + + # Extract fields from the mica L0 8-bit COMMPROG value which is really two MSIDs + bits = np.unpackbits(out["COMMPROG"].data).reshape(-1, 8) + bits_rev = bits[:, ::-1] + out["COMMPROG"] = _extract_bits_as_uint8(bits_rev, 2, 8) + out["COMMPROG_REPEAT"] = _extract_bits_as_uint8(bits_rev, 0, 2) + + return out + + def get_slot_data( start, stop, @@ -218,6 +373,10 @@ def get_slot_data( continue if fname in chunk.dtype.names: all_rows[fname][idx0:idx1] = chunk[fname] + elif fname == "PIXTLM": + all_rows[fname][idx0:idx1] = "ORIG" + elif fname == "BGDTYP": + all_rows[fname][idx0:idx1] = "FLAT" if "IMGSIZE" in columns: all_rows["IMGSIZE"][idx0:idx1] = f_imgsize if "FILENAME" in columns: diff --git a/mica/archive/tests/test_aca_l0.py b/mica/archive/tests/test_aca_l0.py index 0f6f5186..731a894b 100644 --- a/mica/archive/tests/test_aca_l0.py +++ b/mica/archive/tests/test_aca_l0.py @@ -3,10 +3,13 @@ import os +import astropy.units as u import numpy as np import pytest from astropy.table import Table -from Ska.Numpy import interpolate +from chandra_aca import maude_decom +from cxotime import CxoTime +from ska_numpy import interpolate from mica.archive import aca_l0, asp_l1 @@ -32,12 +35,73 @@ def test_l0_images_meta(): } +@pytest.mark.skipif(not has_l0_2012_archive, reason="Test requires 2012 L0 archive") +def test_get_aca_images(): + """ + Confirm mica and maude images sources agree for a small time range. + """ + # Includes 4x4, 6x6, 8x8 images, commanding, NPNT, NMAN, and fids + start = CxoTime("2012:270:02:25:00") + stop = CxoTime("2012:270:02:46:00") + imgs_maude = maude_decom.get_aca_images(start, stop) + imgs_maude.sort(keys=["TIME", "IMGNUM"]) + imgs_mica = aca_l0.get_aca_images(start, stop) + + assert len(imgs_maude) == len(imgs_mica) + + # Test data set includes commanding, searches, and all data types + assert set(imgs_mica["IMGSIZE"]) == {4, 6, 8} + assert set(imgs_mica["COMMPROG"]) == {0, 1, 3, 5, 7, 9, 11, 13, 15, 19, 21, 22} + assert set(imgs_mica["COMMCNT"]) == {0, 24, 13, 25} + assert set(imgs_mica["COMMPROG_REPEAT"]) == {0, 1} + + for colname in imgs_maude.colnames: + if imgs_maude[colname].dtype.kind == "f": + assert np.allclose( + imgs_mica[colname], imgs_maude[colname], rtol=0, atol=1e-3 + ) + else: + assert np.all(imgs_mica[colname] == imgs_maude[colname]) + + assert set(imgs_mica.colnames) - set(imgs_maude.colnames) == { + "HD3TLM63", + "HD3TLM77", + "HD3TLM66", + "HD3TLM74", + "HD3TLM64", + "HD3TLM76", + "IMGSIZE", + "HD3TLM65", + "HD3TLM72", + "HD3TLM73", + "HD3TLM75", + "HD3TLM62", + "HD3TLM67", + } + + +@pytest.mark.skipif(not has_l0_2012_archive, reason="Test requires 2012 L0 archive") +def test_get_aca_images_empty(): + """ + Confirm that get_aca_images returns a zero-length table when no images are found + """ + start = CxoTime("2012:270") + imgs_mica = aca_l0.get_aca_images(start, start) + imgs_maude = maude_decom.get_aca_images(start, start + 10 * u.s) + + # zero-length table with the required columns to match maude_decom + assert len(imgs_mica) == 0 + assert type(imgs_mica) is Table + assert set(imgs_maude.colnames).issubset(imgs_mica.colnames) + + has_l0_2007_archive = os.path.exists(os.path.join(aca_l0.CONFIG["data_root"], "2007")) has_asp_l1 = os.path.exists(os.path.join(asp_l1.CONFIG["data_root"])) @pytest.mark.skipif( - "not has_l0_2007_archive or not has_asp_l1", reason="Test requires 2007 L0 archive" + "not has_l0_2007_archive or not has_asp_l1", + reason="Test requires 2007 L0 archive and L1 aspect archive", ) def test_get_l0_images(): """ @@ -87,7 +151,8 @@ def test_get_l0_images(): @pytest.mark.skipif( - "not has_l0_2007_archive or not has_asp_l1", reason="Test requires 2007 L0 archive" + "not has_l0_2007_archive or not has_asp_l1", + reason="Test requires 2007 L0 archive and L1 aspect archive", ) def test_get_slot_data_8x8(): """