Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 33 additions & 12 deletions stixcore/io/FlareListManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from astropy.time import Time

from stixcore.config.config import CONFIG
from stixcore.io.product_processors.fits.processors import CreateUtcColumn
from stixcore.products.level3.flarelist import FlarelistSC, FlarelistSDC
from stixcore.products.product import Product
from stixcore.util.logging import get_logger
Expand Down Expand Up @@ -188,12 +189,17 @@ def get_data(self, *, start, end, fido_client):
data["flare_id"] = Column(
mt["flare_id"].astype(int), description=f"unique flare id for flarelist {self.flarelistname}"
)
data["start_UTC"] = Column(0, description="start time of flare")
data["start_UTC"] = [Time(d, format="isot", scale="utc") for d in mt["start_UTC"]]
CreateUtcColumn(
data,
[Time(d, format="isot", scale="utc") for d in mt["start_UTC"]],
"start_UTC",
description="start time of flare",
)
Comment on lines +192 to +197
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

data.add_column(Column([Time(d, format="isot", scale="utc") for d in mt["start_UTC"]], name="start_UTC",description="start time of flare")

not sure if the function is need id honest really do much?


data["duration"] = Column(mt["duration"].astype(float) * u.s, description="duration of flare")
data["end_UTC"] = Column(0, description="end time of flare")
data["end_UTC"] = CreateUtcColumn(description="end time of flare")
data["end_UTC"] = [Time(d, format="isot", scale="utc") for d in mt["end_UTC"]]
data["peak_UTC"] = Column(0, description="flare peak time")
data["peak_UTC"] = CreateUtcColumn(description="flare peak time")
data["peak_UTC"] = [Time(d, format="isot", scale="utc") for d in mt["peak_UTC"]]
data["att_in"] = Column(mt["att_in"].astype(bool), description="was attenuator in during flare")
data["bkg_baseline"] = Column(mt["LC0_BKG"] * u.ct, description="background baseline at 4-10 keV")
Expand Down Expand Up @@ -277,7 +283,7 @@ def get_data(self, *, start, end, fido_client):

data.add_index("flare_id")

# add energy axis for the lightcurve peek time data for each flare
# add energy axis for the lightcurve peak time data for each flare
# the energy bins are taken from the daily ql-lightcurve products
# as the definition of the lc energy chanel's are will change only very seldom
# the ql-lightcurve products assume a constant definition for an entire day.
Expand Down Expand Up @@ -439,13 +445,28 @@ def get_data(self, *, start, end, fido_client):
data["flare_id"] = Column(
mt["flare_id"].astype(int), description=f"unique flare id for flarelist {self.flarelistname}"
)
data["start_UTC"] = Column(0, description="start time of flare")
data["start_UTC"] = [Time(d, format="isot", scale="utc") for d in mt["start_UTC"]]

CreateUtcColumn(
data,
[Time(d, format="isot", scale="utc") for d in mt["start_UTC"]],
"start_UTC",
description="start time of flare",
)
data["duration"] = Column(mt["duration"].astype(float) * u.s, description="duration of flare")
data["end_UTC"] = Column(0, description="end time of flare")
data["end_UTC"] = [Time(d, format="isot", scale="utc") for d in mt["end_UTC"]]
data["peak_UTC"] = Column(0, description="flare peak time")
data["peak_UTC"] = [Time(d, format="isot", scale="utc") for d in mt["peak_UTC"]]

CreateUtcColumn(
data,
[Time(d, format="isot", scale="utc") for d in mt["end_UTC"]],
"end_UTC",
description="end time of flare",
)
CreateUtcColumn(
data,
[Time(d, format="isot", scale="utc") for d in mt["peak_UTC"]],
"peak_UTC",
description="flare peak time",
)

data["att_in"] = Column(mt["att_in"].astype(bool), description="was attenuator in during flare")
data["bkg_baseline"] = Column(mt["LC0_BKG"] * u.ct, description="background baseline at 4-10 keV")
data["GOES_class"] = Column(
Expand Down Expand Up @@ -528,7 +549,7 @@ def get_data(self, *, start, end, fido_client):

data.add_index("flare_id")

# add energy axis for the lightcurve peek time data for each flare
# add energy axis for the lightcurve peak time data for each flare
# the energy bins are taken from the daily ql-lightcurve products
# as the definition of the lc energy chanel's are will change only very seldom
# the ql-lightcurve products assume a constant definition for an entire day.
Expand Down
28 changes: 24 additions & 4 deletions stixcore/io/product_processors/fits/processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,24 @@ def set_bscale_unsigned(table_hdu):
return table_hdu


def CreateUtcColumn(table, data, colname, description="UTC Time"):
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

snake case

Suggested change
def CreateUtcColumn(table, data, colname, description="UTC Time"):
def create_utc_column(table, data, colname, description="UTC Time"):

or maybe create_UTC_column not sure what the PEP says for this?

"""
Create UTC time column for FITS tables.

Parameters
----------
description : `str`
Description for the column

Returns
-------
`astropy.table.Column`
Column representing UTC time
"""
table[colname] = data
table[colname].info.description = description


def add_default_tuint(table_hdu):
"""
Add a default empty string tunit if not already defined
Expand Down Expand Up @@ -805,10 +823,11 @@ def generate_primary_header(self, filename, product, *, version=0):
if default[0] not in soop_key_names:
soop_headers += tuple([default])

scet_range = product.scet_timerange
time_headers = (
# Name, Value, Comment
("OBT_BEG", product.scet_timerange.start.as_float().value, "Start of acquisition time in OBT"),
("OBT_END", product.scet_timerange.end.as_float().value, "End of acquisition time in OBT"),
("OBT_BEG", scet_range.start.as_float().value, "Start of acquisition time in OBT"),
("OBT_END", scet_range.end.as_float().value, "End of acquisition time in OBT"),
("TIMESYS", "UTC", "System used for time keywords"),
("LEVEL", "L1", "Processing level of the data"),
("DATE-OBS", product.utc_timerange.start.fits, "Start of acquisition time in UTC"),
Expand Down Expand Up @@ -1126,7 +1145,8 @@ def write_fits(self, prod, *, version=0):
elif fitspath_complete.exists():
logger.warning("Complete Fits file %s exists will be overridden", fitspath.name)

data = prod.data
data = prod.data.copy()
prod.on_serialize(data)

primary_header, header_override = self.generate_primary_header(filename, prod, version=version)

Expand All @@ -1139,7 +1159,7 @@ def write_fits(self, prod, *, version=0):
# Add comment and history
[primary_hdu.header.add_comment(com) for com in prod.comment]
[primary_hdu.header.add_history(com) for com in prod.history]
primary_hdu.header.update({"HISTORY": "Processed by STIXCore ANC"})
primary_hdu.header.update({"HISTORY": "Processed by STIXCore L3"})

if hasattr(prod, "maps") and len(prod.maps) > 0:
# fig = plt.figure(figsize=(12, 6))
Expand Down
15 changes: 8 additions & 7 deletions stixcore/processing/FLtoFL.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
)
from stixcore.products.level3.flarelist import (
FlareList,
FlarePeekPreviewMixin,
FlarePeakPreviewMixin,
FlarePositionMixin,
FlareSOOPMixin,
)
Expand Down Expand Up @@ -107,7 +107,7 @@ def test_for_processing(
"""
try:
c_header = fits.getheader(candidate)
f_data_end = datetime.fromisoformat(c_header["DATE-END"])
# f_data_end = datetime.fromisoformat(c_header["DATE-END"])
f_create_date = datetime.fromisoformat(c_header["DATE"])

cfn = get_complete_file_name_and_path(candidate)
Expand All @@ -128,8 +128,9 @@ def test_for_processing(
# safety margin of 1day until we process higher products with position and pointing
# only use flown spice kernels not predicted once as pointing information
# can be "very off"
if f_data_end > (Spice.instance.get_mk_date(meta_kernel_type="flown") - timedelta(hours=24)):
return TestForProcessingResult.NotSuitable
# TODO redo
# if f_data_end > (Spice.instance.get_mk_date(meta_kernel_type="flown") - timedelta(hours=24)):
# return TestForProcessingResult.NotSuitable

# safety margin of x until we start with processing the list files
if f_create_date >= (datetime.now() - self.cadence):
Expand Down Expand Up @@ -187,9 +188,9 @@ def process_fits_files(
if issubclass(out_product, FlareSOOPMixin) and not issubclass(in_product, FlareSOOPMixin):
out_product.add_soop(data)

# add peek preview images if not already present
if issubclass(out_product, FlarePeekPreviewMixin) and not issubclass(in_product, FlarePeekPreviewMixin):
out_product.add_peek_preview(data, energy, file_path.name, fido_client, img_processor, month=month)
# add peak preview images if not already present
if issubclass(out_product, FlarePeakPreviewMixin) and not issubclass(in_product, FlarePeakPreviewMixin):
out_product.add_peak_preview(data, energy, file_path.name, fido_client, img_processor, month=month)

out_prod = out_product(control=control, data=data, month=month, energy=energy)
out_prod.parent = file_path.name
Expand Down
2 changes: 1 addition & 1 deletion stixcore/processing/FlareListL3.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
class FlareListL3(SingleProductProcessingStepMixin):
"""Processing step from a FlareListManager to monthly solo_L3_stix-flarelist-*.fits file."""

STARTDATE = date(2024, 1, 1)
STARTDATE = date(2025, 7, 1)

def __init__(self, flm: FlareListManager, output_dir: Path):
"""Crates a new Processor.
Expand Down
106 changes: 50 additions & 56 deletions stixcore/processing/pipeline_daily.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,34 +9,27 @@

from stixcore.config.config import CONFIG
from stixcore.ephemeris.manager import Spice, SpiceKernelManager
from stixcore.io.FlareListManager import SDCFlareListManager
from stixcore.io.ProcessingHistoryStorage import ProcessingHistoryStorage
from stixcore.io.product_processors.fits.processors import ( # FitsANCProcessor,; FitsL3Processor,
FitsANCProcessor,
FitsL2Processor,
FitsL3Processor,
)
from stixcore.io.product_processors.plots.processors import PlotProcessor
from stixcore.io.RidLutManager import RidLutManager
from stixcore.processing.AspectANC import AspectANC
from stixcore.processing.FlareListL3 import FlareListL3
from stixcore.processing.FLtoFL import FLtoFL
from stixcore.processing.LL import LL03QL
from stixcore.processing.pipeline import PipelineStatus
from stixcore.processing.SingleStep import SingleProcessingStepResult
from stixcore.products.level1.quicklookL1 import LightCurve
from stixcore.products.level3.flarelist import FlarelistSDC, FlarelistSDCLoc
from stixcore.products.lowlatency.quicklookLL import LightCurveL3
from stixcore.soop.manager import SOOPManager
from stixcore.util.logging import STX_LOGGER_DATE_FORMAT, STX_LOGGER_FORMAT, get_logger

# from stixpy.net.client import STIXClient
# from stixcore.io.FlareListManager import SCFlareListManager, SDCFlareListManager
# from stixcore.processing.FlareListL3 import FlareListL3
# from stixcore.processing.FLtoFL import FLtoFL
# from stixcore.products.level3.flarelist import (
# FlarelistSC,
# FlarelistSCLoc,
# FlarelistSCLocImg,
# FlarelistSDC,
# FlarelistSDCLoc,
# FlarelistSDCLocImg,
# )

logger = get_logger(__name__)


Expand Down Expand Up @@ -215,8 +208,8 @@ def run_daily_pipeline(args):
# SCFlareListManager.instance = SCFlareListManager(flare_lut_file, fido_client, update=True)

# TODO reactivate once flarelist processing is finalized
# flare_lut_file = Path(CONFIG.get("Pipeline", "flareid_sdc_lut_file"))
# SDCFlareListManager.instance = SDCFlareListManager(flare_lut_file, update=False)
flare_lut_file = Path(CONFIG.get("Pipeline", "flareid_sdc_lut_file"))
SDCFlareListManager.instance = SDCFlareListManager(flare_lut_file, update=False)

RidLutManager.instance = RidLutManager(Path(CONFIG.get("Publish", "rid_lut_file")), update=False)

Expand Down Expand Up @@ -249,19 +242,19 @@ def run_daily_pipeline(args):
aspect_anc_processor = AspectANC(fits_in_dir, fits_out_dir)

# TODO reactivate once flarelist processing is finalized
# flarelist_sdc = FlareListL3(SDCFlareListManager.instance, fits_out_dir)
flarelist_sdc = FlareListL3(SDCFlareListManager.instance, fits_out_dir)
# flarelist_sc = FlareListL3(SCFlareListManager.instance, fits_out_dir)
# fl_to_fl = FLtoFL(
# fits_in_dir,
# fits_out_dir,
# products_in_out=[
# (FlarelistSDC, FlarelistSDCLoc),
# (FlarelistSDCLoc, FlarelistSDCLocImg),
# (FlarelistSC, FlarelistSCLoc),
# (FlarelistSCLoc, FlarelistSCLocImg),
# ],
# cadence=timedelta(seconds=1),
# )
fl_to_fl = FLtoFL(
fits_in_dir,
fits_out_dir,
products_in_out=[
(FlarelistSDC, FlarelistSDCLoc),
# (FlarelistSDCLoc, FlarelistSDCLocImg),
# (FlarelistSC, FlarelistSCLoc),
# (FlarelistSCLoc, FlarelistSCLocImg),
],
cadence=timedelta(seconds=1),
)

ll03ql = LL03QL(
fits_in_dir, fits_out_dir, in_product=LightCurve, out_product=LightCurveL3, cadence=timedelta(seconds=1)
Expand All @@ -270,24 +263,25 @@ def run_daily_pipeline(args):
plot_writer = PlotProcessor(fits_out_dir)
l2_fits_writer = FitsL2Processor(fits_out_dir)
# TODO reactivate once flarelist processing is finalized
# l3_fits_writer = FitsL3Processor(fits_out_dir)
# anc_fits_writer = FitsANCProcessor(fits_out_dir)
l3_fits_writer = FitsL3Processor(fits_out_dir)
anc_fits_writer = FitsANCProcessor(fits_out_dir)

hk_in_files = aspect_anc_processor.get_processing_files(phs)
# hk_in_files = aspect_anc_processor.get_processing_files(phs)
hk_in_files = []

ll_candidates = ll03ql.get_processing_files(phs)
# ll_candidates = []
# ll_candidates = ll03ql.get_processing_files(phs)
ll_candidates = []

# TODO reactivate once flarelist processing is finalized
# fl_sdc_months = flarelist_sdc.find_processing_months(phs)
# fl_sdc_months = []
fl_sdc_months = flarelist_sdc.find_processing_months(phs)
fl_sdc_months = []

# TODO reactivate once flarelist processing is finalized
# fl_sc_months = flarelist_sc.find_processing_months(phs)
# fl_sc_months = []

# TODO reactivate once flarelist processing is finalized
# fl_to_fl_files = fl_to_fl.get_processing_files(phs)
fl_to_fl_files = fl_to_fl.get_processing_files(phs)
# fl_to_fl_files = []

# all processing files should be terminated before the next step as the different
Expand All @@ -306,16 +300,16 @@ def run_daily_pipeline(args):
)
)
# TODO reactivate once flarelist processing is finalized
# jobs.append(
# executor.submit(
# flarelist_sdc.process_fits_files,
# fl_sdc_months,
# soopmanager=SOOPManager.instance,
# spice_kernel_path=Spice.instance.meta_kernel_path,
# processor=l3_fits_writer,
# config=CONFIG,
# )
# )
jobs.append(
executor.submit(
flarelist_sdc.process_fits_files,
fl_sdc_months,
soopmanager=SOOPManager.instance,
spice_kernel_path=Spice.instance.meta_kernel_path,
processor=l3_fits_writer,
config=CONFIG,
)
)

# jobs.append(
# executor.submit(
Expand All @@ -330,17 +324,17 @@ def run_daily_pipeline(args):

# # TODO a owen processing step for each flarelist file?
# # for fl_to_fl_file in fl_to_fl_files:
# jobs.append(
# executor.submit(
# fl_to_fl.process_fits_files,
# fl_to_fl_files,
# soopmanager=SOOPManager.instance,
# spice_kernel_path=Spice.instance.meta_kernel_path,
# fl_processor=anc_fits_writer,
# img_processor=l3_fits_writer,
# config=CONFIG,
# )
# )
jobs.append(
executor.submit(
fl_to_fl.process_fits_files,
fl_to_fl_files,
soopmanager=SOOPManager.instance,
spice_kernel_path=Spice.instance.meta_kernel_path,
fl_processor=anc_fits_writer,
img_processor=l3_fits_writer,
config=CONFIG,
)
)

jobs.append(
executor.submit(
Expand Down
1 change: 1 addition & 0 deletions stixcore/products/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,6 @@
from stixcore.products.level2.housekeepingL2 import *
from stixcore.products.level2.quicklookL2 import *
from stixcore.products.level2.scienceL2 import *
from stixcore.products.level3.flarelist import *
from stixcore.products.levelb.binary import LevelB
from stixcore.products.lowlatency.quicklookLL import *
Loading
Loading