Skip to content

Commit

Permalink
Merge branch 'main' into patch-1
Browse files Browse the repository at this point in the history
  • Loading branch information
s-goldman authored Nov 20, 2024
2 parents 04cac9f + c287ef0 commit 963553f
Show file tree
Hide file tree
Showing 9 changed files with 272 additions and 193 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci_cron.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,5 +28,5 @@ jobs:
- linux: py311-xdist
- macos: py311-xdist
- linux: py312-xdist
- linux: py3-devdeps-xdist
- macos: py3-devdeps-xdist
- linux: py312-devdeps-xdist
- macos: py312-devdeps-xdist
4 changes: 4 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ number of the code change for that issue. These PRs can be viewed at:

3.7.2 (unreleased)
==================
- Force an exit with a return code, KEYWORD_UPDATE_PROBLEM, in try/exception block
when invoking refine_product_headers in hapsequencer.py and hapmultisequencer.py.
If the FITS header keywords are not properly updated, this can cause errors during
CAOM ingest. [#1911]

- Introduce warnings for fits extensions with science data of all zeros, and ensure
data with zeros in all science extensions are not processed. [#998]
Expand Down
16 changes: 15 additions & 1 deletion doc/ADRs.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,18 @@ The second column of a poller file is now reserved (for WFPC2) for passing the a

## Consequences

Caution must be taken is using variations of the poller file while processing WFPC2 data.
Caution must be taken is using variations of the poller file while processing WFPC2 data.

# The Use of the TEAL Interface 11/14/23

## Context

The code can be run using the interactive GUI TEAL, however, it is hard to maintain. TEAL is also used in the code to load the json parameter files.

## Decision

In order to make the code more easily maintainable, we will no longer support the use of the GUI for running tasks, however, TEAL will still be used in the background to load the parameter files as there is no current alternative for parsing that data.

## Consequences

TEAL will need to be included in drizzlepac until a replacement for parsing the json files can be found.
403 changes: 225 additions & 178 deletions doc/source/mast_data_products/hap-parameters.rst

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions drizzlepac/hapmultisequencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import numpy as np
import drizzlepac

from drizzlepac.haputils import analyze
from drizzlepac.haputils import cell_utils
from drizzlepac.haputils import config_utils
from drizzlepac.haputils import poller_utils
Expand Down Expand Up @@ -245,6 +246,9 @@ def create_drizzle_products(total_obj_list, custom_limits=None):
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_tb, file=sys.stdout)
logging.exception("message")
# When there is not enough disk space, there can be a problem updating the
# header keywords. This can cause problems for CAOM.
sys.exit(analyze.Ret_code.KEYWORD_UPDATE_PROBLEM.value)

# Remove rules files copied to the current working directory
for rules_filename in list(rules_files.values()):
Expand Down
7 changes: 6 additions & 1 deletion drizzlepac/hapsequencer.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@
from drizzlepac.haputils import diagnostic_utils
from drizzlepac.haputils import hla_flag_filter
from drizzlepac.haputils import poller_utils
from drizzlepac.haputils import analyze
from drizzlepac.haputils import product
from drizzlepac.haputils import processing_utils as proc_utils
from drizzlepac.haputils import svm_quality_analysis as svm_qa
Expand Down Expand Up @@ -456,6 +457,10 @@ def create_drizzle_products(total_obj_list):
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_tb, file=sys.stdout)
logging.exception("message")
# When there is not enough disk space, there can be a problem updating the
# header keywords. This can cause problems for CAOM.
sys.exit(analyze.Ret_code.KEYWORD_UPDATE_PROBLEM.value)

# Remove rules files copied to the current working directory
for rules_filename in list(rules_files.values()):
log.info("Removed rules file {}".format(rules_filename))
Expand Down Expand Up @@ -579,7 +584,7 @@ def run_hap_processing(input_filename, diagnostic_mode=False, input_custom_pars_
log.warning("")
log.warning("There are no viable direct images in any Total Data Product for this visit. No processing can be done.")
log.warning("No SVM processing is done for the Grism/Prism data - no SVM output products are generated.")
sys.exit(0)
sys.exit(analyze.Ret_code.NO_VIABLE_DATA.value)

# Update all of the product objects with their associated configuration information.
for total_item in total_obj_list:
Expand Down
1 change: 1 addition & 0 deletions drizzlepac/haputils/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ class Ret_code(Enum):
Define return status codes for Operations
"""
OK = 0
KEYWORD_UPDATE_PROBLEM = 15
SBCHRC_DATA = 55
NO_VIABLE_DATA = 65

Expand Down
3 changes: 1 addition & 2 deletions drizzlepac/haputils/catalog_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,14 +275,13 @@ def compute_background(self, box_size, win_size,
# this for SBC.
if (self.imghdu[0].header['DETECTOR'].upper() == "SBC"):
num_of_zeros = np.count_nonzero(imgdata[self.footprint_mask] == 0)
num_of_nonzeros = num_of_illuminated_pixels - num_of_zeros

# If there are too many background zeros in the image
# (> number_of_zeros_in_background_threshold), set the background median to
# zero and the background rms to the real rms of the non-zero values in the image.
if num_of_zeros / float(num_of_illuminated_pixels) * 100.0 > zero_percent:
self.bkg_median = 0.0
self.bkg_rms_median = stats.tstd(num_of_nonzeros, limits=[0, None], inclusive=[False, True])
self.bkg_rms_median = stats.tstd(imgdata[self.footprint_mask], limits=[0, None], inclusive=[False, True])
self.bkg_background_ra = np.full_like(imgdata, 0.0)
self.bkg_rms_ra = np.full_like(imgdata, self.bkg_rms_median)
self.bkg_type = 'zero_background'
Expand Down
23 changes: 14 additions & 9 deletions drizzlepac/staticMask.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
log = logutil.create_logger(__name__, level=logutil.logging.NOTSET)


#this is called by the user
# this is called by the user
def createMask(input=None, static_sig=4.0, group=None, editpars=False, configObj=None, **inputDict):
""" The user can input a list of images if they like to create static masks
as well as optional values for static_sig and inputDict.
Expand All @@ -53,7 +53,7 @@ def createMask(input=None, static_sig=4.0, group=None, editpars=False, configObj
if not editpars:
run(configObj)

#this is called by the TEAL interface
# this is called by the TEAL interface
def run(configObj):

#now we really just need the imageObject list created for the dataset
Expand All @@ -63,7 +63,7 @@ def run(configObj):
createStaticMask(imageObjList,configObj)


#this is the workhorse function called by MultiDrizzle
# this is the workhorse function called by MultiDrizzle
def createStaticMask(imageObjectList=[],configObj=None,procSteps=None):
if procSteps is not None:
procSteps.addStep('Static Mask')
Expand Down Expand Up @@ -167,7 +167,7 @@ def addMember(self, imagePtr=None):
if chips is None:
chips = imagePtr.getExtensions()

#for chip in range(1,numchips+1,1):
# for chip in range(1,numchips+1,1):
for chip in chips:
chipid=imagePtr.scienceExt + ','+ str(chip)
chipimage=imagePtr.getData(chipid)
Expand All @@ -186,8 +186,13 @@ def addMember(self, imagePtr=None):
maskname = self.masknames[s]
break
imagePtr[chipid].outputNames['staticMask'] = maskname

stats = ImageStats(chipimage,nclip=3,fields='mode')
stats = ImageStats(
chipimage,
nclip=3,
fields="mode",
lower=np.nanmin(chipimage),
upper=np.nanmax(chipimage),
)
mode = stats.mode
rms = stats.stddev
nbins = len(stats.histogram)
Expand Down Expand Up @@ -259,10 +264,10 @@ def saveToFile(self,imageObjectList):
virtual = imageObjectList[0].inmemory

for key in self.masklist.keys():
#check to see if the file already exists on disk
# check to see if the file already exists on disk
filename = self.masknames[key]
#create a new fits image with the mask array and a standard header
#open a new header and data unit
# create a new fits image with the mask array and a standard header
# open a new header and data unit
newHDU = fits.PrimaryHDU()
newHDU.data = self.masklist[key]

Expand Down

0 comments on commit 963553f

Please sign in to comment.