Skip to content

Commit

Permalink
Add setting of number_extended_events (#178)
Browse files Browse the repository at this point in the history
* Add setting of number_extended_events

This was not being set when in single processing mode.

* Update CHANGES.rst

* Update CHANGES.rst

* Update test_jump.py

---------

Co-authored-by: Howard Bushouse <bushouse@stsci.edu>
  • Loading branch information
mwregan2 and hbushouse committed Jul 7, 2023
1 parent e0c6f69 commit bebfad8
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 6 deletions.
13 changes: 12 additions & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
1.4.1 (2023-06-29)
1.4.2 (unreleased)
==================

Bug Fixes
Expand All @@ -7,6 +7,17 @@ Bug Fixes
jump
~~~~

- Added setting of number_extended_events for non-multiprocessing
mode. This is the value that is put into the header keyword EXTNCRS. [#178]

1.4.1 (2023-06-29)

Bug Fixes
---------

jump
~~~~

- Added statement to prevent the number of cores used in multiprocessing from
being larger than the number of rows. This was causing some CI tests to fail. [#176]

Expand Down
6 changes: 5 additions & 1 deletion src/stcal/jump/jump.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,13 +262,15 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,
only_use_ints=only_use_ints)
# This is the flag that controls the flagging of either snowballs.
if expand_large_events:
flag_large_events(gdq, jump_flag, sat_flag, min_sat_area=min_sat_area,
total_snowballs = flag_large_events(gdq, jump_flag, sat_flag, min_sat_area=min_sat_area,
min_jump_area=min_jump_area,
expand_factor=expand_factor,
sat_required_snowball=sat_required_snowball,
min_sat_radius_extend=min_sat_radius_extend,
edge_size=edge_size, sat_expand=sat_expand,
max_extended_radius=max_extended_radius)
log.info('Total snowballs = %i' % total_snowballs)
number_extended_events = total_snowballs
if find_showers:
gdq, num_showers = find_faint_extended(data, gdq, readnoise_2d,
frames_per_group, minimum_sigclip_groups,
Expand All @@ -280,6 +282,8 @@ def detect_jumps(frames_per_group, data, gdq, pdq, err,
ellipse_expand=extend_ellipse_expand_ratio,
num_grps_masked=grps_masked_after_shower,
max_extended_radius=max_extended_radius)
log.info('Total showers= %i' % num_showers)
number_extended_events = num_showers
else:
yinc = int(n_rows / n_slices)
slices = []
Expand Down
14 changes: 10 additions & 4 deletions tests/test_jump.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ def test_find_faint_extended():
ellipse_expand=1.1, num_grps_masked=3)
# Check that all the expected samples in group 2 are flagged as jump and
# that they are not flagged outside
assert (num_showers == 3)
assert (np.all(gdq[0, 1, 22, 14:23] == 0))
assert (np.all(gdq[0, 1, 21, 16:20] == DQFLAGS['JUMP_DET']))
assert (np.all(gdq[0, 1, 20, 15:22] == DQFLAGS['JUMP_DET']))
Expand Down Expand Up @@ -210,6 +211,7 @@ def test_find_faint_extended_sigclip():
ellipse_expand=1.1, num_grps_masked=3)
# Check that all the expected samples in group 2 are flagged as jump and
# that they are not flagged outside
assert(num_showers == 0)
assert (np.all(gdq[0, 1, 22, 14:23] == 0))
assert (np.all(gdq[0, 1, 21, 16:20] == 0))
assert (np.all(gdq[0, 1, 20, 15:22] == 0))
Expand Down Expand Up @@ -265,10 +267,14 @@ def test_inputjumpall():
@pytest.mark.skip("Used for local testing")
def test_inputjump_sat_star():
testcube = fits.getdata('data/input_gdq_flarge.fits')
flag_large_events(testcube, DQFLAGS['JUMP_DET'], DQFLAGS['SATURATED'], min_sat_area=1,
min_jump_area=6,
expand_factor=2.0, use_ellipses=False,
sat_required_snowball=True, min_sat_radius_extend=2.5, sat_expand=2)
num_extended_events = flag_large_events(testcube, DQFLAGS['JUMP_DET'], DQFLAGS['SATURATED'],
min_sat_area=1,
min_jump_area=6,
expand_factor=2.0,
sat_required_snowball=True,
min_sat_radius_extend=2.5,
sat_expand=2)
assert(num_extended_events == 312)
fits.writeto("outgdq2.fits", testcube, overwrite=True)


Expand Down

0 comments on commit bebfad8

Please sign in to comment.