Coverage for python/lsst/pipe/tasks/finalizeCharacterization.py: 15%
255 statements
« prev ^ index » next coverage.py v6.5.0, created at 2024-03-14 10:10 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2024-03-14 10:10 +0000
1#
2# LSST Data Management System
3# Copyright 2008-2022 AURA/LSST.
4#
5# This product includes software developed by the
6# LSST Project (http://www.lsst.org/).
7#
8# This program is free software: you can redistribute it and/or modify
9# it under the terms of the GNU General Public License as published by
10# the Free Software Foundation, either version 3 of the License, or
11# (at your option) any later version.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the LSST License Statement and
19# the GNU General Public License along with this program. If not,
20# see <http://www.lsstcorp.org/LegalNotices/>.
21#
22"""Task to run a finalized image characterization, using additional data.
23"""
24import numpy as np
25import esutil
26import pandas as pd
28import lsst.pex.config as pexConfig
29import lsst.pipe.base as pipeBase
30import lsst.daf.base as dafBase
31import lsst.afw.table as afwTable
32import lsst.meas.algorithms as measAlg
33import lsst.meas.extensions.piff.piffPsfDeterminer # noqa: F401
34from lsst.meas.algorithms import MeasureApCorrTask
35from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask
36from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry
38from .reserveIsolatedStars import ReserveIsolatedStarsTask
40__all__ = ['FinalizeCharacterizationConnections',
41 'FinalizeCharacterizationConfig',
42 'FinalizeCharacterizationTask']
45class FinalizeCharacterizationConnections(pipeBase.PipelineTaskConnections,
46 dimensions=('instrument', 'visit',),
47 defaultTemplates={}):
48 src_schema = pipeBase.connectionTypes.InitInput(
49 doc='Input schema used for src catalogs.',
50 name='src_schema',
51 storageClass='SourceCatalog',
52 )
53 srcs = pipeBase.connectionTypes.Input(
54 doc='Source catalogs for the visit',
55 name='src',
56 storageClass='SourceCatalog',
57 dimensions=('instrument', 'visit', 'detector'),
58 deferLoad=True,
59 multiple=True,
60 )
61 calexps = pipeBase.connectionTypes.Input(
62 doc='Calexps for the visit',
63 name='calexp',
64 storageClass='ExposureF',
65 dimensions=('instrument', 'visit', 'detector'),
66 deferLoad=True,
67 multiple=True,
68 )
69 isolated_star_cats = pipeBase.connectionTypes.Input(
70 doc=('Catalog of isolated stars with average positions, number of associated '
71 'sources, and indexes to the isolated_star_sources catalogs.'),
72 name='isolated_star_cat',
73 storageClass='DataFrame',
74 dimensions=('instrument', 'tract', 'skymap'),
75 deferLoad=True,
76 multiple=True,
77 )
78 isolated_star_sources = pipeBase.connectionTypes.Input(
79 doc=('Catalog of isolated star sources with sourceIds, and indexes to the '
80 'isolated_star_cats catalogs.'),
81 name='isolated_star_sources',
82 storageClass='DataFrame',
83 dimensions=('instrument', 'tract', 'skymap'),
84 deferLoad=True,
85 multiple=True,
86 )
87 finalized_psf_ap_corr_cat = pipeBase.connectionTypes.Output(
88 doc=('Per-visit finalized psf models and aperture corrections. This '
89 'catalog uses detector id for the id and are sorted for fast '
90 'lookups of a detector.'),
91 name='finalized_psf_ap_corr_catalog',
92 storageClass='ExposureCatalog',
93 dimensions=('instrument', 'visit'),
94 )
95 finalized_src_table = pipeBase.connectionTypes.Output(
96 doc=('Per-visit catalog of measurements for psf/flag/etc.'),
97 name='finalized_src_table',
98 storageClass='DataFrame',
99 dimensions=('instrument', 'visit'),
100 )
103class FinalizeCharacterizationConfig(pipeBase.PipelineTaskConfig,
104 pipelineConnections=FinalizeCharacterizationConnections):
105 """Configuration for FinalizeCharacterizationTask."""
106 source_selector = sourceSelectorRegistry.makeField(
107 doc="How to select sources",
108 default="science"
109 )
110 id_column = pexConfig.Field(
111 doc='Name of column in isolated_star_sources with source id.',
112 dtype=str,
113 default='sourceId',
114 )
115 reserve_selection = pexConfig.ConfigurableField(
116 target=ReserveIsolatedStarsTask,
117 doc='Task to select reserved stars',
118 )
119 make_psf_candidates = pexConfig.ConfigurableField(
120 target=measAlg.MakePsfCandidatesTask,
121 doc='Task to make psf candidates from selected stars.',
122 )
123 psf_determiner = measAlg.psfDeterminerRegistry.makeField(
124 'PSF Determination algorithm',
125 default='piff'
126 )
127 measurement = pexConfig.ConfigurableField(
128 target=SingleFrameMeasurementTask,
129 doc='Measure sources for aperture corrections'
130 )
131 measure_ap_corr = pexConfig.ConfigurableField(
132 target=MeasureApCorrTask,
133 doc="Subtask to measure aperture corrections"
134 )
135 apply_ap_corr = pexConfig.ConfigurableField(
136 target=ApplyApCorrTask,
137 doc="Subtask to apply aperture corrections"
138 )
140 def setDefaults(self):
141 super().setDefaults()
143 source_selector = self.source_selector['science']
144 source_selector.setDefaults()
146 # We use the source selector only to select out flagged objects
147 # and signal-to-noise. Isolated, unresolved sources are handled
148 # by the isolated star catalog.
150 source_selector.doFlags = True
151 source_selector.doSignalToNoise = True
152 source_selector.doFluxLimit = False
153 source_selector.doUnresolved = False
154 source_selector.doIsolated = False
156 source_selector.signalToNoise.minimum = 20.0
157 source_selector.signalToNoise.maximum = 1000.0
159 source_selector.signalToNoise.fluxField = 'base_GaussianFlux_instFlux'
160 source_selector.signalToNoise.errField = 'base_GaussianFlux_instFluxErr'
162 source_selector.flags.bad = ['base_PixelFlags_flag_edge',
163 'base_PixelFlags_flag_interpolatedCenter',
164 'base_PixelFlags_flag_saturatedCenter',
165 'base_PixelFlags_flag_crCenter',
166 'base_PixelFlags_flag_bad',
167 'base_PixelFlags_flag_interpolated',
168 'base_PixelFlags_flag_saturated',
169 'slot_Centroid_flag',
170 'base_GaussianFlux_flag']
172 # Configure aperture correction to select only high s/n sources (that
173 # were used in the psf modeling) to avoid background problems when
174 # computing the aperture correction map.
175 self.measure_ap_corr.sourceSelector = 'science'
177 ap_selector = self.measure_ap_corr.sourceSelector['science']
178 ap_selector.doFluxLimit = False
179 ap_selector.doFlags = True
180 ap_selector.doUnresolved = False
181 ap_selector.doSignalToNoise = True
182 ap_selector.doIsolated = False
183 ap_selector.flags.good = ['calib_psf_used']
184 ap_selector.flags.bad = []
185 ap_selector.signalToNoise.minimum = 200.0
186 ap_selector.signalToNoise.maximum = None
187 ap_selector.signalToNoise.fluxField = 'base_PsfFlux_instFlux'
188 ap_selector.signalToNoise.errField = 'base_PsfFlux_instFluxErr'
190 import lsst.meas.modelfit # noqa: F401
191 import lsst.meas.extensions.photometryKron # noqa: F401
192 import lsst.meas.extensions.convolved # noqa: F401
193 import lsst.meas.extensions.gaap # noqa: F401
194 import lsst.meas.extensions.shapeHSM # noqa: F401
196 # Set up measurement defaults
197 self.measurement.plugins.names = [
198 'base_PsfFlux',
199 'base_GaussianFlux',
200 'modelfit_DoubleShapeletPsfApprox',
201 'modelfit_CModel',
202 'ext_photometryKron_KronFlux',
203 'ext_convolved_ConvolvedFlux',
204 'ext_gaap_GaapFlux',
205 'ext_shapeHSM_HsmShapeRegauss',
206 'ext_shapeHSM_HsmSourceMoments',
207 'ext_shapeHSM_HsmPsfMoments',
208 'ext_shapeHSM_HsmSourceMomentsRound',
209 ]
210 self.measurement.slots.modelFlux = 'modelfit_CModel'
211 self.measurement.plugins['ext_convolved_ConvolvedFlux'].seeing.append(8.0)
212 self.measurement.plugins['ext_gaap_GaapFlux'].sigmas = [
213 0.5,
214 0.7,
215 1.0,
216 1.5,
217 2.5,
218 3.0
219 ]
220 self.measurement.plugins['ext_gaap_GaapFlux'].doPsfPhotometry = True
221 self.measurement.slots.shape = 'ext_shapeHSM_HsmSourceMoments'
222 self.measurement.slots.psfShape = 'ext_shapeHSM_HsmPsfMoments'
223 self.measurement.plugins['ext_shapeHSM_HsmShapeRegauss'].deblendNChild = ""
224 # Turn off slot setting for measurement for centroid and shape
225 # (for which we use the input src catalog measurements)
226 self.measurement.slots.centroid = None
227 self.measurement.slots.apFlux = None
228 self.measurement.slots.calibFlux = None
230 names = self.measurement.plugins['ext_convolved_ConvolvedFlux'].getAllResultNames()
231 self.measure_ap_corr.allowFailure += names
232 names = self.measurement.plugins["ext_gaap_GaapFlux"].getAllGaapResultNames()
233 self.measure_ap_corr.allowFailure += names
236class FinalizeCharacterizationTask(pipeBase.PipelineTask):
237 """Run final characterization on exposures."""
238 ConfigClass = FinalizeCharacterizationConfig
239 _DefaultName = 'finalize_characterization'
241 def __init__(self, initInputs=None, **kwargs):
242 super().__init__(initInputs=initInputs, **kwargs)
244 self.schema_mapper, self.schema = self._make_output_schema_mapper(
245 initInputs['src_schema'].schema
246 )
248 self.makeSubtask('reserve_selection')
249 self.makeSubtask('source_selector')
250 self.makeSubtask('make_psf_candidates')
251 self.makeSubtask('psf_determiner')
252 self.makeSubtask('measurement', schema=self.schema)
253 self.makeSubtask('measure_ap_corr', schema=self.schema)
254 self.makeSubtask('apply_ap_corr', schema=self.schema)
256 # Only log warning and fatal errors from the source_selector
257 self.source_selector.log.setLevel(self.source_selector.log.WARN)
259 def runQuantum(self, butlerQC, inputRefs, outputRefs):
260 input_handle_dict = butlerQC.get(inputRefs)
262 band = butlerQC.quantum.dataId['band']
263 visit = butlerQC.quantum.dataId['visit']
265 src_dict_temp = {handle.dataId['detector']: handle
266 for handle in input_handle_dict['srcs']}
267 calexp_dict_temp = {handle.dataId['detector']: handle
268 for handle in input_handle_dict['calexps']}
269 isolated_star_cat_dict_temp = {handle.dataId['tract']: handle
270 for handle in input_handle_dict['isolated_star_cats']}
271 isolated_star_source_dict_temp = {handle.dataId['tract']: handle
272 for handle in input_handle_dict['isolated_star_sources']}
273 # TODO: Sort until DM-31701 is done and we have deterministic
274 # dataset ordering.
275 src_dict = {detector: src_dict_temp[detector] for
276 detector in sorted(src_dict_temp.keys())}
277 calexp_dict = {detector: calexp_dict_temp[detector] for
278 detector in sorted(calexp_dict_temp.keys())}
279 isolated_star_cat_dict = {tract: isolated_star_cat_dict_temp[tract] for
280 tract in sorted(isolated_star_cat_dict_temp.keys())}
281 isolated_star_source_dict = {tract: isolated_star_source_dict_temp[tract] for
282 tract in sorted(isolated_star_source_dict_temp.keys())}
284 struct = self.run(visit,
285 band,
286 isolated_star_cat_dict,
287 isolated_star_source_dict,
288 src_dict,
289 calexp_dict)
291 butlerQC.put(struct.psf_ap_corr_cat,
292 outputRefs.finalized_psf_ap_corr_cat)
293 butlerQC.put(pd.DataFrame(struct.output_table),
294 outputRefs.finalized_src_table)
296 def run(self, visit, band, isolated_star_cat_dict, isolated_star_source_dict, src_dict, calexp_dict):
297 """
298 Run the FinalizeCharacterizationTask.
300 Parameters
301 ----------
302 visit : `int`
303 Visit number. Used in the output catalogs.
304 band : `str`
305 Band name. Used to select reserved stars.
306 isolated_star_cat_dict : `dict`
307 Per-tract dict of isolated star catalog handles.
308 isolated_star_source_dict : `dict`
309 Per-tract dict of isolated star source catalog handles.
310 src_dict : `dict`
311 Per-detector dict of src catalog handles.
312 calexp_dict : `dict`
313 Per-detector dict of calibrated exposure handles.
315 Returns
316 -------
317 struct : `lsst.pipe.base.struct`
318 Struct with outputs for persistence.
319 """
320 # We do not need the isolated star table in this task.
321 # However, it is used in tests to confirm consistency of indexes.
322 _, isolated_source_table = self.concat_isolated_star_cats(
323 band,
324 isolated_star_cat_dict,
325 isolated_star_source_dict
326 )
328 exposure_cat_schema = afwTable.ExposureTable.makeMinimalSchema()
329 exposure_cat_schema.addField('visit', type='L', doc='Visit number')
331 metadata = dafBase.PropertyList()
332 metadata.add("COMMENT", "Catalog id is detector id, sorted.")
333 metadata.add("COMMENT", "Only detectors with data have entries.")
335 psf_ap_corr_cat = afwTable.ExposureCatalog(exposure_cat_schema)
336 psf_ap_corr_cat.setMetadata(metadata)
338 measured_src_tables = []
340 for detector in src_dict:
341 src = src_dict[detector].get()
342 exposure = calexp_dict[detector].get()
344 psf, ap_corr_map, measured_src = self.compute_psf_and_ap_corr_map(
345 visit,
346 detector,
347 exposure,
348 src,
349 isolated_source_table
350 )
352 # And now we package it together...
353 record = psf_ap_corr_cat.addNew()
354 record['id'] = int(detector)
355 record['visit'] = visit
356 if psf is not None:
357 record.setPsf(psf)
358 if ap_corr_map is not None:
359 record.setApCorrMap(ap_corr_map)
361 measured_src['visit'][:] = visit
362 measured_src['detector'][:] = detector
364 measured_src_tables.append(measured_src.asAstropy().as_array())
366 measured_src_table = np.concatenate(measured_src_tables)
368 return pipeBase.Struct(psf_ap_corr_cat=psf_ap_corr_cat,
369 output_table=measured_src_table)
371 def _make_output_schema_mapper(self, input_schema):
372 """Make the schema mapper from the input schema to the output schema.
374 Parameters
375 ----------
376 input_schema : `lsst.afw.table.Schema`
377 Input schema.
379 Returns
380 -------
381 mapper : `lsst.afw.table.SchemaMapper`
382 Schema mapper
383 output_schema : `lsst.afw.table.Schema`
384 Output schema (with alias map)
385 """
386 mapper = afwTable.SchemaMapper(input_schema)
387 mapper.addMinimalSchema(afwTable.SourceTable.makeMinimalSchema())
388 mapper.addMapping(input_schema['slot_Centroid_x'].asKey())
389 mapper.addMapping(input_schema['slot_Centroid_y'].asKey())
391 # The aperture fields may be used by the psf determiner.
392 aper_fields = input_schema.extract('base_CircularApertureFlux_*')
393 for field, item in aper_fields.items():
394 mapper.addMapping(item.key)
396 # The following two may be redundant, but then the mapping is a no-op.
397 apflux_fields = input_schema.extract('slot_ApFlux_*')
398 for field, item in apflux_fields.items():
399 mapper.addMapping(item.key)
401 calibflux_fields = input_schema.extract('slot_CalibFlux_*')
402 for field, item in calibflux_fields.items():
403 mapper.addMapping(item.key)
405 mapper.addMapping(
406 input_schema[self.config.source_selector.active.signalToNoise.fluxField].asKey(),
407 'calib_psf_selection_flux')
408 mapper.addMapping(
409 input_schema[self.config.source_selector.active.signalToNoise.errField].asKey(),
410 'calib_psf_selection_flux_err')
412 output_schema = mapper.getOutputSchema()
414 output_schema.addField(
415 'calib_psf_candidate',
416 type='Flag',
417 doc=('set if the source was a candidate for PSF determination, '
418 'as determined from FinalizeCharacterizationTask.'),
419 )
420 output_schema.addField(
421 'calib_psf_reserved',
422 type='Flag',
423 doc=('set if source was reserved from PSF determination by '
424 'FinalizeCharacterizationTask.'),
425 )
426 output_schema.addField(
427 'calib_psf_used',
428 type='Flag',
429 doc=('set if source was used in the PSF determination by '
430 'FinalizeCharacterizationTask.'),
431 )
432 output_schema.addField(
433 'visit',
434 type=np.int64,
435 doc='Visit number for the sources.',
436 )
437 output_schema.addField(
438 'detector',
439 type=np.int32,
440 doc='Detector number for the sources.',
441 )
443 alias_map = input_schema.getAliasMap()
444 alias_map_output = afwTable.AliasMap()
445 alias_map_output.set('slot_Centroid', alias_map.get('slot_Centroid'))
446 alias_map_output.set('slot_ApFlux', alias_map.get('slot_ApFlux'))
447 alias_map_output.set('slot_CalibFlux', alias_map.get('slot_CalibFlux'))
449 output_schema.setAliasMap(alias_map_output)
451 return mapper, output_schema
453 def _make_selection_schema_mapper(self, input_schema):
454 """Make the schema mapper from the input schema to the selection schema.
456 Parameters
457 ----------
458 input_schema : `lsst.afw.table.Schema`
459 Input schema.
461 Returns
462 -------
463 mapper : `lsst.afw.table.SchemaMapper`
464 Schema mapper
465 selection_schema : `lsst.afw.table.Schema`
466 Selection schema (with alias map)
467 """
468 mapper = afwTable.SchemaMapper(input_schema)
469 mapper.addMinimalSchema(input_schema)
471 selection_schema = mapper.getOutputSchema()
473 selection_schema.setAliasMap(input_schema.getAliasMap())
475 return mapper, selection_schema
477 def concat_isolated_star_cats(self, band, isolated_star_cat_dict, isolated_star_source_dict):
478 """
479 Concatenate isolated star catalogs and make reserve selection.
481 Parameters
482 ----------
483 band : `str`
484 Band name. Used to select reserved stars.
485 isolated_star_cat_dict : `dict`
486 Per-tract dict of isolated star catalog handles.
487 isolated_star_source_dict : `dict`
488 Per-tract dict of isolated star source catalog handles.
490 Returns
491 -------
492 isolated_table : `np.ndarray` (N,)
493 Table of isolated stars, with indexes to isolated sources.
494 isolated_source_table : `np.ndarray` (M,)
495 Table of isolated sources, with indexes to isolated stars.
496 """
497 isolated_tables = []
498 isolated_sources = []
499 merge_cat_counter = 0
500 merge_source_counter = 0
502 for tract in isolated_star_cat_dict:
503 df_cat = isolated_star_cat_dict[tract].get()
504 table_cat = df_cat.to_records()
506 df_source = isolated_star_source_dict[tract].get(
507 parameters={'columns': [self.config.id_column,
508 'obj_index']}
509 )
510 table_source = df_source.to_records()
512 # Cut isolated star table to those observed in this band, and adjust indexes
513 (use_band,) = (table_cat[f'nsource_{band}'] > 0).nonzero()
515 if len(use_band) == 0:
516 # There are no sources in this band in this tract.
517 self.log.info("No sources found in %s band in tract %d.", band, tract)
518 continue
520 # With the following matching:
521 # table_source[b] <-> table_cat[use_band[a]]
522 obj_index = table_source['obj_index'][:]
523 a, b = esutil.numpy_util.match(use_band, obj_index)
525 # Update indexes and cut to band-selected stars/sources
526 table_source['obj_index'][b] = a
527 _, index_new = np.unique(a, return_index=True)
528 table_cat[f'source_cat_index_{band}'][use_band] = index_new
530 # After the following cuts, the catalogs have the following properties:
531 # - table_cat only contains isolated stars that have at least one source
532 # in ``band``.
533 # - table_source only contains ``band`` sources.
534 # - The slice table_cat["source_cat_index_{band}"]: table_cat["source_cat_index_{band}"]
535 # + table_cat["nsource_{band}]
536 # applied to table_source will give all the sources associated with the star.
537 # - For each source, table_source["obj_index"] points to the index of the associated
538 # isolated star.
539 table_source = table_source[b]
540 table_cat = table_cat[use_band]
542 # Add reserved flag column to tables
543 table_cat = np.lib.recfunctions.append_fields(
544 table_cat,
545 'reserved',
546 np.zeros(table_cat.size, dtype=bool),
547 usemask=False
548 )
549 table_source = np.lib.recfunctions.append_fields(
550 table_source,
551 'reserved',
552 np.zeros(table_source.size, dtype=bool),
553 usemask=False
554 )
556 # Get reserve star flags
557 table_cat['reserved'][:] = self.reserve_selection.run(
558 len(table_cat),
559 extra=f'{band}_{tract}',
560 )
561 table_source['reserved'][:] = table_cat['reserved'][table_source['obj_index']]
563 # Offset indexes to account for tract merging
564 table_cat[f'source_cat_index_{band}'] += merge_source_counter
565 table_source['obj_index'] += merge_cat_counter
567 isolated_tables.append(table_cat)
568 isolated_sources.append(table_source)
570 merge_cat_counter += len(table_cat)
571 merge_source_counter += len(table_source)
573 isolated_table = np.concatenate(isolated_tables)
574 isolated_source_table = np.concatenate(isolated_sources)
576 return isolated_table, isolated_source_table
578 def compute_psf_and_ap_corr_map(self, visit, detector, exposure, src, isolated_source_table):
579 """Compute psf model and aperture correction map for a single exposure.
581 Parameters
582 ----------
583 visit : `int`
584 Visit number (for logging).
585 detector : `int`
586 Detector number (for logging).
587 exposure : `lsst.afw.image.ExposureF`
588 src : `lsst.afw.table.SourceCatalog`
589 isolated_source_table : `np.ndarray`
591 Returns
592 -------
593 psf : `lsst.meas.algorithms.ImagePsf`
594 PSF Model
595 ap_corr_map : `lsst.afw.image.ApCorrMap`
596 Aperture correction map.
597 measured_src : `lsst.afw.table.SourceCatalog`
598 Updated source catalog with measurements, flags and aperture corrections.
599 """
600 # Apply source selector (s/n, flags, etc.)
601 good_src = self.source_selector.selectSources(src)
603 # Cut down input src to the selected sources
604 # We use a separate schema/mapper here than for the output/measurement catalog because of
605 # clashes between fields that were previously run and those that need to be rerun with
606 # the new psf model. This may be slightly inefficient but keeps input
607 # and output values cleanly separated.
608 selection_mapper, selection_schema = self._make_selection_schema_mapper(src.schema)
610 selected_src = afwTable.SourceCatalog(selection_schema)
611 selected_src.reserve(good_src.selected.sum())
612 selected_src.extend(src[good_src.selected], mapper=selection_mapper)
614 # The calib flags have been copied from the input table,
615 # and we reset them here just to ensure they aren't propagated.
616 selected_src['calib_psf_candidate'] = np.zeros(len(selected_src), dtype=bool)
617 selected_src['calib_psf_used'] = np.zeros(len(selected_src), dtype=bool)
618 selected_src['calib_psf_reserved'] = np.zeros(len(selected_src), dtype=bool)
620 # Find the isolated sources and set flags
621 matched_src, matched_iso = esutil.numpy_util.match(
622 selected_src['id'],
623 isolated_source_table[self.config.id_column]
624 )
626 matched_arr = np.zeros(len(selected_src), dtype=bool)
627 matched_arr[matched_src] = True
628 selected_src['calib_psf_candidate'] = matched_arr
630 reserved_arr = np.zeros(len(selected_src), dtype=bool)
631 reserved_arr[matched_src] = isolated_source_table['reserved'][matched_iso]
632 selected_src['calib_psf_reserved'] = reserved_arr
634 selected_src = selected_src[selected_src['calib_psf_candidate']].copy(deep=True)
636 # Make the measured source catalog as well, based on the selected catalog.
637 measured_src = afwTable.SourceCatalog(self.schema)
638 measured_src.reserve(len(selected_src))
639 measured_src.extend(selected_src, mapper=self.schema_mapper)
641 # We need to copy over the calib_psf flags because they were not in the mapper
642 measured_src['calib_psf_candidate'] = selected_src['calib_psf_candidate']
643 measured_src['calib_psf_reserved'] = selected_src['calib_psf_reserved']
645 # Select the psf candidates from the selection catalog
646 try:
647 psf_selection_result = self.make_psf_candidates.run(selected_src, exposure=exposure)
648 except Exception as e:
649 self.log.warning('Failed to make psf candidates for visit %d, detector %d: %s',
650 visit, detector, e)
651 return None, None, measured_src
653 psf_cand_cat = psf_selection_result.goodStarCat
655 # Make list of psf candidates to send to the determiner
656 # (omitting those marked as reserved)
657 psf_determiner_list = [cand for cand, use
658 in zip(psf_selection_result.psfCandidates,
659 ~psf_cand_cat['calib_psf_reserved']) if use]
660 flag_key = psf_cand_cat.schema['calib_psf_used'].asKey()
661 try:
662 psf, cell_set = self.psf_determiner.determinePsf(exposure,
663 psf_determiner_list,
664 self.metadata,
665 flagKey=flag_key)
666 except Exception as e:
667 self.log.warning('Failed to determine psf for visit %d, detector %d: %s',
668 visit, detector, e)
669 return None, None, measured_src
671 # Set the psf in the exposure for measurement/aperture corrections.
672 exposure.setPsf(psf)
674 # At this point, we need to transfer the psf used flag from the selection
675 # catalog to the measurement catalog.
676 matched_selected, matched_measured = esutil.numpy_util.match(
677 selected_src['id'],
678 measured_src['id']
679 )
680 measured_used = np.zeros(len(measured_src), dtype=bool)
681 measured_used[matched_measured] = selected_src['calib_psf_used'][matched_selected]
682 measured_src['calib_psf_used'] = measured_used
684 # Next, we do the measurement on all the psf candidate, used, and reserved stars.
685 try:
686 self.measurement.run(measCat=measured_src, exposure=exposure)
687 except Exception as e:
688 self.log.warning('Failed to make measurements for visit %d, detector %d: %s',
689 visit, detector, e)
690 return psf, None, measured_src
692 # And finally the ap corr map.
693 try:
694 ap_corr_map = self.measure_ap_corr.run(exposure=exposure,
695 catalog=measured_src).apCorrMap
696 except Exception as e:
697 self.log.warning('Failed to compute aperture corrections for visit %d, detector %d: %s',
698 visit, detector, e)
699 return psf, None, measured_src
701 self.apply_ap_corr.run(catalog=measured_src, apCorrMap=ap_corr_map)
703 return psf, ap_corr_map, measured_src