Coverage for python / lsst / pipe / tasks / isolatedStarAssociation.py: 14%

242 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-30 09:11 +0000

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22__all__ = ['IsolatedStarAssociationConnections', 

23 'IsolatedStarAssociationConfig', 

24 'IsolatedStarAssociationTask'] 

25 

26import astropy.table 

27import esutil 

28import hpgeom as hpg 

29import numpy as np 

30from smatch.matcher import Matcher 

31 

32import lsst.pex.config as pexConfig 

33import lsst.pipe.base as pipeBase 

34from lsst.skymap import BaseSkyMap 

35from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry 

36 

37from .healSparseMapping import _is_power_of_two 

38 

39 

40class IsolatedStarAssociationConnections(pipeBase.PipelineTaskConnections, 

41 dimensions=('instrument', 'tract', 'skymap',), 

42 defaultTemplates={}): 

43 source_table_visit = pipeBase.connectionTypes.Input( 

44 doc='Source table in parquet format, per visit', 

45 name='preSourceTable_visit', 

46 storageClass='ArrowAstropy', 

47 dimensions=('instrument', 'visit'), 

48 deferLoad=True, 

49 multiple=True, 

50 ) 

51 skymap = pipeBase.connectionTypes.Input( 

52 doc="Input definition of geometry/bbox and projection/wcs for warped exposures", 

53 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

54 storageClass='SkyMap', 

55 dimensions=('skymap',), 

56 ) 

57 isolated_star_sources = pipeBase.connectionTypes.Output( 

58 doc='Catalog of individual sources for the isolated stars', 

59 name='isolated_star_presources', 

60 storageClass='ArrowAstropy', 

61 dimensions=('instrument', 'tract', 'skymap'), 

62 ) 

63 isolated_star_cat = pipeBase.connectionTypes.Output( 

64 doc='Catalog of isolated star positions', 

65 name='isolated_star_presource_associations', 

66 storageClass='ArrowAstropy', 

67 dimensions=('instrument', 'tract', 'skymap'), 

68 ) 

69 

70 

71class IsolatedStarAssociationConfig(pipeBase.PipelineTaskConfig, 

72 pipelineConnections=IsolatedStarAssociationConnections): 

73 """Configuration for IsolatedStarAssociationTask.""" 

74 

75 inst_flux_field = pexConfig.Field( 

76 doc=('Full name of instFlux field to use for s/n selection and persistence. ' 

77 'The associated flag will be implicity included in bad_flags. ' 

78 'Note that this is expected to end in ``instFlux``.'), 

79 dtype=str, 

80 default='normCompTophatFlux_instFlux', 

81 ) 

82 match_radius = pexConfig.Field( 

83 doc='Match radius (arcseconds)', 

84 dtype=float, 

85 default=1.0, 

86 ) 

87 nside_split = pexConfig.Field( 

88 doc="HealPix nside to use for splitting tract for reduced memory. Must be power of 2.", 

89 dtype=int, 

90 default=32, 

91 check=_is_power_of_two, 

92 ) 

93 isolation_radius = pexConfig.Field( 

94 doc=('Isolation radius (arcseconds). Any stars with average centroids ' 

95 'within this radius of another star will be rejected from the final ' 

96 'catalog. This radius should be at least 2x match_radius.'), 

97 dtype=float, 

98 default=2.0, 

99 ) 

100 band_order = pexConfig.ListField( 

101 doc=(('Ordered list of bands to use for matching/storage. ' 

102 'Any bands not listed will not be matched.')), 

103 dtype=str, 

104 default=['i', 'z', 'r', 'g', 'y', 'u'], 

105 ) 

106 id_column = pexConfig.Field( 

107 doc='Name of column with source id.', 

108 dtype=str, 

109 default='sourceId', 

110 ) 

111 ra_column = pexConfig.Field( 

112 doc='Name of column with right ascension.', 

113 dtype=str, 

114 default='ra', 

115 ) 

116 dec_column = pexConfig.Field( 

117 doc='Name of column with declination.', 

118 dtype=str, 

119 default='dec', 

120 ) 

121 physical_filter_column = pexConfig.Field( 

122 doc='Name of column with physical filter name', 

123 dtype=str, 

124 default='physical_filter', 

125 ) 

126 band_column = pexConfig.Field( 

127 doc='Name of column with band name', 

128 dtype=str, 

129 default='band', 

130 ) 

131 extra_columns = pexConfig.ListField( 

132 doc='Extra names of columns to read and persist (beyond instFlux and error).', 

133 dtype=str, 

134 default=['x', 

135 'y', 

136 'xErr', 

137 'yErr', 

138 'apFlux_12_0_instFlux', 

139 'apFlux_12_0_instFluxErr', 

140 'apFlux_12_0_flag', 

141 'apFlux_17_0_instFlux', 

142 'apFlux_17_0_instFluxErr', 

143 'apFlux_17_0_flag', 

144 'localBackground_instFlux', 

145 'localBackground_flag', 

146 'ixx', 

147 'iyy', 

148 'ixy',] 

149 ) 

150 source_selector = sourceSelectorRegistry.makeField( 

151 doc='How to select sources. Under normal usage this should not be changed.', 

152 default='science' 

153 ) 

154 

155 def setDefaults(self): 

156 super().setDefaults() 

157 

158 source_selector = self.source_selector['science'] 

159 source_selector.setDefaults() 

160 

161 source_selector.doFlags = True 

162 source_selector.doUnresolved = True 

163 source_selector.doSignalToNoise = True 

164 source_selector.doIsolated = True 

165 source_selector.doRequireFiniteRaDec = True 

166 source_selector.doRequirePrimary = True 

167 

168 source_selector.signalToNoise.minimum = 8.0 

169 source_selector.signalToNoise.maximum = 1000.0 

170 

171 flux_flag_name = self.inst_flux_field.replace("instFlux", "flag") 

172 

173 source_selector.flags.bad = ['pixelFlags_edge', 

174 'pixelFlags_interpolatedCenter', 

175 'pixelFlags_saturatedCenter', 

176 'pixelFlags_crCenter', 

177 'pixelFlags_bad', 

178 'pixelFlags_interpolated', 

179 'pixelFlags_saturated', 

180 'centroid_flag', 

181 flux_flag_name] 

182 

183 source_selector.signalToNoise.fluxField = self.inst_flux_field 

184 source_selector.signalToNoise.errField = self.inst_flux_field + 'Err' 

185 

186 source_selector.isolated.parentName = 'parentSourceId' 

187 source_selector.isolated.nChildName = 'deblend_nChild' 

188 

189 source_selector.unresolved.name = 'sizeExtendedness' 

190 

191 source_selector.requireFiniteRaDec.raColName = self.ra_column 

192 source_selector.requireFiniteRaDec.decColName = self.dec_column 

193 

194 

195class IsolatedStarAssociationTask(pipeBase.PipelineTask): 

196 """Associate sources into isolated star catalogs. 

197 """ 

198 ConfigClass = IsolatedStarAssociationConfig 

199 _DefaultName = 'isolatedStarAssociation' 

200 

201 def __init__(self, **kwargs): 

202 super().__init__(**kwargs) 

203 

204 self.makeSubtask('source_selector') 

205 # Only log warning and fatal errors from the source_selector 

206 self.source_selector.log.setLevel(self.source_selector.log.WARN) 

207 

208 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

209 input_handle_dict = butlerQC.get(inputRefs) 

210 

211 tract = butlerQC.quantum.dataId['tract'] 

212 

213 source_table_handles = input_handle_dict['source_table_visit'] 

214 

215 self.log.info('Running with %d source_table_visit datasets', 

216 len(source_table_handles)) 

217 

218 source_table_handle_dict_temp = {source_table_handle.dataId['visit']: source_table_handle for 

219 source_table_handle in source_table_handles} 

220 

221 bands = {source_table_handle.dataId['band'] for source_table_handle in source_table_handles} 

222 for band in bands: 

223 if band not in self.config.band_order: 

224 self.log.warning('Input data has data from band %s but that band is not ' 

225 'configured for matching', band) 

226 

227 # TODO: Sort by visit until DM-31701 is done and we have deterministic 

228 # dataset ordering. 

229 source_table_handle_dict = {visit: source_table_handle_dict_temp[visit] for 

230 visit in sorted(source_table_handle_dict_temp.keys())} 

231 

232 struct = self.run(input_handle_dict['skymap'], tract, source_table_handle_dict) 

233 

234 butlerQC.put(astropy.table.Table(struct.star_source_cat), 

235 outputRefs.isolated_star_sources) 

236 butlerQC.put(astropy.table.Table(struct.star_cat), 

237 outputRefs.isolated_star_cat) 

238 

239 def run(self, skymap, tract, source_table_handle_dict): 

240 """Run the isolated star association task. 

241 

242 Parameters 

243 ---------- 

244 skymap : `lsst.skymap.SkyMap` 

245 Skymap object. 

246 tract : `int` 

247 Tract number. 

248 source_table_handle_dict : `dict` 

249 Dictionary of source_table handles. Key is visit, value is 

250 a `lsst.daf.butler.DeferredDatasetHandle`. 

251 

252 Returns 

253 ------- 

254 struct : `lsst.pipe.base.struct` 

255 Struct with outputs for persistence. 

256 """ 

257 star_source_cat = self._make_all_star_sources(skymap[tract], source_table_handle_dict) 

258 

259 primary_bands = self.config.band_order 

260 

261 # Do primary matching 

262 primary_star_cat = self._match_primary_stars(primary_bands, star_source_cat) 

263 

264 if len(primary_star_cat) == 0: 

265 return pipeBase.Struct(star_source_cat=np.zeros(0, star_source_cat.dtype), 

266 star_cat=np.zeros(0, primary_star_cat.dtype)) 

267 

268 # Remove neighbors 

269 primary_star_cat = self._remove_neighbors(primary_star_cat) 

270 

271 if len(primary_star_cat) == 0: 

272 return pipeBase.Struct(star_source_cat=np.zeros(0, star_source_cat.dtype), 

273 star_cat=np.zeros(0, primary_star_cat.dtype)) 

274 

275 # Crop to inner tract region 

276 inner_tract_ids = skymap.findTractIdArray(primary_star_cat[self.config.ra_column], 

277 primary_star_cat[self.config.dec_column], 

278 degrees=True) 

279 use = (inner_tract_ids == tract) 

280 self.log.info('Total of %d isolated stars in inner tract.', use.sum()) 

281 

282 primary_star_cat = primary_star_cat[use] 

283 

284 if len(primary_star_cat) == 0: 

285 return pipeBase.Struct(star_source_cat=np.zeros(0, star_source_cat.dtype), 

286 star_cat=np.zeros(0, primary_star_cat.dtype)) 

287 

288 # Set the unique ids. 

289 primary_star_cat['isolated_star_id'] = self._compute_unique_ids(skymap, 

290 tract, 

291 len(primary_star_cat)) 

292 

293 # Match to sources. 

294 star_source_cat, primary_star_cat = self._match_sources(primary_bands, 

295 star_source_cat, 

296 primary_star_cat) 

297 

298 return pipeBase.Struct(star_source_cat=star_source_cat, 

299 star_cat=primary_star_cat) 

300 

301 def _make_all_star_sources(self, tract_info, source_table_handle_dict): 

302 """Make a catalog of all the star sources. 

303 

304 Parameters 

305 ---------- 

306 tract_info : `lsst.skymap.TractInfo` 

307 Information about the tract. 

308 source_table_handle_dict : `dict` 

309 Dictionary of source_table handles. Key is visit, value is 

310 a `lsst.daf.butler.DeferredDatasetHandle`. 

311 

312 Returns 

313 ------- 

314 star_source_cat : `np.ndarray` 

315 Catalog of star sources. 

316 """ 

317 # Internally, we use a numpy recarray, they are by far the fastest 

318 # option in testing for relatively narrow tables. 

319 # (have not tested wide tables) 

320 all_columns, persist_columns = self._get_source_table_visit_column_names() 

321 poly = tract_info.outer_sky_polygon 

322 

323 tables = [] 

324 for visit in source_table_handle_dict: 

325 source_table_ref = source_table_handle_dict[visit] 

326 tbl = source_table_ref.get(parameters={'columns': all_columns}) 

327 

328 goodSrc = self.source_selector.selectSources(tbl) 

329 

330 table = np.asarray(tbl[persist_columns][goodSrc.selected]) 

331 

332 # Append columns that include the row in the source table 

333 # and the matched object index (to be filled later). 

334 table = np.lib.recfunctions.append_fields(table, 

335 ['source_row', 

336 'obj_index'], 

337 [np.where(goodSrc.selected)[0], 

338 np.zeros(goodSrc.selected.sum(), dtype=np.int32)], 

339 dtypes=['i4', 'i4'], 

340 usemask=False) 

341 

342 # We cut to the outer tract polygon to ensure consistent matching 

343 # from tract to tract. 

344 tract_use = poly.contains(np.deg2rad(table[self.config.ra_column]), 

345 np.deg2rad(table[self.config.dec_column])) 

346 

347 tables.append(table[tract_use]) 

348 

349 # Combine tables 

350 star_source_cat = np.concatenate(tables) 

351 

352 return star_source_cat 

353 

354 def _get_source_table_visit_column_names(self): 

355 """Get the list of sourceTable_visit columns from the config. 

356 

357 Returns 

358 ------- 

359 all_columns : `list` [`str`] 

360 All columns to read 

361 persist_columns : `list` [`str`] 

362 Columns to persist (excluding selection columns) 

363 """ 

364 columns = [self.config.id_column, 

365 'visit', 'detector', 

366 self.config.ra_column, self.config.dec_column, 

367 self.config.physical_filter_column, self.config.band_column, 

368 self.config.inst_flux_field, self.config.inst_flux_field + 'Err'] 

369 columns.extend(self.config.extra_columns) 

370 

371 all_columns = columns.copy() 

372 if self.source_selector.config.doFlags: 

373 all_columns.extend(self.source_selector.config.flags.bad) 

374 if self.source_selector.config.doUnresolved: 

375 all_columns.append(self.source_selector.config.unresolved.name) 

376 if self.source_selector.config.doIsolated: 

377 all_columns.append(self.source_selector.config.isolated.parentName) 

378 all_columns.append(self.source_selector.config.isolated.nChildName) 

379 if self.source_selector.config.doRequirePrimary: 

380 all_columns.append(self.source_selector.config.requirePrimary.primaryColName) 

381 

382 return all_columns, columns 

383 

384 def _match_primary_stars(self, primary_bands, star_source_cat): 

385 """Match primary stars. 

386 

387 Parameters 

388 ---------- 

389 primary_bands : `list` [`str`] 

390 Ordered list of primary bands. 

391 star_source_cat : `np.ndarray` 

392 Catalog of star sources. 

393 

394 Returns 

395 ------- 

396 primary_star_cat : `np.ndarray` 

397 Catalog of primary star positions 

398 """ 

399 ra_col = self.config.ra_column 

400 dec_col = self.config.dec_column 

401 

402 dtype = self._get_primary_dtype(primary_bands) 

403 

404 # Figure out nside for computing boundary pixels. 

405 # We want to be 10x the match radius to be conservative. 

406 nside_split = self.config.nside_split 

407 nside_boundary = nside_split 

408 res = hpg.nside_to_resolution(nside_boundary)*3600. 

409 while res > 10.*self.config.match_radius: 

410 nside_boundary *= 2 

411 res = hpg.nside_to_resolution(nside_boundary)*3600. 

412 

413 # Cancel out last jump. 

414 nside_boundary //= 2 

415 

416 # Compute healpix nest bit shift between resolutions. 

417 bit_shift = 2*int(np.round(np.log2(nside_boundary / nside_split))) 

418 

419 primary_star_cat = None 

420 for primary_band in primary_bands: 

421 use = (star_source_cat['band'] == primary_band) 

422 

423 ra = star_source_cat[ra_col][use] 

424 dec = star_source_cat[dec_col][use] 

425 

426 hpix_split = np.unique(hpg.angle_to_pixel(nside_split, ra, dec)) 

427 hpix_highres = hpg.angle_to_pixel(nside_boundary, ra, dec) 

428 

429 band_cats = [] 

430 for pix in hpix_split: 

431 self.log.info("Matching primary stars in %s band in pixel %d", primary_band, pix) 

432 # We take all the high resolution pixels in this pixel, and 

433 # add in all the boundary pixels to ensure objects do not get split. 

434 pixels_highres = np.left_shift(pix, bit_shift) + np.arange(2**bit_shift) 

435 pixels_highres = np.unique(hpg.neighbors(nside_boundary, pixels_highres).ravel()) 

436 a, b = esutil.numpy_util.match(pixels_highres, hpix_highres) 

437 

438 _ra = ra[b] 

439 _dec = dec[b] 

440 

441 with Matcher(_ra, _dec) as matcher: 

442 idx = matcher.query_groups(self.config.match_radius/3600., min_match=1) 

443 

444 count = len(idx) 

445 

446 if count == 0: 

447 continue 

448 

449 band_cat = np.zeros(count, dtype=dtype) 

450 band_cat['primary_band'] = primary_band 

451 

452 # If the tract cross ra=0 (that is, it has both low ra and high ra) 

453 # then we need to remap all ra values from [0, 360) to [-180, 180) 

454 # before doing any position averaging. 

455 remapped = False 

456 if _ra.min() < 60.0 and _ra.max() > 300.0: 

457 _ra_temp = (_ra + 180.0) % 360. - 180. 

458 remapped = True 

459 else: 

460 _ra_temp = _ra 

461 

462 # Compute mean position for each primary star 

463 for i, row in enumerate(idx): 

464 row = np.array(row) 

465 band_cat[ra_col][i] = np.mean(_ra_temp[row]) 

466 band_cat[dec_col][i] = np.mean(_dec[row]) 

467 

468 if remapped: 

469 # Remap ra back to [0, 360) 

470 band_cat[ra_col] %= 360.0 

471 

472 # And cut down to objects that are not in the boundary region 

473 # after association. 

474 inpix = (hpg.angle_to_pixel(nside_split, band_cat[ra_col], band_cat[dec_col]) == pix) 

475 if inpix.sum() > 0: 

476 band_cats.append(band_cat[inpix]) 

477 

478 if len(band_cats) == 0: 

479 self.log.info('Found 0 primary stars in %s band.', primary_band) 

480 continue 

481 

482 band_cat = np.concatenate(band_cats) 

483 

484 # Match to previous band catalog(s), and remove duplicates. 

485 if primary_star_cat is None or len(primary_star_cat) == 0: 

486 primary_star_cat = band_cat 

487 else: 

488 with Matcher(band_cat[ra_col], band_cat[dec_col]) as matcher: 

489 idx = matcher.query_radius(primary_star_cat[ra_col], 

490 primary_star_cat[dec_col], 

491 self.config.match_radius/3600.) 

492 # Any object with a match should be removed. 

493 match_indices = np.array([i for i in range(len(idx)) if len(idx[i]) > 0]) 

494 if len(match_indices) > 0: 

495 band_cat = np.delete(band_cat, match_indices) 

496 

497 primary_star_cat = np.append(primary_star_cat, band_cat) 

498 self.log.info('Found %d primary stars in %s band.', len(band_cat), primary_band) 

499 

500 # If everything was cut, we still want the correct datatype. 

501 if primary_star_cat is None: 

502 primary_star_cat = np.zeros(0, dtype=dtype) 

503 

504 return primary_star_cat 

505 

506 def _remove_neighbors(self, primary_star_cat): 

507 """Remove neighbors from the primary star catalog. 

508 

509 Parameters 

510 ---------- 

511 primary_star_cat : `np.ndarray` 

512 Primary star catalog. 

513 

514 Returns 

515 ------- 

516 primary_star_cat_cut : `np.ndarray` 

517 Primary star cat with neighbors removed. 

518 """ 

519 ra_col = self.config.ra_column 

520 dec_col = self.config.dec_column 

521 

522 with Matcher(primary_star_cat[ra_col], primary_star_cat[dec_col]) as matcher: 

523 # By setting min_match=2 objects that only match to themselves 

524 # will not be recorded. 

525 try: 

526 # New smatch API 

527 idx = matcher.query_groups(self.config.isolation_radius/3600., min_match=2) 

528 except AttributeError: 

529 # Old smatch API 

530 idx = matcher.query_self(self.config.isolation_radius/3600., min_match=2) 

531 

532 try: 

533 neighbor_indices = np.concatenate(idx) 

534 except ValueError: 

535 neighbor_indices = np.zeros(0, dtype=int) 

536 

537 if len(neighbor_indices) > 0: 

538 neighbored = np.unique(neighbor_indices) 

539 self.log.info('Cutting %d objects with close neighbors.', len(neighbored)) 

540 primary_star_cat = np.delete(primary_star_cat, neighbored) 

541 

542 return primary_star_cat 

543 

544 def _match_sources(self, bands, star_source_cat, primary_star_cat): 

545 """Match individual sources to primary stars. 

546 

547 Parameters 

548 ---------- 

549 bands : `list` [`str`] 

550 List of bands. 

551 star_source_cat : `np.ndarray` 

552 Array of star sources. 

553 primary_star_cat : `np.ndarray` 

554 Array of primary stars. 

555 

556 Returns 

557 ------- 

558 star_source_cat_sorted : `np.ndarray` 

559 Sorted and cropped array of star sources. 

560 primary_star_cat : `np.ndarray` 

561 Catalog of isolated stars, with indexes to star_source_cat_cut. 

562 """ 

563 ra_col = self.config.ra_column 

564 dec_col = self.config.dec_column 

565 

566 # We match sources per-band because it allows us to have sorted 

567 # sources for easy retrieval of per-band matches. 

568 n_source_per_band_per_obj = np.zeros((len(bands), 

569 len(primary_star_cat)), 

570 dtype=np.int32) 

571 band_uses = [] 

572 idxs = [] 

573 with Matcher(primary_star_cat[ra_col], primary_star_cat[dec_col]) as matcher: 

574 for b, band in enumerate(bands): 

575 band_use, = np.where(star_source_cat['band'] == band) 

576 

577 idx = matcher.query_radius(star_source_cat[ra_col][band_use], 

578 star_source_cat[dec_col][band_use], 

579 self.config.match_radius/3600.) 

580 n_source_per_band_per_obj[b, :] = np.array([len(row) for row in idx]) 

581 idxs.append(idx) 

582 band_uses.append(band_use) 

583 

584 n_source_per_obj = np.sum(n_source_per_band_per_obj, axis=0) 

585 

586 primary_star_cat['nsource'] = n_source_per_obj 

587 primary_star_cat['source_cat_index'][1:] = np.cumsum(n_source_per_obj)[:-1] 

588 

589 n_tot_source = primary_star_cat['source_cat_index'][-1] + primary_star_cat['nsource'][-1] 

590 

591 # Temporary arrays until we crop/sort the source catalog 

592 source_index = np.zeros(n_tot_source, dtype=np.int32) 

593 obj_index = np.zeros(n_tot_source, dtype=np.int32) 

594 

595 ctr = 0 

596 for i in range(len(primary_star_cat)): 

597 obj_index[ctr: ctr + n_source_per_obj[i]] = i 

598 for b in range(len(bands)): 

599 source_index[ctr: ctr + n_source_per_band_per_obj[b, i]] = band_uses[b][idxs[b][i]] 

600 ctr += n_source_per_band_per_obj[b, i] 

601 

602 source_cat_index_band_offset = np.cumsum(n_source_per_band_per_obj, axis=0) 

603 

604 for b, band in enumerate(bands): 

605 primary_star_cat[f'nsource_{band}'] = n_source_per_band_per_obj[b, :] 

606 if b == 0: 

607 # The first band listed is the same as the overall star 

608 primary_star_cat[f'source_cat_index_{band}'] = primary_star_cat['source_cat_index'] 

609 else: 

610 # Other band indices are offset from the previous band 

611 primary_star_cat[f'source_cat_index_{band}'] = (primary_star_cat['source_cat_index'] 

612 + source_cat_index_band_offset[b - 1, :]) 

613 

614 star_source_cat = star_source_cat[source_index] 

615 star_source_cat['obj_index'] = obj_index 

616 

617 return star_source_cat, primary_star_cat 

618 

619 def _compute_unique_ids(self, skymap, tract, nstar): 

620 """Compute unique star ids. 

621 

622 This is a simple hash of the tract and star to provide an 

623 id that is unique for a given processing. 

624 

625 Parameters 

626 ---------- 

627 skymap : `lsst.skymap.Skymap` 

628 Skymap object. 

629 tract : `int` 

630 Tract id number. 

631 nstar : `int` 

632 Number of stars. 

633 

634 Returns 

635 ------- 

636 ids : `np.ndarray` 

637 Array of unique star ids. 

638 """ 

639 # The end of the id will be big enough to hold the tract number 

640 mult = 10**(int(np.log10(len(skymap))) + 1) 

641 

642 return (np.arange(nstar) + 1)*mult + tract 

643 

644 def _get_primary_dtype(self, primary_bands): 

645 """Get the numpy datatype for the primary star catalog. 

646 

647 Parameters 

648 ---------- 

649 primary_bands : `list` [`str`] 

650 List of primary bands. 

651 

652 Returns 

653 ------- 

654 dtype : `numpy.dtype` 

655 Datatype of the primary catalog. 

656 """ 

657 max_len = max([len(primary_band) for primary_band in primary_bands]) 

658 

659 dtype = [('isolated_star_id', 'i8'), 

660 (self.config.ra_column, 'f8'), 

661 (self.config.dec_column, 'f8'), 

662 ('primary_band', f'U{max_len}'), 

663 ('source_cat_index', 'i4'), 

664 ('nsource', 'i4')] 

665 

666 for band in primary_bands: 

667 dtype.append((f'source_cat_index_{band}', 'i4')) 

668 dtype.append((f'nsource_{band}', 'i4')) 

669 

670 return dtype