Coverage for tests/test_association_task.py : 10%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of ap_association.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import numpy as np
23import pandas as pd
24import unittest
26from lsst.afw.cameraGeom.testUtils import DetectorWrapper
27import lsst.afw.geom as afwGeom
28import lsst.afw.image as afwImage
29import lsst.afw.table as afwTable
30import lsst.daf.base as dafBase
31import lsst.geom as geom
32import lsst.sphgeom as sphgeom
33import lsst.utils.tests
35from lsst.ap.association import \
36 AssociationTask, \
37 make_dia_source_schema, \
38 make_dia_object_schema
41def create_test_points(point_locs_deg,
42 wcs=None,
43 start_id=0,
44 schema=None,
45 scatter_arcsec=1.0,
46 indexer_ids=None,
47 associated_ids=None):
48 """Create dummy DIASources or DIAObjects for use in our tests.
49 Parameters
50 ----------
51 point_locs_deg : array-like (N, 2) of `float`s
52 Positions of the test points to create in RA, DEC.
53 wcs : `lsst.afw.geom.SkyWcs`
54 Wcs to convert RA/Dec to x/y if provided.
55 start_id : `int`
56 Unique id of the first object to create. The remaining sources are
57 incremented by one from the first id.
58 schema : `lsst.afw.table.Schema`
59 Schema of the objects to create. Defaults to the DIASource schema.
60 scatter_arcsec : `float`
61 Scatter to add to the position of each DIASource.
62 indexer_ids : `list` of `ints`s
63 Id numbers of pixelization indexer to store. Must be the same length
64 as the first dimension of point_locs_deg.
65 associated_ids : `list` of `ints`s
66 Id numbers of associated DIAObjects to store. Must be the same length
67 as the first dimension of point_locs_deg.
68 Returns
69 -------
70 test_points : `lsst.afw.table.SourceCatalog`
71 Catalog of points to test.
72 """
73 if schema is None:
74 schema = make_dia_source_schema()
75 sources = afwTable.SourceCatalog(schema)
77 for src_idx, (ra, dec,) in enumerate(point_locs_deg):
78 src = sources.addNew()
79 src['id'] = src_idx + start_id
80 coord = geom.SpherePoint(ra, dec, geom.degrees)
81 if scatter_arcsec > 0.0:
82 coord = coord.offset(
83 np.random.rand() * 360 * geom.degrees,
84 np.random.rand() * scatter_arcsec * geom.arcseconds)
85 if indexer_ids is not None:
86 src['pixelId'] = indexer_ids[src_idx]
87 if associated_ids is not None:
88 src['diaObjectId'] = associated_ids[src_idx]
89 src.setCoord(coord)
91 if wcs is not None:
92 xyCentroid = wcs.skykToPixel(coord)
93 src.set("x", xyCentroid.getX())
94 src.set("y", xyCentroid.getY())
96 return sources
99def create_test_points_pandas(point_locs_deg,
100 wcs=None,
101 start_id=0,
102 schema=None,
103 scatter_arcsec=1.0,
104 indexer_ids=None,
105 associated_ids=None):
106 """Create dummy DIASources or DIAObjects for use in our tests.
107 Parameters
108 ----------
109 point_locs_deg : array-like (N, 2) of `float`s
110 Positions of the test points to create in RA, DEC.
111 wcs : `lsst.afw.geom.SkyWcs`
112 Wcs to convert RA/Dec to x/y if provided.
113 start_id : `int`
114 Unique id of the first object to create. The remaining sources are
115 incremented by one from the first id.
116 schema : `lsst.afw.table.Schema`
117 Schema of the objects to create. Defaults to the DIASource schema.
118 scatter_arcsec : `float`
119 Scatter to add to the position of each DIASource.
120 indexer_ids : `list` of `ints`s
121 Id numbers of pixelization indexer to store. Must be the same length
122 as the first dimension of point_locs_deg.
123 associated_ids : `list` of `ints`s
124 Id numbers of associated DIAObjects to store. Must be the same length
125 as the first dimension of point_locs_deg.
126 Returns
127 -------
128 test_points : `pandas.DataFrame`
129 Catalog of points to test.
130 """
131 if schema is None:
132 schema = make_dia_source_schema()
133 sources = afwTable.SourceCatalog(schema)
135 for src_idx, (ra, dec,) in enumerate(point_locs_deg):
136 src = sources.addNew()
137 src['id'] = src_idx + start_id
138 coord = geom.SpherePoint(ra, dec, geom.degrees)
139 if scatter_arcsec > 0.0:
140 coord = coord.offset(
141 np.random.rand() * 360 * geom.degrees,
142 np.random.rand() * scatter_arcsec * geom.arcseconds)
143 if indexer_ids is not None:
144 src['pixelId'] = indexer_ids[src_idx]
145 if associated_ids is not None:
146 src['diaObjectId'] = associated_ids[src_idx]
147 src.setCoord(coord)
149 if wcs is not None:
150 xyCentroid = wcs.skykToPixel(coord)
151 src.set("x", xyCentroid.getX())
152 src.set("y", xyCentroid.getY())
154 sources = sources.asAstropy().to_pandas()
156 return sources
159class TestAssociationTask(unittest.TestCase):
161 def setUp(self):
162 """Create a sqlite3 database with default tables and schemas.
163 """
164 self.filter_names = ["u", "g", "r", "i", "z"]
165 self.dia_object_schema = make_dia_object_schema()
167 # metadata taken from CFHT data
168 # v695856-e0/v695856-e0-c000-a00.sci_img.fits
170 self.metadata = dafBase.PropertySet()
172 self.metadata.set("SIMPLE", "T")
173 self.metadata.set("BITPIX", -32)
174 self.metadata.set("NAXIS", 2)
175 self.metadata.set("NAXIS1", 1024)
176 self.metadata.set("NAXIS2", 1153)
177 self.metadata.set("RADECSYS", 'FK5')
178 self.metadata.set("EQUINOX", 2000.)
180 self.metadata.setDouble("CRVAL1", 215.604025685476)
181 self.metadata.setDouble("CRVAL2", 53.1595451514076)
182 self.metadata.setDouble("CRPIX1", 1109.99981456774)
183 self.metadata.setDouble("CRPIX2", 560.018167811613)
184 self.metadata.set("CTYPE1", 'RA---SIN')
185 self.metadata.set("CTYPE2", 'DEC--SIN')
187 self.metadata.setDouble("CD1_1", 5.10808596133527E-05)
188 self.metadata.setDouble("CD1_2", 1.85579539217196E-07)
189 self.metadata.setDouble("CD2_2", -5.10281493481982E-05)
190 self.metadata.setDouble("CD2_1", -8.27440751733828E-07)
192 self.wcs = afwGeom.makeSkyWcs(self.metadata)
193 self.exposure = afwImage.makeExposure(
194 afwImage.makeMaskedImageFromArrays(np.ones((1024, 1153))),
195 self.wcs)
196 detector = DetectorWrapper(id=23, bbox=self.exposure.getBBox()).detector
197 visit = afwImage.VisitInfo(
198 exposureId=1234,
199 exposureTime=200.,
200 date=dafBase.DateTime("2014-05-13T17:00:00.000000000",
201 dafBase.DateTime.Timescale.TAI))
202 self.exposure.setDetector(detector)
203 self.exposure.getInfo().setVisitInfo(visit)
204 self.exposure.setFilterLabel(afwImage.FilterLabel(band='g'))
205 self.flux0 = 10000
206 self.flux0_err = 100
207 self.exposure.setPhotoCalib(
208 afwImage.PhotoCalib(self.flux0, self.flux0_err))
210 bbox = geom.Box2D(self.exposure.getBBox())
211 wcs = self.exposure.getWcs()
213 self.pixelator = sphgeom.HtmPixelization(20)
214 region = sphgeom.ConvexPolygon([wcs.pixelToSky(pp).getVector()
215 for pp in bbox.getCorners()])
217 indices = self.pixelator.envelope(region, 64)
218 # Index types must be cast to int to work with dax_apdb.
219 self.index_ranges = indices.ranges()
221 def tearDown(self):
222 """Delete the database after we are done with it.
223 """
224 del self.metadata
225 del self.wcs
226 del self.exposure
228 def test_run(self):
229 """Test the run method with a database that already exists and
230 contains DIAObjects and Sources.
231 """
232 dia_objects = self._run_association_and_retrieve_objects(True)
233 not_updated_idx = 0
234 updated_idx_start = 1
235 new_idx_start = 5
236 total_expected_dia_objects = 10
237 self.assertEqual(len(dia_objects), total_expected_dia_objects)
239 # Test to make sure the number of DIAObjects have been properly
240 # associated within the db.
241 for obj_idx, (df_idx, dia_object) in enumerate(dia_objects.iterrows()):
242 if df_idx == not_updated_idx:
243 # Test the DIAObject we expect to not be associated with any
244 # new DIASources.
245 self.assertEqual(dia_object['gPSFluxNdata'], 1)
246 self.assertEqual(dia_object['rPSFluxNdata'], 1)
247 self.assertEqual(dia_object['nDiaSources'], 2)
248 self.assertEqual(df_idx, obj_idx)
249 elif updated_idx_start <= df_idx < new_idx_start:
250 # Test that associating to the existing DIAObjects went
251 # as planned and test that the IDs of the newly associated
252 # DIASources is correct.
253 self.assertEqual(dia_object['gPSFluxNdata'], 2)
254 self.assertEqual(dia_object['rPSFluxNdata'], 1)
255 self.assertEqual(dia_object['nDiaSources'], 3)
256 self.assertEqual(df_idx, obj_idx)
257 else:
258 self.assertEqual(dia_object['gPSFluxNdata'], 1)
259 self.assertEqual(dia_object['nDiaSources'], 1)
260 self.assertEqual(df_idx, obj_idx + 4 + 5)
262 def test_run_no_existing_objects(self):
263 """Test the run method with a completely empty database.
264 """
265 dia_objects = self._run_association_and_retrieve_objects(False)
266 total_expected_dia_objects = 9
267 self.assertEqual(len(dia_objects),
268 total_expected_dia_objects)
269 for obj_idx, (df_idx, output_dia_object) in enumerate(dia_objects.iterrows()):
270 self.assertEqual(output_dia_object['gPSFluxNdata'], 1)
271 self.assertEqual(df_idx, obj_idx + 10)
273 def test_run_dup_diaSources(self):
274 """Test that duplicate sources being run through association throw the
275 correct error.
276 """
277 with self.assertRaises(RuntimeError):
278 self._run_association_and_retrieve_objects(create_objects=True,
279 dupDiaSources=True,
280 dupDiaObjects=False)
282 def test_run_dup_diaObjects(self):
283 """Test that duplicate objects being run through association throw the
284 correct error.
285 """
286 with self.assertRaises(RuntimeError):
287 self._run_association_and_retrieve_objects(create_objects=True,
288 dupDiaSources=False,
289 dupDiaObjects=True)
291 def _run_association_and_retrieve_objects(self,
292 create_objects=False,
293 dupDiaSources=False,
294 dupDiaObjects=False):
295 """Convenience method for testing the Association run method.
297 Parameters
298 ----------
299 create_objects : `bool`
300 Boolean specifying if seed DIAObjects and DIASources should be
301 inserted into the database before association.
302 dupDiaSources : `bool`
303 Add duplicate diaSources into processing to force an error. Must
304 be used with ``create_objects`` equal to True.
305 dupDiaObjects : `bool`
306 Add duplicate diaObjects into processing to force an error. Must
307 be used with ``create_objects`` equal to True.
309 Return
310 ------
311 dia_objects : `lsst.afw.table.SourceCatalog`
312 Final set of DIAObjects to be tested.
313 """
314 if create_objects:
315 diaObjects, diaSourceHistory = \
316 self._create_dia_objects_and_sources()
317 else:
318 diaObjects = pd.DataFrame(columns=["diaObjectId"])
319 diaSourceHistory = pd.DataFrame(columns=["diaObjectId",
320 "filterName",
321 "diaSourceId"])
322 diaObjects.set_index("diaObjectId",
323 inplace=True,
324 drop=False)
325 diaSourceHistory.set_index(["diaObjectId",
326 "filterName",
327 "diaSourceId"],
328 inplace=True,
329 drop=False)
331 source_centers = [
332 [self.wcs.pixelToSky(idx, idx).getRa().asDegrees(),
333 self.wcs.pixelToSky(idx, idx).getDec().asDegrees()]
334 for idx in np.linspace(1, 1000, 10)[1:]]
335 dia_sources = create_test_points(
336 point_locs_deg=source_centers,
337 start_id=10,
338 scatter_arcsec=-1)
339 for dia_source in dia_sources:
340 self._set_source_values(
341 dia_source=dia_source,
342 flux=10000,
343 fluxErr=100,
344 filterName=self.exposure.getFilterLabel().bandLabel,
345 ccdVisitId=self.exposure.getInfo().getVisitInfo().getExposureId(),
346 midPointTai=self.exposure.getInfo().getVisitInfo().getDate().get(system=dafBase.DateTime.MJD))
348 assoc_task = AssociationTask()
350 diaSources = dia_sources.asAstropy().to_pandas()
351 diaSources.rename(columns={"coord_ra": "ra",
352 "coord_dec": "decl",
353 "id": "diaSourceId",
354 "parent": "parentDiaSourceId"},
355 inplace=True)
356 diaSources["ra"] = np.degrees(diaSources["ra"])
357 diaSources["decl"] = np.degrees(diaSources["decl"])
358 if dupDiaSources:
359 diaSources = diaSources.append(diaSourceHistory.iloc[[0, -1]],
360 ignore_index=True)
362 if len(diaObjects) == 0:
363 diaSourceHistory = pd.DataFrame(columns=["diaObjectId",
364 "filterName",
365 "diaSourceId"])
366 diaSourceHistory.set_index(
367 ["diaObjectId", "filterName", "diaSourceId"],
368 drop=False,
369 inplace=True)
370 if dupDiaObjects:
371 diaObjects = diaObjects.append(diaObjects.iloc[[0, -1]],
372 ignore_index=True)
374 results = assoc_task.run(diaSources,
375 diaObjects,
376 diaSourceHistory)
377 return results.diaObjects
379 def _set_source_values(self, dia_source, flux, fluxErr, filterName,
380 ccdVisitId, midPointTai):
381 """Set fluxes and visit info for DiaSources.
383 Parameters
384 ----------
385 dia_source : `lsst.afw.table.SourceRecord`
386 SourceRecord object to edit.
387 flux : `double`
388 Flux of DiaSource
389 fluxErr : `double`
390 Flux error of DiaSource
391 filterName : `string`
392 Name of filter for flux.
393 ccdVisitId : `int`
394 Integer id of this ccd/visit.
395 midPointTai : `double`
396 Time of observation
397 """
398 dia_source['ccdVisitId'] = ccdVisitId
399 dia_source["midPointTai"] = midPointTai
400 dia_source["psFlux"] = flux / self.flux0
401 dia_source["psFluxErr"] = np.sqrt(
402 (fluxErr / self.flux0) ** 2
403 + (flux * self.flux0_err / self.flux0 ** 2) ** 2)
404 dia_source["apFlux"] = flux / self.flux0
405 dia_source["apFluxErr"] = np.sqrt(
406 (fluxErr / self.flux0) ** 2
407 + (flux * self.flux0_err / self.flux0 ** 2) ** 2)
408 dia_source["totFlux"] = flux / self.flux0
409 dia_source["totFluxErr"] = np.sqrt(
410 (fluxErr / self.flux0) ** 2
411 + (flux * self.flux0_err / self.flux0 ** 2) ** 2)
412 dia_source["filterName"] = filterName
413 dia_source["x"] = 0.
414 dia_source["y"] = 0.
416 def _create_dia_objects_and_sources(self):
417 """Method for storing a set of test DIAObjects and sources into
418 the L1 database.
419 """
421 # This should create a DB of 5 DIAObjects with 2 DIASources associated
422 # to them. The DIASources are "observed" in g and r.
424 # Create DIObjects, give them fluxes, and store them
425 n_objects = 5
426 object_centers = np.array([
427 [self.wcs.pixelToSky(idx, idx).getRa().asDegrees(),
428 self.wcs.pixelToSky(idx, idx).getDec().asDegrees()]
429 for idx in np.linspace(1, 1000, 10)])
430 dia_objects = create_test_points(
431 point_locs_deg=object_centers[:n_objects],
432 start_id=0,
433 schema=self.dia_object_schema,
434 scatter_arcsec=-1,)
435 # Set the DIAObject fluxes and number of associated sources.
436 for dia_object in dia_objects:
437 dia_object["nDiaSources"] = 2
438 for filter_name in self.filter_names:
439 sphPoint = geom.SpherePoint(dia_object.getCoord())
440 htmIndex = self.pixelator.index(sphPoint.getVector())
441 dia_object["pixelId"] = htmIndex
442 dia_object['%sPSFluxMean' % filter_name] = 1
443 dia_object['%sPSFluxMeanErr' % filter_name] = 1
444 dia_object['%sPSFluxSigma' % filter_name] = 1
445 dia_object['%sPSFluxNdata' % filter_name] = 1
446 dia_objects = dia_objects.asAstropy().to_pandas()
447 dia_objects.rename(columns={"coord_ra": "ra",
448 "coord_dec": "decl",
449 "id": "diaObjectId"},
450 inplace=True)
451 dia_objects["ra"] = np.degrees(dia_objects["ra"])
452 dia_objects["decl"] = np.degrees(dia_objects["decl"])
454 dateTime = dafBase.DateTime("2014-05-13T16:00:00.000000000",
455 dafBase.DateTime.Timescale.TAI)
457 # Create DIASources, update their ccdVisitId and fluxes, and store
458 # them.
459 dia_sources = create_test_points(
460 point_locs_deg=np.concatenate(
461 [object_centers[:n_objects], object_centers[:n_objects]]),
462 start_id=0,
463 scatter_arcsec=-1,
464 associated_ids=[0, 1, 2, 3, 4,
465 0, 1, 2, 3, 4])
466 for src_idx, dia_source in enumerate(dia_sources):
467 if src_idx < n_objects:
468 self._set_source_values(
469 dia_source=dia_source,
470 flux=10000,
471 fluxErr=100,
472 filterName='g',
473 ccdVisitId=1232,
474 midPointTai=dateTime.get(system=dafBase.DateTime.MJD))
475 else:
476 self._set_source_values(
477 dia_source=dia_source,
478 flux=10000,
479 fluxErr=100,
480 filterName='r',
481 ccdVisitId=1233,
482 midPointTai=dateTime.get(system=dafBase.DateTime.MJD))
483 dia_sources = dia_sources.asAstropy().to_pandas()
484 dia_sources.rename(columns={"coord_ra": "ra",
485 "coord_dec": "decl",
486 "id": "diaSourceId",
487 "parent": "parentDiaSourceId"},
488 inplace=True)
489 dia_sources["ra"] = np.degrees(dia_sources["ra"])
490 dia_sources["decl"] = np.degrees(dia_sources["decl"])
491 return dia_objects, dia_sources
493 def test_associate_sources(self):
494 """Test the performance of the associate_sources method in
495 AssociationTask.
496 """
497 n_objects = 5
498 dia_objects = create_test_points_pandas(
499 point_locs_deg=[[0.04 * obj_idx, 0.04 * obj_idx]
500 for obj_idx in range(n_objects)],
501 start_id=0,
502 schema=self.dia_object_schema,
503 scatter_arcsec=-1,)
504 dia_objects.rename(columns={"coord_ra": "ra",
505 "coord_dec": "decl",
506 "id": "diaObjectId"},
507 inplace=True)
509 n_sources = 5
510 dia_sources = create_test_points_pandas(
511 point_locs_deg=[
512 [0.04 * (src_idx + 1),
513 0.04 * (src_idx + 1)]
514 for src_idx in range(n_sources)],
515 start_id=n_objects,
516 scatter_arcsec=0.1)
517 dia_sources.rename(columns={"coord_ra": "ra",
518 "coord_dec": "decl",
519 "id": "diaSourceId"},
520 inplace=True)
522 assoc_task = AssociationTask()
523 assoc_result = assoc_task.associate_sources(
524 dia_objects, dia_sources)
526 for test_obj_id, expected_obj_id in zip(
527 assoc_result.associated_dia_object_ids,
528 [1, 2, 3, 4, 9]):
529 self.assertEqual(test_obj_id, expected_obj_id)
531 def test_score_and_match(self):
532 """Test association between a set of sources and an existing
533 DIAObjectCollection.
535 This also tests that a DIASource that can't be associated within
536 tolerance is appended to the DIAObjectCollection as a new
537 DIAObject.
538 """
540 assoc_task = AssociationTask()
541 # Create a set of DIAObjects that contain only one DIASource
542 n_objects = 5
543 dia_objects = create_test_points_pandas(
544 point_locs_deg=[[0.04 * obj_idx, 0.04 * obj_idx]
545 for obj_idx in range(n_objects)],
546 start_id=0,
547 schema=self.dia_object_schema,
548 scatter_arcsec=-1,)
549 dia_objects.rename(columns={"coord_ra": "ra",
550 "coord_dec": "decl",
551 "id": "diaObjectId"},
552 inplace=True)
554 n_sources = 5
555 dia_sources = create_test_points_pandas(
556 point_locs_deg=[
557 [0.04 * (src_idx + 1),
558 0.04 * (src_idx + 1)]
559 for src_idx in range(n_sources)],
560 start_id=n_objects,
561 scatter_arcsec=-1)
562 dia_sources.rename(columns={"coord_ra": "ra",
563 "coord_dec": "decl",
564 "id": "diaSourceId"},
565 inplace=True)
567 score_struct = assoc_task.score(dia_objects,
568 dia_sources,
569 1.0 * geom.arcseconds)
570 self.assertFalse(np.isfinite(score_struct.scores[-1]))
571 for src_idx in range(4):
572 # Our scores should be extremely close to 0 but not exactly so due
573 # to machine noise.
574 self.assertAlmostEqual(score_struct.scores[src_idx], 0.0,
575 places=16)
577 # After matching each DIAObject should now contain 2 DIASources
578 # except the last DIAObject in this collection which should be
579 # newly created during the matching step and contain only one
580 # DIASource.
581 match_result = assoc_task.match(dia_objects, dia_sources, score_struct)
582 updated_ids = match_result.associated_dia_object_ids
583 self.assertEqual(len(updated_ids), 5)
584 self.assertEqual(match_result.n_updated_dia_objects, 4)
585 self.assertEqual(match_result.n_new_dia_objects, 1)
586 self.assertEqual(match_result.n_unassociated_dia_objects, 1)
588 # Test updating all DiaObjects
589 n_objects = 4
590 dia_objects = create_test_points_pandas(
591 point_locs_deg=[[0.04 * obj_idx, 0.04 * obj_idx]
592 for obj_idx in range(n_objects)],
593 start_id=0,
594 schema=self.dia_object_schema,
595 scatter_arcsec=-1,)
596 dia_objects.rename(columns={"coord_ra": "ra",
597 "coord_dec": "decl",
598 "id": "diaObjectId"},
599 inplace=True)
601 n_sources = 4
602 dia_sources = create_test_points_pandas(
603 point_locs_deg=[
604 [0.04 * src_idx,
605 0.04 * src_idx]
606 for src_idx in range(n_sources)],
607 start_id=n_objects,
608 scatter_arcsec=-1)
610 dia_sources.rename(columns={"coord_ra": "ra",
611 "coord_dec": "decl",
612 "id": "diaSourceId"},
613 inplace=True)
614 score_struct = assoc_task.score(dia_objects[1:],
615 dia_sources[:-1],
616 1.0 * geom.arcseconds)
617 match_result = assoc_task.match(dia_objects, dia_sources, score_struct)
618 updated_ids = match_result.associated_dia_object_ids
619 self.assertEqual(len(updated_ids), 4)
621 def test_remove_nan_dia_sources(self):
622 n_sources = 6
623 dia_sources = create_test_points_pandas(
624 point_locs_deg=[
625 [0.04 * (src_idx + 1),
626 0.04 * (src_idx + 1)]
627 for src_idx in range(n_sources)],
628 start_id=0,
629 scatter_arcsec=-1)
630 dia_sources.rename(columns={"coord_ra": "ra",
631 "coord_dec": "decl",
632 "id": "diaSourceId"},
633 inplace=True)
635 dia_sources.loc[2, "ra"] = np.nan
636 dia_sources.loc[3, "decl"] = np.nan
637 dia_sources.loc[4, "ra"] = np.nan
638 dia_sources.loc[4, "decl"] = np.nan
639 assoc_task = AssociationTask()
640 out_dia_sources = assoc_task.check_dia_source_radec(dia_sources)
641 self.assertEqual(len(out_dia_sources), n_sources - 3)
644class MemoryTester(lsst.utils.tests.MemoryTestCase):
645 pass
648def setup_module(module):
649 lsst.utils.tests.init()
652if __name__ == "__main__": 652 ↛ 653line 652 didn't jump to line 653, because the condition on line 652 was never true
653 lsst.utils.tests.init()
654 unittest.main()