5 Matches fakes based on position stored in the calibrated exposure image header 8 from __future__
import division
9 from __future__
import print_function
11 from builtins
import map
12 from builtins
import zip
18 from distutils.version
import StrictVersion
26 NO_FOOTPRINT = lsst.afw.table.SOURCE_IO_NO_FOOTPRINTS
30 """Combine the meas and forced_src catalogs.""" 31 if len(meas) != len(force):
32 raise Exception(
"# Meas and Forced_src catalogs should have " +
35 mapper.addMinimalSchema(meas.schema)
36 newSchema = mapper.getOutputSchema()
38 newSchema.addField(
'force_deblend_nChild', type=np.int32)
39 newSchema.addField(
'force_base_ClassificationExtendedness_value', type=float)
40 newSchema.addField(
'force_ext_photometryKron_KronFlux_flux', type=float)
41 newSchema.addField(
'force_ext_photometryKron_KronFlux_fluxSigma', type=float)
42 newSchema.addField(
'force_base_PsfFlux_flux', type=float)
43 newSchema.addField(
'force_base_PsfFlux_fluxSigma', type=float)
44 newSchema.addField(
'force_ext_photometryKron_KronFlux_apCorr', type=float)
45 newSchema.addField(
'force_ext_photometryKron_KronFlux_apCorrSigma', type=float)
46 newSchema.addField(
'force_base_PsfFlux_apCorr', type=float)
47 newSchema.addField(
'force_base_PsfFlux_apCorrSigma', type=float)
48 newSchema.addField(
'force_modelfit_CModel_flux', type=float)
49 newSchema.addField(
'force_modelfit_CModel_fluxSigma', type=float)
50 newSchema.addField(
'force_modelfit_CModel_fracDev', type=float)
51 newSchema.addField(
'force_modelfit_CModel_exp_flux', type=float)
52 newSchema.addField(
'force_modelfit_CModel_exp_fluxSigma', type=float)
53 newSchema.addField(
'force_modelfit_CModel_dev_flux', type=float)
54 newSchema.addField(
'force_modelfit_CModel_dev_fluxSigma', type=float)
55 newSchema.addField(
'force_modelfit_CModel_apCorr', type=float)
56 newSchema.addField(
'force_modelfit_CModel_apCorrSigma', type=float)
57 newSchema.addField(
'force_modelfit_CModel_exp_apCorr', type=float)
58 newSchema.addField(
'force_modelfit_CModel_exp_apCorrSigma', type=float)
59 newSchema.addField(
'force_modelfit_CModel_dev_apCorr', type=float)
60 newSchema.addField(
'force_modelfit_CModel_dev_apCorrSigma', type=float)
62 newCols = [
'deblend_nChild',
'base_ClassificationExtendedness_value',
63 'ext_photometryKron_KronFlux_flux',
'ext_photometryKron_KronFlux_fluxSigma',
64 'base_PsfFlux_flux',
'base_PsfFlux_fluxSigma',
65 'ext_photometryKron_KronFlux_apCorr',
'ext_photometryKron_KronFlux_apCorrSigma',
66 'base_PsfFlux_apCorr',
'base_PsfFlux_apCorrSigma',
67 'modelfit_CModel_flux',
'modelfit_CModel_fluxSigma',
68 'modelfit_CModel_exp_apCorr',
'modelfit_CModel_exp_apCorrSigma',
69 'modelfit_CModel_exp_flux',
'modelfit_CModel_exp_flux',
70 'modelfit_CModel_exp_apCorr',
'modelfit_CModel_exp_apCorrSigma',
71 'modelfit_CModel_dev_flux',
'modelfit_CModel_dev_fluxSigma',
72 'modelfit_CModel_dev_apCorr',
'modelfit_CModel_dev_apCorrSigma',
73 'modelfit_CModel_fracDev']
74 measAlias = meas.schema.getAliasMap()
75 newAlias = newSchema.getAliasMap()
76 for aliasKey
in measAlias.keys():
77 newAlias.set(aliasKey, measAlias[aliasKey])
78 combSrc = SourceCatalog(newSchema)
79 combSrc.extend(meas, mapper=mapper)
82 combSrc[
'force_' + key][:] = force[key][:]
87 def getMag(flux, fluxerr, zeropoint):
88 """Return the magnitude and error.""" 89 mag = -2.5 * np.log10(flux)
90 magerr = (2.5 / np.log(10.0) * fluxerr / flux)
91 return (mag.T + zeropoint).T, magerr
95 """Return the Re, b/a and PA for a given quadrupole moment.""" 99 b_a = (e.getB() / e.getA())
100 except ZeroDivisionError:
102 pa = (e.getTheta() * 180.0 / np.pi)
108 Match to the fake catalog and append those columns to the source table. 110 this assumes the sources are an astropy table 111 or it will throw a TypeError 113 if not isinstance(sources, astropy.table.Table):
114 raise TypeError(
"Expect an astropy table for sources" +
115 " use getAstroTable to convert")
117 fakes = astropy.table.Table().read(fakeCatalog)
118 fakes.rename_column(
'ID',
'fakeId')
119 return astropy.table.join(sources, fakes, keys=
'fakeId', join_type=
'left')
124 Return the fake matches based on the information in the header. 126 returns a tuple with: 127 the positions in pixels of the fake sources added to the chip 128 the match is in a dictionary of the form: 129 {fakeid:[ind_of_match_in_sources,...],...} 131 look within a tolerance of 1 pixel in each direction 133 fakeXY = collections.defaultdict(tuple)
134 fakename = re.compile(
'FAKE([0-9]+)')
135 for card
in cal_md.names():
136 m = fakename.match(card)
138 x, y = list(map(float, (cal_md.get(card)).split(
',')))
139 fakeXY[
int(m.group(1))] = (x, y)
141 srcX, srcY = sources.getX(), sources.getY()
142 srcIndex = collections.defaultdict(list)
143 for fid, fcoord
in fakeXY.items():
144 distX = srcX - fcoord[0]
145 distY = srcY - fcoord[1]
146 matched = (np.abs(distX) < tol) & (np.abs(distY) < tol)
147 srcIndex[fid] = np.where(matched)[0]
149 return fakeXY, srcIndex
153 reffMatch=False, pix=0.168, minRad=None,
154 raCol='RA', decCol='Dec'):
156 Return the fake matches based on an radec match. 159 sources: source table object 160 radecCatFile: filename for fits file of fake object table, 162 bbox: Bounding Box of exposure (ccd or patch) within which 164 wcs: Wcs of source image 167 tol: tolerance within which to match sources, given in PIXELS 170 fakeXY: set of pixel positions of fake sources 171 srcIdx: matches to fake sources in a dictionary of the form 172 {fakeid:[ind_of_match_in_sources,...],...} 175 IOError: couldn't open radecCatFile 177 fakeXY = collections.defaultdict(tuple)
179 fakeCat = astropy.table.Table().read(radecCatFile, format=
'fits')
183 for fakeSrc
in fakeCat:
186 fakeCoord = wcs.skyToPixel(fakePix)
188 if bbox.contains(fakeCoord):
190 fakeXY[
int(fakeSrc[
'ID'])] = (fakeCoord.getX(),
192 (fakeSrc[
'reff'] / pix) * tol)
194 fakeXY[
int(fakeSrc[
'ID'])] = (fakeCoord.getX(),
198 srcX, srcY = sources.getX(), sources.getY()
199 srcIndex = collections.defaultdict(list)
200 srcClose = collections.defaultdict(list)
201 for fid, fcoord
in fakeXY.items():
202 distX = (srcX - fcoord[0])
203 distY = (srcY - fcoord[1])
204 distR = np.sqrt((np.abs(distX) ** 2.0) + (np.abs(distY) ** 2.0))
205 closest = np.nanargmin(distR)
207 if minRad
is not None:
208 if radMatch < minRad:
211 matched = (distR <= radMatch)
213 matched = (np.abs(distX) <= radMatch) & (np.abs(distY) <= radMatch)
215 srcIndex[fid] = np.where(matched)[0]
216 srcClose[fid] = closest
218 return fakeXY, srcIndex, srcClose
222 extraCols=(
'zeropoint',
'visit',
'ccd'),
223 includeMissing=
False, footprints=
False, radecMatch=
None,
224 multiband=
False, reffMatch=
False, pix=0.168, minRad=
None,
225 raCol=
'RA', decCol=
'Dec'):
227 Get list of sources which agree in pixel position with fake ones with tol. 229 This returns a sourceCatalog of all the matched fake objects, 230 note, there will be duplicates in this list, since I haven't 231 checked deblend.nchild, and I'm only doing a tolerance match, 232 which could include extra sources 234 The outputs can include extraCols as long as they are one of: 235 zeropoint, visit, ccd, thetaNorth, pixelScale 237 If includeMissing is true, then the pipeline looks at the fake sources 238 added in the header and includes an entry in the table for sources without 239 any measurements, specifically the 'id' column will be 0 241 radecMatch is the fakes table. if it's not None(default), then do an ra/dec 242 match with the input catalog instead of looking in the header for where the 245 coaddData =
"deepCoadd_calexp" 246 coaddMeta =
"deepCoadd_calexp_md" 248 availExtras = {
'zeropoint': {
'type': float,
'doc':
'zeropoint'},
249 'visit': {
'type': int,
'doc':
'visit id'},
250 'ccd': {
'type': int,
'doc':
'ccd id'},
252 'doc':
'angle to north'},
253 'pixelScale': {
'type': float,
254 'doc':
'pixelscale in arcsec/pixel'}}
256 if not np.in1d(extraCols, list(availExtras.keys())).all():
257 print(
"extraCols must be in ", availExtras)
260 if 'filter' not in dataId:
261 sources = butler.get(
'src', dataId,
262 flags=lsst.afw.table.SOURCE_IO_NO_FOOTPRINTS,
264 cal = butler.get(
'calexp', dataId, immediate=
True)
265 cal_md = butler.get(
'calexp_md', dataId, immediate=
True)
267 meas = butler.get(
'deepCoadd_meas', dataId,
270 force = butler.get(
'deepCoadd_forced_src', dataId,
274 cal = butler.get(coaddData, dataId, immediate=
True)
275 cal_md = butler.get(coaddMeta, dataId, immediate=
True)
276 except RuntimeError
as e:
277 print(
"skipping", dataId)
280 if (
'pixelScale' in extraCols)
or (
'thetaNorth' in extraCols):
282 availExtras[
'pixelScale'][
'value'] = wcs.getPixelScale().asArcseconds()
286 xMid = cal.getWidth() // 2
287 yMid = cal.getHeight() // 2
289 midCoord = wcs.pixelToSky(midPoint)
290 northSkyToPixelMatrix = wcs.linearizeSkyToPixel(midCoord, lsst.afw.geom.degrees)
291 northSkyToPixelMatrix = northSkyToPixelMatrix.getLinear()
292 availExtras[
'thetaNorth'][
'value'] =
lsst.afw.geom.Angle(np.arctan2(*tuple(northSkyToPixelMatrix
295 if 'visit' in extraCols:
296 availExtras[
'visit'][
'value'] = dataId[
'visit']
297 if 'ccd' in extraCols:
298 availExtras[
'ccd'][
'value'] = dataId[
'ccd']
299 if 'zeropoint' in extraCols:
300 zeropoint = 2.5 * np.log10(cal_md.get(
'FLUXMAG0'))
301 availExtras[
'zeropoint'][
'value'] = zeropoint
303 if radecMatch
is None:
306 if minRad
is not None:
307 print(
"# The min matching radius is %4.1f pixel" % minRad)
321 mapper.addMinimalSchema(sources.schema)
322 newSchema = mapper.getOutputSchema()
323 newSchema.addField(
'fakeId', type=np.int32,
324 doc=
'id of fake source matched to position')
325 newSchema.addField(
'nMatched', type=np.int32,
326 doc=
'Number of matched objects')
327 newSchema.addField(
'nPrimary', type=np.int32,
328 doc=
'Number of unique matched objects')
329 newSchema.addField(
'nNoChild', type=np.int32,
330 doc=
'Number of matched objects with nchild==0')
331 newSchema.addField(
'rMatched', type=float,
332 doc=
'Radius used form atching obects, in pixel')
333 newSchema.addField(
'fakeOffX', type=float,
334 doc=
'offset from input fake position in X (pixels)')
335 newSchema.addField(
'fakeOffY', type=float,
336 doc=
'offset from input fake position in Y (pixels)')
337 newSchema.addField(
'fakeOffR', type=float,
338 doc=
'offset from input fake position in radius')
339 newSchema.addField(
'fakeClosest', type=
"Flag",
340 doc=
'Is this match the closest one?')
342 for extraName
in set(extraCols).intersection(availExtras):
343 newSchema.addField(extraName, type=availExtras[extraName][
'type'],
344 doc=availExtras[extraName][
'doc'])
346 srcList = SourceCatalog(newSchema)
347 srcList.reserve(sum([len(s)
for s
in srcIndex.values()]) +
348 (0
if not includeMissing
else list(srcIndex.values()).count([])))
350 centroidKey = sources.getCentroidKey()
351 isPrimary = sources.schema.find(
'detect_isPrimary').getKey()
352 nChild = sources.schema.find(
'force_deblend_nChild').getKey()
353 for ident, sindlist
in srcIndex.items():
354 rMatched = fakeXY[ident][2]
355 if minRad
is not None:
356 if rMatched < minRad:
358 nMatched = len(sindlist)
359 nPrimary = np.sum([sources[
int(obj)].get(isPrimary)
for obj
in sindlist])
360 nNoChild = np.sum([(sources[
int(obj)].get(nChild) == 0)
for 362 if includeMissing
and (nMatched == 0):
363 newRec = srcList.addNew()
364 newRec.set(
'fakeId', ident)
366 newRec.set(
'nMatched', 0)
367 newRec.set(
'rMatched', rMatched)
369 newRec = srcList.addNew()
370 newRec.assign(sources[
int(ss)], mapper)
371 newRec.set(
'fakeId', ident)
372 newRec.set(
'nMatched', nMatched)
373 newRec.set(
'nPrimary', nPrimary)
374 newRec.set(
'nNoChild', nNoChild)
375 newRec.set(
'rMatched', rMatched)
376 offsetX = (sources[
int(ss)].get(centroidKey).getX() -
378 newRec.set(
'fakeOffX', offsetX)
379 offsetY = (sources[
int(ss)].get(centroidKey).getY() -
381 newRec.set(
'fakeOffY', offsetY)
382 newRec.set(
'fakeOffR', np.sqrt(offsetX ** 2.0 + offsetY ** 2.0))
384 if int(ss) ==
int(srcClose[ident]):
385 newRec.set(
'fakeClosest',
True)
387 newRec.set(
'fakeClosest',
False)
390 srcList = srcList.copy(deep=
True)
392 for extraName
in set(extraCols).intersection(availExtras):
393 tempCol = srcList.get(extraName)
394 tempCol.fill(availExtras[extraName][
'value'])
401 Return an astropy table with all the src entries. 403 if the entries are complex objects, it breaks them down: 404 ellipse entries are broken into 405 ellipse_a = semi-major axis 406 ellipse_q = axis ratio (always < 1) 407 ellipse_theta = rotation of semi-major axis 408 from chip x-axis in degrees 409 if mags is True, returns the magnitudes for all the flux columns 411 tab = astropy.table.Table()
412 for name
in src.schema.getNames():
415 nameKey = src.schema.find(name).getKey()
417 tab.add_column(astropy.table.Column(name=name,
418 data=src.get(nameKey)))
419 except lsst.pex.exceptions.LsstException:
420 quadrupole = lsst.afw.geom.ellipses.ellipsesLib.Quadrupole
421 if type(src[0].get(nameKey))
is quadrupole:
422 """Check for shape measurements""" 423 reff, q, theta = list(zip(*[
getEllipse(s.get(nameKey))
425 tab.add_column(astropy.table.Column(name=name+
'_a',
427 tab.add_column(astropy.table.Column(name=name+
'_q',
429 tab.add_column(astropy.table.Column(name=name+
'_theta',
432 """Check for coordinate measurements""" 433 x, y = list(zip(*[(s.get(nameKey).getRa().asDegrees(),
434 s.get(nameKey).getDec().asDegrees())
436 tab.add_column(astropy.table.Column(name=name+
'_ra', data=x))
437 tab.add_column(astropy.table.Column(name=name+
'_dec', data=y))
439 keyData = np.array([s.get(nameKey)
for s
in src])
440 tab.add_column(astropy.table.Column(name=name,
445 tab.remove_column(name)
446 newCol = astropy.table.Column(data=[s.get(nameKey).asDegrees()
448 dtype=float, name=name)
449 tab.add_column(newCol)
454 for col
in tab.colnames:
455 colMatch = (re.match(
'^flux\.[a-z]+$', col)
or 456 re.match(
'^flux\.[a-z]+.apcorr$', col)
or 457 re.match(
'^force.flux\.[a-z]+$', col)
or 458 re.match(
'^force.flux\.[a-z]+.apcorr$', col)
or 459 re.match(
'^force.cmodel.+flux$', col)
or 460 re.match(
'^force.cmodel.+flux.apcorr$', col)
or 461 re.match(
'^cmodel.+flux$', col)
or 462 re.match(
'^cmodel.+flux.apcorr$', col))
464 zp = tab[
'zeropoint']
if not re.search(
'apcorr', col)
else 0.0
465 mag, magerr =
getMag(tab[col], tab[col+
'.err'], zp)
466 tab.add_column(astropy.table.Column(name=re.sub(
'flux',
'mag',
469 tab.add_column(astropy.table.Column(name=re.sub(
'flux',
'mag',
477 filt=None, tol=1.0, pix=0.168,
478 fakeCat=None, pixMatch=False, multiband=False,
479 reffMatch=False, includeMissing=True, minRad=None,
480 raCol='RA', decCol='Dec'):
481 """Return matched catalog for each CCD or Patch.""" 483 print(
'Doing ccd %d' %
int(ccd))
485 {
'visit': visit,
'ccd':
int(ccd)},
486 includeMissing=includeMissing,
487 extraCols=(
'visit',
'ccd',
488 'zeropoint',
'pixelScale',
490 radecMatch=fakeCat
if not pixMatch
else None,
491 tol=tol, reffMatch=reffMatch, pix=pix,
492 minRad=minRad, raCol=raCol, decCol=decCol)
494 print(
'Doing patch %s' % ccd)
496 {
'tract': visit,
'patch': ccd,
498 includeMissing=includeMissing,
499 extraCols=(
'thetaNorth',
'pixelScale',
501 radecMatch=fakeCat
if not pixMatch
else None,
502 tol=tol, multiband=multiband,
503 reffMatch=reffMatch, pix=pix,
504 minRad=minRad, raCol=raCol, decCol=decCol)
507 print(
' No match returns!')
510 slist = mlis.copy(
True)
512 slist.extend(mlis,
True)
519 overwrite=False, filt=None, tol=1.0, pixMatch=False,
520 multiband=False, reffMatch=False, pix=0.168,
521 multijobs=1, includeMissing=True, minRad=None,
522 raCol='RA', decCol='Dec'):
524 Driver (main function) for return match to fakes. 526 INPUT: rootDir = rerun directory 527 visit = visit id (int) (or tracts) 528 ccdList = list of ccds to look at (or patches) 529 outdir = output directory for matched file, 530 None means no output written 531 fakeCat = fake catalog to match to, 532 None means the fake sources are just 533 extracted from the header of the CCDs based on 534 position but no matching is done 535 overwrite = whether to overwrite the existing output file, 537 pixMatch = do pixel matching instead of ra/dec matching 538 even if there is a catalog supplied 539 multiband = whether match to forced photometry catalogs 540 from multiband process 541 reffMatch = whether match fake sources in pixel radius 542 or using tol x Reff (Only for Ra, Dec match) 543 OUTPUT: returns an astropy.table.Table with all the entries 544 from the source catalog for objects which match in pixel 545 position to the fake sources 547 butler = dafPersist.Butler(rootDir)
552 from joblib
import Parallel, delayed
553 mlist = Parallel(n_jobs=multijobs)(delayed(returnMatchSingle)(
554 butler,
None, visit, ccd,
557 includeMissing=includeMissing,
559 reffMatch=reffMatch, tol=tol,
564 raCol=raCol)
for ccd
in ccdList)
570 slist.extend(m,
True)
573 print(
"# Can not import joblib, stop multiprocessing!")
576 filt=filt, fakeCat=fakeCat,
577 includeMissing=includeMissing,
583 raCol=raCol, decCol=decCol)
587 filt=filt, fakeCat=fakeCat,
588 includeMissing=includeMissing,
594 raCol=raCol, decCol=decCol)
597 print(
"Returns no match....!")
603 if fakeCat
is not None:
606 if outfile
is not None:
608 astroTable.write(outfile+
'.fits', format=
'fits',
611 print(
"Try setting the option -w to overwrite the file.")
617 if __name__ ==
'__main__':
619 parser = argparse.ArgumentParser()
620 parser.add_argument(
'rootDir', help=
'root dir of data repo')
621 parser.add_argument(
'visit',
622 help=
'id of visit (or tract, if filter is specified)',
624 parser.add_argument(
'-f',
'--filter', dest=
'filt',
625 help=
'name of filter, if none assume single visit',
627 parser.add_argument(
'--ccd', nargs=
'+', help=
'id of ccd(s) or patches')
628 parser.add_argument(
'-o', help=
'outputfilename', default=
None,
630 parser.add_argument(
'-c', help=
'fake catalog', default=
None,
632 parser.add_argument(
'-w',
'--overwrite', help=
'overwrite output file',
633 dest=
'ow', default=
False, action=
'store_true')
634 parser.add_argument(
'-m',
'--multiband',
635 help=
'Match multiband measurements',
636 dest=
'multiband', default=
False, action=
'store_true')
637 parser.add_argument(
'-r',
'--reffMatch',
638 help=
'Match the fake sources using tol x Reff',
639 dest=
'reffMatch', default=
False, action=
'store_true')
640 parser.add_argument(
'--min',
'--minRad',
641 help=
'Minimum matching radius in unit of pixel',
642 dest=
'minRad', type=float, default=
None)
643 parser.add_argument(
'-t',
'--tolerance', type=float, dest=
'tol',
644 help=
'matching radius in PIXELS (default=1.0)')
645 parser.add_argument(
'-j',
'--multijobs', type=int,
646 help=
'Number of jobs run at the same time',
647 dest=
'multijobs', default=1)
648 parser.add_argument(
'--ra',
'--raCol', dest=
'raCol',
649 help=
'Name of the column for RA',
651 parser.add_argument(
'--dec',
'--decCol', dest=
'decCol',
652 help=
'Name of the column for Dec',
654 args = parser.parse_args()
657 args.fakeCat, overwrite=args.ow, filt=args.filt,
658 tol=args.tol, multiband=args.multiband,
659 reffMatch=args.reffMatch,
660 multijobs=args.multijobs,
662 raCol=args.raCol, decCol=args.decCol)
def getMag(flux, fluxerr, zeropoint)
def combineWithForce(meas, force)
def getFakeMatchesHeader(cal_md, sources, tol=1.0)
def returnMatchTable(rootDir, visit, ccdList, outfile=None, fakeCat=None, overwrite=False, filt=None, tol=1.0, pixMatch=False, multiband=False, reffMatch=False, pix=0.168, multijobs=1, includeMissing=True, minRad=None, raCol='RA', decCol='Dec')
def getFakeSources(butler, dataId, tol=1.0, extraCols=('zeropoint', 'visit', 'ccd'), includeMissing=False, footprints=False, radecMatch=None, multiband=False, reffMatch=False, pix=0.168, minRad=None, raCol='RA', decCol='Dec')
def getAstroTable(src, mags=True)
def returnMatchSingle(butler, slist, visit, ccd, filt=None, tol=1.0, pix=0.168, fakeCat=None, pixMatch=False, multiband=False, reffMatch=False, includeMissing=True, minRad=None, raCol='RA', decCol='Dec')
def matchToFakeCatalog(sources, fakeCatalog)
def getFakeMatchesRaDec(sources, radecCatFile, bbox, wcs, tol=1.0, reffMatch=False, pix=0.168, minRad=None, raCol='RA', decCol='Dec')