23 from lsst.utils import continueClass, TemplateMeta
24 from ._table
import BaseRecord, BaseCatalog
25 from ._schema
import Key
35 """Extract a dictionary of {<name>: <field-value>} in which the field names
36 match the given shell-style glob pattern(s).
38 Any number of glob patterns may be passed; the result will be the union of all
39 the result of each glob considered separately.
44 The result of a call to self.schema.extract(); this will be used
45 instead of doing any new matching, and allows the pattern matching
46 to be reused to extract values from multiple records. This
47 keyword is incompatible with any position arguments and the regex,
48 sub, and ordered keyword arguments.
50 If `True`, fields with named subfields (e.g. points) will be split
51 into separate items in the dict; instead of {"point":
52 lsst.geom.Point2I(2,3)}, for instance, you'd get {"point.x":
53 2, "point.y": 3}. Default is `False`.
54 regex : `str` or `re` pattern object
55 A regular expression to be used in addition to any glob patterns
56 passed as positional arguments. Note that this will be compared
57 with re.match, not re.search.
59 A replacement string (see `re.MatchObject.expand`) used to set the
60 dictionary keys of any fields matched by regex.
62 If `True`, a `collections.OrderedDict` will be returned instead of
63 a standard dict, with the order corresponding to the definition
64 order of the `Schema`. Default is `False`.
66 d = kwargs.pop(
"items",
None)
67 split = kwargs.pop(
"split",
False)
69 d = self.schema.
extract(*patterns, **kwargs).copy()
71 kwargsStr =
", ".join(kwargs.keys())
72 raise ValueError(f
"Unrecognized keyword arguments for extract: {kwargsStr}")
74 for name, schemaItem
in list(d.items()):
76 if split
and key.HAS_NAMED_SUBFIELDS:
77 for subname, subkey
in zip(key.subfields, key.subkeys):
78 d[f
"{name}.{subname}"] = self.get(subkey)
81 d[name] = self.get(schemaItem.key)
85 return f
"{type(self)}\n{self}"
91 self.
_columns = self._getColumnView()
94 def __getColumns(self):
95 if not hasattr(self,
"_columns")
or self.
_columns is None:
96 self.
_columns = self._getColumnView()
98 columns = property(__getColumns, doc=
"a column view of the catalog")
101 """Return the record at index key if key is an integer,
102 return a column if `key` is a string field name or Key,
103 or return a subset of the catalog if key is a slice
104 or boolean NumPy array.
106 if type(key)
is slice:
107 (start, stop, step) = (key.start, key.stop, key.step)
114 return self.subset(start, stop, step)
115 elif isinstance(key, np.ndarray):
116 if key.dtype == bool:
117 return self.subset(key)
118 raise RuntimeError(f
"Unsupported array type for indexing non-contiguous Catalog: {key.dtype}")
119 elif isinstance(key, Key)
or isinstance(key, str):
120 if not self.isContiguous():
121 if isinstance(key, str):
122 key = self.schema[key].asKey()
123 array = self._getitem_(key)
129 array.flags.writeable =
False
133 return self._getitem_(key)
136 """If ``key`` is an integer, set ``catalog[key]`` to
137 ``value``. Otherwise select column ``key`` and set it to
141 if isinstance(key, Key)
or isinstance(key, str):
144 return self.set(key, value)
148 if isinstance(key, slice):
159 self._insert(key, value)
167 return self._addNew()
169 def cast(self, type_, deep=False):
170 """Return a copy of the catalog with the given type.
175 Type of catalog to return.
176 deep : `bool`, optional
177 If `True`, clone the table and deep copy all records.
182 Copy of catalog with the requested type.
185 table = self.table.
clone()
186 table.preallocate(len(self))
190 copy.extend(self, deep=deep)
195 Copy a catalog (default is not a deep copy).
199 def extend(self, iterable, deep=False, mapper=None):
200 """Append all records in the given iterable to the catalog.
205 Any Python iterable containing records.
206 deep : `bool`, optional
207 If `True`, the records will be deep-copied; ignored if
208 mapper is not `None` (that always implies `True`).
209 mapper : `lsst.afw.table.schemaMapper.SchemaMapper`, optional
210 Used to translate records.
215 if type(deep).__name__ ==
"SchemaMapper":
218 if isinstance(iterable,
type(self)):
219 if mapper
is not None:
220 self._extend(iterable, mapper)
222 self._extend(iterable, deep)
224 for record
in iterable:
225 if mapper
is not None:
226 self._append(self.table.copyRecord(record, mapper))
228 self._append(self.table.copyRecord(record))
234 return lsst.afw.fits.reduceToFits(self)
236 def asAstropy(self, cls=None, copy=False, unviewable="copy"):
237 """Return an astropy.table.Table (or subclass thereof) view into this catalog.
242 Table subclass to use; `None` implies `astropy.table.Table`
243 itself. Use `astropy.table.QTable` to get Quantity columns.
244 copy : bool, optional
245 If `True`, copy data from the LSST catalog to the astropy
246 table. Not copying is usually faster, but can keep memory
247 from being freed if columns are later removed from the
249 unviewable : `str`, optional
250 One of the following options (which is ignored if
251 copy=`True` ), indicating how to handle field types (`str`
252 and `Flag`) for which views cannot be constructed:
253 - 'copy' (default): copy only the unviewable fields.
254 - 'raise': raise ValueError if unviewable fields are present.
255 - 'skip': do not include unviewable fields in the Astropy Table.
259 cls : `astropy.table.Table`
260 Astropy view into the catalog.
265 Raised if the `unviewable` option is not a known value, or
266 if the option is 'raise' and an uncopyable field is found.
271 cls = astropy.table.Table
272 if unviewable
not in (
"copy",
"raise",
"skip"):
274 f
"'unviewable'={unviewable!r} must be one of 'copy', 'raise', or 'skip'")
275 ps = self.getMetadata()
276 meta = ps.toOrderedDict()
if ps
is not None else None
278 items = self.schema.extract(
"*", ordered=
True)
279 for name, item
in items.items():
281 unit = item.field.getUnits()
or None
282 if key.getTypeString() ==
"String":
284 if unviewable ==
"raise":
285 raise ValueError(
"Cannot extract string "
286 "unless copy=True or unviewable='copy' or 'skip'.")
287 elif unviewable ==
"skip":
290 len(self), dtype=np.dtype((str, key.getSize())))
291 for i, record
in enumerate(self):
292 data[i] = record.get(key)
293 elif key.getTypeString() ==
"Flag":
295 if unviewable ==
"raise":
296 raise ValueError(
"Cannot extract packed bit columns "
297 "unless copy=True or unviewable='copy' or 'skip'.")
298 elif unviewable ==
"skip":
300 data = self.
columns.get_bool_array(key)
301 elif key.getTypeString() ==
"Angle":
306 elif "Array" in key.getTypeString()
and key.isVariableLength():
308 if unviewable ==
"raise":
309 raise ValueError(
"Cannot extract variable-length array fields unless unviewable='skip'.")
310 elif unviewable ==
"skip" or unviewable ==
"copy":
317 astropy.table.Column(
321 description=item.field.getDoc()
324 return cls(columns, meta=meta, copy=
False)
328 This custom dir is necessary due to the custom getattr below.
329 Without it, not all of the methods available are returned with dir.
332 def recursive_get_class_dir(cls):
334 Return a set containing the names of all methods
335 for a given class *and* all of its subclasses.
339 for subcls
in cls.__bases__:
340 result |= recursive_get_class_dir(subcls)
341 result |=
set(cls.__dict__.keys())
344 | recursive_get_class_dir(
type(self)) |
set(self.__dict__.keys()))
350 if name ==
"_columns":
354 return getattr(self.table, name)
355 except AttributeError:
356 return getattr(self.
columns, name)
359 if self.isContiguous():
362 fields =
' '.join(x.field.getName()
for x
in self.schema)
363 return f
"Non-contiguous afw.Catalog of {len(self)} rows.\ncolumns: {fields}"
366 return "%s\n%s" % (
type(self), self)
369 Catalog.register(
"Base", BaseCatalog)