Coverage for python/lsst/obs/base/gen2to3/repoWalker/builders.py : 28%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21"""Classes used in `RepoWalker` construction.
23The objects here form a temporary tree that is pruned and then transformed
24into a similar tree of `PathElementHandler` instances. See `BuilderNode`
25method documentation for more information.
26"""
27from __future__ import annotations
29__all__ = ["BuilderSkipInput", "BuilderTargetInput", "BuilderTree"]
31from abc import ABC, abstractmethod
32import os
33import re
34from typing import (
35 Any,
36 Dict,
37 List,
38 Optional,
39 Tuple,
40)
42from lsst.daf.butler import DatasetType, DimensionUniverse, StorageClass, FormatterParameter
43from ..translators import TranslatorFactory
44from .parser import PathElementParser
45from .scanner import PathElementHandler, DirectoryScanner
46from .handlers import (IgnoreHandler, SubdirectoryHandler, SkipHandler,
47 TargetFileHandler)
50class BuilderNode(ABC):
51 """Abstract interface for nodes in the temporary tree that is used to
52 construct a `RepoWalker`.
53 """
55 @abstractmethod
56 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
57 """Attempt to prune this node and its children from the tree.
59 Returns
60 -------
61 replacement : `BuilderNode`
62 The result of recursively pruning child nodes; often just ``self``.
63 messages : `list` [`str`]
64 Warning messages that should be logged by a parent node when a
65 matching path element is encountered, if this node is pruned.
66 prune : `bool`
67 If `True`, this node may be pruned from the tree (but will not
68 necessarily be - it may correspond to a path element that should
69 be skipped with siblings that should not be).
70 """
71 raise NotImplementedError()
73 @abstractmethod
74 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
75 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
76 ) -> PathElementHandler:
77 """Transform this node in the build tree into a corresponding
78 `PathElementHandler`, recursing to any children.
80 Must be called after `prune`.
82 Parameters
83 ----------
84 parser : `PathElementParser`
85 An object that matches the path element the new handler is
86 responsible for and extracts a (partial) Gen2 data ID from it.
87 allKeys : `dict` [`str`, `type`]
88 A mapping from Gen2 data ID key to the type of its value. Will
89 contain all keys that may be extracted by the given parser, and
90 possibly others.
91 cumulativeKeys : `dict` [`str`, `type`], optional
92 A dictionary containing key strings and types for Gen2 data ID keys
93 that have been extracted from previous path elements for this
94 template, including those extracted by ``parser``.
96 Returns
97 -------
98 handler : `PathElementHandler`
99 A new handler object.
100 """
101 raise NotImplementedError()
104class BuilderInput(BuilderNode):
105 """An intermediate base for `BuilderNode` classes that are provided as
106 direct inputs to a `RepoWalker`, and generally correspond to exactly one
107 Gen2 dataset type.
109 Parameters
110 ----------
111 template : `str`
112 The complete Gen2 template to be matched (not just the template for
113 one path element).
114 keys : `dict` [`str`, `type`]
115 A mapping from Gen2 data ID key to the type of its value.
116 """
117 def __init__(self, template: str, keys: Dict[str, type]):
118 self.template = template
119 self.keys = keys
120 self.elements = self.template.split(os.path.sep)
122 template: str
123 """The complete Gen2 template to be matched (`str`).
124 """
126 keys: Dict[str, type]
127 """A mapping from Gen2 data ID key to the type of its value
128 (`dict` [`str`, `type`]).
129 """
131 elements: List[str]
132 """The path elements (file or directory levels) of `template`
133 (`list` of `str`).
134 """
137class BuilderSkipInput(BuilderInput):
138 """An input to a `RepoWalker` that indicates that matched files should be
139 skipped, possibly with a warning message.
141 BuilderSkipInputs can be pruned. When they are not pruned, they build
142 `SkipHandler` instances.
144 Parameters
145 ----------
146 template : `str`
147 The complete Gen2 template to be matched (not just the template for
148 one path element).
149 keys : `dict` [`str`, `type`]
150 A mapping from Gen2 data ID key to the type of its value.
151 message : `str`, optional
152 If not `None`, a warning message that should be printed either when a
153 matching file is enountered or a directory that may contain such files
154 is skipped.
155 isForFiles : `bool`, optional
156 If `True` (default), this handler should be run on files. Otherwise it
157 should be run on directories.
158 """
159 def __init__(self, template: str, keys: Dict[str, type], message: Optional[str] = None, *,
160 isForFiles: bool = True):
161 super().__init__(template=template, keys=keys)
162 self._message = message
163 self._isForFiles = isForFiles
165 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
166 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
167 ) -> PathElementHandler:
168 # Docstring inherited from BuilderNode.
169 return SkipHandler(parser=parser, isForFiles=self._isForFiles, message=self._message)
171 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
172 # Docstring inherited from BuilderNode.
173 return self, [self._message] if self._message is not None else [], True
176class BuilderTargetInput(BuilderInput):
177 """An input to a `RepoWalker` that matches files that correspond to
178 datasets that we want to extract.
180 BuilderTargetInputs can never be pruned, and always build
181 `TargetFileHandler` instances.
183 Parameters
184 ----------
185 datasetTypeName : `str`
186 Name of the dataset type.
187 template : `str`
188 Full Gen2 filename template.
189 keys : `dict` [`str`, `type`]
190 Dictionary that maps Gen2 data ID key to the type of its value.
191 storageClass : `StorageClass`
192 `StorageClass` for the Gen3 dataset type.
193 universe : `DimensionUniverse`
194 All candidate dimensions for the Gen3 dataset type.
195 formatter : `lsst.daf.butler.Formatter` or `str`, optional
196 A Gen 3 formatter class or fully-qualified name.
197 translatorFactory : `TranslatorFactory`
198 Object that can be used to construct data ID translators.
199 targetHandler : `PathElementHandler`, optional
200 Override target handler for this dataset type.
201 kwargs:
202 Additional keyword arguments are passed to `Translator.makeMatching`,
203 in along with ``datasetTypeName`` and ``keys``.
204 """
205 def __init__(self, *, datasetTypeName: str, template: str, keys: Dict[str, type],
206 storageClass: StorageClass, universe: DimensionUniverse,
207 formatter: FormatterParameter, translatorFactory: TranslatorFactory,
208 targetHandler: Optional[PathElementHandler] = None, **kwargs: Any):
209 # strip off [%HDU] identifiers from e.g. DECAM Community Pipeline products
210 template = template.split('[%(')[0]
211 super().__init__(template=template, keys=keys)
212 self._translator = translatorFactory.makeMatching(datasetTypeName, keys, **kwargs)
213 self.datasetType = DatasetType(datasetTypeName, dimensions=self._translator.dimensionNames,
214 storageClass=storageClass, universe=universe)
215 self._formatter = formatter
216 if targetHandler is None:
217 targetHandler = TargetFileHandler
218 self._handler = targetHandler
220 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
221 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
222 ) -> PathElementHandler:
224 return self._handler(parser=parser, translator=self._translator, datasetType=self.datasetType,
225 formatter=self._formatter)
227 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
228 # Docstring inherited from BuilderNode.
229 return self, [], False
231 datasetType: DatasetType
232 """The Gen3 dataset type extracted by the handler this object builds
233 (`lsst.daf.butler.DatasetType`).
234 """
237class BuilderPrunedTree(BuilderNode):
238 """A `BuilderNode` that represents a subdirectory to be skipped,
239 created by pruning `BuilderTree` that contained only `BuilderSkipInput`
240 instances.
242 BuilderPrunedTrees can be pruned. When they are not pruned, they
243 build `SkipHandler` instances.
245 Parameters
246 ----------
247 messages : `list` [`str`]
248 A list of warning messages to be printed when the handler produced by
249 this builder matches a subdirectory.
250 """
252 def __init__(self, messages: List[str]):
253 self._messages = messages
255 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
256 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
257 ) -> PathElementHandler:
258 # Docstring inherited from BuilderNode.
259 message = "; ".join(self._messages) if self._messages else None
260 return SkipHandler(parser=parser, isForFiles=False, message=message)
262 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
263 # Docstring inherited from BuilderNode.
264 return self, self._messages, True
267class BuilderDuplicateInputs(BuilderNode):
268 """A `BuilderNode` that represents a collection of `BuilderInput` instances
269 that all have the same template.
270 """
271 def __init__(self, old: BuilderInput, new: BuilderInput):
272 self._children = []
273 if isinstance(old, BuilderDuplicateInputs):
274 self._children.extend(old._children)
275 else:
276 self._children.append(old)
277 self._children.append(new)
278 self._messages = [] # populated in prune()
280 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
281 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
282 ) -> PathElementHandler:
283 # Docstring inherited from BuilderNode.
284 message = "; ".join(self._messages) if self._messages else None
285 return SkipHandler(parser=parser, isForFiles=False, message=message)
287 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
288 # Docstring inherited from BuilderNode.
289 unprunable = []
290 newChildren = []
291 for child in self._children:
292 newChild, childMessages, toPruneChild = child.prune()
293 if toPruneChild:
294 self._messages.extend(childMessages)
295 else:
296 unprunable.append(newChild)
297 newChildren.append(newChildren)
298 self._children = newChildren
299 if len(unprunable) == 0:
300 # All children are just skips, so we can prune this node if we
301 # remember their messages.
302 return self, self._messages, True
303 elif len(unprunable) == 1 and not self._messages:
304 # Exactly one child is a target, and the others were ignored with
305 # no warning messages. Tell parent node to just use that child,
306 # so if we see any matching files, we just assume they're for that
307 # target.
308 return unprunable[0], [], False
309 else:
310 # Multiple targets or skips with messages, which means we won't
311 # know how to handle any matching files. Replace any messages we
312 # have with a single message that combines them all as well as
313 # any target dataset types that they are ambiguous with.
314 nested = [f"{c.datasetType.name} (target)" for c in unprunable]
315 nested.extend(self._messages)
316 self._messages = [f"ambiguous match: [{', '.join(nested)}]"]
317 return self, self._messages, True
320class BuilderTree(BuilderNode):
321 """A `BuilderNode` that represents a directory.
323 This is the only `BuilderNode` class that is not a leaf node. If all
324 of its children can be pruned, it is replaced by a `BuilderPrunedTree`
325 (which can then be pruned itself). It builds `SubdirectoryHandler`
326 instances when not pruned.
327 """
328 def __init__(self):
329 self._children = {} # Maps template path element to BuilderNode
331 def insert(self, level: int, leaf: BuilderInput):
332 """Insert an input leaf node into the tree, recursively constructing
333 intermediate parents in order to put it at the right level.
335 Parameters
336 ----------
337 level : `int`
338 The level ``self``is at in the larger tree, with zero the
339 repository root. The right level for the leaf is given by the
340 length of ``leaf.elements``.
341 leaf : `BuilderInput`
342 The leaf node to insert.
343 """
344 nextLevel = level + 1
345 element = leaf.elements[level]
346 if nextLevel == len(leaf.elements):
347 conflict = self._children.get(element)
348 if conflict is not None:
349 # Sadly, the Gen2 butler has some actual dataset types that
350 # use the exact same template.
351 leaf = BuilderDuplicateInputs(conflict, leaf)
352 self._children[element] = leaf
353 else:
354 child = self._children.setdefault(element, BuilderTree())
355 child.insert(nextLevel, leaf)
357 def fill(self, scanner: DirectoryScanner, allKeys: Dict[str, type], previousKeys: Dict[str, type], *,
358 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]):
359 """Fill a `DirectoryScanner` instance by recursively building all
360 child nodes.
362 Parameters
363 ----------
364 scanner : `DirectoryScanner`
365 Object to populate.
366 allKeys : `dict` [`str`, `type`]
367 Mapping from Gen2 data ID key to its value type, covering all keys
368 that could be used in any child template.
369 previousKeys : `dict` [`str`, `type`], optional
370 A dictionary containing key strings and types for Gen2 data ID keys
371 that have been extracted from previous path elements of the same
372 template.
373 """
374 if fileIgnoreRegEx is not None:
375 scanner.add(IgnoreHandler(fileIgnoreRegEx, isForFiles=True))
376 if dirIgnoreRegEx is not None:
377 scanner.add(IgnoreHandler(dirIgnoreRegEx, isForFiles=False))
378 for template, child in self._children.items():
379 parser = PathElementParser(template, allKeys, previousKeys=previousKeys)
380 cumulativeKeys = previousKeys.copy()
381 cumulativeKeys.update(parser.keys)
382 scanner.add(child.build(parser, allKeys, cumulativeKeys, fileIgnoreRegEx=fileIgnoreRegEx,
383 dirIgnoreRegEx=dirIgnoreRegEx))
385 def prune(self) -> Tuple[BuilderNode, List[str], bool]:
386 # Docstring inherited from BuilderNode.
387 toPruneThis = True
388 newChildren = {}
389 messages = []
390 # Recursively prune children.
391 for template, child in list(self._children.items()):
392 newChild, childMessages, toPruneChild = child.prune()
393 newChildren[template] = newChild
394 messages.extend(childMessages)
395 if not toPruneChild:
396 toPruneThis = False
397 self._children = newChildren
398 if toPruneThis:
399 return BuilderPrunedTree(messages), messages, True
400 else:
401 return self, [], False
403 def build(self, parser: PathElementParser, allKeys: Dict[str, type], cumulativeKeys: Dict[str, type], *,
404 fileIgnoreRegEx: Optional[re.Pattern], dirIgnoreRegEx: Optional[re.Pattern]
405 ) -> PathElementHandler:
406 # Docstring inherited from BuilderNode.
407 built = SubdirectoryHandler(parser)
408 self.fill(built.scanner, allKeys, cumulativeKeys, fileIgnoreRegEx=fileIgnoreRegEx,
409 dirIgnoreRegEx=dirIgnoreRegEx)
410 return built