Coverage for python/lsst/daf/butler/datastore/file_templates.py: 12%
281 statements
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-13 10:57 +0000
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-13 10:57 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Support for file template string expansion."""
30from __future__ import annotations
32__all__ = ("FileTemplates", "FileTemplate", "FileTemplatesConfig", "FileTemplateValidationError")
34import logging
35import os.path
36import string
37from collections.abc import Iterable, Mapping
38from types import MappingProxyType
39from typing import TYPE_CHECKING, Any, cast
41from .._config import Config
42from .._config_support import LookupKey, processLookupConfigs
43from .._dataset_ref import DatasetId, DatasetRef
44from .._exceptions import ValidationError
45from .._storage_class import StorageClass
46from ..dimensions import DataCoordinate
48if TYPE_CHECKING:
49 from .._dataset_type import DatasetType
50 from ..dimensions import DimensionRecord, DimensionUniverse
52log = logging.getLogger(__name__)
55class FileTemplateValidationError(ValidationError):
56 """Exception for file template inconsistent with associated DatasetType."""
58 pass
61class FileTemplatesConfig(Config):
62 """Configuration information for `FileTemplates`."""
64 pass
67class FileTemplates:
68 """Collection of `FileTemplate` templates.
70 Parameters
71 ----------
72 config : `FileTemplatesConfig` or `str`
73 Load configuration.
74 default : `str`, optional
75 If not `None`, a default template to use if no template has
76 been specified explicitly in the configuration.
77 universe : `DimensionUniverse`
78 The set of all known dimensions, used to normalize any lookup keys
79 involving dimensions.
81 Notes
82 -----
83 The configuration can include one level of hierarchy where an
84 instrument-specific section can be defined to override more general
85 template specifications. This is represented in YAML using a
86 key of form ``instrument<name>`` which can then define templates
87 that will be returned if a `DatasetRef` contains a matching instrument
88 name in the data ID.
90 A default fallback template can be specified using the key ``default``.
91 Defaulting can be disabled in a child configuration by defining the
92 value to be an empty string or a boolean `False`.
94 The config is parsed using the function
95 `~lsst.daf.butler.configSubset.processLookupConfigs`.
96 """
98 defaultKey = LookupKey("default")
99 """Configuration key associated with the default template."""
101 def __init__(
102 self,
103 config: FileTemplatesConfig | str,
104 default: str | None = None,
105 *,
106 universe: DimensionUniverse,
107 ):
108 self.config = FileTemplatesConfig(config)
109 self._templates = {}
111 contents = processLookupConfigs(self.config, universe=universe)
113 # Determine default to use -- defaults can be disabled if
114 # we get a False or None
115 defaultValue = contents.get(self.defaultKey, default)
116 if defaultValue and not isinstance(defaultValue, str):
117 raise RuntimeError(
118 f"Default template value should be str or False, or None. Got '{defaultValue}'"
119 )
120 self.default = FileTemplate(defaultValue) if isinstance(defaultValue, str) and defaultValue else None
122 # Convert all the values to FileTemplate, handling defaults
123 for key, templateStr in contents.items():
124 if key == self.defaultKey:
125 continue
126 if not isinstance(templateStr, str):
127 raise RuntimeError(f"Unexpected value in file template key {key}: {templateStr}")
128 self._templates[key] = FileTemplate(templateStr)
130 @property
131 def templates(self) -> Mapping[LookupKey, FileTemplate]:
132 """Return collection of templates indexed by lookup key (`dict`)."""
133 return MappingProxyType(self._templates)
135 def __contains__(self, key: LookupKey) -> bool:
136 """Indicate whether the supplied key is present in the templates.
138 Parameters
139 ----------
140 key : `LookupKey`
141 Key to use to determine if a corresponding value is present
142 in the templates.
144 Returns
145 -------
146 in : `bool`
147 `True` if the supplied key is present in the templates.
148 """
149 return key in self.templates
151 def __getitem__(self, key: LookupKey) -> FileTemplate:
152 return self.templates[key]
154 def validateTemplates(
155 self, entities: Iterable[DatasetType | DatasetRef | StorageClass], logFailures: bool = False
156 ) -> None:
157 """Validate the templates.
159 Retrieves the template associated with each dataset type and
160 validates the dimensions against the template.
162 Parameters
163 ----------
164 entities : `DatasetType`, `DatasetRef`, or `StorageClass`
165 Entities to validate against the matching templates. Can be
166 differing types.
167 logFailures : `bool`, optional
168 If `True`, output a log message for every validation error
169 detected.
171 Raises
172 ------
173 FileTemplateValidationError
174 Raised if an entity failed validation.
176 Notes
177 -----
178 See `FileTemplate.validateTemplate()` for details on the validation.
179 """
180 unmatchedKeys = set(self.templates)
181 failed = []
182 for entity in entities:
183 try:
184 matchKey, template = self.getTemplateWithMatch(entity)
185 except KeyError as e:
186 # KeyError always quotes on stringification so strip here
187 errMsg = str(e).strip("\"'")
188 failed.append(errMsg)
189 if logFailures:
190 log.critical("%s", errMsg)
191 continue
193 if matchKey in unmatchedKeys:
194 unmatchedKeys.remove(matchKey)
196 try:
197 template.validateTemplate(entity)
198 except FileTemplateValidationError as e:
199 failed.append(f"{e} (via key '{matchKey}')")
200 if logFailures:
201 log.critical("Template failure with key '%s': %s", matchKey, e)
203 if logFailures and unmatchedKeys:
204 log.warning("Unchecked keys: '%s'", ", ".join([str(k) for k in unmatchedKeys]))
206 if failed:
207 if len(failed) == 1:
208 msg = str(failed[0])
209 else:
210 failMsg = ";\n".join(failed)
211 msg = f"{len(failed)} template validation failures: {failMsg}"
212 raise FileTemplateValidationError(msg)
214 def getLookupKeys(self) -> set[LookupKey]:
215 """Retrieve the look up keys for all the template entries.
217 Returns
218 -------
219 keys : `set` of `LookupKey`
220 The keys available for matching a template.
221 """
222 return set(self.templates)
224 def getTemplateWithMatch(
225 self, entity: DatasetRef | DatasetType | StorageClass
226 ) -> tuple[LookupKey, FileTemplate]:
227 """Retrieve the `FileTemplate` associated with the dataset type.
229 Also retrieves the lookup key that was a match for this template.
231 If the lookup name corresponds to a component the base name for
232 the component will be examined if the full component name does
233 not match.
235 Parameters
236 ----------
237 entity : `DatasetType`, `DatasetRef`, or `StorageClass`
238 Instance to use to look for a corresponding template.
239 A `DatasetType` name or a `StorageClass` name will be used
240 depending on the supplied entity. Priority is given to a
241 `DatasetType` name. Supports instrument override if a
242 `DatasetRef` is provided configured with an ``instrument``
243 value for the data ID.
245 Returns
246 -------
247 matchKey : `LookupKey`
248 The key that resulted in the successful match.
249 template : `FileTemplate`
250 Template instance to use with that dataset type.
252 Raises
253 ------
254 KeyError
255 Raised if no template could be located for this Dataset type.
256 """
257 # Get the names to use for lookup
258 names = entity._lookupNames()
260 # Get a location from the templates
261 template = self.default
262 source = self.defaultKey
263 for name in names:
264 if name in self.templates:
265 template = self.templates[name]
266 source = name
267 break
269 if template is None:
270 raise KeyError(f"Unable to determine file template from supplied argument [{entity}]")
272 log.debug("Got file %s from %s via %s", template, entity, source)
274 return source, template
276 def getTemplate(self, entity: DatasetType | DatasetRef | StorageClass) -> FileTemplate:
277 """Retrieve the `FileTemplate` associated with the dataset type.
279 If the lookup name corresponds to a component the base name for
280 the component will be examined if the full component name does
281 not match.
283 Parameters
284 ----------
285 entity : `DatasetType`, `DatasetRef`, or `StorageClass`
286 Instance to use to look for a corresponding template.
287 A `DatasetType` name or a `StorageClass` name will be used
288 depending on the supplied entity. Priority is given to a
289 `DatasetType` name. Supports instrument override if a
290 `DatasetRef` is provided configured with an ``instrument``
291 value for the data ID.
293 Returns
294 -------
295 template : `FileTemplate`
296 Template instance to use with that dataset type.
298 Raises
299 ------
300 KeyError
301 Raised if no template could be located for this Dataset type.
302 """
303 _, template = self.getTemplateWithMatch(entity)
304 return template
307class FileTemplate:
308 """Format a path template into a fully expanded path.
310 Parameters
311 ----------
312 template : `str`
313 Template string.
315 Raises
316 ------
317 FileTemplateValidationError
318 Raised if the template fails basic validation.
320 Notes
321 -----
322 The templates use the standard Format Specification Mini-Language
323 with the caveat that only named fields can be used. The field names
324 are taken from the Dimensions along with several additional fields:
326 - datasetType: `str`, `DatasetType.name`
327 - component: `str`, name of the StorageClass component
328 - run: `str`, name of the run this dataset was added with
330 `run` must always be provided to ensure unique paths.
332 More detailed information can be requested from dimensions by using a dot
333 notation, so ``visit.name`` would use the name of the visit and
334 ``detector.name_in_raft`` would use the name of the detector within the
335 raft.
337 The mini-language is extended to understand a "?" in the format
338 specification. This indicates that a field is optional. If that
339 Dimension is missing the field, along with the text before the field,
340 unless it is a path separator, will be removed from the output path.
342 By default any "/" in a dataId value will be replaced by "_" to prevent
343 unexpected directories being created in the path. If the "/" should be
344 retained then a special "/" format specifier can be included in the
345 template.
346 """
348 mandatoryFields = {"run", "id"}
349 """A set of fields, one of which must be present in a template."""
351 datasetFields = {"datasetType", "component"}
352 """Fields related to the supplied dataset, not a dimension."""
354 specialFields = mandatoryFields | datasetFields
355 """Set of special fields that are available independently of the defined
356 Dimensions."""
358 def __init__(self, template: str):
359 if not isinstance(template, str):
360 raise FileTemplateValidationError(
361 f"Template ('{template}') does not contain any format specifiers"
362 )
363 self.template = template
365 # Do basic validation without access to dimensions
366 self.validateTemplate(None)
368 def __eq__(self, other: Any) -> bool:
369 if not isinstance(other, FileTemplate):
370 return False
372 return self.template == other.template
374 def __str__(self) -> str:
375 return self.template
377 def __repr__(self) -> str:
378 return f'{self.__class__.__name__}("{self.template}")'
380 def grouped_fields(self) -> tuple[dict[str, set[str]], dict[str, set[str]]]:
381 """Return all the fields, grouped by their type.
383 Returns
384 -------
385 grouped : `dict` [ `set` [ `str` ]]
386 The fields grouped by their type. The keys for this dict are
387 ``standard``, ``special``, ``subfield``, and
388 ``parent``. If field ``a.b`` is present, ``a`` will not be
389 included in ``standard`` but will be included in ``parent``.
390 grouped_optional : `dict` [ `set` [ `str` ]]
391 As for ``grouped`` but the optional fields.
392 """
393 fmt = string.Formatter()
394 parts = fmt.parse(self.template)
396 grouped: dict[str, set[str]] = {
397 "standard": set(),
398 "special": set(),
399 "subfield": set(),
400 "parent": set(),
401 }
402 grouped_optional: dict[str, set[str]] = {
403 "standard": set(),
404 "special": set(),
405 "subfield": set(),
406 "parent": set(),
407 }
409 for _, field_name, format_spec, _ in parts:
410 if field_name is not None and format_spec is not None:
411 subfield = None
412 key = "standard"
413 if field_name in self.specialFields:
414 key = "special"
415 elif "." in field_name:
416 # This needs to be added twice.
417 subfield = field_name
418 key = "parent"
419 field_name, _ = field_name.split(".")
421 if "?" in format_spec:
422 target = grouped_optional
423 else:
424 target = grouped
425 target[key].add(field_name)
426 if subfield is not None:
427 target["subfield"].add(subfield)
429 return grouped, grouped_optional
431 def fields(self, optionals: bool = False, specials: bool = False, subfields: bool = False) -> set[str]:
432 """Return the field names used in this template.
434 Parameters
435 ----------
436 optionals : `bool`
437 If `True`, optional fields are included in the returned set.
438 specials : `bool`
439 If `True`, non-dimension fields are included.
440 subfields : `bool`, optional
441 If `True`, fields with syntax ``a.b`` are included. If `False`,
442 the default, only ``a`` would be returned.
444 Returns
445 -------
446 names : `set`
447 Names of fields used in this template.
449 Notes
450 -----
451 The returned set will include the special values such as `datasetType`
452 and `component`.
453 """
454 fmt = string.Formatter()
455 parts = fmt.parse(self.template)
457 names = set()
458 for _, field_name, format_spec, _ in parts:
459 if field_name is not None and format_spec is not None:
460 if not optionals and "?" in format_spec:
461 continue
463 if not specials and field_name in self.specialFields:
464 continue
466 if not subfields and "." in field_name:
467 field_name, _ = field_name.split(".")
469 names.add(field_name)
471 return names
473 def format(self, ref: DatasetRef) -> str:
474 """Format a template string into a full path.
476 Parameters
477 ----------
478 ref : `DatasetRef`
479 The dataset to be formatted.
481 Returns
482 -------
483 path : `str`
484 Expanded path.
486 Raises
487 ------
488 KeyError
489 Raised if the requested field is not defined and the field is
490 not optional. Or, `component` is specified but "component" was
491 not part of the template.
492 RuntimeError
493 Raised if a template uses dimension record metadata but no
494 records are attached to the `DatasetRef`.
495 """
496 # Get the dimension values. Should all be non None.
497 # Will want to store a DatasetId in it later.
498 fields = cast(dict[str, int | str | DatasetId], dict(ref.dataId.mapping))
499 # Extra information that can be included using . syntax
500 extras: dict[str, DimensionRecord | None] = {}
501 skypix_alias: str | None = None
502 can_use_extra_records = False
503 if isinstance(ref.dataId, DataCoordinate):
504 if ref.dataId.hasRecords():
505 can_use_extra_records = True
506 skypix_alias = self._determine_skypix_alias(ref)
507 if skypix_alias is not None:
508 fields["skypix"] = fields[skypix_alias]
510 datasetType = ref.datasetType
511 fields["datasetType"], component = datasetType.nameAndComponent()
513 usedComponent = False
514 if component is not None:
515 fields["component"] = component
517 fields["run"] = ref.run
518 fields["id"] = ref.id
520 fmt = string.Formatter()
521 parts = fmt.parse(self.template)
522 output = ""
524 for literal, field_name, format_spec, _ in parts:
525 if field_name == "component":
526 usedComponent = True
528 if format_spec is None:
529 output = output + literal
530 continue
532 # Should only happen if format_spec is None
533 if field_name is None:
534 raise RuntimeError(f"Unexpected blank field_name encountered in {self.template} [{literal}]")
536 if "?" in format_spec:
537 optional = True
538 # Remove the non-standard character from the spec
539 format_spec = format_spec.replace("?", "")
540 else:
541 optional = False
543 # Check for request for additional information from the dataId
544 if "." in field_name:
545 primary, secondary = field_name.split(".")
546 if can_use_extra_records and primary not in extras and primary in fields:
547 record_key = primary
548 if primary == "skypix" and skypix_alias is not None:
549 record_key = skypix_alias
550 extras[record_key] = ref.dataId.records[record_key]
551 if record_key != primary:
552 # Make sure that htm7 and skypix both work.
553 extras[primary] = extras[record_key]
555 if primary in extras:
556 record = extras[primary]
557 # Only fill in the fields if we have a value, the
558 # KeyError will trigger below if the attribute is missing,
559 # but only if it is not optional. This is most likely
560 # a typo in the metadata field and so should be reported
561 # even if optional.
562 if hasattr(record, secondary):
563 fields[field_name] = getattr(record, secondary)
564 else:
565 # Is a log message sufficient?
566 log.info(
567 "Template field %s could not be resolved because metadata field %s"
568 " is not understood for dimension %s. Template entry will be ignored",
569 field_name,
570 secondary,
571 primary,
572 )
573 elif primary in fields:
574 # We do have an entry for the primary but do not have any
575 # secondary entries. This is likely a problem with the
576 # code failing to attach a record to the DatasetRef.
577 raise RuntimeError(
578 f"No metadata records attached to dataset {ref}"
579 f" when attempting to expand field {field_name}."
580 " Either expand the DatasetRef or change the template."
581 )
583 if field_name in fields:
584 value = fields[field_name]
585 elif optional:
586 # If this is optional ignore the format spec
587 # and do not include the literal text prior to the optional
588 # field unless it contains a "/" path separator
589 format_spec = ""
590 value = ""
591 if "/" not in literal:
592 literal = ""
593 else:
594 raise KeyError(
595 f"'{field_name}' requested in template via '{self.template}' "
596 "but not defined and not optional"
597 )
599 # Handle "/" in values since we do not want to be surprised by
600 # unexpected directories turning up
601 replace_slash = True
602 if "/" in format_spec:
603 # Remove the non-standard character from the spec
604 format_spec = format_spec.replace("/", "")
605 replace_slash = False
607 if isinstance(value, str):
608 # Replace spaces with underscores for more friendly file paths
609 value = value.replace(" ", "_")
610 if replace_slash:
611 value = value.replace("/", "_")
613 # Now use standard formatting
614 output = output + literal + format(value, format_spec)
616 # Replace periods with underscores in the non-directory part to
617 # prevent file extension confusion. Also replace # in the non-dir
618 # part to avoid confusion with URI fragments
619 head, tail = os.path.split(output)
620 tail = tail.replace(".", "_")
621 tail = tail.replace("#", "HASH")
622 output = os.path.join(head, tail)
624 # Complain if we were meant to use a component
625 if component is not None and not usedComponent:
626 raise KeyError(f"Component '{component}' specified but template {self.template} did not use it")
628 # Since this is known to be a path, normalize it in case some double
629 # slashes have crept in
630 path = os.path.normpath(output)
632 # It should not be an absolute path (may happen with optionals)
633 if os.path.isabs(path):
634 path = os.path.relpath(path, start="/")
636 return path
638 def validateTemplate(self, entity: DatasetRef | DatasetType | StorageClass | None) -> None:
639 """Compare the template against supplied entity that wants to use it.
641 Parameters
642 ----------
643 entity : `DatasetType`, `DatasetRef`, or `StorageClass`
644 Entity to compare against template. If `None` is given only
645 very basic validation of templates will be performed.
647 Raises
648 ------
649 FileTemplateValidationError
650 Raised if the template is inconsistent with the supplied entity.
652 Notes
653 -----
654 Validation will always include a check that mandatory fields
655 are present and that at least one field refers to a dimension.
656 If the supplied entity includes a `DimensionGraph` then it will be
657 used to compare the available dimensions with those specified in the
658 template.
659 """
660 grouped_fields, grouped_optionals = self.grouped_fields()
662 # Check that the template has run
663 withSpecials = (
664 grouped_fields["standard"]
665 | grouped_fields["parent"]
666 | grouped_fields["special"]
667 | grouped_optionals["standard"]
668 | grouped_optionals["parent"]
669 | grouped_optionals["special"]
670 )
672 if "collection" in withSpecials:
673 raise FileTemplateValidationError(
674 "'collection' is no longer supported as a file template placeholder; use 'run' instead."
675 )
677 if not withSpecials & self.mandatoryFields:
678 raise FileTemplateValidationError(
679 f"Template '{self}' is missing a mandatory field from {self.mandatoryFields}"
680 )
682 # Check that there are some dimension fields in the template
683 # The id is allowed instead if present since that also uniquely
684 # identifies the file in the datastore.
685 allfields = (
686 grouped_fields["standard"]
687 | grouped_fields["parent"]
688 | grouped_optionals["standard"]
689 | grouped_optionals["parent"]
690 )
691 if not allfields and "id" not in withSpecials:
692 raise FileTemplateValidationError(
693 f"Template '{self}' does not seem to have any fields corresponding to dimensions."
694 )
696 # Do not allow ../ in the template to confuse where the file might
697 # end up.
698 if "../" in self.template:
699 raise FileTemplateValidationError("A file template should not include jump to parent directory.")
701 # Require that if "id" is in the template then it must exist in the
702 # file part -- this avoids templates like "{id}/fixed" where the file
703 # name is fixed but the directory has the ID.
704 if "id" in withSpecials:
705 file_part = os.path.split(self.template)[-1]
706 if "{id}" not in file_part:
707 raise FileTemplateValidationError(
708 f"Template '{self}' includes the 'id' but that ID is not part of the file name."
709 )
711 # If we do not have dimensions available then all we can do is shrug
712 if not hasattr(entity, "dimensions"):
713 return
715 # Mypy does not know about hasattr so help it out
716 if entity is None:
717 return
719 # if this entity represents a component then insist that component
720 # is present in the template. If the entity is not a component
721 # make sure that component is not mandatory.
722 try:
723 # mypy does not see the except block so complains about
724 # StorageClass not supporting isComponent
725 if entity.isComponent(): # type: ignore
726 if "component" not in withSpecials:
727 raise FileTemplateValidationError(
728 f"Template '{self}' has no component but {entity} refers to a component."
729 )
730 else:
731 mandatorySpecials = (
732 grouped_fields["standard"] | grouped_fields["parent"] | grouped_fields["special"]
733 )
734 if "component" in mandatorySpecials:
735 raise FileTemplateValidationError(
736 f"Template '{self}' has mandatory component but "
737 f"{entity} does not refer to a component."
738 )
739 except AttributeError:
740 pass
742 # From here on we need at least a DatasetType
743 # Mypy doesn't understand the AttributeError clause below
744 if isinstance(entity, StorageClass):
745 return
747 # Get the dimension links to get the full set of available field names
748 # Fall back to dataId keys if we have them but no links.
749 # dataId keys must still be present in the template
750 try:
751 minimal = set(entity.dimensions.required.names)
752 maximal = set(entity.dimensions.names)
753 except AttributeError:
754 try:
755 minimal = set(entity.dataId.keys().names) # type: ignore
756 maximal = minimal
757 except AttributeError:
758 return
760 # Replace specific skypix dimensions with generic one
761 skypix_alias = self._determine_skypix_alias(entity)
762 if skypix_alias is not None:
763 minimal.add("skypix")
764 maximal.add("skypix")
765 minimal.remove(skypix_alias)
766 maximal.remove(skypix_alias)
768 required = grouped_fields["standard"] | grouped_fields["parent"]
770 # Calculate any field usage that does not match a dimension
771 if not required.issubset(maximal):
772 raise FileTemplateValidationError(
773 f"Template '{self}' is inconsistent with {entity}: {required} is not a subset of {maximal}."
774 )
776 if not allfields.issuperset(minimal):
777 raise FileTemplateValidationError(
778 f"Template '{self}' is inconsistent with {entity}:"
779 f" {allfields} is not a superset of {minimal}."
780 )
782 return
784 def _determine_skypix_alias(self, entity: DatasetRef | DatasetType) -> str | None:
785 """Return the dimension name that refers to a sky pixel.
787 Parameters
788 ----------
789 entity : `DatasetRef` or `DatasetType`
790 The entity to examine.
792 Returns
793 -------
794 alias : `str`
795 If there is a sky pixelization in the supplied dataId, return
796 its name, else returns `None`. Will return `None` also if there
797 is more than one sky pix dimension in the data ID or if the
798 dataID is not a `DataCoordinate`
799 """
800 alias = None
802 if isinstance(entity, DatasetRef):
803 entity = entity.datasetType
805 # If there is exactly one SkyPixDimension in the data ID, alias its
806 # value with the key "skypix", so we can use that to match any
807 # skypix dimension.
808 # We restrict this behavior to the (real-world) case where the
809 # data ID is a DataCoordinate, not just a dict. That should only
810 # not be true in some test code, but that test code is a pain to
811 # update to be more like the real world while still providing our
812 # only tests of important behavior.
813 if len(entity.dimensions.skypix) == 1:
814 (alias,) = entity.dimensions.skypix.names
815 return alias