Coverage for python/lsst/daf/butler/core/fileTemplates.py: 13%

252 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-15 00:10 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24"""Support for file template string expansion.""" 

25 

26__all__ = ("FileTemplates", "FileTemplate", "FileTemplatesConfig", "FileTemplateValidationError") 

27 

28import logging 

29import os.path 

30import string 

31from types import MappingProxyType 

32from typing import TYPE_CHECKING, Any, Iterable, Mapping, Optional, Set, Tuple, Union 

33 

34from .config import Config 

35from .configSupport import LookupKey, processLookupConfigs 

36from .datasets import DatasetRef 

37from .dimensions import DataCoordinate, SkyPixDimension 

38from .exceptions import ValidationError 

39from .storageClass import StorageClass 

40 

41if TYPE_CHECKING: 41 ↛ 42line 41 didn't jump to line 42, because the condition on line 41 was never true

42 from .datasets import DatasetType 

43 from .dimensions import DimensionUniverse 

44 

45log = logging.getLogger(__name__) 

46 

47 

48class FileTemplateValidationError(ValidationError): 

49 """Exception for file template inconsistent with associated DatasetType.""" 

50 

51 pass 

52 

53 

54class FileTemplatesConfig(Config): 

55 """Configuration information for `FileTemplates`.""" 

56 

57 pass 

58 

59 

60class FileTemplates: 

61 """Collection of `FileTemplate` templates. 

62 

63 Parameters 

64 ---------- 

65 config : `FileTemplatesConfig` or `str` 

66 Load configuration. 

67 default : `str`, optional 

68 If not `None`, a default template to use if no template has 

69 been specified explicitly in the configuration. 

70 universe : `DimensionUniverse` 

71 The set of all known dimensions, used to normalize any lookup keys 

72 involving dimensions. 

73 

74 Notes 

75 ----- 

76 The configuration can include one level of hierarchy where an 

77 instrument-specific section can be defined to override more general 

78 template specifications. This is represented in YAML using a 

79 key of form ``instrument<name>`` which can then define templates 

80 that will be returned if a `DatasetRef` contains a matching instrument 

81 name in the data ID. 

82 

83 A default fallback template can be specified using the key ``default``. 

84 Defaulting can be disabled in a child configuration by defining the 

85 value to be an empty string or a boolean `False`. 

86 

87 The config is parsed using the function 

88 `~lsst.daf.butler.configSubset.processLookupConfigs`. 

89 """ 

90 

91 defaultKey = LookupKey("default") 

92 """Configuration key associated with the default template.""" 

93 

94 def __init__( 

95 self, 

96 config: Union[FileTemplatesConfig, str], 

97 default: Optional[str] = None, 

98 *, 

99 universe: DimensionUniverse, 

100 ): 

101 self.config = FileTemplatesConfig(config) 

102 self._templates = {} 

103 

104 contents = processLookupConfigs(self.config, universe=universe) 

105 

106 # Determine default to use -- defaults can be disabled if 

107 # we get a False or None 

108 defaultValue = contents.get(self.defaultKey, default) 

109 if defaultValue and not isinstance(defaultValue, str): 

110 raise RuntimeError( 

111 f"Default template value should be str or False, or None. Got '{defaultValue}'" 

112 ) 

113 self.default = FileTemplate(defaultValue) if isinstance(defaultValue, str) and defaultValue else None 

114 

115 # Convert all the values to FileTemplate, handling defaults 

116 for key, templateStr in contents.items(): 

117 if key == self.defaultKey: 

118 continue 

119 if not isinstance(templateStr, str): 

120 raise RuntimeError(f"Unexpected value in file template key {key}: {templateStr}") 

121 self._templates[key] = FileTemplate(templateStr) 

122 

123 @property 

124 def templates(self) -> Mapping[LookupKey, FileTemplate]: 

125 """Return collection of templates indexed by lookup key (`dict`).""" 

126 return MappingProxyType(self._templates) 

127 

128 def __contains__(self, key: LookupKey) -> bool: 

129 """Indicate whether the supplied key is present in the templates. 

130 

131 Parameters 

132 ---------- 

133 key : `LookupKey` 

134 Key to use to determine if a corresponding value is present 

135 in the templates. 

136 

137 Returns 

138 ------- 

139 in : `bool` 

140 `True` if the supplied key is present in the templates. 

141 """ 

142 return key in self.templates 

143 

144 def __getitem__(self, key: LookupKey) -> FileTemplate: 

145 return self.templates[key] 

146 

147 def validateTemplates( 

148 self, entities: Iterable[Union[DatasetType, DatasetRef, StorageClass]], logFailures: bool = False 

149 ) -> None: 

150 """Validate the templates. 

151 

152 Retrieves the template associated with each dataset type and 

153 validates the dimensions against the template. 

154 

155 Parameters 

156 ---------- 

157 entities : `DatasetType`, `DatasetRef`, or `StorageClass` 

158 Entities to validate against the matching templates. Can be 

159 differing types. 

160 logFailures : `bool`, optional 

161 If `True`, output a log message for every validation error 

162 detected. 

163 

164 Raises 

165 ------ 

166 FileTemplateValidationError 

167 Raised if an entity failed validation. 

168 

169 Notes 

170 ----- 

171 See `FileTemplate.validateTemplate()` for details on the validation. 

172 """ 

173 unmatchedKeys = set(self.templates) 

174 failed = [] 

175 for entity in entities: 

176 try: 

177 matchKey, template = self.getTemplateWithMatch(entity) 

178 except KeyError as e: 

179 # KeyError always quotes on stringification so strip here 

180 errMsg = str(e).strip("\"'") 

181 failed.append(errMsg) 

182 if logFailures: 

183 log.critical("%s", errMsg) 

184 continue 

185 

186 if matchKey in unmatchedKeys: 

187 unmatchedKeys.remove(matchKey) 

188 

189 try: 

190 template.validateTemplate(entity) 

191 except FileTemplateValidationError as e: 

192 failed.append(f"{e} (via key '{matchKey}')") 

193 if logFailures: 

194 log.critical("Template failure with key '%s': %s", matchKey, e) 

195 

196 if logFailures and unmatchedKeys: 

197 log.warning("Unchecked keys: %s", ", ".join([str(k) for k in unmatchedKeys])) 

198 

199 if failed: 

200 if len(failed) == 1: 

201 msg = str(failed[0]) 

202 else: 

203 failMsg = ";\n".join(failed) 

204 msg = f"{len(failed)} template validation failures: {failMsg}" 

205 raise FileTemplateValidationError(msg) 

206 

207 def getLookupKeys(self) -> Set[LookupKey]: 

208 """Retrieve the look up keys for all the template entries. 

209 

210 Returns 

211 ------- 

212 keys : `set` of `LookupKey` 

213 The keys available for matching a template. 

214 """ 

215 return set(self.templates) 

216 

217 def getTemplateWithMatch( 

218 self, entity: Union[DatasetRef, DatasetType, StorageClass] 

219 ) -> Tuple[LookupKey, FileTemplate]: 

220 """Retrieve the `FileTemplate` associated with the dataset type. 

221 

222 Also retrieves the lookup key that was a match for this template. 

223 

224 If the lookup name corresponds to a component the base name for 

225 the component will be examined if the full component name does 

226 not match. 

227 

228 Parameters 

229 ---------- 

230 entity : `DatasetType`, `DatasetRef`, or `StorageClass` 

231 Instance to use to look for a corresponding template. 

232 A `DatasetType` name or a `StorageClass` name will be used 

233 depending on the supplied entity. Priority is given to a 

234 `DatasetType` name. Supports instrument override if a 

235 `DatasetRef` is provided configured with an ``instrument`` 

236 value for the data ID. 

237 

238 Returns 

239 ------- 

240 matchKey : `LookupKey` 

241 The key that resulted in the successful match. 

242 template : `FileTemplate` 

243 Template instance to use with that dataset type. 

244 

245 Raises 

246 ------ 

247 KeyError 

248 Raised if no template could be located for this Dataset type. 

249 """ 

250 # Get the names to use for lookup 

251 names = entity._lookupNames() 

252 

253 # Get a location from the templates 

254 template = self.default 

255 source = self.defaultKey 

256 for name in names: 

257 if name in self.templates: 

258 template = self.templates[name] 

259 source = name 

260 break 

261 

262 if template is None: 

263 raise KeyError(f"Unable to determine file template from supplied argument [{entity}]") 

264 

265 log.debug("Got file %s from %s via %s", template, entity, source) 

266 

267 return source, template 

268 

269 def getTemplate(self, entity: Union[DatasetType, DatasetRef, StorageClass]) -> FileTemplate: 

270 """Retrieve the `FileTemplate` associated with the dataset type. 

271 

272 If the lookup name corresponds to a component the base name for 

273 the component will be examined if the full component name does 

274 not match. 

275 

276 Parameters 

277 ---------- 

278 entity : `DatasetType`, `DatasetRef`, or `StorageClass` 

279 Instance to use to look for a corresponding template. 

280 A `DatasetType` name or a `StorageClass` name will be used 

281 depending on the supplied entity. Priority is given to a 

282 `DatasetType` name. Supports instrument override if a 

283 `DatasetRef` is provided configured with an ``instrument`` 

284 value for the data ID. 

285 

286 Returns 

287 ------- 

288 template : `FileTemplate` 

289 Template instance to use with that dataset type. 

290 

291 Raises 

292 ------ 

293 KeyError 

294 Raised if no template could be located for this Dataset type. 

295 """ 

296 _, template = self.getTemplateWithMatch(entity) 

297 return template 

298 

299 

300class FileTemplate: 

301 """Format a path template into a fully expanded path. 

302 

303 Parameters 

304 ---------- 

305 template : `str` 

306 Template string. 

307 

308 Raises 

309 ------ 

310 FileTemplateValidationError 

311 Raised if the template fails basic validation. 

312 

313 Notes 

314 ----- 

315 The templates use the standard Format Specification Mini-Language 

316 with the caveat that only named fields can be used. The field names 

317 are taken from the Dimensions along with several additional fields: 

318 

319 - datasetType: `str`, `DatasetType.name` 

320 - component: `str`, name of the StorageClass component 

321 - run: `str`, name of the run this dataset was added with 

322 

323 `run` must always be provided to ensure unique paths. 

324 

325 More detailed information can be requested from dimensions by using a dot 

326 notation, so ``visit.name`` would use the name of the visit and 

327 ``detector.name_in_raft`` would use the name of the detector within the 

328 raft. 

329 

330 The mini-language is extended to understand a "?" in the format 

331 specification. This indicates that a field is optional. If that 

332 Dimension is missing the field, along with the text before the field, 

333 unless it is a path separator, will be removed from the output path. 

334 

335 By default any "/" in a dataId value will be replaced by "_" to prevent 

336 unexpected directories being created in the path. If the "/" should be 

337 retained then a special "/" format specifier can be included in the 

338 template. 

339 """ 

340 

341 mandatoryFields = {"run", "id"} 

342 """A set of fields, one of which must be present in a template.""" 

343 

344 datasetFields = {"datasetType", "component"} 

345 """Fields related to the supplied dataset, not a dimension.""" 

346 

347 specialFields = mandatoryFields | datasetFields 

348 """Set of special fields that are available independently of the defined 

349 Dimensions.""" 

350 

351 def __init__(self, template: str): 

352 if not isinstance(template, str): 

353 raise FileTemplateValidationError( 

354 f"Template ('{template}') does not contain any format specifiers" 

355 ) 

356 self.template = template 

357 

358 # Do basic validation without access to dimensions 

359 self.validateTemplate(None) 

360 

361 def __eq__(self, other: Any) -> bool: 

362 if not isinstance(other, FileTemplate): 

363 return False 

364 

365 return self.template == other.template 

366 

367 def __str__(self) -> str: 

368 return self.template 

369 

370 def __repr__(self) -> str: 

371 return f'{self.__class__.__name__}("{self.template}")' 

372 

373 def fields(self, optionals: bool = False, specials: bool = False, subfields: bool = False) -> Set[str]: 

374 """Return the field names used in this template. 

375 

376 Parameters 

377 ---------- 

378 optionals : `bool` 

379 If `True`, optional fields are included in the returned set. 

380 specials : `bool` 

381 If `True`, non-dimension fields are included. 

382 subfields : `bool`, optional 

383 If `True`, fields with syntax ``a.b`` are included. If `False`, 

384 the default, only ``a`` would be returned. 

385 

386 Returns 

387 ------- 

388 names : `set` 

389 Names of fields used in this template 

390 

391 Notes 

392 ----- 

393 The returned set will include the special values such as `datasetType` 

394 and `component`. 

395 """ 

396 fmt = string.Formatter() 

397 parts = fmt.parse(self.template) 

398 

399 names = set() 

400 for literal, field_name, format_spec, conversion in parts: 

401 if field_name is not None and format_spec is not None: 

402 if "?" in format_spec and not optionals: 

403 continue 

404 

405 if not specials and field_name in self.specialFields: 

406 continue 

407 

408 if "." in field_name and not subfields: 

409 field_name, _ = field_name.split(".") 

410 

411 names.add(field_name) 

412 

413 return names 

414 

415 def format(self, ref: DatasetRef) -> str: 

416 """Format a template string into a full path. 

417 

418 Parameters 

419 ---------- 

420 ref : `DatasetRef` 

421 The dataset to be formatted. 

422 

423 Returns 

424 ------- 

425 path : `str` 

426 Expanded path. 

427 

428 Raises 

429 ------ 

430 KeyError 

431 Raised if the requested field is not defined and the field is 

432 not optional. Or, `component` is specified but "component" was 

433 not part of the template. 

434 RuntimeError 

435 Raised if a template uses dimension record metadata but no 

436 records are attached to the `DatasetRef`. 

437 """ 

438 # Extract defined non-None dimensions from the dataId. 

439 # This guards against Nones being explicitly present in the data ID 

440 # (which can happen if, say, an exposure has no filter), as well as 

441 # the case where only required dimensions are present (which in this 

442 # context should only happen in unit tests; in general we need all 

443 # dimensions to fill out templates). 

444 fields = { 

445 k: ref.dataId.get(k) for k in ref.datasetType.dimensions.names if ref.dataId.get(k) is not None 

446 } 

447 # Extra information that can be included using . syntax 

448 extras = {} 

449 if isinstance(ref.dataId, DataCoordinate): 

450 if ref.dataId.hasRecords(): 

451 extras = ref.dataId.records.byName() 

452 skypix_alias = self._determine_skypix_alias(ref) 

453 if skypix_alias is not None: 

454 fields["skypix"] = fields[skypix_alias] 

455 if extras: 

456 extras["skypix"] = extras[skypix_alias] 

457 

458 datasetType = ref.datasetType 

459 fields["datasetType"], component = datasetType.nameAndComponent() 

460 

461 usedComponent = False 

462 if component is not None: 

463 fields["component"] = component 

464 

465 fields["run"] = ref.run 

466 fields["id"] = ref.id 

467 

468 fmt = string.Formatter() 

469 parts = fmt.parse(self.template) 

470 output = "" 

471 

472 for literal, field_name, format_spec, conversion in parts: 

473 if field_name == "component": 

474 usedComponent = True 

475 

476 if format_spec is None: 

477 output = output + literal 

478 continue 

479 

480 # Should only happen if format_spec is None 

481 if field_name is None: 

482 raise RuntimeError(f"Unexpected blank field_name encountered in {self.template} [{literal}]") 

483 

484 if "?" in format_spec: 

485 optional = True 

486 # Remove the non-standard character from the spec 

487 format_spec = format_spec.replace("?", "") 

488 else: 

489 optional = False 

490 

491 # Check for request for additional information from the dataId 

492 if "." in field_name: 

493 primary, secondary = field_name.split(".") 

494 if primary in extras: 

495 record = extras[primary] 

496 # Only fill in the fields if we have a value, the 

497 # KeyError will trigger below if the attribute is missing, 

498 # but only if it is not optional. This is most likely 

499 # a typo in the metadata field and so should be reported 

500 # even if optional. 

501 if hasattr(record, secondary): 

502 fields[field_name] = getattr(record, secondary) 

503 else: 

504 # Is a log message sufficient? 

505 log.info( 

506 "Template field %s could not be resolved because metadata field %s" 

507 " is not understood for dimension %s. Template entry will be ignored", 

508 field_name, 

509 secondary, 

510 primary, 

511 ) 

512 elif primary in fields: 

513 # We do have an entry for the primary but do not have any 

514 # secondary entries. This is likely a problem with the 

515 # code failing to attach a record to the DatasetRef. 

516 raise RuntimeError( 

517 f"No metadata records attached to dataset {ref}" 

518 f" when attempting to expand field {field_name}." 

519 " Either expand the DatasetRef or change the template." 

520 ) 

521 

522 if field_name in fields: 

523 value = fields[field_name] 

524 elif optional: 

525 # If this is optional ignore the format spec 

526 # and do not include the literal text prior to the optional 

527 # field unless it contains a "/" path separator 

528 format_spec = "" 

529 value = "" 

530 if "/" not in literal: 

531 literal = "" 

532 else: 

533 raise KeyError( 

534 f"'{field_name}' requested in template via '{self.template}' " 

535 "but not defined and not optional" 

536 ) 

537 

538 # Handle "/" in values since we do not want to be surprised by 

539 # unexpected directories turning up 

540 replace_slash = True 

541 if "/" in format_spec: 

542 # Remove the non-standard character from the spec 

543 format_spec = format_spec.replace("/", "") 

544 replace_slash = False 

545 

546 if isinstance(value, str): 

547 # Replace spaces with underscores for more friendly file paths 

548 value = value.replace(" ", "_") 

549 if replace_slash: 

550 value = value.replace("/", "_") 

551 

552 # Now use standard formatting 

553 output = output + literal + format(value, format_spec) 

554 

555 # Replace periods with underscores in the non-directory part to 

556 # prevent file extension confusion. Also replace # in the non-dir 

557 # part to avoid confusion with URI fragments 

558 head, tail = os.path.split(output) 

559 tail = tail.replace(".", "_") 

560 tail = tail.replace("#", "HASH") 

561 output = os.path.join(head, tail) 

562 

563 # Complain if we were meant to use a component 

564 if component is not None and not usedComponent: 

565 raise KeyError( 

566 "Component '{}' specified but template {} did not use it".format(component, self.template) 

567 ) 

568 

569 # Since this is known to be a path, normalize it in case some double 

570 # slashes have crept in 

571 path = os.path.normpath(output) 

572 

573 # It should not be an absolute path (may happen with optionals) 

574 if os.path.isabs(path): 

575 path = os.path.relpath(path, start="/") 

576 

577 return path 

578 

579 def validateTemplate(self, entity: Union[DatasetRef, DatasetType, StorageClass, None]) -> None: 

580 """Compare the template against supplied entity that wants to use it. 

581 

582 Parameters 

583 ---------- 

584 entity : `DatasetType`, `DatasetRef`, or `StorageClass` 

585 Entity to compare against template. If `None` is given only 

586 very basic validation of templates will be performed. 

587 

588 Raises 

589 ------ 

590 FileTemplateValidationError 

591 Raised if the template is inconsistent with the supplied entity. 

592 

593 Notes 

594 ----- 

595 Validation will always include a check that mandatory fields 

596 are present and that at least one field refers to a dimension. 

597 If the supplied entity includes a `DimensionGraph` then it will be 

598 used to compare the available dimensions with those specified in the 

599 template. 

600 """ 

601 # Check that the template has run 

602 withSpecials = self.fields(specials=True, optionals=True) 

603 

604 if "collection" in withSpecials: 

605 raise FileTemplateValidationError( 

606 "'collection' is no longer supported as a file template placeholder; use 'run' instead." 

607 ) 

608 

609 if not withSpecials & self.mandatoryFields: 

610 raise FileTemplateValidationError( 

611 f"Template '{self}' is missing a mandatory field from {self.mandatoryFields}" 

612 ) 

613 

614 # Check that there are some dimension fields in the template 

615 # The id is allowed instead if present since that also uniquely 

616 # identifies the file in the datastore. 

617 allfields = self.fields(optionals=True) 

618 if not allfields and "id" not in withSpecials: 

619 raise FileTemplateValidationError( 

620 f"Template '{self}' does not seem to have any fields corresponding to dimensions." 

621 ) 

622 

623 # Require that if "id" is in the template then it must exist in the 

624 # file part -- this avoids templates like "{id}/fixed" where the file 

625 # name is fixed but the directory has the ID. 

626 if "id" in withSpecials: 

627 file_part = os.path.split(self.template)[-1] 

628 if "{id}" not in file_part: 

629 raise FileTemplateValidationError( 

630 f"Template '{self}' includes the 'id' but that ID is not part of the file name." 

631 ) 

632 

633 # If we do not have dimensions available then all we can do is shrug 

634 if not hasattr(entity, "dimensions"): 

635 return 

636 

637 # Mypy does not know about hasattr so help it out 

638 if entity is None: 

639 return 

640 

641 # if this entity represents a component then insist that component 

642 # is present in the template. If the entity is not a component 

643 # make sure that component is not mandatory. 

644 try: 

645 # mypy does not see the except block so complains about 

646 # StorageClass not supporting isComponent 

647 if entity.isComponent(): # type: ignore 

648 if "component" not in withSpecials: 

649 raise FileTemplateValidationError( 

650 f"Template '{self}' has no component but {entity} refers to a component." 

651 ) 

652 else: 

653 mandatorySpecials = self.fields(specials=True) 

654 if "component" in mandatorySpecials: 

655 raise FileTemplateValidationError( 

656 f"Template '{self}' has mandatory component but " 

657 f"{entity} does not refer to a component." 

658 ) 

659 except AttributeError: 

660 pass 

661 

662 # From here on we need at least a DatasetType 

663 # Mypy doesn't understand the AttributeError clause below 

664 if isinstance(entity, StorageClass): 

665 return 

666 

667 # Get the dimension links to get the full set of available field names 

668 # Fall back to dataId keys if we have them but no links. 

669 # dataId keys must still be present in the template 

670 try: 

671 minimal = set(entity.dimensions.required.names) 

672 maximal = set(entity.dimensions.names) 

673 except AttributeError: 

674 try: 

675 minimal = set(entity.dataId.keys().names) # type: ignore 

676 maximal = minimal 

677 except AttributeError: 

678 return 

679 

680 # Replace specific skypix dimensions with generic one 

681 skypix_alias = self._determine_skypix_alias(entity) 

682 if skypix_alias is not None: 

683 minimal.add("skypix") 

684 maximal.add("skypix") 

685 minimal.remove(skypix_alias) 

686 maximal.remove(skypix_alias) 

687 

688 required = self.fields(optionals=False) 

689 

690 # Calculate any field usage that does not match a dimension 

691 if not required.issubset(maximal): 

692 raise FileTemplateValidationError( 

693 f"Template '{self}' is inconsistent with {entity}:" 

694 f" {required} is not a subset of {maximal}." 

695 ) 

696 

697 if not allfields.issuperset(minimal): 

698 raise FileTemplateValidationError( 

699 f"Template '{self}' is inconsistent with {entity}:" 

700 f" {allfields} is not a superset of {minimal}." 

701 ) 

702 

703 return 

704 

705 def _determine_skypix_alias(self, entity: Union[DatasetRef, DatasetType]) -> Optional[str]: 

706 """Return the dimension name that refers to a sky pixel. 

707 

708 Parameters 

709 ---------- 

710 ref : `DatasetRef` or `DatasetType` 

711 The entity to examine. 

712 

713 Returns 

714 ------- 

715 alias : `str` 

716 If there is a sky pixelization in the supplied dataId, return 

717 its name, else returns `None`. Will return `None` also if there 

718 is more than one sky pix dimension in the data ID or if the 

719 dataID is not a `DataCoordinate` 

720 """ 

721 alias = None 

722 

723 if isinstance(entity, DatasetRef): 

724 entity = entity.datasetType 

725 

726 # If there is exactly one SkyPixDimension in the data ID, alias its 

727 # value with the key "skypix", so we can use that to match any 

728 # skypix dimension. 

729 # We restrict this behavior to the (real-world) case where the 

730 # data ID is a DataCoordinate, not just a dict. That should only 

731 # not be true in some test code, but that test code is a pain to 

732 # update to be more like the real world while still providing our 

733 # only tests of important behavior. 

734 skypix = [dimension for dimension in entity.dimensions if isinstance(dimension, SkyPixDimension)] 

735 if len(skypix) == 1: 

736 alias = skypix[0].name 

737 return alias