Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

420

421

422

423

424

425

426

427

428

429

430

431

432

433

434

435

436

437

438

439

440

441

442

443

444

445

446

447

448

449

450

451

452

453

454

455

456

457

458

459

460

461

462

463

464

465

466

467

468

469

470

471

472

473

474

475

476

477

478

479

480

481

482

483

484

485

486

487

488

489

490

491

492

493

494

495

496

497

498

499

500

501

502

503

504

505

506

507

508

509

510

511

512

513

514

515

516

517

518

519

520

521

522

523

524

525

526

527

528

529

530

531

532

533

534

535

536

537

538

539

540

541

542

543

544

545

546

547

548

549

550

551

552

553

554

555

556

557

558

559

560

561

562

563

564

565

566

567

568

569

570

571

572

573

574

575

576

577

578

579

580

581

582

583

584

585

586

587

588

589

590

591

592

593

594

595

596

597

598

599

600

601

602

603

604

605

606

607

608

609

610

611

612

613

614

615

616

617

618

619

620

621

622

623

624

625

626

627

628

629

630

631

632

633

634

635

636

637

638

639

640

641

642

643

644

645

646

647

648

649

650

651

652

653

654

655

656

657

658

659

660

661

662

663

664

665

666

667

668

669

670

671

672

673

674

675

676

677

678

679

680

681

682

683

684

685

686

687

688

689

690

691

692

693

694

695

696

697

698

699

700

701

702

703

704

705

706

707

708

709

710

711

712

713

714

715

716

717

718

719

720

721

722

723

724

725

726

727

728

729

730

731

732

733

734

735

736

737

738

739

740

741

742

743

744

745

746

747

748

749

750

751

752

753

754

755

756

757

758

759

760

761

762

763

764

765

766

767

768

769

770

771

772

773

774

775

776

777

778

779

780

781

782

783

784

785

786

787

788

789

790

791

792

793

794

795

796

797

798

799

800

801

802

803

804

805

806

807

808

809

810

811

812

813

814

815

816

817

818

819

820

821

822

823

824

825

826

827

828

829

830

831

832

833

834

835

836

837

838

839

840

841

842

843

844

845

846

847

848

849

850

851

852

853

854

855

856

857

858

859

860

861

862

863

864

865

866

867

868

869

870

871

872

873

874

875

876

877

878

879

880

881

882

883

884

885

886

887

888

889

890

891

892

893

894

895

896

897

898

899

900

901

902

903

904

905

906

907

908

909

910

911

912

913

914

915

916

917

918

919

920

921

922

923

924

925

926

927

928

929

930

931

932

933

934

935

936

937

938

939

940

941

942

943

944

945

946

947

948

949

950

951

952

953

954

955

956

957

958

959

960

961

962

963

964

965

966

967

968

969

970

971

972

973

974

975

976

977

978

979

980

981

982

983

984

985

986

987

988

989

990

991

992

993

994

995

996

997

998

999

1000

1001

1002

1003

1004

1005

1006

1007

1008

1009

1010

1011

1012

1013

1014

1015

1016

1017

1018

1019

1020

1021

1022

1023

1024

1025

1026

1027

1028

1029

1030

1031

1032

1033

1034

1035

1036

1037

1038

1039

1040

1041

1042

1043

1044

1045

1046

1047

1048

1049

1050

1051

1052

1053

1054

1055

1056

1057

1058

1059

1060

1061

1062

1063

1064

1065

1066

1067

1068

1069

1070

1071

1072

1073

1074

1075

1076

1077

1078

1079

1080

1081

1082

1083

1084

1085

1086

1087

1088

1089

1090

1091

1092

1093

1094

1095

1096

1097

1098

1099

1100

1101

1102

1103

1104

1105

1106

1107

1108

1109

1110

1111

1112

1113

1114

1115

1116

1117

1118

1119

1120

1121

1122

1123

1124

1125

1126

1127

1128

1129

1130

1131

1132

1133

1134

1135

1136

1137

1138

1139

1140

1141

1142

1143

1144

1145

1146

1147

1148

1149

1150

1151

1152

1153

1154

1155

1156

1157

1158

1159

1160

1161

1162

1163

1164

1165

1166

1167

1168

1169

1170

1171

1172

1173

1174

1175

1176

1177

1178

1179

1180

1181

1182

1183

1184

1185

1186

1187

1188

1189

1190

1191

1192

1193

1194

1195

1196

1197

1198

1199

1200

1201

1202

1203

1204

1205

1206

1207

1208

1209

1210

1211

1212

1213

1214

1215

1216

1217

1218

1219

1220

1221

1222

1223

1224

1225

1226

1227

1228

1229

1230

1231

1232

1233

1234

1235

1236

1237

1238

1239

1240

1241

1242

1243

1244

1245

1246

1247

1248

1249

1250

1251

1252

1253

1254

1255

1256

1257

1258

1259

1260

1261

1262

1263

1264

1265

1266

1267

1268

1269

1270

1271

1272

1273

1274

1275

1276

1277

1278

1279

1280

1281

1282

1283

1284

1285

1286

1287

1288

1289

1290

1291

1292

1293

1294

1295

1296

1297

1298

1299

1300

1301

1302

1303

1304

1305

1306

1307

1308

1309

1310

1311

1312

1313

1314

1315

1316

1317

1318

1319

1320

1321

1322

1323

1324

1325

1326

1327

1328

1329

1330

1331

1332

1333

1334

1335

1336

1337

1338

1339

1340

1341

1342

1343

1344

1345

1346

1347

1348

1349

1350

1351

1352

1353

1354

1355

1356

1357

1358

1359

1360

1361

1362

1363

1364

1365

1366

1367

1368

1369

1370

1371

1372

1373

1374

1375

1376

1377

1378

1379

1380

1381

1382

1383

1384

1385

1386

1387

1388

1389

1390

1391

1392

1393

1394

1395

1396

1397

1398

1399

1400

1401

1402

1403

1404

1405

1406

1407

1408

1409

1410

1411

1412

1413

1414

1415

1416

1417

1418

1419

1420

1421

1422

1423

1424

1425

1426

1427

1428

1429

1430

1431

1432

1433

1434

1435

1436

1437

1438

1439

1440

1441

1442

1443

1444

1445

1446

1447

1448

# 

# LSST Data Management System 

# Copyright 2008, 2009, 2010 LSST Corporation. 

# 

# This product includes software developed by the 

# LSST Project (http://www.lsst.org/). 

# 

# This program is free software: you can redistribute it and/or modify 

# it under the terms of the GNU General Public License as published by 

# the Free Software Foundation, either version 3 of the License, or 

# (at your option) any later version. 

# 

# This program is distributed in the hope that it will be useful, 

# but WITHOUT ANY WARRANTY; without even the implied warranty of 

# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

# GNU General Public License for more details. 

# 

# You should have received a copy of the LSST License Statement and 

# the GNU General Public License along with this program. If not, 

# see <http://www.lsstcorp.org/LegalNotices/>. 

# 

 

import copy 

import os 

from astropy.io import fits # required by _makeDefectsDict until defects are written as AFW tables 

import re 

import weakref 

import lsst.daf.persistence as dafPersist 

from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping 

import lsst.daf.base as dafBase 

import lsst.afw.geom as afwGeom 

import lsst.afw.image as afwImage 

import lsst.afw.table as afwTable 

from lsst.afw.fits import readMetadata 

import lsst.afw.cameraGeom as afwCameraGeom 

import lsst.log as lsstLog 

import lsst.pex.policy as pexPolicy 

import lsst.pex.exceptions as pexExcept 

from .exposureIdInfo import ExposureIdInfo 

from .makeRawVisitInfo import MakeRawVisitInfo 

from lsst.utils import getPackageDir 

 

__all__ = ["CameraMapper", "exposureFromImage"] 

 

 

class CameraMapper(dafPersist.Mapper): 

 

"""CameraMapper is a base class for mappers that handle images from a 

camera and products derived from them. This provides an abstraction layer 

between the data on disk and the code. 

 

Public methods: keys, queryMetadata, getDatasetTypes, map, 

canStandardize, standardize 

 

Mappers for specific data sources (e.g., CFHT Megacam, LSST 

simulations, etc.) should inherit this class. 

 

The CameraMapper manages datasets within a "root" directory. Note that 

writing to a dataset present in the input root will hide the existing 

dataset but not overwrite it. See #2160 for design discussion. 

 

A camera is assumed to consist of one or more rafts, each composed of 

multiple CCDs. Each CCD is in turn composed of one or more amplifiers 

(amps). A camera is also assumed to have a camera geometry description 

(CameraGeom object) as a policy file, a filter description (Filter class 

static configuration) as another policy file, and an optional defects 

description directory. 

 

Information from the camera geometry and defects are inserted into all 

Exposure objects returned. 

 

The mapper uses one or two registries to retrieve metadata about the 

images. The first is a registry of all raw exposures. This must contain 

the time of the observation. One or more tables (or the equivalent) 

within the registry are used to look up data identifier components that 

are not specified by the user (e.g. filter) and to return results for 

metadata queries. The second is an optional registry of all calibration 

data. This should contain validity start and end entries for each 

calibration dataset in the same timescale as the observation time. 

 

Subclasses will typically set MakeRawVisitInfoClass: 

 

MakeRawVisitInfoClass: a class variable that points to a subclass of 

MakeRawVisitInfo, a functor that creates an 

lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 

 

Subclasses must provide the following methods: 

 

_extractDetectorName(self, dataId): returns the detector name for a CCD 

(e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 

a dataset identifier referring to that CCD or a subcomponent of it. 

 

_computeCcdExposureId(self, dataId): see below 

 

_computeCoaddExposureId(self, dataId, singleFilter): see below 

 

Subclasses may also need to override the following methods: 

 

_transformId(self, dataId): transformation of a data identifier 

from colloquial usage (e.g., "ccdname") to proper/actual usage 

(e.g., "ccd"), including making suitable for path expansion (e.g. removing 

commas). The default implementation does nothing. Note that this 

method should not modify its input parameter. 

 

getShortCcdName(self, ccdName): a static method that returns a shortened 

name suitable for use as a filename. The default version converts spaces 

to underscores. 

 

_getCcdKeyVal(self, dataId): return a CCD key and value 

by which to look up defects in the defects registry. 

The default value returns ("ccd", detector name) 

 

_mapActualToPath(self, template, actualId): convert a template path to an 

actual path, using the actual dataset identifier. 

 

The mapper's behaviors are largely specified by the policy file. 

See the MapperDictionary.paf for descriptions of the available items. 

 

The 'exposures', 'calibrations', and 'datasets' subpolicies configure 

mappings (see Mappings class). 

 

Common default mappings for all subclasses can be specified in the 

"policy/{images,exposures,calibrations,datasets}.yaml" files. This 

provides a simple way to add a product to all camera mappers. 

 

Functions to map (provide a path to the data given a dataset 

identifier dictionary) and standardize (convert data into some standard 

format or type) may be provided in the subclass as "map_{dataset type}" 

and "std_{dataset type}", respectively. 

 

If non-Exposure datasets cannot be retrieved using standard 

daf_persistence methods alone, a "bypass_{dataset type}" function may be 

provided in the subclass to return the dataset instead of using the 

"datasets" subpolicy. 

 

Implementations of map_camera and bypass_camera that should typically be 

sufficient are provided in this base class. 

 

Notes 

----- 

TODO: 

 

- Handle defects the same was as all other calibration products, using the 

calibration registry 

- Instead of auto-loading the camera at construction time, load it from 

the calibration registry 

- Rewrite defects as AFW tables so we don't need astropy.io.fits to 

unpersist them; then remove all mention of astropy.io.fits from this 

package. 

""" 

packageName = None 

 

# a class or subclass of MakeRawVisitInfo, a functor that makes an 

# lsst.afw.image.VisitInfo from the FITS metadata of a raw image 

MakeRawVisitInfoClass = MakeRawVisitInfo 

 

# a class or subclass of PupilFactory 

PupilFactoryClass = afwCameraGeom.PupilFactory 

 

def __init__(self, policy, repositoryDir, 

root=None, registry=None, calibRoot=None, calibRegistry=None, 

provided=None, parentRegistry=None, repositoryCfg=None): 

"""Initialize the CameraMapper. 

 

Parameters 

---------- 

policy : daf_persistence.Policy, 

Can also be pexPolicy.Policy, only for backward compatibility. 

Policy with per-camera defaults already merged. 

repositoryDir : string 

Policy repository for the subclassing module (obtained with 

getRepositoryPath() on the per-camera default dictionary). 

root : string, optional 

Path to the root directory for data. 

registry : string, optional 

Path to registry with data's metadata. 

calibRoot : string, optional 

Root directory for calibrations. 

calibRegistry : string, optional 

Path to registry with calibrations' metadata. 

provided : list of string, optional 

Keys provided by the mapper. 

parentRegistry : Registry subclass, optional 

Registry from a parent repository that may be used to look up 

data's metadata. 

repositoryCfg : daf_persistence.RepositoryCfg or None, optional 

The configuration information for the repository this mapper is 

being used with. 

""" 

 

dafPersist.Mapper.__init__(self) 

 

self.log = lsstLog.Log.getLogger("CameraMapper") 

 

195 ↛ 197line 195 didn't jump to line 197, because the condition on line 195 was never false if root: 

self.root = root 

elif repositoryCfg: 

self.root = repositoryCfg.root 

else: 

self.root = None 

if isinstance(policy, pexPolicy.Policy): 

policy = dafPersist.Policy(policy) 

 

repoPolicy = repositoryCfg.policy if repositoryCfg else None 

if repoPolicy is not None: 

policy.update(repoPolicy) 

 

defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base", 

"MapperDictionary.paf", 

"policy") 

dictPolicy = dafPersist.Policy(defaultPolicyFile) 

policy.merge(dictPolicy) 

 

# Levels 

self.levels = dict() 

if 'levels' in policy: 

levelsPolicy = policy['levels'] 

for key in levelsPolicy.names(True): 

self.levels[key] = set(levelsPolicy.asArray(key)) 

self.defaultLevel = policy['defaultLevel'] 

self.defaultSubLevels = dict() 

if 'defaultSubLevels' in policy: 

self.defaultSubLevels = policy['defaultSubLevels'] 

 

# Root directories 

226 ↛ 227line 226 didn't jump to line 227, because the condition on line 226 was never true if root is None: 

root = "." 

root = dafPersist.LogicalLocation(root).locString() 

 

self.rootStorage = dafPersist.Storage.makeFromURI(uri=root) 

 

# If the calibRoot is passed in, use that. If not and it's indicated in 

# the policy, use that. And otherwise, the calibs are in the regular 

# root. 

# If the location indicated by the calib root does not exist, do not 

# create it. 

calibStorage = None 

238 ↛ 239line 238 didn't jump to line 239, because the condition on line 238 was never true if calibRoot is not None: 

calibRoot = dafPersist.Storage.absolutePath(root, calibRoot) 

calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

create=False) 

else: 

calibRoot = policy.get('calibRoot', None) 

244 ↛ 245line 244 didn't jump to line 245, because the condition on line 244 was never true if calibRoot: 

calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, 

create=False) 

247 ↛ 250line 247 didn't jump to line 250, because the condition on line 247 was never false if calibStorage is None: 

calibStorage = self.rootStorage 

 

self.root = root 

 

# Registries 

self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath", 

self.rootStorage, searchParents=False, 

posixIfNoSql=(not parentRegistry)) 

if not self.registry: 

self.registry = parentRegistry 

needCalibRegistry = policy.get('needCalibRegistry', None) 

if needCalibRegistry: 

260 ↛ 265line 260 didn't jump to line 265, because the condition on line 260 was never false if calibStorage: 

self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy, 

"calibRegistryPath", calibStorage, 

posixIfNoSql=False) # NB never use posix for calibs 

else: 

raise RuntimeError( 

"'needCalibRegistry' is true in Policy, but was unable to locate a repo at " + 

"calibRoot ivar:%s or policy['calibRoot']:%s" % 

(calibRoot, policy.get('calibRoot', None))) 

else: 

self.calibRegistry = None 

 

# Dict of valid keys and their value types 

self.keyDict = dict() 

 

self._initMappings(policy, self.rootStorage, calibStorage, provided=None) 

self._initWriteRecipes() 

 

# Camera geometry 

self.cameraDataLocation = None # path to camera geometry config file 

self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir) 

 

# Defect registry and root. Defects are stored with the camera and the registry is loaded from the 

# camera package, which is on the local filesystem. 

self.defectRegistry = None 

285 ↛ 286line 285 didn't jump to line 286, because the condition on line 285 was never true if 'defects' in policy: 

self.defectPath = os.path.join(repositoryDir, policy['defects']) 

defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3") 

self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation) 

 

# Filter translation table 

self.filters = None 

 

# verify that the class variable packageName is set before attempting 

# to instantiate an instance 

if self.packageName is None: 

raise ValueError('class variable packageName must not be None') 

 

self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log) 

 

def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None): 

"""Initialize mappings 

 

For each of the dataset types that we want to be able to read, there 

are methods that can be created to support them: 

* map_<dataset> : determine the path for dataset 

* std_<dataset> : standardize the retrieved dataset 

* bypass_<dataset> : retrieve the dataset (bypassing the usual 

retrieval machinery) 

* query_<dataset> : query the registry 

 

Besides the dataset types explicitly listed in the policy, we create 

additional, derived datasets for additional conveniences, 

e.g., reading the header of an image, retrieving only the size of a 

catalog. 

 

Parameters 

---------- 

policy : `lsst.daf.persistence.Policy` 

Policy with per-camera defaults already merged 

rootStorage : `Storage subclass instance` 

Interface to persisted repository data. 

calibRoot : `Storage subclass instance` 

Interface to persisted calib repository data 

provided : `list` of `str` 

Keys provided by the mapper 

""" 

# Sub-dictionaries (for exposure/calibration/dataset types) 

imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

"obs_base", "ImageMappingDictionary.paf", "policy")) 

expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

"obs_base", "ExposureMappingDictionary.paf", "policy")) 

calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

"obs_base", "CalibrationMappingDictionary.paf", "policy")) 

dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile( 

"obs_base", "DatasetMappingDictionary.paf", "policy")) 

 

# Mappings 

mappingList = ( 

("images", imgMappingPolicy, ImageMapping), 

("exposures", expMappingPolicy, ExposureMapping), 

("calibrations", calMappingPolicy, CalibrationMapping), 

("datasets", dsMappingPolicy, DatasetMapping) 

) 

self.mappings = dict() 

for name, defPolicy, cls in mappingList: 

if name in policy: 

datasets = policy[name] 

 

# Centrally-defined datasets 

defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml") 

if os.path.exists(defaultsPath): 

datasets.merge(dafPersist.Policy(defaultsPath)) 

 

mappings = dict() 

setattr(self, name, mappings) 

for datasetType in datasets.names(True): 

subPolicy = datasets[datasetType] 

subPolicy.merge(defPolicy) 

 

if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy: 

def compositeClosure(dataId, write=False, mapper=None, mapping=None, 

subPolicy=subPolicy): 

components = subPolicy.get('composite') 

assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None 

disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None 

python = subPolicy['python'] 

butlerComposite = dafPersist.ButlerComposite(assembler=assembler, 

disassembler=disassembler, 

python=python, 

dataId=dataId, 

mapper=self) 

for name, component in components.items(): 

butlerComposite.add(id=name, 

datasetType=component.get('datasetType'), 

setter=component.get('setter', None), 

getter=component.get('getter', None), 

subset=component.get('subset', False), 

inputOnly=component.get('inputOnly', False)) 

return butlerComposite 

setattr(self, "map_" + datasetType, compositeClosure) 

# for now at least, don't set up any other handling for this dataset type. 

continue 

 

if name == "calibrations": 

mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage, 

provided=provided, dataRoot=rootStorage) 

else: 

mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided) 

self.keyDict.update(mapping.keys()) 

mappings[datasetType] = mapping 

self.mappings[datasetType] = mapping 

392 ↛ 396line 392 didn't jump to line 396, because the condition on line 392 was never false if not hasattr(self, "map_" + datasetType): 

def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

return mapping.map(mapper, dataId, write) 

setattr(self, "map_" + datasetType, mapClosure) 

396 ↛ 400line 396 didn't jump to line 400, because the condition on line 396 was never false if not hasattr(self, "query_" + datasetType): 

def queryClosure(format, dataId, mapping=mapping): 

return mapping.lookup(format, dataId) 

setattr(self, "query_" + datasetType, queryClosure) 

if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType): 

def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping): 

return mapping.standardize(mapper, item, dataId) 

setattr(self, "std_" + datasetType, stdClosure) 

 

def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None): 

"""Set convenience methods on CameraMapper""" 

mapName = "map_" + datasetType + "_" + suffix 

bypassName = "bypass_" + datasetType + "_" + suffix 

queryName = "query_" + datasetType + "_" + suffix 

410 ↛ 412line 410 didn't jump to line 412, because the condition on line 410 was never false if not hasattr(self, mapName): 

setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType)) 

412 ↛ 417line 412 didn't jump to line 417, because the condition on line 412 was never false if not hasattr(self, bypassName): 

413 ↛ 414line 413 didn't jump to line 414, because the condition on line 413 was never true if bypassImpl is None and hasattr(self, "bypass_" + datasetType): 

bypassImpl = getattr(self, "bypass_" + datasetType) 

if bypassImpl is not None: 

setattr(self, bypassName, bypassImpl) 

417 ↛ exitline 417 didn't return from function 'setMethods', because the condition on line 417 was never false if not hasattr(self, queryName): 

setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType)) 

 

# Filename of dataset 

setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId: 

[os.path.join(location.getStorage().root, p) for p in location.getLocations()]) 

# Metadata from FITS file 

if subPolicy["storage"] == "FitsStorage": # a FITS image 

425 ↛ exitline 425 didn't run the lambda on line 425 setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId: 

readMetadata(location.getLocationsWithRoot()[0])) 

 

# Add support for configuring FITS compression 

addName = "add_" + datasetType 

430 ↛ 433line 430 didn't jump to line 433, because the condition on line 430 was never false if not hasattr(self, addName): 

setattr(self, addName, self.getImageCompressionSettings) 

 

if name == "exposures": 

434 ↛ exitline 434 didn't run the lambda on line 434 setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId: 

afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))) 

436 ↛ exitline 436 didn't run the lambda on line 436 setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId: 

afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0]))) 

438 ↛ exitline 439 didn't finish the lambda on line 439 setMethods("visitInfo", 

bypassImpl=lambda datasetType, pythonType, location, dataId: 

afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0]))) 

441 ↛ exitline 442 didn't finish the lambda on line 442 setMethods("filter", 

bypassImpl=lambda datasetType, pythonType, location, dataId: 

afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0]))) 

setMethods("detector", 

mapImpl=lambda dataId, write=False: 

dafPersist.ButlerLocation( 

pythonType="lsst.afw.cameraGeom.CameraConfig", 

cppType="Config", 

storageName="Internal", 

locationList="ignored", 

dataId=dataId, 

mapper=self, 

storage=None, 

), 

bypassImpl=lambda datasetType, pythonType, location, dataId: 

self.camera[self._extractDetectorName(dataId)] 

) 

458 ↛ exitline 458 didn't run the lambda on line 458 setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId: 

afwImage.bboxFromMetadata( 

readMetadata(location.getLocationsWithRoot()[0], hdu=1))) 

 

elif name == "images": 

setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId: 

afwImage.bboxFromMetadata( 

readMetadata(location.getLocationsWithRoot()[0]))) 

 

if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog 

setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId: 

readMetadata(os.path.join(location.getStorage().root, 

location.getLocations()[0]), hdu=1)) 

 

# Sub-images 

if subPolicy["storage"] == "FitsStorage": 

def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping): 

subId = dataId.copy() 

del subId['bbox'] 

loc = mapping.map(mapper, subId, write) 

bbox = dataId['bbox'] 

llcX = bbox.getMinX() 

llcY = bbox.getMinY() 

width = bbox.getWidth() 

height = bbox.getHeight() 

loc.additionalData.set('llcX', llcX) 

loc.additionalData.set('llcY', llcY) 

loc.additionalData.set('width', width) 

loc.additionalData.set('height', height) 

if 'imageOrigin' in dataId: 

loc.additionalData.set('imageOrigin', 

dataId['imageOrigin']) 

return loc 

 

def querySubClosure(key, format, dataId, mapping=mapping): 

subId = dataId.copy() 

del subId['bbox'] 

return mapping.lookup(format, subId) 

setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure) 

 

if subPolicy["storage"] == "FitsCatalogStorage": 

# Length of catalog 

setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId: 

readMetadata(os.path.join(location.getStorage().root, 

location.getLocations()[0]), 

hdu=1).getScalar("NAXIS2")) 

 

# Schema of catalog 

if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets: 

setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId: 

afwTable.Schema.readFits(os.path.join(location.getStorage().root, 

location.getLocations()[0]))) 

 

def _computeCcdExposureId(self, dataId): 

"""Compute the 64-bit (long) identifier for a CCD exposure. 

 

Subclasses must override 

 

Parameters 

---------- 

dataId : `dict` 

Data identifier with visit, ccd. 

""" 

raise NotImplementedError() 

 

def _computeCoaddExposureId(self, dataId, singleFilter): 

"""Compute the 64-bit (long) identifier for a coadd. 

 

Subclasses must override 

 

Parameters 

---------- 

dataId : `dict` 

Data identifier with tract and patch. 

singleFilter : `bool` 

True means the desired ID is for a single-filter coadd, in which 

case dataIdmust contain filter. 

""" 

raise NotImplementedError() 

 

def _search(self, path): 

"""Search for path in the associated repository's storage. 

 

Parameters 

---------- 

path : string 

Path that describes an object in the repository associated with 

this mapper. 

Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 

indicator will be stripped when searching and so will match 

filenames without the HDU indicator, e.g. 'foo.fits'. The path 

returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 

 

Returns 

------- 

string 

The path for this object in the repository. Will return None if the 

object can't be found. If the input argument path contained an HDU 

indicator, the returned path will also contain the HDU indicator. 

""" 

return self.rootStorage.search(path) 

 

def backup(self, datasetType, dataId): 

"""Rename any existing object with the given type and dataId. 

 

The CameraMapper implementation saves objects in a sequence of e.g.: 

 

- foo.fits 

- foo.fits~1 

- foo.fits~2 

 

All of the backups will be placed in the output repo, however, and will 

not be removed if they are found elsewhere in the _parent chain. This 

means that the same file will be stored twice if the previous version 

was found in an input repo. 

""" 

 

# Calling PosixStorage directly is not the long term solution in this 

# function, this is work-in-progress on epic DM-6225. The plan is for 

# parentSearch to be changed to 'search', and search only the storage 

# associated with this mapper. All searching of parents will be handled 

# by traversing the container of repositories in Butler. 

 

def firstElement(list): 

"""Get the first element in the list, or None if that can't be 

done. 

""" 

return list[0] if list is not None and len(list) else None 

 

n = 0 

newLocation = self.map(datasetType, dataId, write=True) 

newPath = newLocation.getLocations()[0] 

path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True) 

path = firstElement(path) 

oldPaths = [] 

while path is not None: 

n += 1 

oldPaths.append((n, path)) 

path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True) 

path = firstElement(path) 

for n, oldPath in reversed(oldPaths): 

self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n)) 

 

def keys(self): 

"""Return supported keys. 

 

Returns 

------- 

iterable 

List of keys usable in a dataset identifier 

""" 

return iter(self.keyDict.keys()) 

 

def getKeys(self, datasetType, level): 

"""Return a dict of supported keys and their value types for a given 

dataset type at a given level of the key hierarchy. 

 

Parameters 

---------- 

datasetType : `str` 

Dataset type or None for all dataset types. 

level : `str` or None 

Level or None for all levels or '' for the default level for the 

camera. 

 

Returns 

------- 

`dict` 

Keys are strings usable in a dataset identifier, values are their 

value types. 

""" 

 

# not sure if this is how we want to do this. what if None was intended? 

631 ↛ 634line 631 didn't jump to line 634, because the condition on line 631 was never false if level == '': 

level = self.getDefaultLevel() 

 

634 ↛ 635line 634 didn't jump to line 635, because the condition on line 634 was never true if datasetType is None: 

keyDict = copy.copy(self.keyDict) 

else: 

keyDict = self.mappings[datasetType].keys() 

638 ↛ 639line 638 didn't jump to line 639, because the condition on line 638 was never true if level is not None and level in self.levels: 

keyDict = copy.copy(keyDict) 

for l in self.levels[level]: 

if l in keyDict: 

del keyDict[l] 

return keyDict 

 

def getDefaultLevel(self): 

return self.defaultLevel 

 

def getDefaultSubLevel(self, level): 

if level in self.defaultSubLevels: 

return self.defaultSubLevels[level] 

return None 

 

@classmethod 

def getCameraName(cls): 

"""Return the name of the camera that this CameraMapper is for.""" 

className = str(cls) 

className = className[className.find('.'):-1] 

m = re.search(r'(\w+)Mapper', className) 

if m is None: 

m = re.search(r"class '[\w.]*?(\w+)'", className) 

name = m.group(1) 

return name[:1].lower() + name[1:] if name else '' 

 

@classmethod 

def getPackageName(cls): 

"""Return the name of the package containing this CameraMapper.""" 

if cls.packageName is None: 

raise ValueError('class variable packageName must not be None') 

return cls.packageName 

 

@classmethod 

def getPackageDir(cls): 

"""Return the base directory of this package""" 

return getPackageDir(cls.getPackageName()) 

 

def map_camera(self, dataId, write=False): 

"""Map a camera dataset.""" 

if self.camera is None: 

raise RuntimeError("No camera dataset available.") 

actualId = self._transformId(dataId) 

return dafPersist.ButlerLocation( 

pythonType="lsst.afw.cameraGeom.CameraConfig", 

cppType="Config", 

storageName="ConfigStorage", 

locationList=self.cameraDataLocation or "ignored", 

dataId=actualId, 

mapper=self, 

storage=self.rootStorage 

) 

 

def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId): 

"""Return the (preloaded) camera object. 

""" 

if self.camera is None: 

raise RuntimeError("No camera dataset available.") 

return self.camera 

 

def map_defects(self, dataId, write=False): 

"""Map defects dataset. 

 

Returns 

------- 

`lsst.daf.butler.ButlerLocation` 

Minimal ButlerLocation containing just the locationList field 

(just enough information that bypass_defects can use it). 

""" 

defectFitsPath = self._defectLookup(dataId=dataId) 

if defectFitsPath is None: 

raise RuntimeError("No defects available for dataId=%s" % (dataId,)) 

 

return dafPersist.ButlerLocation(None, None, None, defectFitsPath, 

dataId, self, 

storage=self.rootStorage) 

 

def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId): 

"""Return a defect based on the butler location returned by map_defects 

 

Parameters 

---------- 

butlerLocation : `lsst.daf.persistence.ButlerLocation` 

locationList = path to defects FITS file 

dataId : `dict` 

Butler data ID; "ccd" must be set. 

 

Note: the name "bypass_XXX" means the butler makes no attempt to 

convert the ButlerLocation into an object, which is what we want for 

now, since that conversion is a bit tricky. 

""" 

detectorName = self._extractDetectorName(dataId) 

defectsFitsPath = butlerLocation.locationList[0] 

with fits.open(defectsFitsPath) as hduList: 

for hdu in hduList[1:]: 

if hdu.header["name"] != detectorName: 

continue 

 

defectList = [] 

for data in hdu.data: 

bbox = afwGeom.Box2I( 

afwGeom.Point2I(int(data['x0']), int(data['y0'])), 

afwGeom.Extent2I(int(data['width']), int(data['height'])), 

) 

defectList.append(afwImage.DefectBase(bbox)) 

return defectList 

 

raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath)) 

 

def map_expIdInfo(self, dataId, write=False): 

return dafPersist.ButlerLocation( 

pythonType="lsst.obs.base.ExposureIdInfo", 

cppType=None, 

storageName="Internal", 

locationList="ignored", 

dataId=dataId, 

mapper=self, 

storage=self.rootStorage 

) 

 

def bypass_expIdInfo(self, datasetType, pythonType, location, dataId): 

"""Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 

expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId) 

expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId) 

return ExposureIdInfo(expId=expId, expBits=expBits) 

 

def std_bfKernel(self, item, dataId): 

"""Disable standardization for bfKernel 

 

bfKernel is a calibration product that is numpy array, 

unlike other calibration products that are all images; 

all calibration images are sent through _standardizeExposure 

due to CalibrationMapping, but we don't want that to happen to bfKernel 

""" 

return item 

 

def std_raw(self, item, dataId): 

"""Standardize a raw dataset by converting it to an Exposure instead 

of an Image""" 

return self._standardizeExposure(self.exposures['raw'], item, dataId, 

trimmed=False, setVisitInfo=True) 

 

def map_skypolicy(self, dataId): 

"""Map a sky policy.""" 

return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy", 

"Internal", None, None, self, 

storage=self.rootStorage) 

 

def std_skypolicy(self, item, dataId): 

"""Standardize a sky policy by returning the one we use.""" 

return self.skypolicy 

 

############################################################################### 

# 

# Utility functions 

# 

############################################################################### 

 

def _getCcdKeyVal(self, dataId): 

"""Return CCD key and value used to look a defect in the defect 

registry 

 

The default implementation simply returns ("ccd", full detector name) 

""" 

return ("ccd", self._extractDetectorName(dataId)) 

 

def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, 

posixIfNoSql=True): 

"""Set up a registry (usually SQLite3), trying a number of possible 

paths. 

 

Parameters 

---------- 

name : string 

Name of registry. 

description: `str` 

Description of registry (for log messages) 

path : string 

Path for registry. 

policy : string 

Policy that contains the registry name, used if path is None. 

policyKey : string 

Key in policy for registry path. 

storage : Storage subclass 

Repository Storage to look in. 

searchParents : bool, optional 

True if the search for a registry should follow any Butler v1 

_parent symlinks. 

posixIfNoSql : bool, optional 

If an sqlite registry is not found, will create a posix registry if 

this is True. 

 

Returns 

------- 

lsst.daf.persistence.Registry 

Registry object 

""" 

if path is None and policyKey in policy: 

path = dafPersist.LogicalLocation(policy[policyKey]).locString() 

837 ↛ 838line 837 didn't jump to line 838, because the condition on line 837 was never true if os.path.isabs(path): 

raise RuntimeError("Policy should not indicate an absolute path for registry.") 

839 ↛ 840line 839 didn't jump to line 840, because the condition on line 839 was never true if not storage.exists(path): 

newPath = storage.instanceSearch(path) 

 

newPath = newPath[0] if newPath is not None and len(newPath) else None 

if newPath is None: 

self.log.warn("Unable to locate registry at policy path (also looked in root): %s", 

path) 

path = newPath 

else: 

self.log.warn("Unable to locate registry at policy path: %s", path) 

path = None 

 

# Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to 

# be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip 

# root from path. Currently only works with PosixStorage 

try: 

root = storage.root 

if path and (path.startswith(root)): 

path = path[len(root + '/'):] 

except AttributeError: 

pass 

 

# determine if there is an sqlite registry and if not, try the posix registry. 

registry = None 

 

def search(filename, description): 

"""Search for file in storage 

 

Parameters 

---------- 

filename : `str` 

Filename to search for 

description : `str` 

Description of file, for error message. 

 

Returns 

------- 

path : `str` or `None` 

Path to file, or None 

""" 

result = storage.instanceSearch(filename) 

if result: 

return result[0] 

self.log.debug("Unable to locate %s: %s", description, filename) 

return None 

 

# Search for a suitable registry database 

if path is None: 

path = search("%s.pgsql" % name, "%s in root" % description) 

if path is None: 

path = search("%s.sqlite3" % name, "%s in root" % description) 

if path is None: 

path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description) 

 

if path is not None: 

894 ↛ 895line 894 didn't jump to line 895, because the condition on line 894 was never true if not storage.exists(path): 

newPath = storage.instanceSearch(path) 

newPath = newPath[0] if newPath is not None and len(newPath) else None 

if newPath is not None: 

path = newPath 

localFileObj = storage.getLocalFile(path) 

self.log.info("Loading %s registry from %s", description, localFileObj.name) 

registry = dafPersist.Registry.create(localFileObj.name) 

localFileObj.close() 

elif not registry and posixIfNoSql: 

try: 

self.log.info("Loading Posix %s registry from %s", description, storage.root) 

registry = dafPersist.PosixRegistry(storage.root) 

except Exception: 

registry = None 

 

return registry 

 

def _transformId(self, dataId): 

"""Generate a standard ID dict from a camera-specific ID dict. 

 

Canonical keys include: 

- amp: amplifier name 

- ccd: CCD name (in LSST this is a combination of raft and sensor) 

The default implementation returns a copy of its input. 

 

Parameters 

---------- 

dataId : `dict` 

Dataset identifier; this must not be modified 

 

Returns 

------- 

`dict` 

Transformed dataset identifier. 

""" 

 

return dataId.copy() 

 

def _mapActualToPath(self, template, actualId): 

"""Convert a template path to an actual path, using the actual data 

identifier. This implementation is usually sufficient but can be 

overridden by the subclass. 

 

Parameters 

---------- 

template : `str` 

Template path 

actualId : `dict` 

Dataset identifier 

 

Returns 

------- 

`str` 

Pathname 

""" 

 

try: 

transformedId = self._transformId(actualId) 

return template % transformedId 

except Exception as e: 

raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e)) 

 

@staticmethod 

def getShortCcdName(ccdName): 

"""Convert a CCD name to a form useful as a filename 

 

The default implementation converts spaces to underscores. 

""" 

return ccdName.replace(" ", "_") 

 

def _extractDetectorName(self, dataId): 

"""Extract the detector (CCD) name from the dataset identifier. 

 

The name in question is the detector name used by lsst.afw.cameraGeom. 

 

Parameters 

---------- 

dataId : `dict` 

Dataset identifier. 

 

Returns 

------- 

`str` 

Detector name 

""" 

raise NotImplementedError("No _extractDetectorName() function specified") 

 

def _extractAmpId(self, dataId): 

"""Extract the amplifier identifer from a dataset identifier. 

 

.. note:: Deprecated in 11_0 

 

amplifier identifier has two parts: the detector name for the CCD 

containing the amplifier and index of the amplifier in the detector. 

 

Parameters 

---------- 

dataId : `dict` 

Dataset identifer 

 

Returns 

------- 

`tuple` 

Amplifier identifier 

""" 

 

trDataId = self._transformId(dataId) 

return (trDataId["ccd"], int(trDataId['amp'])) 

 

def _setAmpDetector(self, item, dataId, trimmed=True): 

"""Set the detector object in an Exposure for an amplifier. 

 

Defects are also added to the Exposure based on the detector object. 

 

Parameters 

---------- 

item : `lsst.afw.image.Exposure` 

Exposure to set the detector in. 

dataId : `dict` 

Dataset identifier 

trimmed : `bool` 

Should detector be marked as trimmed? (ignored) 

""" 

 

return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed) 

 

def _setCcdDetector(self, item, dataId, trimmed=True): 

"""Set the detector object in an Exposure for a CCD. 

 

Parameters 

---------- 

item : `lsst.afw.image.Exposure` 

Exposure to set the detector in. 

dataId : `dict` 

Dataset identifier 

trimmed : `bool` 

Should detector be marked as trimmed? (ignored) 

""" 

if item.getDetector() is not None: 

return 

 

detectorName = self._extractDetectorName(dataId) 

detector = self.camera[detectorName] 

item.setDetector(detector) 

 

def _setFilter(self, mapping, item, dataId): 

"""Set the filter object in an Exposure. If the Exposure had a FILTER 

keyword, this was already processed during load. But if it didn't, 

use the filter from the registry. 

 

Parameters 

---------- 

mapping : `lsst.obs.base.Mapping` 

Where to get the filter from. 

item : `lsst.afw.image.Exposure` 

Exposure to set the filter in. 

dataId : `dict` 

Dataset identifier. 

""" 

 

if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or 

isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)): 

return 

 

if item.getFilter().getId() != afwImage.Filter.UNKNOWN: 

return 

 

actualId = mapping.need(['filter'], dataId) 

filterName = actualId['filter'] 

if self.filters is not None and filterName in self.filters: 

filterName = self.filters[filterName] 

item.setFilter(afwImage.Filter(filterName)) 

 

# Default standardization function for exposures 

def _standardizeExposure(self, mapping, item, dataId, filter=True, 

trimmed=True, setVisitInfo=True): 

"""Default standardization function for images. 

 

This sets the Detector from the camera geometry 

and optionally set the Fiter. In both cases this saves 

having to persist some data in each exposure (or image). 

 

Parameters 

---------- 

mapping : `lsst.obs.base.Mapping` 

Where to get the values from. 

item : image-like object 

Can be any of lsst.afw.image.Exposure, 

lsst.afw.image.DecoratedImage, lsst.afw.image.Image 

or lsst.afw.image.MaskedImage 

 

dataId : `dict` 

Dataset identifier 

filter : `bool` 

Set filter? Ignored if item is already an exposure 

trimmed : `bool` 

Should detector be marked as trimmed? 

setVisitInfo : `bool` 

Should Exposure have its VisitInfo filled out from the metadata? 

 

Returns 

------- 

`lsst.afw.image.Exposure` 

The standardized Exposure. 

""" 

try: 

item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo) 

except Exception as e: 

self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e)) 

raise 

 

if mapping.level.lower() == "amp": 

self._setAmpDetector(item, dataId, trimmed) 

elif mapping.level.lower() == "ccd": 

self._setCcdDetector(item, dataId, trimmed) 

 

if filter: 

self._setFilter(mapping, item, dataId) 

 

return item 

 

def _defectLookup(self, dataId): 

"""Find the defects for a given CCD. 

 

Parameters 

---------- 

dataId : `dict` 

Dataset identifier 

 

Returns 

------- 

`str` 

Path to the defects file or None if not available. 

""" 

if self.defectRegistry is None: 

return None 

if self.registry is None: 

raise RuntimeError("No registry for defect lookup") 

 

ccdKey, ccdVal = self._getCcdKeyVal(dataId) 

 

dataIdForLookup = {'visit': dataId['visit']} 

# .lookup will fail in a posix registry because there is no template to provide. 

rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup) 

if len(rows) == 0: 

return None 

assert len(rows) == 1 

taiObs = rows[0][0] 

 

# Lookup the defects for this CCD serial number that are valid at the exposure midpoint. 

rows = self.defectRegistry.executeQuery(("path",), ("defect",), 

[(ccdKey, "?")], 

("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"), 

(ccdVal, taiObs)) 

if not rows or len(rows) == 0: 

return None 

if len(rows) == 1: 

return os.path.join(self.defectPath, rows[0][0]) 

else: 

raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" % 

(ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows]))) 

 

def _makeCamera(self, policy, repositoryDir): 

"""Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 

the camera geometry 

 

Also set self.cameraDataLocation, if relevant (else it can be left 

None). 

 

This implementation assumes that policy contains an entry "camera" 

that points to the subdirectory in this package of camera data; 

specifically, that subdirectory must contain: 

- a file named `camera.py` that contains persisted camera config 

- ampInfo table FITS files, as required by 

lsst.afw.cameraGeom.makeCameraFromPath 

 

Parameters 

---------- 

policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy` 

Policy with per-camera defaults already merged 

(PexPolicy only for backward compatibility). 

repositoryDir : `str` 

Policy repository for the subclassing module (obtained with 

getRepositoryPath() on the per-camera default dictionary). 

""" 

1180 ↛ 1181line 1180 didn't jump to line 1181, because the condition on line 1180 was never true if isinstance(policy, pexPolicy.Policy): 

policy = dafPersist.Policy(pexPolicy=policy) 

1182 ↛ 1183line 1182 didn't jump to line 1183, because the condition on line 1182 was never true if 'camera' not in policy: 

raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera") 

cameraDataSubdir = policy['camera'] 

self.cameraDataLocation = os.path.normpath( 

os.path.join(repositoryDir, cameraDataSubdir, "camera.py")) 

cameraConfig = afwCameraGeom.CameraConfig() 

cameraConfig.load(self.cameraDataLocation) 

ampInfoPath = os.path.dirname(self.cameraDataLocation) 

return afwCameraGeom.makeCameraFromPath( 

cameraConfig=cameraConfig, 

ampInfoPath=ampInfoPath, 

shortNameFunc=self.getShortCcdName, 

pupilFactoryClass=self.PupilFactoryClass 

) 

 

def getRegistry(self): 

"""Get the registry used by this mapper. 

 

Returns 

------- 

Registry or None 

The registry used by this mapper for this mapper's repository. 

""" 

return self.registry 

 

def getImageCompressionSettings(self, datasetType, dataId): 

"""Stuff image compression settings into a daf.base.PropertySet 

 

This goes into the ButlerLocation's "additionalData", which gets 

passed into the boost::persistence framework. 

 

Parameters 

---------- 

datasetType : `str` 

Type of dataset for which to get the image compression settings. 

dataId : `dict` 

Dataset identifier. 

 

Returns 

------- 

additionalData : `lsst.daf.base.PropertySet` 

Image compression settings. 

""" 

mapping = self.mappings[datasetType] 

recipeName = mapping.recipe 

storageType = mapping.storage 

1228 ↛ 1229line 1228 didn't jump to line 1229, because the condition on line 1228 was never true if storageType not in self._writeRecipes: 

return dafBase.PropertySet() 

1230 ↛ 1231line 1230 didn't jump to line 1231, because the condition on line 1230 was never true if recipeName not in self._writeRecipes[storageType]: 

raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" % 

(datasetType, storageType, recipeName)) 

recipe = self._writeRecipes[storageType][recipeName].deepCopy() 

seed = hash(tuple(dataId.items())) % 2**31 

for plane in ("image", "mask", "variance"): 

1236 ↛ 1235line 1236 didn't jump to line 1235, because the condition on line 1236 was never false if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0: 

recipe.set(plane + ".scaling.seed", seed) 

return recipe 

 

def _initWriteRecipes(self): 

"""Read the recipes for writing files 

 

These recipes are currently used for configuring FITS compression, 

but they could have wider uses for configuring different flavors 

of the storage types. A recipe is referred to by a symbolic name, 

which has associated settings. These settings are stored as a 

`PropertySet` so they can easily be passed down to the 

boost::persistence framework as the "additionalData" parameter. 

 

The list of recipes is written in YAML. A default recipe and 

some other convenient recipes are in obs_base/policy/writeRecipes.yaml 

and these may be overridden or supplemented by the individual obs_* 

packages' own policy/writeRecipes.yaml files. 

 

Recipes are grouped by the storage type. Currently, only the 

``FitsStorage`` storage type uses recipes, which uses it to 

configure FITS image compression. 

 

Each ``FitsStorage`` recipe for FITS compression should define 

"image", "mask" and "variance" entries, each of which may contain 

"compression" and "scaling" entries. Defaults will be provided for 

any missing elements under "compression" and "scaling". 

 

The allowed entries under "compression" are: 

 

* algorithm (string): compression algorithm to use 

* rows (int): number of rows per tile (0 = entire dimension) 

* columns (int): number of columns per tile (0 = entire dimension) 

* quantizeLevel (float): cfitsio quantization level 

 

The allowed entries under "scaling" are: 

 

* algorithm (string): scaling algorithm to use 

* bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 

* fuzz (bool): fuzz the values when quantising floating-point values? 

* seed (long): seed for random number generator when fuzzing 

* maskPlanes (list of string): mask planes to ignore when doing 

statistics 

* quantizeLevel: divisor of the standard deviation for STDEV_* scaling 

* quantizePad: number of stdev to allow on the low side (for 

STDEV_POSITIVE/NEGATIVE) 

* bscale: manually specified BSCALE (for MANUAL scaling) 

* bzero: manually specified BSCALE (for MANUAL scaling) 

 

A very simple example YAML recipe: 

 

FitsStorage: 

default: 

image: &default 

compression: 

algorithm: GZIP_SHUFFLE 

mask: *default 

variance: *default 

""" 

recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml") 

recipes = dafPersist.Policy(recipesFile) 

supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml") 

validationMenu = {'FitsStorage': validateRecipeFitsStorage, } 

1299 ↛ 1300line 1299 didn't jump to line 1300, because the condition on line 1299 was never true if os.path.exists(supplementsFile) and supplementsFile != recipesFile: 

supplements = dafPersist.Policy(supplementsFile) 

# Don't allow overrides, only supplements 

for entry in validationMenu: 

intersection = set(recipes[entry].names()).intersection(set(supplements.names())) 

if intersection: 

raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" % 

(supplementsFile, entry, recipesFile, intersection)) 

recipes.update(supplements) 

 

self._writeRecipes = {} 

for storageType in recipes.names(True): 

1311 ↛ 1312line 1311 didn't jump to line 1312, because the condition on line 1311 was never true if "default" not in recipes[storageType]: 

raise RuntimeError("No 'default' recipe defined for storage type %s in %s" % 

(storageType, recipesFile)) 

self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType]) 

 

 

def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True): 

"""Generate an Exposure from an image-like object 

 

If the image is a DecoratedImage then also set its WCS and metadata 

(Image and MaskedImage are missing the necessary metadata 

and Exposure already has those set) 

 

Parameters 

---------- 

image : Image-like object 

Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 

Exposure. 

 

Returns 

------- 

`lsst.afw.image.Exposure` 

Exposure containing input image. 

""" 

metadata = None 

if isinstance(image, afwImage.MaskedImage): 

exposure = afwImage.makeExposure(image) 

elif isinstance(image, afwImage.DecoratedImage): 

exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage())) 

metadata = image.getMetadata() 

try: 

wcs = afwGeom.makeSkyWcs(metadata, strip=True) 

exposure.setWcs(wcs) 

except pexExcept.TypeError as e: 

# raised on failure to create a wcs (and possibly others) 

1346 ↛ 1348line 1346 didn't jump to line 1348, because the condition on line 1346 was never false if logger is None: 

logger = lsstLog.Log.getLogger("CameraMapper") 

logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:" 

" %s", e.args[0]) 

 

exposure.setMetadata(metadata) 

elif isinstance(image, afwImage.Exposure): 

# Exposure 

exposure = image 

metadata = exposure.getMetadata() 

else: 

# Image 

exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image)) 

# 

# set VisitInfo if we can 

# 

1362 ↛ 1374line 1362 didn't jump to line 1374, because the condition on line 1362 was never false if setVisitInfo and exposure.getInfo().getVisitInfo() is None: 

if metadata is not None: 

1364 ↛ 1369line 1364 didn't jump to line 1369, because the condition on line 1364 was never false if mapper is None: 

if not logger: 

logger = lsstLog.Log.getLogger("CameraMapper") 

logger.warn("I can only set the VisitInfo if you provide a mapper") 

else: 

exposureId = mapper._computeCcdExposureId(dataId) 

visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId) 

 

exposure.getInfo().setVisitInfo(visitInfo) 

 

return exposure 

 

 

def validateRecipeFitsStorage(recipes): 

"""Validate recipes for FitsStorage 

 

The recipes are supplemented with default values where appropriate. 

 

TODO: replace this custom validation code with Cerberus (DM-11846) 

 

Parameters 

---------- 

recipes : `lsst.daf.persistence.Policy` 

FitsStorage recipes to validate. 

 

Returns 

------- 

validated : `lsst.daf.base.PropertySet` 

Validated FitsStorage recipe. 

 

Raises 

------ 

`RuntimeError` 

If validation fails. 

""" 

# Schemas define what should be there, and the default values (and by the default 

# value, the expected type). 

compressionSchema = { 

"algorithm": "NONE", 

"rows": 1, 

"columns": 0, 

"quantizeLevel": 0.0, 

} 

scalingSchema = { 

"algorithm": "NONE", 

"bitpix": 0, 

"maskPlanes": ["NO_DATA"], 

"seed": 0, 

"quantizeLevel": 4.0, 

"quantizePad": 5.0, 

"fuzz": True, 

"bscale": 1.0, 

"bzero": 0.0, 

} 

 

def checkUnrecognized(entry, allowed, description): 

"""Check to see if the entry contains unrecognised keywords""" 

unrecognized = set(entry.keys()) - set(allowed) 

1422 ↛ 1423line 1422 didn't jump to line 1423, because the condition on line 1422 was never true if unrecognized: 

raise RuntimeError( 

"Unrecognized entries when parsing image compression recipe %s: %s" % 

(description, unrecognized)) 

 

validated = {} 

for name in recipes.names(True): 

checkUnrecognized(recipes[name], ["image", "mask", "variance"], name) 

rr = dafBase.PropertySet() 

validated[name] = rr 

for plane in ("image", "mask", "variance"): 

checkUnrecognized(recipes[name][plane], ["compression", "scaling"], 

name + "->" + plane) 

 

for settings, schema in (("compression", compressionSchema), 

("scaling", scalingSchema)): 

prefix = plane + "." + settings 

1439 ↛ 1440line 1439 didn't jump to line 1440, because the condition on line 1439 was never true if settings not in recipes[name][plane]: 

for key in schema: 

rr.set(prefix + "." + key, schema[key]) 

continue 

entry = recipes[name][plane][settings] 

checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings) 

for key in schema: 

value = type(schema[key])(entry[key]) if key in entry else schema[key] 

rr.set(prefix + "." + key, value) 

return validated