Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

# This file is part of daf_butler. 

# 

# Developed for the LSST Data Management System. 

# This product includes software developed by the LSST Project 

# (http://www.lsst.org). 

# See the COPYRIGHT file at the top-level directory of this distribution 

# for details of code ownership. 

# 

# This program is free software=you can redistribute it and/or modify 

# it under the terms of the GNU General Public License as published by 

# the Free Software Foundation, either version 3 of the License, or 

# (at your option) any later version. 

# 

# This program is distributed in the hope that it will be useful, 

# but WITHOUT ANY WARRANTY; without even the implied warranty of 

# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

# GNU General Public License for more details. 

# 

# You should have received a copy of the GNU General Public License 

# along with this program. If not, see <http://www.gnu.org/licenses/>. 

from __future__ import annotations 

 

__all__ = ["RegistryTablesTuple", "makeRegistryTableSpecs"] 

 

from collections import namedtuple 

 

import sqlalchemy 

 

from ..core.dimensions import DimensionUniverse 

from ..core.dimensions.schema import addDimensionForeignKey 

 

from ..core import ddl 

 

 

RegistryTablesTuple = namedtuple( 

"RegistryTablesTuple", 

[ 

"dataset", 

"dataset_composition", 

"dataset_type", 

"dataset_type_dimensions", 

"dataset_collection", 

"run", 

"quantum", 

"dataset_consumers", 

"dataset_storage", 

] 

) 

 

 

def makeRegistryTableSpecs(universe: DimensionUniverse) -> RegistryTablesTuple: 

"""Construct descriptions of all tables in the Registry, aside from those 

that correspond to `DimensionElement` instances. 

 

Parameters 

---------- 

universe: `DimensionUniverse` 

All dimensions known to the `Registry`. 

 

Returns 

------- 

specs : `RegistryTablesTuple` 

A named tuple containing `ddl.TableSpec` instances. 

""" 

# The 'dataset' table is special: we need to add foreign key fields for 

# each dimension in the universe. 

dataset = ddl.TableSpec( 

fields=[ 

ddl.FieldSpec( 

name="dataset_id", 

dtype=sqlalchemy.BigInteger, 

primaryKey=True, 

autoincrement=True, 

doc="A unique autoincrement field used as the primary key for dataset.", 

), 

ddl.FieldSpec( 

name="dataset_type_name", 

dtype=sqlalchemy.String, 

length=128, 

nullable=False, 

doc=( 

"The name of the DatasetType associated with this dataset; a " 

"reference to the dataset_type table." 

), 

), 

ddl.FieldSpec( 

name="run_id", 

dtype=sqlalchemy.BigInteger, 

nullable=False, 

doc=( 

"The Id of the run that produced this dataset, providing access to " 

"coarse provenance information." 

), 

), 

ddl.FieldSpec( 

name="quantum_id", 

dtype=sqlalchemy.BigInteger, 

doc=( 

"The id of the quantum that produced this dataset, providing access " 

"to fine-grained provenance information. May be null for datasets " 

"not produced by running a PipelineTask." 

), 

), 

ddl.FieldSpec( 

name="dataset_ref_hash", 

dtype=ddl.Base64Bytes, 

nbytes=32, 

nullable=False, 

doc="Secure hash of the data ID (i.e. dimension link values) and dataset_type_name.", 

), 

], 

foreignKeys=[ 

ddl.ForeignKeySpec( 

table="dataset_type", 

source=("dataset_type_name",), 

target=("dataset_type_name",), 

), 

ddl.ForeignKeySpec( 

table="run", source=("run_id",), target=("id",), onDelete="CASCADE" 

), 

ddl.ForeignKeySpec( 

table="quantum", 

source=("quantum_id",), 

target=("id",), 

onDelete="SET NULL", 

), 

], 

) 

for dimension in universe.dimensions: 

addDimensionForeignKey(dataset, dimension, primaryKey=False, nullable=True) 

# All other table specs are fully static and do not depend on 

# configuration. 

return RegistryTablesTuple( 

dataset=dataset, 

dataset_composition=ddl.TableSpec( 

doc="A self-join table that relates components of a dataset to their parents.", 

fields=[ 

ddl.FieldSpec( 

name="parent_dataset_id", 

dtype=sqlalchemy.BigInteger, 

primaryKey=True, 

doc="Link to the dataset entry for the parent/composite dataset.", 

), 

ddl.FieldSpec( 

name="component_dataset_id", 

dtype=sqlalchemy.BigInteger, 

primaryKey=True, 

doc="Link to the dataset entry for a child/component dataset.", 

), 

ddl.FieldSpec( 

name="component_name", 

dtype=sqlalchemy.String, 

length=32, 

nullable=False, 

doc="Name of this component within this composite.", 

), 

], 

foreignKeys=[ 

ddl.ForeignKeySpec( 

table="dataset", 

source=("parent_dataset_id",), 

target=("dataset_id",), 

onDelete="CASCADE", 

), 

ddl.ForeignKeySpec( 

table="dataset", 

source=("component_dataset_id",), 

target=("dataset_id",), 

onDelete="CASCADE", 

), 

], 

), 

dataset_type=ddl.TableSpec( 

doc="A Table containing the set of registered DatasetTypes and their StorageClasses.", 

fields=[ 

ddl.FieldSpec( 

name="dataset_type_name", 

dtype=sqlalchemy.String, 

length=128, 

primaryKey=True, 

nullable=False, 

doc="Globally unique name for this DatasetType.", 

), 

ddl.FieldSpec( 

name="storage_class", 

dtype=sqlalchemy.String, 

length=64, 

nullable=False, 

doc=( 

"Name of the StorageClass associated with this DatasetType. All " 

"registries must support the full set of standard StorageClasses, " 

"so the set of allowed StorageClasses and their properties is " 

"maintained in the registry Python code rather than the database." 

), 

), 

], 

), 

dataset_type_dimensions=ddl.TableSpec( 

doc=( 

"A definition table indicating which dimension fields in Dataset are " 

"non-NULL for Datasets with this DatasetType." 

), 

fields=[ 

ddl.FieldSpec( 

name="dataset_type_name", 

dtype=sqlalchemy.String, 

length=128, 

primaryKey=True, 

doc="The name of the DatasetType.", 

), 

ddl.FieldSpec( 

name="dimension_name", 

dtype=sqlalchemy.String, 

length=32, 

primaryKey=True, 

doc="The name of a Dimension associated with this DatasetType.", 

), 

], 

foreignKeys=[ 

ddl.ForeignKeySpec( 

table="dataset_type", 

source=("dataset_type_name",), 

target=("dataset_type_name",), 

) 

], 

), 

dataset_collection=ddl.TableSpec( 

doc=( 

"A table that associates Dataset records with Collections, " 

"which are implemented simply as string tags." 

), 

fields=[ 

ddl.FieldSpec( 

name="dataset_id", 

dtype=sqlalchemy.BigInteger, 

primaryKey=True, 

nullable=False, 

doc="Link to a unique record in the dataset table.", 

), 

ddl.FieldSpec( 

name="dataset_ref_hash", 

dtype=ddl.Base64Bytes, 

nbytes=32, 

nullable=False, 

doc="Secure hash of the data ID (i.e. dimension link values) and dataset_type_name.", 

), 

ddl.FieldSpec( 

name="collection", 

dtype=sqlalchemy.String, 

length=128, 

primaryKey=True, 

nullable=False, 

doc="Name of a Collection with which this Dataset is associated.", 

), 

], 

foreignKeys=[ 

ddl.ForeignKeySpec( 

table="dataset", 

source=("dataset_id",), 

target=("dataset_id",), 

onDelete="CASCADE", 

) 

], 

unique=[("dataset_ref_hash", "collection")], 

), 

run=ddl.TableSpec( 

doc="A table used to capture coarse provenance for all datasets.", 

fields=[ 

ddl.FieldSpec( 

name="id", 

dtype=sqlalchemy.BigInteger, 

primaryKey=True, 

autoincrement=True, 

doc="A unique autoincrement integer identifier for this run.", 

), 

ddl.FieldSpec( 

name="name", 

dtype=sqlalchemy.String, 

length=128, 

doc="The name of the run.", 

), 

ddl.FieldSpec( 

name="start_time", 

dtype=sqlalchemy.DateTime, 

nullable=True, 

doc="The start time for the run.", 

), 

ddl.FieldSpec( 

name="end_time", 

dtype=sqlalchemy.DateTime, 

nullable=True, 

doc="The end time for the run.", 

), 

ddl.FieldSpec( 

name="host", 

dtype=sqlalchemy.String, 

length=64, 

nullable=True, 

doc="The system on which the run was executed.", 

), 

], 

unique=[("name",)], 

), 

quantum=ddl.TableSpec( 

doc="A table used to capture fine-grained provenance for datasets produced by PipelineTasks.", 

fields=[ 

ddl.FieldSpec( 

name="id", 

dtype=sqlalchemy.BigInteger, 

primaryKey=True, 

autoincrement=True, 

doc="A unique autoincrement integer identifier for this quantum.", 

), 

ddl.FieldSpec( 

name="task", 

dtype=sqlalchemy.String, 

length=256, 

doc="Fully qualified name of the SuperTask that executed this quantum.", 

), 

ddl.FieldSpec( 

name="run_id", 

dtype=sqlalchemy.BigInteger, 

doc="Link to the run this quantum is a part of.", 

), 

ddl.FieldSpec( 

name="start_time", 

dtype=sqlalchemy.DateTime, 

nullable=True, 

doc="The start time for the quantum.", 

), 

ddl.FieldSpec( 

name="end_time", 

dtype=sqlalchemy.DateTime, 

nullable=True, 

doc="The end time for the quantum.", 

), 

ddl.FieldSpec( 

name="host", 

dtype=sqlalchemy.String, 

length=64, 

nullable=True, 

doc="The system on which the quantum was executed.", 

), 

], 

foreignKeys=[ 

ddl.ForeignKeySpec(table="run", source=("run_id",), target=("id",), onDelete="CASCADE") 

], 

), 

dataset_consumers=ddl.TableSpec( 

doc="A table relating Quantum records to the Datasets they used as inputs.", 

fields=[ 

ddl.FieldSpec( 

name="quantum_id", 

dtype=sqlalchemy.BigInteger, 

nullable=False, 

doc="A link to the associated Quantum.", 

), 

ddl.FieldSpec( 

name="dataset_id", 

dtype=sqlalchemy.BigInteger, 

nullable=False, 

doc="A link to the associated Dataset.", 

), 

ddl.FieldSpec( 

name="actual", 

dtype=sqlalchemy.Boolean, 

nullable=False, 

doc=( 

"Whether the Dataset was actually used as an input by the Quantum " 

"(as opposed to just predicted to be used during preflight)." 

), 

), 

], 

foreignKeys=[ 

ddl.ForeignKeySpec( 

table="quantum", 

source=("quantum_id",), 

target=("id",), 

onDelete="CASCADE", 

), 

ddl.ForeignKeySpec( 

table="dataset", 

source=("dataset_id",), 

target=("dataset_id",), 

onDelete="CASCADE", 

), 

], 

), 

dataset_storage=ddl.TableSpec( 

doc=( 

"A table that provides information on whether a Dataset is stored in " 

"one or more Datastores. The presence or absence of a record in this " 

"table itself indicates whether the Dataset is present in that " 

"Datastore. " 

), 

fields=[ 

ddl.FieldSpec( 

name="dataset_id", 

dtype=sqlalchemy.BigInteger, 

primaryKey=True, 

nullable=False, 

doc="Link to the dataset table.", 

), 

ddl.FieldSpec( 

name="datastore_name", 

dtype=sqlalchemy.String, 

length=256, 

primaryKey=True, 

nullable=False, 

doc="Name of the Datastore this entry corresponds to.", 

), 

], 

foreignKeys=[ 

ddl.ForeignKeySpec( 

table="dataset", source=("dataset_id",), target=("dataset_id",) 

) 

], 

), 

)