Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

# This file is part of astro_metadata_translator. 

# 

# Developed for the LSST Data Management System. 

# This product includes software developed by the LSST Project 

# (http://www.lsst.org). 

# See the LICENSE file at the top-level directory of this distribution 

# for details of code ownership. 

# 

# Use of this source code is governed by a 3-clause BSD-style 

# license that can be found in the LICENSE file. 

 

"""Implementation of the ``translate_header.py`` script. 

 

Read file metadata from the specified files and report the translated content. 

""" 

 

__all__ = ("main", "process_files") 

 

import argparse 

import logging 

 

import os 

import re 

import sys 

import traceback 

import importlib 

import yaml 

from astro_metadata_translator import ObservationInfo, merge_headers, fix_header 

from astro_metadata_translator.tests import read_test_file 

 

# Prefer afw over Astropy 

try: 

from lsst.afw.fits import readMetadata 

import lsst.daf.base # noqa: F401 need PropertyBase for readMetadata 

 

def read_metadata(file, hdu): 

try: 

return readMetadata(file, hdu=hdu) 

except lsst.afw.fits.FitsError: 

return None 

 

except ImportError: 

from astropy.io import fits 

 

def read_metadata(file, hdu): 

fits_file = fits.open(file) 

try: 

header = fits_file[hdu].header 

except IndexError: 

header = None 

return header 

 

 

# Output mode choices 

OUTPUT_MODES = ("auto", "verbose", "table", "yaml", "fixed", "yamlnative", "fixednative", "none") 

 

# Definitions for table columns 

TABLE_COLUMNS = ({ 

"format": "32.32s", 

"attr": "observation_id", 

"label": "ObsId" 

}, 

{ 

"format": "8.8s", 

"attr": "observation_type", 

"label": "ImgType", 

}, 

{ 

"format": "16.16s", 

"attr": "object", 

"label": "Object", 

}, 

{ 

"format": "16.16s", 

"attr": "physical_filter", 

"label": "Filter", 

}, 

{ 

"format": "5.1f", 

"attr": "exposure_time", 

"label": "ExpTime", 

}, 

) 

 

 

def build_argparser(): 

"""Construct an argument parser for the ``translate_header.py`` script. 

 

Returns 

------- 

argparser : `argparse.ArgumentParser` 

The argument parser that defines the ``translate_header.py`` 

command-line interface. 

""" 

 

parser = argparse.ArgumentParser(description="Summarize headers from astronomical data files") 

parser.add_argument("files", metavar="file", type=str, nargs="+", 

help="File(s) from which headers will be parsed." 

" If a directory is given it will be scanned for files matching the regular" 

" expression defined in --regex.") 

parser.add_argument("-q", "--quiet", action="store_true", 

help="Do not report the translation content from each header. This forces " 

"output mode 'none'.") 

parser.add_argument("-d", "--dumphdr", action="store_true", 

help="Dump the header in YAML format to standard output rather than translating it." 

" This is the same as using mode=yaml") 

parser.add_argument("--traceback", action="store_true", 

help="Give detailed trace back when any errors encountered") 

parser.add_argument("-n", "--hdrnum", default=1, 

help="HDU number to read. If the HDU can not be found, a warning is issued but " 

"translation is attempted using the primary header. " 

"The primary header is always read and merged with this header.") 

parser.add_argument("-m", "--mode", default="auto", choices=OUTPUT_MODES, 

help="Display mode for translated parameters. 'verbose' displays all the information" 

" available. 'table' displays important information in tabular form." 

" 'yaml' dumps the header in YAML format (this is equivalent to -d option)." 

" 'fixed' dumps the header in YAML after it has had corrections applied." 

" Add 'native' suffix to dump YAML in PropertyList or Astropy native form." 

" 'none' displays no translated header information and is an alias for the " 

" '--quiet' option." 

" 'auto' mode is 'verbose' for a single file and 'table' for multiple files.") 

parser.add_argument("-l", "--log", default="warn", 

help="Python logging level to use.") 

 

re_default = r"\.fit[s]?\b" 

parser.add_argument("-r", "--regex", default=re_default, 

help="When looking in a directory, regular expression to use to determine whether" 

f" a file should be examined. Default: '{re_default}'") 

 

parser.add_argument("-p", "--packages", action="append", type=str, 

help="Python packages to import to register additional translators") 

 

return parser 

 

 

def read_file(file, hdrnum, print_trace, 

outstream=sys.stdout, errstream=sys.stderr, output_mode="verbose", 

write_heading=False): 

"""Read the specified file and process it. 

 

Parameters 

---------- 

file : `str` 

The file from which the header is to be read. 

hdrnum : `int` 

The HDU number to read. The primary header is always read and 

merged with the header from this HDU. 

print_trace : `bool` 

If there is an error reading the file and this parameter is `True`, 

a full traceback of the exception will be reported. If `False` prints 

a one line summary of the error condition. 

outstream : `io.StringIO`, optional 

Output stream to use for standard messages. Defaults to `sys.stdout`. 

errstream : `io.StringIO`, optional 

Stream to send messages that would normally be sent to standard 

error. Defaults to `sys.stderr`. 

output_mode : `str`, optional 

Output mode to use. Must be one of "verbose", "none", "table", 

"yaml", or "fixed". "yaml" and "fixed" can be modified with a 

"native" suffix to indicate that the output should be a representation 

of the native object type representing the header (which can be 

PropertyList or an Astropy header). Without this modify headers 

will be dumped as simple `dict` form. 

"auto" is not allowed by this point. 

write_heading: `bool`, optional 

If `True` and in table mode, write a table heading out before writing 

the content. 

 

Returns 

------- 

success : `bool` 

`True` if the file was handled successfully, `False` if the file 

could not be processed. 

""" 

if output_mode not in OUTPUT_MODES: 

raise ValueError(f"Output mode of '{output_mode}' is not understood.") 

if output_mode == "auto": 

raise ValueError("Output mode can not be 'auto' here.") 

 

# This gets in the way in tabular mode 

if output_mode != "table": 

print(f"Analyzing {file}...", file=errstream) 

 

try: 

if file.endswith(".yaml"): 

md = read_test_file(file,) 

if hdrnum != 0: 

# YAML can't have HDUs 

hdrnum = 0 

else: 

md = read_metadata(file, 0) 

if md is None: 

print(f"Unable to open file {file}", file=errstream) 

return False 

if hdrnum != 0: 

mdn = read_metadata(file, int(hdrnum)) 

# Astropy does not allow append mode since it does not 

# convert lists to multiple cards. Overwrite for now 

if mdn is not None: 

md = merge_headers([md, mdn], mode="overwrite") 

else: 

print(f"HDU {hdrnum} was not found. Ignoring request.", file=errstream) 

 

if output_mode.endswith("native"): 

# Strip native and don't change type of md 

output_mode = output_mode[:-len("native")] 

else: 

# Rewrite md as simple dict for output 

md = {k: v for k, v in md.items()} 

 

if output_mode in ("yaml", "fixed"): 

 

if output_mode == "fixed": 

fix_header(md) 

 

# The header should be written out in the insertion order 

print(yaml.dump(md, sort_keys=False), file=outstream) 

return True 

obs_info = ObservationInfo(md, pedantic=True, filename=file) 

if output_mode == "table": 

columns = ["{:{fmt}}".format(getattr(obs_info, c["attr"]), fmt=c["format"]) 

for c in TABLE_COLUMNS] 

 

if write_heading: 

# Construct headings of the same width as the items 

# we have calculated. Doing this means we don't have to 

# work out for ourselves how many characters will be used 

# for non-strings (especially Quantity) 

headings = [] 

separators = [] 

for thiscol, defn in zip(columns, TABLE_COLUMNS): 

width = len(thiscol) 

headings.append("{:{w}.{w}}".format(defn["label"], w=width)) 

separators.append("-"*width) 

print(" ".join(headings), file=outstream) 

print(" ".join(separators), file=outstream) 

 

row = " ".join(columns) 

print(row, file=outstream) 

elif output_mode == "verbose": 

print(f"{obs_info}", file=outstream) 

elif output_mode == "none": 

pass 

else: 

raise RuntimeError(f"Output mode of '{output_mode}' not recognized but should be known.") 

except Exception as e: 

if print_trace: 

traceback.print_exc(file=outstream) 

else: 

print(repr(e), file=outstream) 

return False 

return True 

 

 

def process_files(files, regex, hdrnum, print_trace, 

outstream=sys.stdout, errstream=sys.stderr, 

output_mode="auto"): 

"""Read and translate metadata from the specified files. 

 

Parameters 

---------- 

files : iterable of `str` 

The files or directories from which the headers are to be read. 

regex : `str` 

Regular expression string used to filter files when a directory is 

scanned. 

hdrnum : `int` 

The HDU number to read. The primary header is always read and 

merged with the header from this HDU. 

print_trace : `bool` 

If there is an error reading the file and this parameter is `True`, 

a full traceback of the exception will be reported. If `False` prints 

a one line summary of the error condition. 

outstream : `io.StringIO`, optional 

Output stream to use for standard messages. Defaults to `sys.stdout`. 

errstream : `io.StringIO`, optional 

Stream to send messages that would normally be sent to standard 

error. Defaults to `sys.stderr`. 

output_mode : `str`, optional 

Output mode to use for the translated information. 

"auto" switches based on how many files are found. 

 

Returns 

------- 

okay : `list` of `str` 

All the files that were processed successfully. 

failed : `list` of `str` 

All the files that could not be processed. 

""" 

file_regex = re.compile(regex) 

found_files = [] 

 

# Find all the files of interest 

for file in files: 

if os.path.isdir(file): 

for root, dirs, files in os.walk(file): 

for name in files: 

path = os.path.join(root, name) 

if os.path.isfile(path) and file_regex.search(name): 

found_files.append(path) 

else: 

found_files.append(file) 

 

# Convert "auto" to correct mode 

if output_mode == "auto": 

if len(found_files) > 1: 

output_mode = "table" 

else: 

output_mode = "verbose" 

 

# Process each file 

failed = [] 

okay = [] 

heading = True 

for path in sorted(found_files): 

isok = read_file(path, hdrnum, print_trace, outstream, errstream, output_mode, 

heading) 

heading = False 

if isok: 

okay.append(path) 

else: 

failed.append(path) 

 

return okay, failed 

 

 

def main(): 

"""Read metadata from the supplied files and translate the content to 

standard form. 

 

Returns 

------- 

status : `int` 

Exit status to be passed to `sys.exit()`. 0 if any of the files 

could be translated. 1 otherwise. 

""" 

args = build_argparser().parse_args() 

 

# Process import requests 

if args.packages: 

for m in args.packages: 

importlib.import_module(m) 

 

output_mode = args.mode 

if args.quiet: 

output_mode = "none" 

elif args.dumphdr: 

output_mode = "yaml" 

 

# Set the log level. Convert to upper case to allow the user to 

# specify --log=DEBUG or --log=debug 

numeric_level = getattr(logging, args.log.upper(), None) 

if not isinstance(numeric_level, int): 

raise ValueError(f"Invalid log level: {args.log}") 

logging.basicConfig(level=numeric_level) 

 

# Main loop over files 

okay, failed = process_files(args.files, args.regex, args.hdrnum, 

args.traceback, 

output_mode=output_mode) 

 

if failed: 

print("Files with failed translations:", file=sys.stderr) 

for f in failed: 

print(f"\t{f}", file=sys.stderr) 

 

if okay: 

# Good status if anything was returned in okay 

return 0 

else: 

return 1