Package ivs :: Package inout :: Module fits
[hide private]
[frames] | no frames]

Source Code for Module ivs.inout.fits

  1  # -*- coding: utf-8 -*- 
  2  """ 
  3  Read and write FITS files. 
  4  """ 
  5  import gzip 
  6  import logging 
  7  import os 
  8  import astropy.io.fits as pf 
  9   
 10  import numpy as np 
 11  from ivs.aux import loggers 
 12  from ivs.units import conversions 
 13   
 14  logger = logging.getLogger("IO.FITS") 
 15  logger.addHandler(loggers.NullHandler()) 
 16   
 17  #{ Input 
 18   
19 -def read_spectrum(filename, return_header=False):
20 """ 21 Read a standard 1D spectrum from the primary HDU of a FITS file. 22 23 @param filename: FITS filename 24 @type filename: str 25 @param return_header: return header information as dictionary 26 @type return_header: bool 27 @return: wavelength, flux(, header) 28 @rtype: array, array(, dict) 29 """ 30 flux = pf.getdata(filename) 31 header = pf.getheader(filename) 32 33 #-- Make the equidistant wavelengthgrid using the Fits standard info 34 # in the header 35 ref_pix = int(header["CRPIX1"])-1 36 dnu = float(header["CDELT1"]) 37 nu0 = float(header["CRVAL1"]) - ref_pix*dnu 38 nun = nu0 + (len(flux)-1)*dnu 39 wave = np.linspace(nu0,nun,len(flux)) 40 #-- fix wavelengths for logarithmic sampling 41 if 'ctype1' in header and header['CTYPE1']=='log(wavelength)': 42 wave = np.exp(wave) 43 44 logger.debug('Read spectrum %s'%(filename)) 45 46 if return_header: 47 return wave,flux,header 48 else: 49 return wave,flux
50 51
52 -def read_corot(fits_file, return_header=False, type_data='hel', 53 remove_flagged=True):
54 """ 55 Read CoRoT data from a CoRoT FITS file. 56 57 Both SISMO and EXO data are recognised and extracted accordingly. 58 59 type_data is one of: 60 - type_data='raw' 61 - type_data='hel': heliocentric unequidistant 62 - type_data='helreg': heliocentric equidistant 63 64 @param fits_file: CoRoT FITS file name 65 @type fits_file: string 66 @param return_header: return header information as dictionary 67 @type return_header: bool 68 @param type_data: type of data to return 69 @type type_data: string (one of 'raw','hel' or 'helreg') 70 @param remove_flagged: remove flagged datapoints 71 @type remove_flagged: True 72 @return: CoRoT data (times, flux, error, flags) 73 @rtype: array, array, array, array(, header) 74 """ 75 #-- read in the FITS file 76 # headers: ['DATE', 'DATEJD', 'DATEHEL', 'STATUS', 'WHITEFLUX', 'WHITEFLUXDEV', 'BG', 'CORREC'] 77 fits_file_ = pf.open(fits_file) 78 if fits_file_[0].header['hlfccdid'][0]=='A': 79 times,flux,error,flags = fits_file_[type_data].data.field(0),\ 80 fits_file_[type_data].data.field(1),\ 81 fits_file_[type_data].data.field(2),\ 82 fits_file_[type_data].data.field(3) 83 # extract the header if asked 84 if return_header: 85 header = fits_file_[0].header 86 fits_file_.close() 87 88 logger.debug('Read CoRoT SISMO file %s'%(fits_file)) 89 elif fits_file_[0].header['hlfccdid'][0]=='E': 90 times = fits_file_['bintable'].data.field('datehel') 91 if 'blueflux' in fits_file_['bintable'].columns.names: 92 blueflux,e_blueflux = fits_file_['bintable'].data.field('blueflux'),fits_file_['bintable'].data.field('bluefluxdev') 93 greenflux,e_greenflux = fits_file_['bintable'].data.field('greenflux'),fits_file_['bintable'].data.field('greenfluxdev') 94 redflux,e_redflux = fits_file_['bintable'].data.field('redflux'),fits_file_['bintable'].data.field('redfluxdev') 95 #-- chromatic light curves 96 if type_data=='colors': 97 flux = np.column_stack([blueflux,greenflux,redflux]) 98 error = np.column_stack([e_blueflux,e_greenflux,e_redflux]).min(axis=1) 99 #-- white light curves 100 else: 101 flux = blueflux + greenflux + redflux 102 error = np.sqrt(e_blueflux**2 + e_greenflux**2 + e_redflux**2) 103 else: 104 flux,error = fits_file_['bintable'].data.field('whiteflux'),fits_file_['bintable'].data.field('whitefluxdev') 105 flags = fits_file_['bintable'].data.field('status') 106 107 # remove flagged datapoints if asked 108 if remove_flagged: 109 keep1 = (flags==0) 110 keep2 = (error!=-1) 111 logger.info('Remove: flagged (%d) no valid error (%d) datapoints (%d)'%(len(keep1)-keep1.sum(),len(keep2)-keep2.sum(),len(keep1))) 112 keep = keep1 & keep2 113 times,flux,error,flags = times[keep], flux[keep], error[keep], flags[keep] 114 115 # convert times to heliocentric JD 116 times = conversions.convert('MJD','JD',times,jtype='corot') 117 118 if return_header: 119 return times, flux, error, flags, header 120 else: 121 return times, flux, error, flags
122 123
124 -def read_fuse(ff,combine=True,return_header=False):
125 """ 126 Read FUSE spectrum. 127 128 Modified JD: JD-2400000.5. 129 130 Do 'EXPEND'-'EXPSTART' 131 132 V_GEOCEN,V_HELIO 133 134 ANO: all night only: data obtained during orbital night (highest SNR when airglow is not an issue) 135 ALL: all: highest SNR with minimal airglow contamination 136 137 Preference of ANO over ALL for science purpose. 138 139 Use TTAGfcal files. 140 """ 141 ff = pf.open(ff) 142 hdr = ff[0].header 143 if hdr['SRC_TYPE']=='EE': 144 logger.warning("Warning: %s is not thrustworty (see manual)"%(ff)) 145 waves,fluxs,errrs = [],[],[] 146 for seg in range(1,len(ff)): 147 if ff[seg].data is None: continue 148 waves.append(ff[seg].data.field('WAVE')) 149 fluxs.append(ff[seg].data.field('FLUX')) 150 errrs.append(ff[seg].data.field('ERROR')) 151 ff.close() 152 153 if combine: 154 waves = np.hstack(waves) 155 fluxs = np.hstack(fluxs) 156 errrs = np.hstack(errrs) 157 sa = np.argsort(waves) 158 waves,fluxs,errrs = waves[sa],fluxs[sa],errrs[sa] 159 keep = fluxs>0 160 waves,fluxs,errrs = waves[keep],fluxs[keep],errrs[keep] 161 162 163 if return_header: 164 return waves,fluxs,errrs,hdr 165 else: 166 return waves,fluxs,errrs
167
168 -def read_iue(filename,return_header=False):
169 """ 170 Read IUE spectrum 171 172 Instrumental profiles: http://starbrite.jpl.nasa.gov/pds/viewInstrumentProfile.jsp?INSTRUMENT_ID=LWR&INSTRUMENT_HOST_ID=IUE 173 174 Better only use .mxlo for reliable absolute calibration!! 175 176 LWP 177 178 - Large-aperture spectral resolution is best between 2700 and 2900 A 179 with an average FWHM of 5.2 A and decreases to approximately 8.0 A on 180 either side of this range. Small-aperture resolution is optimal 181 between 2400 and 3000 A with an average FWHM of 5.5 A and decreases to 182 8.1 A at the extreme wavelengths. 183 184 SWP 185 186 - The best resolution occurs around 1200 A, with a FWHM of 4.6 A in the 187 large aperture and 3.0 A in the small aperture, and gradually worsens 188 towards longer wavelengths: 6.7 A at 1900 A in the large aperture and 189 6.3 A in the small. On average, the small-aperture resolution is 190 approximately 10% better than the large-aperture resolution. 191 192 193 """ 194 ff = pf.open(filename) 195 header = ff[0].header 196 if os.path.splitext(filename)[1]=='.mxlo': 197 try: 198 flux = ff[1].data.field('flux')[0] 199 error = ff[1].data.field('sigma')[0] 200 except: 201 flux = ff[1].data.field('abs_cal')[0] 202 error = flux 203 nu0 = ff[1].data.field('wavelength')[0] 204 dnu = ff[1].data.field('deltaw')[0] 205 nun = nu0 + len(flux)*dnu 206 wavelength = np.linspace(nu0,nun,len(flux)) 207 elif os.path.splitext(filename)[1]=='.mxhi': 208 orders_w = [] 209 orders_f = [] 210 orders_e = [] 211 for i in range(len(ff[1].data.field('abs_cal'))): 212 flux = ff[1].data.field('abs_cal')[i] 213 error = ff[1].data.field('noise')[i] 214 quality = ff[1].data.field('quality')[i] 215 npoints = ff[1].data.field('npoints')[i] 216 startpix = ff[1].data.field('startpix')[i] 217 flux = flux[startpix:startpix+npoints+1] 218 error = error[startpix:startpix+npoints+1] 219 quality = quality[startpix:startpix+npoints+1] 220 flux[quality!=0] = 0 221 nu0 = ff[1].data.field('wavelength')[i] 222 dnu = ff[1].data.field('deltaw')[i] 223 nun = nu0 + len(flux)*dnu 224 wavelength = np.linspace(nu0,nun,len(flux)) 225 if len(orders_f)>0: 226 orders_f[-1][wavelength[0]<orders_w[-1]] = 0 227 orders_w.append(wavelength) 228 orders_f.append(flux) 229 orders_e.append(error) 230 wavelength = np.hstack(orders_w) 231 flux = np.hstack(orders_f) 232 error = np.hstack(orders_e) 233 234 wavelength = wavelength[flux!=0] 235 error = error[flux!=0] 236 flux = flux[flux!=0] 237 238 logger.info("IUE spectrum %s read"%(filename)) 239 ff.close() 240 if not return_header: 241 return wavelength,flux,error 242 else: 243 return wavelength,flux,error,header
244 245 #} 246 247 #{ Generic reading 248
249 -def read2recarray(fits_file,ext=1,return_header=False):
250 """ 251 Read the contents of a FITS file to a record array. 252 253 Should add a test that the strings were not chopped of... 254 """ 255 dtype_translator = dict(L=np.bool,D=np.float64,E=np.float32,J=np.int) 256 if isinstance(fits_file,str): 257 ff = pf.open(fits_file) 258 elif isinstance(fits_file,pf.HDUList): 259 ff = fits_file 260 data = ff[ext].data 261 names = ff[ext].columns.names 262 formats = ff[ext].columns.formats 263 dtypes = [] 264 for name,dtype in zip(names,formats): 265 if 'A' in dtype: 266 dtypes.append((name,'S60')) 267 else: 268 dtypes.append((name,dtype_translator[dtype])) 269 dtypes = np.dtype(dtypes) 270 data = [np.cast[dtypes[i]](data.field(name)) for i,name in enumerate(names)] 271 data = np.rec.array(data,dtype=dtypes) 272 header = {} 273 for key in ff[ext].header.keys(): 274 if 'TTYPE' in key: continue 275 if 'TUNIT' in key: continue 276 if 'TFORM' in key: continue 277 header[key] = ff[ext].header[key] 278 if isinstance(fits_file,str): 279 ff.close() 280 if not return_header: 281 return data 282 else: 283 return data,header
284 285 286 287 #} 288 289 290 #{ Output 291
292 -def write_primary(filename,data=None,header_dict={}):
293 """ 294 Initiate a FITS file by writing to the primary HDU. 295 296 If data is not given, a 1x1 zero array will be added. 297 """ 298 if data is None: 299 data = np.array([[0]]) 300 hdulist = pf.HDUList([pf.PrimaryHDU(data)]) 301 for key in header_dict: 302 hdulist[0].header.update(key,header_dict[key]) 303 hdulist.writeto(filename) 304 hdulist.close() 305 return filename
306 307
308 -def write_recarray(recarr,filename,header_dict={},units={},ext='new',close=True):
309 """ 310 Write or add a record array to a FITS file. 311 312 If 'filename' refers to an existing file, the record array will be added 313 (ext='new') to the HDUlist or replace an existing HDU (ext=integer). Else, 314 a new file will be created. 315 316 Units can be given as a dictionary with keys the same names as the column 317 names of the record array. 318 319 A header_dictionary can be given, it is used to update an existing header 320 or create a new one if the extension is new. 321 """ 322 is_file = isinstance(filename,str) and os.path.isfile(filename) 323 if isinstance(filename,str) and not os.path.isfile(filename): 324 primary = np.array([[0]]) 325 hdulist = pf.HDUList([pf.PrimaryHDU(primary)]) 326 hdulist.writeto(filename) 327 hdulist.close() 328 329 if is_file or isinstance(filename,str): 330 hdulist = pf.open(filename,mode='update') 331 else: 332 hdulist = filename 333 334 335 #-- create the table HDU 336 cols = [] 337 for i,name in enumerate(recarr.dtype.names): 338 format = recarr.dtype[i].str.lower().replace('|','').replace('>','') 339 format = format.replace('b1','L').replace('<','') 340 if 's' in format: # Changes to be compatible with Pyfits version 3.3 341 format = format.replace('s','') + 'A' # Changes to be compatible with Pyfits version 3.3 342 unit = name in units and units[name] or 'NA' 343 cols.append(pf.Column(name=name,format=format,array=recarr[name],unit=unit)) 344 tbhdu = pf.BinTableHDU.from_columns(pf.ColDefs(cols)) 345 346 #-- take care of the header: 347 if len(header_dict): 348 for key in header_dict: 349 if (len(key)>8) and (not key in tbhdu.header.keys()) and (not key[:9]=='HIERARCH'): 350 key_ = 'HIERARCH '+key 351 else: 352 key_ = key 353 tbhdu.header[key_] = header_dict[key] 354 if ext!='new': 355 tbhdu.header['EXTNAME'] = ext 356 357 358 # put it in the right place 359 extnames = [iext.header['EXTNAME'] for iext in hdulist if ('extname' in iext.header.keys()) or ('EXTNAME' in iext.header.keys())] 360 if ext=='new' or not ext in extnames: 361 logger.info('Creating new extension %s'%(ext)) 362 hdulist.append(tbhdu) 363 ext = -1 364 else: 365 logger.info('Overwriting existing extension %s'%(ext)) 366 hdulist[ext] = tbhdu 367 368 if close: 369 hdulist.close() 370 return filename 371 else: 372 return hdulist
373
374 -def write_array(arr,filename,names=(),units=(),header_dict={},ext='new',close=True):
375 """ 376 Write or add an array to a FITS file. 377 378 If 'filename' refers to an existing file, the list of arrays will be added 379 (ext='new') to the HDUlist or replace an existing HDU (ext=integer). Else, 380 a new file will be created. 381 382 Names and units should be given as a list of strings, in the same order as 383 the list of arrays. 384 385 A header_dictionary can be given, it is used to update an existing header 386 or create a new one if the extension is new. 387 388 Instead of writing the file, you can give a hdulist and append to it. 389 Supply a HDUList for 'filename', and set close=False 390 """ 391 if isinstance(filename,str) and not os.path.isfile(filename): 392 primary = np.array([[0]]) 393 hdulist = pf.HDUList([pf.PrimaryHDU(primary)]) 394 hdulist.writeto(filename) 395 396 if isinstance(filename,str): 397 hdulist = pf.open(filename,mode='update') 398 else: 399 hdulist = filename 400 401 #-- create the table HDU 402 cols = [] 403 for i,name in enumerate(names): 404 format = arr[i].dtype.str.lower().replace('|','').replace('s','a').replace('>','') 405 format = format.replace('b1','L').replace('<','') 406 if format=='f8': 407 format = 'D' 408 if isinstance(units,dict): 409 unit = name in units and units[name] or 'NA' 410 elif len(units)>i: 411 unit = units[i] 412 else: 413 unit = 'NA' 414 cols.append(pf.Column(name=name,format=format,array=arr[i],unit=unit)) 415 tbhdu = pf.BinTableHDU.from_columns(pf.ColDefs(cols)) # Changes to be compatible with Pyfits version 3.3 416 417 # put it in the right place 418 if ext=='new' or ext==len(hdulist): 419 hdulist.append(tbhdu) 420 ext = -1 421 else: 422 hdulist[ext] = tbhdu 423 424 #-- take care of the header: 425 if len(header_dict): 426 for key in header_dict: 427 hdulist[ext].header[key] = header_dict[key] 428 429 if close: 430 hdulist.close() 431 else: 432 return hdulist
433 434 #} 435