我们从Python开源项目中,提取了以下19个代码示例,用于说明如何使用astropy.io.fits.getheader()。
def __init__(self, fitsfile, ID = 0, quiet = False, clobber = False, cadence = 'lc', **kwargs): ''' ''' # Read kwargs self.ID = ID self._season = 12 self.mission = 'k2' self.clobber = clobber self.cadence = cadence # Initialize preliminary logging if not quiet: screen_level = logging.DEBUG else: screen_level = logging.CRITICAL everest.utils.InitLog(None, logging.DEBUG, screen_level, False) # Load self.fitsfile = fitsfile self.model_name = pyfits.getheader(self.fitsfile, 1)['MODEL'] self._weights = None self.load_fits()
def split_galex_observations(self, original_path, path_fuv, path_nuv): """ This function ... :param original_path: :param path_fuv: :param path_nuv: :return: """ # Loop over the files in the path for path, name in fs.files_in_path(original_path, extension="fits", returns=["path", "name"]): # Get header header = getheader(path) # Check band band = header["BAND"] if band == 1: shutil.copy(path, path_fuv) elif band == 2: shutil.copy(path, path_nuv) else: raise RuntimeError("Invalid band: " + str(band)) # -----------------------------------------------------------------
def from_file(cls, path): """ This function ... :param path: :return: """ # Get the header and flatten it (remove references to third axis) header = fits.getheader(path) header["NAXIS"] = 2 if "NAXIS3" in header: del header["NAXIS3"] for key in header: if "PLANE" in key: del header[key] return cls(header) # -----------------------------------------------------------------
def regrid_h2(projDir='/media/DATAPART/projects/GAS/testing/', region_list = ['L1688','NGC1333','B18','OrionA'], file_extension='DR1_rebase3', herDir = '/media/DATAPART/projects/GAS/otherData/herschel_ayushi/', herFile_list=['OphL1688','perseus','Tau_B18','orionA-N']): for region_i in range(len(region_list)): region = region_list[region_i] herFilename = herFile_list[region_i] herColFits = herDir+'{0}/Colden_temp/{1}_colden_masked.fits'.format(region,herFilename) nh3ImFits = projDir + '{0}/{0}_NH3_11_{1}_mom0_QA_trim.fits'.format(region,file_extension) h2_hdu = fits.open(herColFits) nh3_hdr = fits.getheader(nh3ImFits) new_h2 = FITS_tools.hcongrid.hcongrid(h2_hdu[0].data,h2_hdu[0].header,nh3_hdr) new_h2_hdu = fits.PrimaryHDU(new_h2,nh3_hdr) new_h2_hduList = fits.HDUList([new_h2_hdu]) new_h2_hduList.writeto('nh2_regridded/{0}_NH2_regrid.fits',clobber=True)
def getGLEAMPhase(filename): """ copied from ngamsGLEAM_VO_JobPlugin """ fileId = os.path.basename(filename) hdrs = pyfits.getheader(filename) gleam_phase = 1 getf_frmfn = 0 if 'ORIGIN' in hdrs: if 'WSClean' in hdrs['ORIGIN']: gleam_phase = 2 else: getf_frmfn = 1 if (getf_frmfn == 1 and fileId.split('_v')[1].split('.')[0] == '2'): # filename pattern is brittle, only use it if no fits header key: ORIGIN gleam_phase = 2 return gleam_phase
def _create_reductions(self): ''' Detect and create valid reductions in path ''' print('Create reductions from available data') wpath = os.walk(self._path) for w in wpath: subs = w[1] if 'raw' in subs: # if directory has a raw/ sub-directory, make sure it # has FITS files and that they are from a valid # sub-system reduction_path = w[0] fits_files = glob.glob(os.path.join(reduction_path, 'raw', '*.fits')) if len(fits_files) != 0: hdr = fits.getheader(fits_files[0]) try: arm = hdr['HIERARCH ESO SEQ ARM'] if arm == 'IRDIS': instrument = 'IRDIS' reduction = IRDIS.ImagingReduction(reduction_path) self._IRDIS_reductions.append(reduction) elif arm == 'IFS': instrument = 'IFS' reduction = IFS.Reduction(reduction_path) self._IFS_reductions.append(reduction) else: raise NameError('Unknown arm {0}'.format(arm)) except: continue print(reduction_path) print(' ==> {0}, {1} files'.format(instrument, len(fits_files))) print() # merge all reductions into a single list self._reductions = self._IFS_reductions + self._IRDIS_reductions
def get_frame_names(path): """ This function ... :param path: :return: """ # Load the header header = fits.getheader(path) # Get the number of planes nplanes = headers.get_number_of_frames(header) # Initialize a dictionary to contain the frame names and corresponding descriptions frames = dict() # Look at the properties of each plane for i in range(nplanes): # Get name and description of plane name, description, plane_type = headers.get_frame_name_and_description(header, i, always_call_first_primary=False) if plane_type == "frame": frames[name] = description # Return the frames with their name and description return frames # -----------------------------------------------------------------
def get_mask_names(path): """ This function ... :param path: :return: """ from ..tools import headers # Load the header header = fits.getheader(path) # Get the number of planes nplanes = headers.get_number_of_frames(header) # Initialize a dictionary to contain the mask names and corresponding descriptions masks = dict() # Look at the properties of each plane for i in range(nplanes): # Get name and description of plane name, description, plane_type = headers.get_frame_name_and_description(header, i, always_call_first_primary=False) if plane_type == "mask": masks[name] = description # Return the masks with their name and description return masks # -----------------------------------------------------------------
def regrid_h2(nh3_image,h2_image): # Edit to write out regridded image - glue won't work if files not on same grid h2fits = fits.open(h2_image) nh3_hdr = fits.getheader(nh3_image) new_h2 = FITS_tools.hcongrid.hcongrid(h2fits[0].data,h2fits[0].header,nh3_hdr) new_h2_hdu = fits.PrimaryHDU(new_h2,nh3_hdr) return new_h2_hdu
def wcs(galaxy): return WCS(fits.getheader(galaxy.cubepath, 1))
def _getPlateFromFile(self): ''' Initialize a Plate from a Cube/RSS File''' # Load file try: self._hdr = fits.getheader(self.filename, 1) self.plateid = int(self._hdr['PLATEID']) except Exception as e: raise MarvinError('Could not initialize via filename: {0}' .format(e)) else: self.data_origin = 'file' self._makePdict()
def get_vsini(file_list, vsini_filename=VSINI_FILE): """ Get the vsini for every fits file in file_list. Uses the OBJECT keyword and a pre-tabulated vsini table. This is really only useful for my project... :param file_list: :return: """ vsini = pd.read_csv(vsini_filename, sep='|', skiprows=8)[1:] vsini_dict = {} prim_vsini = [] for fname in file_list: root = fname.split('/')[-1][:9] if root in vsini_dict: prim_vsini.append(vsini_dict[root]) else: header = fits.getheader(fname) star = header['OBJECT'] try: v = vsini.loc[vsini.Identifier.str.strip() == star]['vsini(km/s)'].values[0] prim_vsini.append(float(v) * 0.8) vsini_dict[root] = float(v) * 0.8 except IndexError: logging.warn('No vsini found for star {}! No primary star removal will be attempted!'.format(star)) prim_vsini.append(None) return prim_vsini
def has_pixelscale(fits_file): """ Find pixel scale keywords in FITS file Parameters ---------- fits_file: str Path to a FITS image file """ header = pyfits.getheader(fits_file) return [key for key in PIXSCL_KEYS if key in list(header.keys())]
def test_write(self, tmpdir, fname, precision, ans): f = str(tmpdir.join(fname)) self.sp.writefits(f, precision=precision) hdr = fits.getheader(f, ext=1) assert hdr['tform2'].lower() == ans
def zoom_fits(fitsfile, scalefactor, preserve_bad_pixels=True, **kwargs): """ This function is used to zoom in on a FITS image by interpolating using scipy.ndimage.interpolation.zoom. It takes the following arguments: :param fitsfile: the FITS file name :param scalefactor: the zoom factor along all axes :param preserve_bad_pixels: try to set NaN pixels to NaN in the zoomed image. Otherwise, bad pixels will be set to zero. :param kwargs: :return: """ # Get the data array and the header of the FITS file arr = pyfits.getdata(fitsfile) h = pyfits.getheader(fitsfile) h['CRPIX1'] = (h['CRPIX1']-1)*scalefactor + scalefactor/2. + 0.5 h['CRPIX2'] = (h['CRPIX2']-1)*scalefactor + scalefactor/2. + 0.5 if 'CD1_1' in h: for ii in (1,2): for jj in (1,2): k = "CD%i_%i" % (ii,jj) if k in h: # allow for CD1_1 but not CD1_2 h[k] = h[k]/scalefactor elif 'CDELT1' in h: h['CDELT1'] = h['CDELT1']/scalefactor h['CDELT2'] = h['CDELT2']/scalefactor bad_pixels = np.isnan(arr) + np.isinf(arr) arr[bad_pixels] = 0 upscaled = scipy.ndimage.zoom(arr,scalefactor,**kwargs) if preserve_bad_pixels: bp_up = scipy.ndimage.zoom(bad_pixels,scalefactor,mode='constant',cval=np.nan,order=0) upscaled[bp_up] = np.nan up_hdu = pyfits.PrimaryHDU(data=upscaled, header=h) return up_hdu # -----------------------------------------------------------------