Commit 2a2675a2 authored by Monica Rainer's avatar Monica Rainer
Browse files

Add keyword BJD_TDB

parent aba1331f
from __future__ import print_function, division
import numpy as np
from astroTimeLegacy import premat, daycnv, precess, helio_jd
from astropy import time, coordinates as coord, units as u
def idlMod(a, b):
"""
......@@ -501,7 +502,18 @@ def helcorr(obs_long, obs_lat, obs_alt, ra2000, dec2000, jd, equinox=2000.0, pma
# Calculate heliocentric julian date
rjd = jd-2.4e6
hjd = helio_jd(rjd, ra, dec) + 2.4e6
#hjd = helio_jd(rjd, ra, dec) + 2.4e6 # original computation (used until 4 May 2018)
tng = coord.EarthLocation.from_geodetic(obs_long,obs_lat,obs_alt)
target = coord.SkyCoord(ra,dec, unit=(u.deg,u.deg))
times = time.Time(rjd, format='mjd', scale='utc', location=tng)
ltt_bary = times.light_travel_time(target)
ltt_helio = times.light_travel_time(target, 'heliocentric')
bjd = times.tdb + ltt_bary
bjd = bjd.value + 2.4e6
hjd = times.utc + ltt_helio
hjd = hjd.value + 2.4e6
# DIURNAL VELOCITY (see IRAF task noao.astutil.rvcorrect)
# convert geodetic latitude into geocentric latitude to correct
......@@ -561,4 +573,4 @@ def helcorr(obs_long, obs_lat, obs_alt, ra2000, dec2000, jd, equinox=2000.0, pma
print('----- HELCORR.PRO - DEBUG INFO - END -----')
print('')
return corr, hjd, vbar, vdiurnal
return corr, hjd, bjd, vbar, vdiurnal
......@@ -42,14 +42,18 @@ def read_usr_config(path):
usr_cfg_path = os.path.join(os.getcwd(), 'gofio.cfg')
if os.path.isfile(usr_cfg_path):
#print 'qui'
try:
USRCONFIG = read_usr_config(usr_cfg_path)
except:
USRCONFIG = {}
USRCONFIG['OFFLINE'] = False
#print USRCONFIG
else:
#print 'qui2'
USRCONFIG ={}
USRCONFIG['OFFLINE'] = False
#print USRCONFIG
#
......@@ -71,7 +75,7 @@ CONFIG['TMP_DIR'] = tempfile.gettempdir()
CONFIG['APP_DIR'] = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
#CONFIG['BASE_RAW'] = '/mnt/nfs/RAW'
CONFIG['BASE_RAW'] = '/home/monica/Documenti/gianob/gianobdrs/varie/commissioning_marzo/raw'
CONFIG['BASE_RAW'] = '/Users/monica/Documents/GOFIO/gofio/raw'
CONFIG['BASE_RED_DIR'] = os.path.join(CONFIG['APP_DIR'],'reduced')
#CONFIG['BASE_RED_DIR'] = '/mnt/nfs/REDUCED'
......@@ -150,17 +154,22 @@ CONFIG['SPEC_USED'] = (' '.join((CONFIG['KEY_DRS'],'SPEC')),'Images combined')
CONFIG['SPEC_MJD'] = (' '.join((CONFIG['KEY_DRS'],'MJD')),'MJD of the relative image')
CONFIG['SNR'] = (' '.join((CONFIG['KEY_DRS'],'SNR')),'SNR in the middle of the order',)
CONFIG['STRAIGHT_PAR'] = OrderedDict([ ('P1',' '.join((CONFIG['KEY_DRS'],'STR2DP1'))), ('P2',' '.join((CONFIG['KEY_DRS'],'STR2DP2'))), ('P3',' '.join((CONFIG['KEY_DRS'],'STR2DP3'))), ('P4',' '.join((CONFIG['KEY_DRS'],'STR2DP4'))), ('P5',' '.join((CONFIG['KEY_DRS'],'STR2DP5'))), ('P6',' '.join((CONFIG['KEY_DRS'],'STR2DP6'))) ])
CONFIG['DRS_MJD'] = (' '.join((CONFIG['KEY_DRS'],'MJD')),'MJD of the combined image')
CONFIG['DRS_MJD'] = (' '.join((CONFIG['KEY_DRS'],'MJD')),'MJD of the (combined) image')
CONFIG['BERV'] = (' '.join((CONFIG['KEY_DRS'],'BERV')),'Barycentric correction [km/s]')
CONFIG['HJD'] = (' '.join((CONFIG['KEY_DRS'],'HJD')),'HJD of the combined image')
CONFIG['HJD'] = (' '.join((CONFIG['KEY_DRS'],'HJD')),'HJD_UTC of the (combined) image')
CONFIG['BJD'] = (' '.join((CONFIG['KEY_DRS'],'BJD')),'BJD_TDB of the (combined) image')
CONFIG['AIRMASS'] = (' '.join((CONFIG['KEY_DRS'],'AIRMASS')),'Airmass of the combined image')
CONFIG['MASTERFLAT'] = (' '.join((CONFIG['KEY_DRS'],'FLAT')),'Flat-field used for the reduction')
CONFIG['MASTERLAMP'] = (' '.join((CONFIG['KEY_DRS'],'LAMP')),'Calibration lamp used for the reduction')
CONFIG['KEY_WEXT'] = (' '.join((CONFIG['KEY_DRS'],'WEXT')),'Standard extraction window [pixel]')
CONFIG['DRS_VERSION'] = (' '.join((CONFIG['KEY_DRS'],'VERSION')),'GOFIO version')
# Wavelength calibration
CONFIG['CAL_FUNC'] = {'Oliva':True, 'Poly3':False} # function used for wavelength calibration (Tino Oliva Polynomial or 3-degree polynomial)
#CONFIG['CAL_FUNC'] = {'Oliva':False, 'Poly3':True}
CONFIG['CAL_FAILED'] = (' '.join((CONFIG['KEY_DRS'],'CAL ORDER')),'Calibration successful')
CONFIG['WLFIT'] = (' '.join((CONFIG['KEY_DRS'],'CAL WLFUNC')),'Function used for wavelength calibration')
if CONFIG['CAL_FUNC']['Oliva']:
......@@ -271,7 +280,7 @@ CONFIG['SHIFT_Y'] = 20 # expected bottom position of flat signal on the lowest o
#
CONFIG['DARK_MEAN'] = {10:[10.0,10.0,10.0,10.0], 30:[25.0,25.0,25.0,25.0], 60:[50.0,50.0,50.0,50.0], 100:[80.0,80.0,80.0,80.0], 200:[800.0,800.0,800.0,800.0], 300:[800.0,800.0,800.0,800.0], 600:[800.0,800.0,800.0,800.0]} # Dark quality check: mean signal below this number (4 quadrant for 7 exposure time)
CONFIG['FLATSIGNAL'] = 300 # lowest flat signal expected to pass quality check
CONFIG['FLATSIGNAL'] = 2000 # lowest flat signal expected to pass quality check
CONFIG['SCIENCECHECK'] = [500,540,140,160] # region x1:x2,y1:y2 for images' quality check
#CONFIG['NODSIGNAL'] = 2*CONFIG['DARK_MEAN']['quadrant'][0] # lowest image signal expected to pass quality check
CONFIG['NODSIGNAL'] = 0.0 # lowest image signal expected to pass quality check
......@@ -282,25 +291,25 @@ CONFIG['RV_STEP'] = 0.2 # km/s step
CONFIG['RV_EXTEND'] = 16
if USRCONFIG['OFFLINE']:
for key in USRCONFIG.keys():
CONFIG[key] = USRCONFIG[key]
#if USRCONFIG['OFFLINE']:
# for key in USRCONFIG.keys():
# CONFIG[key] = USRCONFIG[key]
CONFIG['RAW_DIR'] = os.path.join(CONFIG['BASE_RAW'],CONFIG['DATE'])
CONFIG['OFFLINE_DIR'] = os.path.join(CONFIG['BASE_RED_DIR'],CONFIG['DATE'])
CONFIG['RED_DIR'] = os.path.join(CONFIG['OFFLINE_DIR'],'offline')
CONFIG['RED_CALIB'] = os.path.join(CONFIG['RED_DIR'],'CALIB')
CONFIG['RED_STR'] = os.path.join(CONFIG['RED_DIR'],'STR')
CONFIG['LOG_FILE'] = os.path.join(CONFIG['RED_DIR'], 'drs.log')
CONFIG['RAMP_NAME'] = 'ramps' + CONFIG['DATE'] + '.db'
CONFIG['DB_RAMP'] = os.path.join(CONFIG['BASE_RAMP'],CONFIG['RAMP_NAME'])
# CONFIG['RAW_DIR'] = os.path.join(CONFIG['BASE_RAW'],CONFIG['DATE'])
# CONFIG['OFFLINE_DIR'] = os.path.join(CONFIG['BASE_RED_DIR'],CONFIG['DATE'])
# CONFIG['RED_DIR'] = os.path.join(CONFIG['OFFLINE_DIR'],'offline')
# CONFIG['RED_CALIB'] = os.path.join(CONFIG['RED_DIR'],'CALIB')
# CONFIG['RED_STR'] = os.path.join(CONFIG['RED_DIR'],'STR')
# CONFIG['LOG_FILE'] = os.path.join(CONFIG['RED_DIR'], 'drs.log')
# CONFIG['RAMP_NAME'] = 'ramps' + CONFIG['DATE'] + '.db'
# CONFIG['DB_RAMP'] = os.path.join(CONFIG['BASE_RAMP'],CONFIG['RAMP_NAME'])
CONFIG['DB_OFFLINE'] = ''.join(('db_',CONFIG['DATE'],'_offline.db'))
CONFIG['DB_OFFLINE_PATH'] = os.path.join(CONFIG['WEBUI_DB_DIR'], CONFIG['DB_OFFLINE'])
CONFIG['DB_NIGHT'] = CONFIG['DB_OFFLINE']
CONFIG['DB_NIGHT_PATH'] = CONFIG['DB_OFFLINE_PATH']
# CONFIG['DB_OFFLINE'] = ''.join(('db_',CONFIG['DATE'],'_offline.db'))
# CONFIG['DB_OFFLINE_PATH'] = os.path.join(CONFIG['WEBUI_DB_DIR'], CONFIG['DB_OFFLINE'])
# CONFIG['DB_NIGHT'] = CONFIG['DB_OFFLINE']
# CONFIG['DB_NIGHT_PATH'] = CONFIG['DB_OFFLINE_PATH']
maskc = ''.join(('GIANOB_MASKC_',CONFIG['DATE'],'.fits'))
CONFIG['MASK_C'] = os.path.join(CONFIG['RES_DIR'], maskc)
# maskc = ''.join(('GIANOB_MASKC_',CONFIG['DATE'],'.fits'))
# CONFIG['MASK_C'] = os.path.join(CONFIG['RES_DIR'], maskc)
......@@ -375,7 +375,8 @@ class GBNodding():
extflat = varie.extract(norflat, optSpectrum[x], x1, x2, profile, fgaineff, froneff)
#print extflat
with np.errstate(divide='ignore', invalid='ignore'):
fsnr[x] = (np.sqrt(froneff**2 + (fgaineff*(extflat*meanflat))))/(fgaineff*(extflat*meanflat))
#fsnr[x] = (np.sqrt(froneff**2 + (fgaineff*(extflat*meanflat))))/(fgaineff*(extflat*meanflat))
fsnr[x] = np.true_divide(np.sqrt(froneff**2 + (fgaineff*(extflat*meanflat))),fgaineff*(extflat*meanflat))
fsnr[fsnr==np.inf] = 0
fsnr[fsnr==-np.inf] = 0
fsnr = np.nan_to_num(fsnr)
......@@ -384,7 +385,8 @@ class GBNodding():
fsnr[x] = np.zeros(len(optSpectrum[x]))
with np.errstate(divide='ignore', invalid='ignore'):
ssnr = (np.sqrt(roneff**2 + (gaineff*nspec*optSpectrum[x])))/(gaineff*nspec*optSpectrum[x])
#ssnr = (np.sqrt(roneff**2 + (gaineff*nspec*optSpectrum[x])))/(gaineff*nspec*optSpectrum[x])
ssnr = np.true_divide(np.sqrt(roneff**2 + (gaineff*nspec*optSpectrum[x])),gaineff*nspec*optSpectrum[x])
snr[x] = 1.0/(np.sqrt(ssnr**2 + fsnr[x]**2))
calib_failed, coeffs, comments = varie.UNe_calibrate(extlamp,x+32,select_lines[x+32],all_lines[x+32])
......@@ -392,8 +394,10 @@ class GBNodding():
for comment in comments:
self.messages.append(comment)
keyfail = ''.join((CONFIG['CAL_FAILED'][0],str(x+32)))
if calib_failed:
heacal[keyfail] = (False,CONFIG['CAL_FAILED'][1])
self.messages.append(' *** WARNING ***')
self.messages.append('The default wavelength calibration for the order %s will be taken from the database and as such it will not be optimal for the night.' % (str(x+32),))
wcalib = db.extract_dbfile(self.dbconn,'une_calib')
......@@ -403,6 +407,7 @@ class GBNodding():
heacal[keyword] = wlc.header[keyword]
else:
heacal[keyfail] = (True,CONFIG['CAL_FAILED'][1])
for key in coeffs:
keyword = ''.join((CONFIG['WLCOEFFS'][key][0],str(x+32)))
heacal[keyword] = (coeffs[key],CONFIG['WLCOEFFS'][key][1])
......@@ -463,7 +468,7 @@ class GBNodding():
heaspe = fits.Header(hea_ima)
heaspe[CONFIG['KEYS']['FILENAME']] = calname
heaspe[CONFIG['KEYS']['IMANAME']] = calname
drs_mjd = float(heaspe[CONFIG['KEYS']['MJD']]) + (float(heaspe[CONFIG['KEYS']['EXPTIME']])/(2*86400))
drs_mjd = float(heaspe[CONFIG['KEYS']['MJD']]) + (float(heaspe[CONFIG['KEYS']['EXPTIME']])/(2.0*86400.0))
heaspe[CONFIG['DRS_MJD'][0]] = (drs_mjd,CONFIG['DRS_MJD'][1])
try:
......@@ -497,9 +502,10 @@ class GBNodding():
for hea in heacal:
heaspe[hea] = heacal[hea]
barycorr, hjd = varie.berv_corr(heaspe)
barycorr, hjd, bjd = varie.berv_corr(heaspe)
heaspe[CONFIG['BERV'][0]] = (barycorr,CONFIG['BERV'][1])
heaspe[CONFIG['HJD'][0]] = (hjd,CONFIG['HJD'][1])
heaspe[CONFIG['BJD'][0]] = (bjd,CONFIG['BJD'][1])
waves = np.zeros((CONFIG['N_ORD'],CONFIG['YCCD']))
for o in xrange(CONFIG['N_ORD']):
......@@ -514,13 +520,14 @@ class GBNodding():
#calname = os.path.join(CONFIG['RED_DIR'],calname)
#results.writeto(calname,clobber=True)
orders=np.arange(CONFIG['N_ORD'])+32
c1 = fits.Column(name='ORDER', format='I', array=orders)
c2 = fits.Column(name='WAVE', format=''.join((str(CONFIG['YCCD']),'D')), unit='nm', array=waves)
c3 = fits.Column(name='FLUX', format=''.join((str(CONFIG['YCCD']),'D')), array=optSpectrum)
c4 = fits.Column(name='SNR', format=''.join((str(CONFIG['YCCD']),'D')), array=snr)
heaspe[CONFIG['DRS_VERSION'][0]] = (CONFIG['VERSION'], CONFIG['DRS_VERSION'][1])
#tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4],header=heaspe)
tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4])
prihdu = fits.PrimaryHDU(data=None, header=heaspe)
......@@ -625,7 +632,8 @@ class GBNodding():
abcalib[o] = (afluxes[o]+bshift)/2.0
#snr.append(max(np.mean(abcalib[o][1000:1050])/np.std(abcalib[o][1000:1050]),0))
with np.errstate(divide='ignore', invalid='ignore'):
ssnr = (np.sqrt(roneff**2 + (gaineff*2*abcalib[o])))/(gaineff*2*abcalib[o])
#ssnr = (np.sqrt(roneff**2 + (gaineff*2*abcalib[o])))/(gaineff*2*abcalib[o])
ssnr = np.true_divide(np.sqrt(roneff**2 + (gaineff*2*abcalib[o])),gaineff*2*abcalib[o])
snr[o] = 1.0/(np.sqrt(ssnr**2 + fsnr[o]**2))
abcalib = np.asarray(abcalib, dtype='float32')
......@@ -658,9 +666,10 @@ class GBNodding():
heaspe[CONFIG['KEYS']['FILENAME']] = os.path.basename(abnome)
heaspe[CONFIG['KEYS']['IMANAME']] = os.path.basename(abnome)
barycorr, hjd = varie.berv_corr(heaspe)
barycorr, hjd, bjd = varie.berv_corr(heaspe)
heaspe[CONFIG['BERV'][0]] = (barycorr,CONFIG['BERV'][1])
heaspe[CONFIG['HJD'][0]] = (hjd,CONFIG['HJD'][1])
heaspe[CONFIG['BJD'][0]] = (bjd,CONFIG['BJD'][1])
try:
......@@ -715,6 +724,8 @@ class GBNodding():
c3 = fits.Column(name='FLUX', format=''.join((str(CONFIG['YCCD']),'D')), array=abcalib)
c4 = fits.Column(name='SNR', format=''.join((str(CONFIG['YCCD']),'D')), array=snr)
heaspe[CONFIG['DRS_VERSION'][0]] = (CONFIG['VERSION'], CONFIG['DRS_VERSION'][1])
#tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4],header=heaspe)
tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4])
prihdu = fits.PrimaryHDU(data=None, header=heaspe)
......
......@@ -143,7 +143,7 @@ class GBStare():
name_obj.pop(n)
#print len(self.starelist)
self.mjd = np.average(np.asarray(mjd_obj)) + (exp_common/(2*86400))
self.mjd = np.average(np.asarray(mjd_obj)) + (exp_common/(2.0*86400.0))
sky_time = abs(np.asarray(sky_time) - self.mjd)
# Skip sky images with exposure time different than Obj
......@@ -508,7 +508,8 @@ class GBStare():
extflat = varie.extract(norflat, optSpectrum[x], x1, x2, profile, fgaineff, froneff)
#print extflat
with np.errstate(divide='ignore', invalid='ignore'):
fsnr[x] = (np.sqrt(froneff**2 + (fgaineff*(extflat*meanflat))))/(fgaineff*(extflat*meanflat))
#fsnr[x] = (np.sqrt(froneff**2 + (fgaineff*(extflat*meanflat))))/(fgaineff*(extflat*meanflat))
fsnr[x] = np.true_divide(np.sqrt(froneff**2 + (fgaineff*(extflat*meanflat))),fgaineff*(extflat*meanflat))
fsnr[fsnr==np.inf] = 0
fsnr[fsnr==-np.inf] = 0
fsnr = np.nan_to_num(fsnr)
......@@ -517,7 +518,8 @@ class GBStare():
fsnr[x] = np.zeros(len(optSpectrum[x]))
with np.errstate(divide='ignore', invalid='ignore'):
ssnr = (np.sqrt(roneff**2 + (gaineff*nspec*optSpectrum[x])))/(gaineff*nspec*optSpectrum[x])
#ssnr = (np.sqrt(roneff**2 + (gaineff*nspec*optSpectrum[x])))/(gaineff*nspec*optSpectrum[x])
ssnr = np.true_divide(np.sqrt(roneff**2 + (gaineff*nspec*optSpectrum[x])),gaineff*nspec*optSpectrum[x])
snr[x] = 1.0/(np.sqrt(ssnr**2 + fsnr[x]**2))
......@@ -526,7 +528,10 @@ class GBStare():
for comment in comments:
self.messages.append(comment)
keyfail = ''.join((CONFIG['CAL_FAILED'][0],str(x+32)))
if calib_failed:
heacal[keyfail] = (False,CONFIG['CAL_FAILED'][1])
self.messages.append(' *** WARNING ***')
self.messages.append('The default wavelength calibration for the order %s will be taken from the database and as such it will not be optimal for the night.' % (str(x+32),))
wcalib = db.extract_dbfile(self.dbconn,'une_calib')
......@@ -536,6 +541,7 @@ class GBStare():
heacal[keyword] = wlc.header[keyword]
else:
heacal[keyfail] = (True,CONFIG['CAL_FAILED'][1])
for key in coeffs:
keyword = ''.join((CONFIG['WLCOEFFS'][key][0],str(x+32)))
heacal[keyword] = (coeffs[key],CONFIG['WLCOEFFS'][key][1])
......@@ -594,9 +600,10 @@ class GBStare():
for hea in heacal:
heaspe[hea] = heacal[hea]
barycorr, hjd = varie.berv_corr(heaspe)
barycorr, hjd, bjd = varie.berv_corr(heaspe)
heaspe[CONFIG['BERV'][0]] = (barycorr,CONFIG['BERV'][1])
heaspe[CONFIG['HJD'][0]] = (hjd,CONFIG['HJD'][1])
heaspe[CONFIG['BJD'][0]] = (bjd,CONFIG['BJD'][1])
waves = np.zeros((CONFIG['N_ORD'],CONFIG['YCCD']))
for o in xrange(CONFIG['N_ORD']):
......@@ -609,12 +616,15 @@ class GBStare():
#calname = os.path.join(CONFIG['RED_DIR'],calname)
#results.writeto(calname,clobber=True)
orders=np.arange(CONFIG['N_ORD'])+32
c1 = fits.Column(name='ORDER', format='I', array=orders)
c2 = fits.Column(name='WAVE', format=''.join((str(CONFIG['YCCD']),'D')), unit='nm', array=waves)
c3 = fits.Column(name='FLUX', format=''.join((str(CONFIG['YCCD']),'D')), array=optSpectrum)
c4 = fits.Column(name='SNR', format=''.join((str(CONFIG['YCCD']),'D')), array=snr)
heaspe[CONFIG['DRS_VERSION'][0]] = (CONFIG['VERSION'], CONFIG['DRS_VERSION'][1])
#tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4],header=heaspe)
tbhdu = fits.BinTableHDU.from_columns([c1, c2, c3, c4])
prihdu = fits.PrimaryHDU(data=None, header=heaspe)
......
......@@ -88,20 +88,11 @@ def badpix(image,bad_mask,inverse_mask):
def stdcombine(x,axis):
#return np.ma.sqrt((np.ma.absolute(x- np.ma.mean(x)) /CONFIG['GAIN']) + ((CONFIG['RON']/CONFIG['GAIN']) ** 2))
#return np.ma.sqrt(np.ma.mean((np.ma.absolute(x- np.ma.mean(x)) /CONFIG['GAIN']) + ((CONFIG['RON']/CONFIG['GAIN']) ** 2)))
return np.ma.sqrt(np.ma.mean((np.ma.absolute(x- np.ma.median(x)) /CONFIG['GAIN']) + ((CONFIG['RON']/CONFIG['GAIN']) ** 2)))
#return np.ma.sqrt(np.ma.mean((np.ma.absolute(x- np.ma.median(x)) /CONFIG['GAIN']) + ((CONFIG['RON']/CONFIG['GAIN']) ** 2)))
return np.ma.sqrt(np.ma.mean((np.ma.absolute(x- np.ma.median(x)) /float(CONFIG['GAIN'])) + ((float(CONFIG['RON'])/float(CONFIG['GAIN'])) ** 2)))
#-------------- Define straighten option vertical shift -------------------
def shiftY(fdata):
# search for shift defined in the straighten options in config.py
#for opt in CONFIG['STRAIGHT_OPT']:
# try:
# dy = opt.rindex('DY=')
# ypos = int(opt[dy-2:])
# shift = CONFIG['SHIFT_Y'] + ypos
# return shift
# except:
# pass
# if the shift is not defined, compute it
column = fdata[0:140,0] # the bottom 140 pixel of the first column on the left of the detector
background = np.sort(column)[20] # value of the 20th pixel after arranging them in ascending order
#print column
......@@ -158,11 +149,11 @@ def optExtract(data,gain,ron,slit_pos,ordine):
# define gaussian function:
def gaussian(x,p,c,sg):
return p * np.exp(-((x-c)/sg)**2)
return p * np.exp(-((x-c)/float(sg))**2)
# define variance function:
def var(x):
return (np.absolute(x)/gain) + (ron/gain) ** 2
return (np.absolute(x)/float(gain)) + (ron/float(gain)) ** 2
#t1 = time.time()
......@@ -267,7 +258,7 @@ def optExtract(data,gain,ron,slit_pos,ordine):
outlier = True
while outlier:
#fitprofile = np.polyval(np.polyfit(np.arange(columns),profile[row],deg=2,w=1./np.sqrt(variance[row])),np.arange(columns))
fitprofile = poly.polyval(np.arange(columns),poly.polyfit(np.arange(columns),profile[row],deg=2,w=1./np.sqrt(variance[row])))
fitprofile = poly.polyval(np.arange(columns),poly.polyfit(np.arange(columns),profile[row],deg=2,w=1.0/np.sqrt(variance[row])))
sigma = np.mean((profile[row]-fitprofile)**2)
......@@ -333,8 +324,10 @@ def optExtract(data,gain,ron,slit_pos,ordine):
# first optimal extraction
with np.errstate(divide='ignore', invalid='ignore', over='ignore'):
varOptFlux = 1.0/np.sum(((profile[x1:x2])**2)/variance[x1:x2],axis=0)
OptFlux = np.sum(profile[x1:x2]*data[x1:x2]/variance[x1:x2],axis=0)*varOptFlux
#varOptFlux = 1.0/np.sum(((profile[x1:x2])**2)/variance[x1:x2],axis=0)
varOptFlux = 1.0/np.sum(np.true_divide(((profile[x1:x2])**2),variance[x1:x2]),axis=0)
#OptFlux = np.sum(profile[x1:x2]*data[x1:x2]/variance[x1:x2],axis=0)*varOptFlux
OptFlux = np.sum(np.true_divide(profile[x1:x2]*data[x1:x2],variance[x1:x2]),axis=0)*varOptFlux
OptFlux[OptFlux==np.inf] = 0
OptFlux[OptFlux==-np.inf] = 0
OptFlux = np.nan_to_num(OptFlux)
......@@ -370,8 +363,10 @@ def optExtract(data,gain,ron,slit_pos,ordine):
profile[rworst,cworst] = np.median([profile[rworst,max(cworst-4,0):min(cworst+5,CONFIG['XCCD'])]])
with np.errstate(divide='ignore', invalid='ignore', over='ignore'):
varOptFlux = 1.0/np.sum(((profile[x1:x2])**2)/variance[x1:x2],axis=0)
OptFlux = np.sum(profile[x1:x2]*data[x1:x2]/variance[x1:x2],axis=0)*varOptFlux
#varOptFlux = 1.0/np.sum(((profile[x1:x2])**2)/variance[x1:x2],axis=0)
varOptFlux = 1.0/np.sum(np.true_divide(((profile[x1:x2])**2),variance[x1:x2]),axis=0)
#OptFlux = np.sum(profile[x1:x2]*data[x1:x2]/variance[x1:x2],axis=0)*varOptFlux
OptFlux = np.sum(np.true_divide(profile[x1:x2]*data[x1:x2],variance[x1:x2]),axis=0)*varOptFlux
OptFlux[OptFlux==np.inf] = 0
OptFlux[OptFlux==-np.inf] = 0
......@@ -432,14 +427,16 @@ def extract(data,optflux,x1,x2,profile,gain,ron):
# define variance function:
def var(x):
return (np.absolute(x)/gain) + (ron/gain) ** 2
return (np.absolute(x)/float(gain)) + (ron/float(gain)) ** 2
variance = var(optflux*profile)
# optimal extraction
with np.errstate(divide='ignore', invalid='ignore'):
varOptFlux = 1.0/np.sum(((profile[x1:x2])**2)/variance[x1:x2],axis=0)
OptFlux = np.sum(profile[x1:x2]*data[x1:x2]/variance[x1:x2],axis=0)*varOptFlux
#varOptFlux = 1.0/np.sum(((profile[x1:x2])**2)/variance[x1:x2],axis=0)
#OptFlux = np.sum(profile[x1:x2]*data[x1:x2]/variance[x1:x2],axis=0)*varOptFlux
varOptFlux = 1.0/np.sum(np.true_divide(((profile[x1:x2])**2),variance[x1:x2]),axis=0)
OptFlux = np.sum(np.true_divide(profile[x1:x2]*data[x1:x2],variance[x1:x2]),axis=0)*varOptFlux
# clean the extracted spectrum from NaN and infinite values
OptFlux[OptFlux==np.inf] = 0
......@@ -1146,9 +1143,10 @@ def berv_corr(hdr):
mjd = mjd + (expt/(86400.0))
barycorr, hjd, vbar, vdiurnal = baryvel.helcorr(longitude,latitude,elevation,ra,dec,mjd,equinox,pma,pmd)
return barycorr, hjd
barycorr, hjd, bjd, vbar, vdiurnal = baryvel.helcorr(longitude,latitude,elevation,ra,dec,mjd,equinox,pma,pmd)
return barycorr, hjd, bjd
#--------------------- Create s1d output -------------------
......@@ -1180,7 +1178,8 @@ def create_s1d(spectrum, header):
massimi = massimi[-250:-50]+chunk
valori = flux_old[massimi]
norm_fit = np.polyval(np.polyfit(massimi,valori,1),np.arange(len(flux_old)))
flux_old = flux_old/norm_fit
#flux_old = flux_old/norm_fit
flux_old = np.true_divide(flux_old,norm_fit)
#t2 = time.time()
#print 's1d spectrum order %s normalization: %s s' % (str(o),str(t2-t1))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment