Skip to content

Commit

Permalink
Merge pull request #100 from kadrlica/lsst_sims
Browse files Browse the repository at this point in the history
Merge updates for LSST DP0 simulations
  • Loading branch information
kadrlica authored Jun 27, 2024
2 parents f7d7d9f + e777540 commit 6883af0
Show file tree
Hide file tree
Showing 14 changed files with 390 additions and 883 deletions.
1 change: 1 addition & 0 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ name: build
on:
push:
pull_request:
workflow_dispatch:
schedule:
- cron: '0 0 1 * *'

Expand Down
2 changes: 1 addition & 1 deletion ugali/analysis/imf.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def integrate(self, mass_min, mass_max, log_mode=True, weight=False, steps=10000
-----------
mass_min: minimum mass bound for integration (solar masses)
mass_max: maximum mass bound for integration (solar masses)
log_mode[True]: use logarithmic steps in stellar mass as oppose to linear
log_mode[True]: use logarithmic steps in stellar mass as opposed to linear
weight[False]: weight the integral by stellar mass
steps: number of numerical integration steps
Expand Down
4 changes: 2 additions & 2 deletions ugali/isochrone/dartmouth.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@

class Dotter2008(Isochrone):
"""
KCB: currently inheriting from PadovaIsochrone because there are
several useful functions where we would basically be copying code.
Dartmouth isochrones from Dotter et al. 2008 (https://arxiv.org/abs/0804.4473)
http://stellar.dartmouth.edu
"""
_dirname = os.path.join(get_iso_dir(),'{survey}','dotter2008')
#_zsolar = 0.0163
Expand Down
49 changes: 34 additions & 15 deletions ugali/isochrone/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,9 @@ class IsochroneModel(Model):
""" Abstract base class for dealing with isochrone models. """

_params = odict([
('distance_modulus', Parameter(15.0, [10.0, 30.0]) ),
('distance_modulus', Parameter(15.0, [10.0, 30.0]) ), # m-M
('age', Parameter(10.0, [0.1, 15.0]) ), # Gyr
('metallicity', Parameter(0.0002, [0.0,0.02]) ),
('metallicity', Parameter(0.0002, [0.0,0.02]) ), # Z_ini
])
_mapping = odict([
('mod','distance_modulus'),
Expand Down Expand Up @@ -385,7 +385,7 @@ def absolute_magnitude_martin(self, richness=1, steps=1e4, n_trials=1000, mag_br
Returns:
--------
med,lo,hi : Total absolute magnitude interval
med,[lo,hi] : Total absolute magnitude interval
"""
# ADW: This function is not quite right. It should restrict
# the catalog to the obsevable space using the mask in each
Expand All @@ -398,6 +398,10 @@ def absolute_magnitude_martin(self, richness=1, steps=1e4, n_trials=1000, mag_br
params.update(band_1='g',band_2='r',survey='sdss')
iso = self.__class__(**params)

# ADW: 2022-09-18: There are now direct transformation
# equations from DES to Johnson V-band...
# Appendix B.5: https://arxiv.org/abs/2101.05765

# Analytic part (below detection threshold)
# g, r are absolute magnitudes
mass_init, mass_pdf, mass_act, sdss_g, sdss_r = iso.sample(mass_steps = steps)
Expand Down Expand Up @@ -440,10 +444,15 @@ def simulate(self, stellar_mass, distance_modulus=None, **kwargs):
"""
if distance_modulus is None: distance_modulus = self.distance_modulus
# Total number of stars in system
n = int(round(stellar_mass / self.stellar_mass()))
f_1 = scipy.interpolate.interp1d(self.mass_init, self.mag_1)
f_2 = scipy.interpolate.interp1d(self.mass_init, self.mag_2)
mass_init_sample = self.imf.sample(n, np.min(self.mass_init), np.max(self.mass_init), **kwargs)
richness = stellar_mass / self.stellar_mass()
n = int(round(richness))
#ADW 2022-08-28: Should this really be a Poisson sample of the richness?
#n = scipy.stats.poisson(richness).rvs()

mass_init,mag_1,mag_2 = self.mass_init, self.mag_1, self.mag_2
f_1 = scipy.interpolate.interp1d(mass_init, mag_1)
f_2 = scipy.interpolate.interp1d(mass_init, mag_2)
mass_init_sample = self.imf.sample(n, np.min(mass_init), np.max(mass_init), **kwargs)
mag_1_sample, mag_2_sample = f_1(mass_init_sample), f_2(mass_init_sample)
return mag_1_sample + distance_modulus, mag_2_sample + distance_modulus

Expand Down Expand Up @@ -907,8 +916,11 @@ def pdf(self, mag_1, mag_2, mag_err_1, mag_err_2,


def raw_separation(self,mag_1,mag_2,steps=10000):
"""
Calculate the separation in magnitude-magnitude space between points and isochrone. Uses a dense sampling of the isochrone and calculates the metric distance from any isochrone sample point.
"""
Calculate the separation in magnitude-magnitude space between
points and isochrone. Uses a dense sampling of the isochrone
and calculates the metric distance from any isochrone sample
point.
Parameters:
-----------
Expand Down Expand Up @@ -1074,11 +1086,14 @@ def create_grid(self,abins=None,zbins=None):
if abins is None and zbins is None:
filenames = glob.glob(self.get_dirname()+'/%s_*.dat'%(self._prefix))
data = np.array([self.filename2params(f) for f in filenames])
if not len(data):
if len(data):
arange = np.unique(data[:,0])
zrange = np.unique(data[:,1])
else:
msg = "No isochrone files found in: %s"%self.get_dirname()
raise Exception(msg)
arange = np.unique(data[:,0])
zrange = np.unique(data[:,1])
logger.warn(msg)
arange = np.array([self.age])
zrange = np.array([self.metallicity])
elif abins is not None and zbins is not None:
# Age in units of Gyr
arange = np.linspace(abins[0],abins[1],abins[2]+1)
Expand Down Expand Up @@ -1110,7 +1125,11 @@ def _cache(self,name=None):
filename = self.get_filename()
if filename != self.filename:
self.filename = filename
self._parse(self.filename)
if not os.path.exists(self.filename):
msg = "Filename does not exist: %s"%self.filename
logger.warn(msg)
else:
self._parse(self.filename)

def _parse(self,filename):
raise Exception("Must be implemented by subclass.")
Expand Down Expand Up @@ -1143,7 +1162,7 @@ def download(self,age=None,metallicity=None,outdir=None,force=False):
Parameters
----------
age : age in (Gyr)
metallicity : Z
metallicity : initial metallicity, Z
outdir : output directory (default to current directory)
force : force overwrite of file
Expand Down
154 changes: 108 additions & 46 deletions ugali/isochrone/parsec.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
('ps1' ,'tab_mag_odfnew/tab_mag_panstarrs1.dat'),
('acs_wfc' ,'tab_mag_odfnew/tab_mag_acs_wfc.dat'),
('lsst', 'tab_mag_odfnew/tab_mag_lsst.dat'),
('lsst_dp0', 'tab_mag_odfnew/tab_mag_lsstDP0.dat'),
])

photname_dict = odict([
Expand All @@ -42,6 +43,7 @@
('ps1' ,'Pan-STARRS1'),
('acs_wfc','HST/ACS'),
('lsst', 'LSST'),
('lsst_dp0', 'LSST'),
])

# Commented options may need to be restored for older version/isochrones.
Expand Down Expand Up @@ -155,6 +157,8 @@
#'.cgifields': 'dust_sourceM',
}

defaults_36 = dict(defaults_33,cmd_version=3.6)

class ParsecIsochrone(Isochrone):
""" Base class for PARSEC-style isochrones. """

Expand Down Expand Up @@ -242,7 +246,7 @@ def query_server(self,outfile,age,metallicity):
urlopen(url,timeout=2)

q = urlencode(params).encode('utf-8')
logger.debug(url+'?'+q)
logger.debug("%s?%s"%(url,q))
c = str(urlopen(url, q).read())
aa = re.compile('output\d+')
fname = aa.findall(c)
Expand All @@ -256,49 +260,93 @@ def query_server(self,outfile,age,metallicity):
cmd = 'wget --progress dot:binary %s -O %s'%(out,outfile)
logger.debug(cmd)
stdout = subprocess.check_output(cmd,shell=True,stderr=subprocess.STDOUT)
logger.debug(stdout)
logger.debug(str(stdout))

return outfile

@classmethod
def verify(cls, filename, survey, age, metallicity):
age = age*1e9
nlines=15
def parse_header(cls, filename, nlines=15):
header = dict(
photname = None,
columns = None
)

with open(filename,'r') as f:
lines = [f.readline() for i in range(nlines)]
if len(lines) < nlines:
msg = "Incorrect file size"
raise Exception(msg)

for i,l in enumerate(lines):
if l.startswith('# Photometric system:'): break
else:
msg = "Incorrect file header"
raise Exception(msg)
if len(lines) < nlines:
msg = "Incorrect file size"
raise Exception(msg)

try:
s = lines[i].split()[3]
assert photname_dict[survey] == s
except:
msg = "Incorrect survey:\n"+lines[i]
raise Exception(msg)
for i,l in enumerate(lines):
if l.startswith('# Photometric system:'):
try: header['photname'] = lines[i].split()[3]
except: header['photname'] = None
if not l.startswith('# '): break

try:
z = lines[-1].split()[0]
assert np.allclose(metallicity,float(z),atol=1e-5)
except:
msg = "Metallicity does not match:\n"+lines[-1]
raise Exception(msg)
header['columns'] = lines[i-1].split()[1:]

try:
a = lines[-1].split()[1]
# Need to deal with age or log-age
assert (np.allclose(age,float(a),atol=1e-2) or
np.allclose(np.log10(age),float(a),atol=1e-2))
except:
msg = "Age does not match:\n"+lines[-1]
for k,v in header.items():
if v is None:
msg = "File header missing: '%s'"%k
raise Exception(msg)

return header, lines

@classmethod
def verify(cls, filename, survey, age, metallicity):
"""Verify that the isochrone file matches the isochrone
parameters. Used mostly for verifying the integrity of
a download.
Parameters
----------
filename : the downloaded filename
survey : the survey (photometric system) of the download
age : the requested age of the system
metallicity : the requested metallicity
Returns
-------
None
"""
age = age*1e9
nlines=15
header, lines = cls.parse_header(filename,nlines=nlines)

try:
assert photname_dict[survey] == header['photname']
except:
msg = "Incorrect survey:\n"+header['photname']
raise Exception(msg)

try:
try: zidx = header['columns'].index('Zini')
except ValueError: zidx = 0
z = float(lines[-1].split()[zidx])
assert np.allclose(metallicity,z,atol=1e-5)
except:
msg = "Metallicity does not match:\n"+lines[-1]
raise Exception(msg)

try:
# Need to deal with age or log-age
names = ['log(age/yr)', 'logAge', 'Age']
for name in names:
try:
aidx = header['columns'].index(name)
break
except ValueError:
aidx = 1
if aidx < 0: aidx = 1

a = lines[-1].split()[aidx]
assert (np.allclose(age,float(a),atol=1e-2) or
np.allclose(np.log10(age),float(a),atol=1e-2))
except:
msg = "Age does not match:\n"+lines[-1]
raise Exception(msg)


class Bressan2012(ParsecIsochrone):
_dirname = os.path.join(get_iso_dir(),'{survey}','bressan2012')
Expand Down Expand Up @@ -403,7 +451,7 @@ class Marigo2017(ParsecIsochrone):
('hb_spread',0.1,'Intrinisic spread added to horizontal branch'),
)

download_defaults = copy.deepcopy(defaults_31)
download_defaults = copy.deepcopy(defaults_36)
download_defaults['isoc_kind'] = 'parsec_CAF09_v1.2S_NOV13'

columns = dict(
Expand Down Expand Up @@ -442,24 +490,38 @@ class Marigo2017(ParsecIsochrone):
(27,('y',float)),
(28,('w',float)),
]),
lsst = odict([
(2, ('mass_init',float)),
(3, ('mass_act',float)),
(4, ('log_lum',float)),
(7, ('stage',int)),
(23,('u',float)),
(24,('g',float)),
(25,('r',float)),
(26,('i',float)),
(27,('z',float)),
(28,('Y',float)),
lsst_dp0 = odict([
(3, ('mass_init',float)),
(5, ('mass_act',float)),
(6, ('log_lum',float)),
(9, ('stage',int)),
(25,('u',float)),
(26,('g',float)),
(27,('r',float)),
(28,('i',float)),
(29,('z',float)),
(30,('Y',float)),
]),
)
columns['lsst'] = copy.deepcopy(columns['lsst_dp0'])

def _find_column_numbers(self):
""" Map from the isochrone column names to the column numbers. """
header,lines = self.parse_header(self.filename)
columns = header['columns']

def _parse(self,filename):
"""Reads an isochrone file in the Padova (Marigo et al. 2017)
"""Reads an isochrone file in the Marigo et al. 2017
format. Creates arrays with the initial stellar mass and
corresponding magnitudes for each step along the isochrone.
Parameters:
-----------
filename : name of isochrone file to parse
Returns:
--------
None
"""
try:
columns = self.columns[self.survey.lower()]
Expand All @@ -471,7 +533,7 @@ def _parse(self,filename):
self.data = np.genfromtxt(filename,**kwargs)
# cut out anomalous point:
# https://github.com/DarkEnergySurvey/ugali/issues/29
self.data = self.data[self.data['stage'] != 9]
self.data = self.data[~np.in1d(self.data['stage'], [9])]

self.mass_init = self.data['mass_init']
self.mass_act = self.data['mass_act']
Expand Down
Loading

0 comments on commit 6883af0

Please sign in to comment.