Skip to content

Commit

Permalink
Fix git weirdness
Browse files Browse the repository at this point in the history
  • Loading branch information
OmegaLambda1998 committed Jul 4, 2024
1 parent 15fb088 commit d325a71
Show file tree
Hide file tree
Showing 45 changed files with 20,201 additions and 24 deletions.
49 changes: 49 additions & 0 deletions Examples/Inputs/2016gkg/2016gkg.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
[ global ]
base_path = "../"
output_path = "../Outputs/2016gkg"
logging = true

[ data ]
data_path = "2016gkg_data.toml" # Can be absolute, or relative to base_path
max_time = 3 # Can specify either a value, min, or max. If min or max are specified, you canalso give a range of max_times, and it will find where the min / max flux within that range is. Can do the same with min_time

[[ model ]]
name = "P15"
constraints.R.unit = "Rsun"
constraints.R.prior = "Normal"
constraints.R.values = [25.0, 15.0]
constraints.R.min = 0 # Truncates distribution. Defaults to -Inf.
constraints.R.max = 500

constraints.M.unit = "Msun"
constraints.M.prior = "Normal"
constraints.M.values = [1.0, 0.1]
constraints.M.min = 0
constraints.M.max = 10

constraints.v.unit = "km / s"
constraints.v.prior = "Normal"
constraints.v.values = [3e4, 1e4]
constraints.v.min = 1e4
constraints.v.max = 5e4

constraints.t.unit = "d"
constraints.t.prior = "Normal"
constraints.t.values = [-23, 3]
constraints.t.min = -30
constraints.t.max = -20

[ fitting ]
numwalkers = 100
thinning = 1
burnin = 100
numsamples_perwalker = 200
bestfit = "median" # How to choose the best fitting value

[ plot.prior ] # show the distribution of prior values

[ plot.burnin ] # show how the walkers moved over the burn in phase

[ plot.comparison ] # compare the best fit to the data

[ plot.contour ] # show a contour plot of the posterior
31 changes: 31 additions & 0 deletions Examples/Inputs/2016gkg/2016gkg_data.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
[ global ]
base_path = "../" # Optional, defaults to the directory containing this input file. Can be relative (to this input file's directory) or absolute. All relative paths will be assumed to be relative to base_path
filter_path = "Filters" # Defaults to base_path / Filters
output_path = "../Output" # Defaults to base_path / Output

# Data
[ data ]
# First include information about the supernova
name = "2016gkg" # Required
peak_time = true # Default false. Can either be true, in which case all times will become relative to the peak data point. Alternatively, give a value, and all times will be relative to that value
zeropoint = 23.00
zeropoint_unit = "AB_mag" # Optional, default to AB_mag
redshift = 0.0049

[[ data.observations ]] # Now load in different observations of the supernova. This can either be one file with all observations, or you can load in multiple files
name = "2016gkg"
path = "2016gkg/2016gkg.csv"
delimiter = ","
header.upperlimit.col = "upperlimit"
header.time.col = "time"
header.time.unit = "d"

header.magnitude.col = "magnitude"
header.magnitude.unit = "AB_mag"

header.magnitude_err.col = "e_magnitude"
header.magnitude_err.unit = "AB_mag"

header.filter.col = "band"
header.instrument.col = "instrument"
header.facility.col = "telescope"
45 changes: 45 additions & 0 deletions Examples/Inputs/2017jgh/2017jgh.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
[ global ]
output_path = "../../Outputs/2017jgh"
data_path = "../Data"
logging = true

<include 2017jgh_data.toml>

[ data.modifications ]
max_time = 58117.5 # Can specify either a value, min, or max. If min or max are specified, you canalso give a range of max_times, and it will find where the min / max flux within that range is. Can do the same with min_time
max_time_unit = "d" # Defaults to d

[[ model ]]
name = "P15"
constraints.R.unit = "Rsun"
constraints.R.prior = "Normal"
constraints.R.values = [25.0, 15.0]
constraints.R.min = 0 # Truncates distribution. Defaults to -Inf.
constraints.R.max = 500

constraints.M.unit = "Msun"
constraints.M.prior = "Normal"
constraints.M.values = [1.0, 0.1]
constraints.M.min = 0
constraints.M.max = 10

constraints.v.unit = "km / s"
constraints.v.prior = "Normal"
constraints.v.values = [3e4, 1e4]
constraints.v.min = 1e4
constraints.v.max = 5e4

constraints.t.unit = "d"
constraints.t.prior = "Normal"
constraints.t.values = [-23, 3]
constraints.t.min = -30
constraints.t.max = -20

[ fitting ]
numwalkers = 1e1
burnin = 1e2
iterations = 1e3

[ plot.comparison ] # compare the best fit to the data

[ plot.contour ] # show a contour plot of the posterior
32 changes: 32 additions & 0 deletions Examples/Inputs/2017jgh/2017jgh_data.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Data
[ data ]
# First include information about the supernova
name = "2017jgh" # Required
zeropoint = 25.3
redshift = 0.079
#max_flux_err = 2.5e2 # Optional, set's the maximum allowed value for the uncertainty in the flux
#max_flux_err_unit = "µJy" # Optional, default to µJy
peak_time = 58127.0 # Default false. Can either be true, in which case all times will become relative to the peak data point. Alternatively, give a value, and all times will be relative to that value
peak_time_unit = "d"

[[ data.observations ]] # Now load in different observations of the supernova. This can either be one file with all observations, or you can load in multiple files
name = "kepler" # Required, Human readable name to distinguish observations
path = "2017jgh/2017jghCleaned2.csv" # Required, Accepts either relative (to Supernova) or absolute path
delimiter = "," # Optional, defaults to comma
header.time.col = "time"
header.time.unit = "d"

header.flux.col = "flux"
header.flux.unit = "erg / s / cm^2 / Hz"

header.flux_err.col = "e_flux"
header.flux_err.unit = "erg / s / cm^2 / Hz"

header.upperlimit.col = "upperlimit"
upperlimit_false = ["False"]
header.facility.col = "facility"
header.instrument.col = "instrument"
header.passband.col = "band"

[[ plot.lightcurve ]]
legend = true
Loading

0 comments on commit d325a71

Please sign in to comment.