Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding executables #92

Merged
merged 19 commits into from
Feb 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
1ae0d72
set_tight_layout method has been marked as deprecated and will fail t…
Jun 9, 2023
6ea84ec
adding ntransits for old maptree bin names
Jul 9, 2023
39547dc
minor code clenaup
Aug 12, 2023
65380f6
Revert "minor code clenaup"
torresramiro350 Nov 8, 2023
0b860d6
Revert "Revert "minor code clenaup""
torresramiro350 Nov 8, 2023
f2b5819
Revert "adding ntransits for old maptree bin names"
torresramiro350 Nov 8, 2023
4ec3889
Revert "set_tight_layout method has been marked as deprecated and wil…
torresramiro350 Nov 8, 2023
d4c8174
reducing time to read in maptree with multiprocessing
torresramiro350 Nov 9, 2023
46298fd
Major code refactor for modules that read a ROOT file. First introduc…
torresramiro350 Nov 11, 2023
1404f57
minor change to response module
torresramiro350 Nov 13, 2023
49e01a4
switching the multiprocessing method and cleaning some code
torresramiro350 Nov 15, 2023
61b2d1e
removing unnecessary equation
torresramiro350 Nov 15, 2023
c609e19
reading partial arrays into memory
torresramiro350 Nov 17, 2023
8be8cf8
updating multiprocessing method for reading maptree and response ROOT…
torresramiro350 Dec 11, 2023
13aa06d
addressing test fail for checking transits
torresramiro350 Dec 11, 2023
0bf0bb4
ensure consistency in documentation of functions
torresramiro350 Jan 3, 2024
3a11939
changing documentation format
torresramiro350 Jan 14, 2024
622ee4f
adding executables
torresramiro350 Jan 17, 2024
c0e995a
adopting changes from parallelize_hal branch
torresramiro350 Jan 31, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
235 changes: 142 additions & 93 deletions hawc_hal/HAL.py

Large diffs are not rendered by default.

369 changes: 233 additions & 136 deletions hawc_hal/maptree/from_root_file.py

Large diffs are not rendered by default.

53 changes: 20 additions & 33 deletions hawc_hal/maptree/map_tree.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,35 @@
from __future__ import absolute_import, division

import os
from builtins import object

import astropy.units as u
import numpy as np
import pandas as pd
import uproot
from past.utils import old_div
from threeML.io.file_utils import sanitize_filename
from threeML.io.logging import setup_logger
from threeML.io.rich_display import display

log = setup_logger(__name__)
log.propagate = False

import astropy.units as u

from ..serialize import Serialization
from .from_hdf5_file import from_hdf5_file
from .from_root_file import from_root_file

log = setup_logger(__name__)
log.propagate = False

def map_tree_factory(map_tree_file, roi, n_transits=None):

def map_tree_factory(map_tree_file, roi, n_workers: int = 1, n_transits=None):
# Sanitize files in input (expand variables and so on)
map_tree_file = sanitize_filename(map_tree_file)

if os.path.splitext(map_tree_file)[-1] == ".root":
return MapTree.from_root_file(map_tree_file, roi, n_transits, n_workers)

return MapTree.from_root_file(map_tree_file, roi, n_transits)

else:

return MapTree.from_hdf5(map_tree_file, roi, n_transits)
return MapTree.from_hdf5(map_tree_file, roi, n_transits)


class MapTree(object):
class MapTree:
def __init__(self, analysis_bins, roi, n_transits=None):

self._analysis_bins = analysis_bins
self._roi = roi

Expand All @@ -55,13 +47,12 @@ def __init__(self, analysis_bins, roi, n_transits=None):
@classmethod
# def from_hdf5(cls, map_tree_file, roi):
def from_hdf5(cls, map_tree_file, roi, n_transits):

data_analysis_bins, transits = from_hdf5_file(map_tree_file, roi, n_transits)

return cls(data_analysis_bins, roi, transits)

@classmethod
def from_root_file(cls, map_tree_file, roi, n_transits):
def from_root_file(cls, map_tree_file, roi, n_transits, n_workers: int):
"""
Create a MapTree object from a ROOT file and a ROI. Do not use this directly, use map_tree_factory instead.

Expand All @@ -70,7 +61,9 @@ def from_root_file(cls, map_tree_file, roi, n_transits):
:return:
"""

data_analysis_bins, transits = from_root_file(map_tree_file, roi, n_transits)
data_analysis_bins, transits = from_root_file(
map_tree_file, roi, n_transits, n_workers
)

return cls(data_analysis_bins, roi, transits)

Expand All @@ -86,7 +79,6 @@ def __iter__(self):
"""

for analysis_bin in self._analysis_bins:

yield analysis_bin

def __getitem__(self, item):
Expand All @@ -100,15 +92,12 @@ def __getitem__(self, item):
"""

try:

return self._analysis_bins[item]

except IndexError:

raise IndexError("Analysis bin_name with index %i does not exist" % (item))

def __len__(self):

return len(self._analysis_bins)

@property
Expand All @@ -117,16 +106,18 @@ def n_transits(self):

@property
def analysis_bins_labels(self):

return list(self._analysis_bins.keys())

def display(self):

df = pd.DataFrame()

df["Bin"] = list(self._analysis_bins.keys())
df["Nside"] = [self._analysis_bins[bin_id].nside for bin_id in self._analysis_bins]
df["Scheme"] = [self._analysis_bins[bin_id].scheme for bin_id in self._analysis_bins]
df["Nside"] = [
self._analysis_bins[bin_id].nside for bin_id in self._analysis_bins
]
df["Scheme"] = [
self._analysis_bins[bin_id].scheme for bin_id in self._analysis_bins
]

# Compute observed counts, background counts, how many pixels we have in the ROI and
# the sky area they cover
Expand All @@ -140,7 +131,6 @@ def display(self):
size = 0

for i, bin_id in enumerate(self._analysis_bins):

analysis_bin = self._analysis_bins[bin_id]

sparse_obs = analysis_bin.observation_map.as_partial()
Expand Down Expand Up @@ -190,7 +180,6 @@ def write(self, filename):
all_metas = []

for bin_id in self._analysis_bins:

analysis_bin = self._analysis_bins[bin_id]

assert bin_id == analysis_bin.name, "Bin name inconsistency: {} != {}".format(
Expand All @@ -208,23 +197,21 @@ def write(self, filename):
meta_df = pd.concat(all_metas, axis=1, keys=multi_index_keys).T

with Serialization(filename, mode="w") as serializer:

serializer.store_pandas_object("/analysis_bins", analysis_bins_df)
serializer.store_pandas_object("/analysis_bins_meta", meta_df)

# Write the ROI
if self._roi is not None:

if self._roi.to_dict()["ROI type"] == "HealpixMapROI":
ROIDict = self._roi.to_dict()
roimap = ROIDict["roimap"]
ROIDict.pop("roimap", None)
serializer.store_pandas_object("/ROI", pd.Series(roimap), **ROIDict)

else:

serializer.store_pandas_object("/ROI", pd.Series(), **self._roi.to_dict())
serializer.store_pandas_object(
"/ROI", pd.Series(), **self._roi.to_dict()
)

else:

serializer.store_pandas_object("/ROI", pd.Series())
Loading