diff --git a/corkit/__init__.py b/corkit/__init__.py index 5b3b5f8..48084a5 100644 --- a/corkit/__init__.py +++ b/corkit/__init__.py @@ -1,3 +1,3 @@ -__version__ = '{{VERSION_PLACEHOLDER}}' +__version__ = "0.0.1731770678" __author__ = "Jorgedavyd" __email__ = "jorged.encyso@gmail.com" diff --git a/corkit/cli.py b/corkit/cli.py index baa7fcb..479a0bb 100644 --- a/corkit/cli.py +++ b/corkit/cli.py @@ -2,15 +2,14 @@ import asyncio from .dataset import update + def main(): - parser = argparse.ArgumentParser( - description="Corkit CLI dataset update manager." - ) + parser = argparse.ArgumentParser(description="Corkit CLI dataset update manager.") parser.add_argument( "--batch-size", type=int, default=100, - help="Batch size for dataset updates (default: 10)" + help="Batch size for dataset updates (default: 10)", ) args = parser.parse_args() diff --git a/corkit/dataset.py b/corkit/dataset.py index df91044..635cced 100644 --- a/corkit/dataset.py +++ b/corkit/dataset.py @@ -27,6 +27,7 @@ __all__ = ["update", "CorKitDatasets"] + def timeout(retries=5, delay=10): def decorator(func): @wraps(func) @@ -38,11 +39,14 @@ async def wrapper(*args, **kwargs): except (asyncio.TimeoutError, ClientPayloadError) as e: print(f"Error: {e}. Retrying {attempts + 1}/{retries}...") attempts += 1 - await asyncio.sleep(delay*attempts) + await asyncio.sleep(delay * attempts) raise TimeoutError(f"Failed after {retries} retries.") + return wrapper + return decorator + def clean_old(file_list): dat_files = filter(lambda filename: filename.lower().endswith(".dat"), file_list) fts_files = filter( @@ -62,6 +66,7 @@ async def get_names(url: str, href): names = [name["href"] for name in names] return names + @timeout() async def download_single(url: str, filepath: str): if not os.path.exists(filepath): @@ -71,6 +76,7 @@ async def download_single(url: str, filepath: str): ) as f: await f.write(await response.read()) + async def update(batch_size: int = 500) -> None: """ # update @@ -223,11 +229,12 @@ async def update(batch_size: int = 500) -> None: print("Downloading reconstructors and their utilities...") download_recons() + def download_recons(): - root = os.path.join(DEFAULT_SAVE_DIR, 'models') + root = os.path.join(DEFAULT_SAVE_DIR, "models") os.makedirs(root, exist_ok=True) - url = 'https://drive.google.com/uc?id=102orHwKGr9BL6s-M4bc1a3HVgHO_JmDd' - output = os.path.join(root, 'partial_conv.pt') + url = "https://drive.google.com/uc?id=102orHwKGr9BL6s-M4bc1a3HVgHO_JmDd" + output = os.path.join(root, "partial_conv.pt") gdown.download(url, output, quiet=False) # await download_single("", os.path.join(DEFAULT_SAVE_DIR, "models/fourier.pt")) diff --git a/corkit/lasco.py b/corkit/lasco.py index d3f273b..8dfae23 100644 --- a/corkit/lasco.py +++ b/corkit/lasco.py @@ -28,7 +28,13 @@ check_05, ) -from .reconstruction import dl_image, normal_model_reconstruction, transforms, cross_model_reconstruction, fourier_model_reconstruction +from .reconstruction import ( + dl_image, + normal_model_reconstruction, + transforms, + cross_model_reconstruction, + fourier_model_reconstruction, +) from astropy.visualization import HistEqStretch, ImageNormalize from typing import Union, Dict, Tuple, List, Optional from datetime import datetime, timedelta @@ -48,6 +54,7 @@ __all__ = ["level_1", "CME", "LASCOplot", "downloader", "c3_calibrate", "c2_calibrate"] + ############## # LASCO # ############## @@ -120,7 +127,7 @@ def level_1( - 1023 != 0 ): - img0: NDArray= reduce_std_size(img0, header, FULL=dofull) + img0: NDArray = reduce_std_size(img0, header, FULL=dofull) print( f'LASCO-{header["detector"]}:{header["filename"]}:{header["date-obs"]}T{header["time-obs"]}...' @@ -177,15 +184,15 @@ def level_1( ramp = _read_ramp_full() bkg = _read_bkg_full() forward, inverse = transforms() - if 'model' not in kwargs: + if "model" not in kwargs: model = normal_model_reconstruction() else: - match kwargs['model']: - case 'normal': + match kwargs["model"]: + case "normal": model = normal_model_reconstruction() - case 'fourier': + case "fourier": model = fourier_model_reconstruction() - case 'cross': + case "cross": model = cross_model_reconstruction() args = (vig_pre, vig_post, mask, ramp, bkg, model, forward, inverse) for filepath in fits_files: @@ -202,6 +209,7 @@ def level_1( return out + def final_step( target_path: Optional[str], filetype: str, @@ -259,7 +267,9 @@ def final_step( f"CorKit Level 1 calibration with python modules: level_1.py, open source level 1 implementation." ) header["date"] = datetime.now().strftime("%Y/%m/%d %H:%M:%S.%f") - header["filename"] = os.path.basename(target_path) if target_path is not None else 'null' + header["filename"] = ( + os.path.basename(target_path) if target_path is not None else "null" + ) header["CRPIX1"] = crpix_x header["CRPIX2"] = crpix_y header["CROTA"] = r_hdr @@ -310,6 +320,7 @@ def final_step( return bout, header + class downloader: tools = ["c2", "c3"] batch_size = 2 @@ -379,6 +390,7 @@ async def __call__(self, scrap_date_list): for scrap_date in scrap_date_list: await self.downloader_pipeline(scrap_date) + def _read_bkg_full(): bkg_path = os.path.join(DEFAULT_SAVE_DIR, "3m_clcl_all.fts") with fits.open(bkg_path) as hdul: @@ -386,16 +398,19 @@ def _read_bkg_full(): bkg *= 0.8 / hdul[0].header["exptime"] return bkg + def _read_ramp_full() -> NDArray: ramp_path = os.path.join(DEFAULT_SAVE_DIR, "C3ramp.fts") ramp = fits.getdata(ramp_path) return ramp + def _read_mask_full() -> NDArray: msk_fn = os.path.join(DEFAULT_SAVE_DIR, "c3_cl_mask_lvl1.fts") mask = fits.getdata(msk_fn) return mask + def _read_vig_full() -> Tuple[NDArray, NDArray]: vig_pre = os.path.join(DEFAULT_SAVE_DIR, "c3vig_preint_final.fts") vig_post = os.path.join(DEFAULT_SAVE_DIR, "c3vig_postint_final.fts") @@ -403,6 +418,7 @@ def _read_vig_full() -> Tuple[NDArray, NDArray]: vig_post = fits.getdata(vig_post) return vig_pre, vig_post + def c3_calibrate(img0: NDArray, header: fits.Header, *args, **kwargs): assert header["detector"] == "C3", "Not valid C3 fits file" if check_05(header): @@ -437,7 +453,7 @@ def c3_calibrate(img0: NDArray, header: fits.Header, *args, **kwargs): ramp = _read_ramp_full() bkg = _read_bkg_full() forward, inverse = transforms() - if 'model' not in kwargs: + if "model" not in kwargs: model = normal_model_reconstruction() header.add_history("C3ramp.fts 1999/03/18") @@ -457,7 +473,9 @@ def c3_calibrate(img0: NDArray, header: fits.Header, *args, **kwargs): vig, ramp, bkg, mask = correct_var(header, vig, ramp, bkg, mask) - img = c3_calibration_forward(img0, header, calfac, vig, mask, bkg, ramp, model, forward, inverse, **kwargs) + img = c3_calibration_forward( + img0, header, calfac, vig, mask, bkg, ramp, model, forward, inverse, **kwargs + ) header.add_history( f"corkit/lasco.py c3_calibrate: (function) {__version__}, 12/04/24" @@ -522,6 +540,7 @@ def c3_calibration_forward( img = dl_image(model, img.T, bkg, forward, inverse) return img + def c3_calfactor(header: fits.Header, **kwargs) -> Tuple[fits.Header, float]: # Set calibration factor for the various filters filter_ = header["filter"].upper().strip() @@ -680,6 +699,7 @@ def c2_calfactor(header: fits.Header, **kwargs) -> Tuple[fits.Header, float]: return header, cal_factor + def c2_calibrate( img0: NDArray, header: fits.Header, **kwargs ) -> Tuple[NDArray, fits.Header]: @@ -691,7 +711,7 @@ def c2_calibrate( print("This file is already a Level 1 product.") return img0, header - vig_full = kwargs.get('vig_full', None) + vig_full = kwargs.get("vig_full", None) header, expfac, bias = get_exp_factor(header) header["exptime"] *= expfac header["offset"] = bias @@ -723,6 +743,7 @@ def c2_calibrate( return img, header + def c2_calibration_forward(img, header, calfac, vig): if header["polar"] in [ "PB", diff --git a/corkit/reconstruction.py b/corkit/reconstruction.py index aa0c215..5bb21fe 100644 --- a/corkit/reconstruction.py +++ b/corkit/reconstruction.py @@ -18,10 +18,14 @@ def forward(x, bkg): forward_eq = forward_eq(x) x = forward_eq(x) bkg = ImageNormalize(stretch=HistEqStretch(bkg[np.isfinite(bkg)]))(bkg) < 0.2 - msk = torch.from_numpy((((x <0.168) + bkg) < 0.99).astype(np.float32)).unsqueeze(0).unsqueeze(0) + msk = ( + torch.from_numpy((((x < 0.168) + bkg) < 0.99).astype(np.float32)) + .unsqueeze(0) + .unsqueeze(0) + ) x = torch.from_numpy(x).unsqueeze(0).unsqueeze(0) - x = interpolate(x, (1024,1024), mode = 'bilinear', align_corners=False) - msk = interpolate(msk, size = (1024,1024), mode = 'nearest') + x = interpolate(x, (1024, 1024), mode="bilinear", align_corners=False) + msk = interpolate(msk, size=(1024, 1024), mode="nearest") return x, forward_eq, msk def inverse(x, forward_eq, init_shape): @@ -32,33 +36,40 @@ def inverse(x, forward_eq, init_shape): def load_model(path: str): - device = 'cuda' if torch.cuda.is_available() else 'cpu' + device = "cuda" if torch.cuda.is_available() else "cpu" match device: - case 'cuda': + case "cuda": return torch.jit.load(path) - case 'cpu': - return torch.jit.load(path, map_location = torch.device('cpu')) + case "cpu": + return torch.jit.load(path, map_location=torch.device("cpu")) + def cross_model_reconstruction(): path: str = os.path.join(DEFAULT_SAVE_DIR, "models/cross.pt") return load_model(path) + def fourier_model_reconstruction(): path: str = os.path.join(DEFAULT_SAVE_DIR, "models/fourier.pt") return load_model(path) + def normal_model_reconstruction(): path: str = os.path.join(DEFAULT_SAVE_DIR, "models/partial_conv.pt") return load_model(path) + def dl_image(model, img, bkg, forward_transform, inverse_transform): init_shape = img.shape x, forward_eq, mask = forward_transform(img.astype(np.float32), bkg) - if len(np.where(mask == 0.)[0]) > 32*32: - device = 'cuda' if torch.cuda.is_available() else 'cpu' - x, _ = model(x.view(1, 1, 1024,1024).to(device).float(), mask.view(1, 1, 1024,1024).to(device).float()) - x = interpolate(x, size = init_shape) + if len(np.where(mask == 0.0)[0]) > 32 * 32: + device = "cuda" if torch.cuda.is_available() else "cpu" + x, _ = model( + x.view(1, 1, 1024, 1024).to(device).float(), + mask.view(1, 1, 1024, 1024).to(device).float(), + ) + x = interpolate(x, size=init_shape) x = inverse_transform(x, forward_eq, init_shape) return x else: diff --git a/corkit/secchi.py b/corkit/secchi.py index a9e802b..592ae4b 100644 --- a/corkit/secchi.py +++ b/corkit/secchi.py @@ -46,7 +46,9 @@ def __init__(self) -> None: self.url = ( lambda date, name: f"https://secchi.nrl.navy.mil/postflight/cor2/L0/a/img/{date}/{name}" ) - self.png_path = lambda date, hour: f"./data/SECCHI/COR2/{date}_{hour}.png" + self.png_path = ( + lambda date, hour: f"./data/SECCHI/COR2/{date}_{hour}.png" + ) self.fits_path = ( lambda date, hour: f"./data/SECCHI/COR2/{date}_{hour}.fits" ) @@ -103,4 +105,3 @@ def data_prep(self, scrap_date): scrap_date[0], scrap_date[-1], timedelta(days=1) ) return self.get_days(scrap_date) - diff --git a/corkit/utils.py b/corkit/utils.py index dd44697..4d3430c 100644 --- a/corkit/utils.py +++ b/corkit/utils.py @@ -48,9 +48,11 @@ def save_to_fits(img: np.array, header: fits.Header, filepath: str): def save_to_png(img, filepath): Image.fromarray(img).save(filepath, "PNG") + def save_to_jp2(img, filepath): Image.fromarray(img).save(filepath, quality_mode="lossless") + def save( filepath: str, filename: str, @@ -66,6 +68,7 @@ def save( elif filetype == "png": save_to_png(img, os.path.join(filepath, filename + ".png")) + def FITS(fits_file): with fits.open(fits_file) as hdul: img0: np.array = hdul[0].data @@ -75,6 +78,7 @@ def FITS(fits_file): return img0, header + def get_exp_factor(header: fits.Header) -> Tuple[fits.Header, float, float]: tool = header["detector"].strip().lower() date = datetime.strptime(header["date-obs"], "%Y/%m/%d") @@ -116,11 +120,16 @@ def get_exp_factor(header: fits.Header) -> Tuple[fits.Header, float, float]: header.add_history(f"Bias {bias} from None") return header, 1, header["offset"] + def correct_var(header, *args): args = list(args) - if (header["r1col"] != 20) or (header["r1row"] != 1) \ - or (header["r2col"] != 1043) or (header["r2row"] != 1024): + if ( + (header["r1col"] != 20) + or (header["r1row"] != 1) + or (header["r2col"] != 1043) + or (header["r2row"] != 1024) + ): x1 = header["r1col"] - 20 x2 = header["r2col"] - 20 @@ -134,6 +143,7 @@ def correct_var(header, *args): return args + def apply_summing_corrections(header, *args): args = list(args) summing = np.maximum(header["sumcol"], 1) * np.maximum(header["sumrow"], 1) @@ -151,6 +161,7 @@ def apply_summing_corrections(header, *args): return args + def c2_warp(img: np.array, header: fits.Header) -> Tuple[np.array, fits.Header]: header.add_history(f"corkit/utils.py c2_warp: (function) {__version__} 12/04/24") gridsize = 32 @@ -187,6 +198,7 @@ def c2_warp(img: np.array, header: fits.Header) -> Tuple[np.array, fits.Header]: header.add_history(f"corkit/utils.py warp_tri: (function) {__version__} 12/04/24") return img, header + def c3_warp(img, header): header.add_history(f"corkit/utils.py c3_warp: (function) {__version__} 12/04/24") gridsize = 32 @@ -235,6 +247,7 @@ def c3_warp(img, header): header.add_history(f"corkit/utils.py warp_tri: (function) {__version__} 12/04/24") return img[int(y1) : int(y2 + 1), int(x1) : int(x2 + 1)], header + def warp_tri(x0, y0, xi, yi, img): y_new, x_new = np.meshgrid(np.arange(img.shape[1]), np.arange(img.shape[0])) @@ -246,6 +259,7 @@ def warp_tri(x0, y0, xi, yi, img): return map_coordinates(img, [y_grid, x_grid], order=1) + def c2_distortion(data, arcs): mm = data * 0.021 cf = np.array( @@ -256,6 +270,7 @@ def c2_distortion(data, arcs): secs = subtense("c2") if arcs is None else float(arcs) return secs * f1 + def distortion_coeffs(telescope: str): tel = telescope.upper().strip() if tel == "C2": @@ -268,6 +283,7 @@ def distortion_coeffs(telescope: str): return cf + def subtense(tool: str): tool = tool.strip().upper() @@ -286,6 +302,7 @@ def subtense(tool: str): else: return 0 + def get_sec_pixel(header, FULL: float = False): sec_pix = subtense(header["detector"]) @@ -300,6 +317,7 @@ def get_sec_pixel(header, FULL: float = False): return sec_pix + def solar_ephem(yymmdd, soho=False): dte = Time(datetime.strptime(yymmdd, "%y%m%d")) j2000 = Time(datetime.strptime("20000101", "%Y%m%d")) @@ -317,6 +335,7 @@ def solar_ephem(yymmdd, soho=False): radius = 0.2666 / dist return radius + def reduce_statistics2(img, header, **kwargs): if np.max(img) > 0.1: img[img > 0.00005] = 0.00005 @@ -328,7 +347,7 @@ def reduce_statistics2(img, header, **kwargs): mn = np.min(img[wmn]) mx = np.max(img[wmn]) medyen = np.median(img[wmn]) - bscale: float = 1. + bscale: float = 1.0 if medyen > 0: while medyen * bscale < 1000: bscale *= 10 @@ -391,6 +410,7 @@ def reduce_statistics2(img, header, **kwargs): return header + def reduce_std_size( img0, hdr, @@ -529,9 +549,11 @@ def reduce_std_size( return full_img + def rebin(arr, *args): return tt.resize(arr, args, anti_aliasing=True) + def offset_bias(hdr, SUM: bool = False): port: str = hdr["readport"].strip().upper() tel: str = hdr["detector"].strip().upper() @@ -700,6 +722,7 @@ def offset_bias(hdr, SUM: bool = False): c3_occult_cntr_list = dict(zip(date_list, pos)) + def occltr_cntr(header): tel = header["detector"].strip().upper() filt = header["filter"].strip().upper() @@ -849,6 +872,7 @@ def tai2utc(tai_time: float): return utc_time + def date_to_seconds_since_1958(date_time): reference_date = datetime(1958, 1, 1) days_difference = (date_time - reference_date).days @@ -878,6 +902,7 @@ def fixwrap(in_val): out = out + (out < 0) * max_unsigned_int return out + def get_offset(utime: datetime): filename = os.path.join(DEFAULT_SAVE_DIR, "data/data_anal/c2_time_offsets.dat") offsets = {} @@ -916,6 +941,7 @@ def get_offset(utime: datetime): return offset + def adjust_all_date_obs(hdr): adj = {"date": "", "time": "", "err": ""} @@ -1041,11 +1067,13 @@ def adjust_hdr(hdr): return adjusted + def linear_interp(x1, y1, x2, y2, x): s = (y2 - y1) / (x2 - x1) y = s * (x - x1) + y1 return y + def get_roll_or_xy(hdr, DEGREES=False): adjusted = {"xpos": 0.0, "ypos": 0.0, "roll": 0.0} sunroll = 0.0 @@ -1317,10 +1345,12 @@ def get_sc_point(date, type, **kwargs): return RESULT + def to_mil(date: datetime): start = datetime(date.year, date.month, date.day) return (date - start).total_seconds() * 1000 + def get_sc_att(date): s_year = str(date.year).strip() base = os.path.join(DEFAULT_SAVE_DIR, "ancil_data/attitude") @@ -1383,6 +1413,7 @@ def get_sc_att(date): return result + def get_crota(indate): intai = utc2tai(indate) datfile = os.path.join( @@ -1405,6 +1436,7 @@ def get_crota(indate): crota = roll return crota + def rot( A, ANGLE, @@ -1533,10 +1565,12 @@ def eltheory(Rin, T, limb=0.63, center=False): def ne2mass(num_el): return electron_mass * num_el + """ --------------------------------------------Deprecation Warning ---------------------------------------------- """ + def deprecation(version): warnings.warn( f"This function is deprecated and will be removed in version: {version}.", diff --git a/scripts/autocommit.sh b/scripts/autocommit.sh old mode 100644 new mode 100755 diff --git a/scripts/requirements.sh b/scripts/requirements.sh old mode 100644 new mode 100755 diff --git a/scripts/test.sh b/scripts/test.sh old mode 100644 new mode 100755 diff --git a/setup.py b/setup.py index 1b5b620..166a2af 100644 --- a/setup.py +++ b/setup.py @@ -7,11 +7,13 @@ this_directory = Path(__file__).parent long_description = (this_directory / "README.md").read_text() + class CustomInstall(install): def run(self): super().run() subprocess.run(["corkit-update", "--batch-size", "10"], check=True) + if __name__ == "__main__": setup( name="corkit", @@ -37,16 +39,10 @@ def run(self): "pandas", "torch", "torchvision", - "gdown" + "gdown", ], - entry_points = { - "console_scripts": [ - "corkit-update=corkit.cli:main" - ] - }, - cmdclass={ - "install": CustomInstall - }, + entry_points={"console_scripts": ["corkit-update=corkit.cli:main"]}, + cmdclass={"install": CustomInstall}, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Science/Research", @@ -82,4 +78,3 @@ def run(self): "Typing :: Typed", ], ) - diff --git a/tests/test_corkit.py b/tests/test_corkit.py index 098630e..457140c 100644 --- a/tests/test_corkit.py +++ b/tests/test_corkit.py @@ -5,13 +5,16 @@ from corkit.lasco import CME, level_1, downloader from typing import List + async def tool_downloader(tool: str, scrap_date_list: List[datetime]) -> None: down = downloader(tool, "test") await down(scrap_date_list) + async def gather_tasks(tools: List[str], scrap_date_list: List[datetime]) -> None: await asyncio.gather(*[tool_downloader(tool, scrap_date_list) for tool in tools]) + def test_downloader() -> None: scrap_date_list = [ (datetime(1998, 5, 6), datetime(1998, 5, 7)), # Solar Storm of May 1998 @@ -29,7 +32,9 @@ def test_downloader() -> None: level_1(file_list, path(name)) cme = CME() - bn = lambda name: os.path.join(f"test/{name}", sorted(os.listdir(f"./test/{name}"))[0]) + bn = lambda name: os.path.join( + f"test/{name}", sorted(os.listdir(f"./test/{name}"))[0] + ) for name in tools: cme.mass( bn(name), diff --git a/update.py b/update.py index adade45..d540f3a 100644 --- a/update.py +++ b/update.py @@ -1,5 +1,5 @@ from corkit.dataset import update import asyncio -if __name__ == '__main__': - asyncio.run(update(batch_size = 1)) +if __name__ == "__main__": + asyncio.run(update(batch_size=1))