diff --git a/lassie/models/detection.py b/lassie/models/detection.py index 527d4c25..f2bec3fa 100644 --- a/lassie/models/detection.py +++ b/lassie/models/detection.py @@ -8,7 +8,9 @@ from typing import TYPE_CHECKING, Any, ClassVar, Iterator, Literal, Type, TypeVar from uuid import UUID, uuid4 +import numpy as np from pydantic import BaseModel, Field, PrivateAttr, computed_field +from pyevtk.hl import pointsToVTK from pyrocko import io from pyrocko.gui import marker from pyrocko.model import Event, dump_events @@ -450,6 +452,12 @@ def csv_dir(self) -> Path: dir.mkdir(exist_ok=True) return dir + @property + def vtk_dir(self) -> Path: + dir = self.rundir / "vtk" + dir.mkdir(exist_ok=True) + return dir + def add(self, detection: EventDetection) -> None: markers_file = self.markers_dir / f"{time_to_path(detection.time)}.list" self.markers_dir.mkdir(exist_ok=True) @@ -474,18 +482,24 @@ def dump_detections(self, jitter_location: float = 0.0) -> None: """Dump all detections to files in the detection directory.""" logger.debug("dumping detections") - self.save_csv(self.csv_dir / "detections.csv") - self.save_pyrocko_events(self.rundir / "pyrocko_detections.list") + self.export_csv(self.csv_dir / "detections.csv") + self.export_pyrocko_events(self.rundir / "pyrocko_detections.list") + + self.export_vtk(self.vtk_dir / "detections") if jitter_location: - self.save_csv( + self.export_csv( self.csv_dir / "detections_jittered.csv", jitter_location=jitter_location, ) - self.save_pyrocko_events( + self.export_pyrocko_events( self.rundir / "pyrocko_detections_jittered.list", jitter_location=jitter_location, ) + self.export_vtk( + self.vtk_dir / "detections_jittered", + jitter_location=jitter_location, + ) def add_semblance(self, trace: Trace) -> None: """Add semblance trace to detection and save to file. @@ -520,8 +534,8 @@ def load_rundir(cls, rundir: Path) -> EventDetections: console.log(f"loaded {detections.n_detections} detections") return detections - def save_csv(self, file: Path, jitter_location: float = 0.0) -> None: - """Save detections to a CSV file + def export_csv(self, file: Path, jitter_location: float = 0.0) -> None: + """Export detections to a CSV file Args: file (Path): output filename @@ -539,8 +553,10 @@ def save_csv(self, file: Path, jitter_location: float = 0.0) -> None: ) file.write_text("\n".join(lines)) - def save_pyrocko_events(self, filename: Path, jitter_location: float = 0.0) -> None: - """Save Pyrocko events for all detections to a file + def export_pyrocko_events( + self, filename: Path, jitter_location: float = 0.0 + ) -> None: + """Export Pyrocko events for all detections to a file Args: filename (Path): output filename @@ -548,16 +564,14 @@ def save_pyrocko_events(self, filename: Path, jitter_location: float = 0.0) -> N logger.info("saving Pyrocko events to %s", filename) detections = self.detections if jitter_location: - detections = [ - detection.jitter_location(jitter_location) for detection in detections - ] + detections = [det.jitter_location(jitter_location) for det in detections] dump_events( - [detection.as_pyrocko_event() for detection in detections], + [det.as_pyrocko_event() for det in detections], filename=str(filename), ) - def save_pyrocko_markers(self, filename: Path) -> None: - """Save Pyrocko markers for all detections to a file + def export_pyrocko_markers(self, filename: Path) -> None: + """Export Pyrocko markers for all detections to a file Args: filename (Path): output filename @@ -568,5 +582,33 @@ def save_pyrocko_markers(self, filename: Path) -> None: pyrocko_markers.extend(detection.get_pyrocko_markers()) marker.save_markers(pyrocko_markers, str(filename)) + def export_vtk( + self, + filename: Path, + jitter_location: float = 0.0, + ) -> None: + """Export events as vtk file + + Args: + filename (Path): output filename, without file extension. + reference (Location): Relative to this location. + """ + detections = self.detections + if jitter_location: + detections = [det.jitter_location(jitter_location) for det in detections] + offsets = np.array( + [(det.east_shift, det.north_shift, det.depth) for det in detections] + ) + pointsToVTK( + str(filename), + np.array(offsets[:, 0]), + np.array(offsets[:, 1]), + -np.array(offsets[:, 2]), + data={ + "semblance": np.array([det.semblance for det in detections]), + "time": np.array([det.time.timestamp() for det in detections]), + }, + ) + def __iter__(self) -> Iterator[EventDetection]: return iter(sorted(self.detections, key=lambda d: d.time)) diff --git a/lassie/models/station.py b/lassie/models/station.py index 53a4c30f..9f730f9e 100644 --- a/lassie/models/station.py +++ b/lassie/models/station.py @@ -183,7 +183,7 @@ def get_coordinates(self, system: CoordSystem = "geographic") -> np.ndarray: [(*sta.effective_lat_lon, sta.effective_elevation) for sta in self] ) - def dump_pyrocko_stations(self, filename: Path) -> None: + def export_pyrocko_stations(self, filename: Path) -> None: """Dump stations to pyrocko station yaml file. Args: @@ -194,7 +194,7 @@ def dump_pyrocko_stations(self, filename: Path) -> None: filename=str(filename.expanduser()), ) - def dump_csv(self, filename: Path) -> None: + def export_csv(self, filename: Path) -> None: """Dump stations to CSV file. Args: @@ -208,5 +208,8 @@ def dump_csv(self, filename: Path) -> None: f"{sta.lat},{sta.lon},{sta.elevation},{sta.depth}\n" ) + def export_vtk(self, reference: Location | None = None) -> None: + ... + def __hash__(self) -> int: return hash(sta for sta in self) diff --git a/lassie/octree.py b/lassie/octree.py index 6101a662..f3778330 100644 --- a/lassie/octree.py +++ b/lassie/octree.py @@ -208,6 +208,7 @@ def check_limits(self) -> Octree: f"invalid octree size limits ({self.size_initial}, {self.size_limit})," " expected size_limit <= size_initial" ) + self.reference = self.reference.shifted_origin() return self def model_post_init(self, __context: Any) -> None: diff --git a/lassie/search.py b/lassie/search.py index 8f557c5b..74ab23a3 100644 --- a/lassie/search.py +++ b/lassie/search.py @@ -144,11 +144,11 @@ def write_config(self, path: Path | None = None) -> None: path.write_text(self.model_dump_json(indent=2, exclude_unset=True)) logger.debug("dumping stations...") - self.stations.dump_pyrocko_stations(rundir / "pyrocko_stations.yaml") + self.stations.export_pyrocko_stations(rundir / "pyrocko_stations.yaml") csv_dir = rundir / "csv" csv_dir.mkdir(exist_ok=True) - self.stations.dump_csv(csv_dir / "stations.csv") + self.stations.export_csv(csv_dir / "stations.csv") @property def semblance_stats(self) -> SemblanceStats: @@ -218,11 +218,11 @@ async def prepare(self) -> None: self.init_boundaries() async def start(self, force_rundir: bool = False) -> None: - await self.prepare() - if not self.has_rundir(): self.init_rundir(force=force_rundir) + await self.prepare() + logger.info("starting search...") batch_processing_start = datetime_now() processing_start = datetime_now() diff --git a/lassie/tracers/fast_marching/fast_marching.py b/lassie/tracers/fast_marching/fast_marching.py index 062ce290..590e8b79 100644 --- a/lassie/tracers/fast_marching/fast_marching.py +++ b/lassie/tracers/fast_marching/fast_marching.py @@ -217,7 +217,7 @@ def save(self, path: Path) -> Path: raise AttributeError("travel times have not been calculated yet") file = path / self.filename if path.is_dir() else path - logger.debug("saving travel times to %s...", file) + logger.debug("saving travel times to %s", file) with zipfile.ZipFile(str(file), "w") as archive: archive.writestr("model.json", self.model_dump_json(indent=2)) @@ -238,7 +238,7 @@ def load(cls, file: Path) -> Self: Returns: Self: 3D travel times """ - logger.debug("loading travel times from %s...", file) + logger.debug("loading travel times from %s", file) with zipfile.ZipFile(file, "r") as archive: path = zipfile.Path(archive) model_file = path / "model.json" @@ -253,13 +253,15 @@ def _load_travel_times(self) -> np.ndarray: with zipfile.ZipFile(self._file, "r") as archive: return np.load(archive.open("travel_times.npy", "r")) - def export_vtk(self, filename: Path) -> None: + def export_vtk(self, filename: Path, reference: Location | None = None) -> None: + offset = reference.offset_from(self.center) if reference else np.zeros(3) + out_file = gridToVTK( str(filename), - self._east_coords, - self._north_coords, - self._depth_coords, - cellData={"travel_times": self.travel_times}, + self._east_coords + offset[0], + self._north_coords + offset[1], + -(self._depth_coords + offset[2]), + pointData={"travel_times": self.travel_times}, ) logger.debug( "vtk: exported travel times of %s to %s", @@ -399,12 +401,16 @@ async def prepare( vtk_dir = rundir / "vtk" vtk_dir.mkdir(parents=True, exist_ok=True) - logger.info("exporting velocity model VTK file...") - velocity_model.export_vtk(vtk_dir / f"velocity-model-{self.phase}") - - logger.info("exporting VTK files for travel time volumes...") - for station, volume in self._travel_time_volumes.items(): - volume.export_vtk(vtk_dir / f"travel-times-{station}") + logger.info("exporting vtk files") + velocity_model.export_vtk( + vtk_dir / f"velocity-model-{self.phase}", + reference=octree.reference, + ) + for volume in self._travel_time_volumes.values(): + volume.export_vtk( + vtk_dir / f"travel-times-{volume.station.pretty_nsl}", + reference=octree.reference, + ) def _load_cached_tavel_times(self, cache_dir: Path) -> None: logger.debug("loading travel times volumes from cache %s...", cache_dir) diff --git a/lassie/tracers/fast_marching/velocity_models.py b/lassie/tracers/fast_marching/velocity_models.py index aad3f650..9aa19ace 100644 --- a/lassie/tracers/fast_marching/velocity_models.py +++ b/lassie/tracers/fast_marching/velocity_models.py @@ -240,13 +240,15 @@ def resample( ) return resampled_model - def export_vtk(self, filename: Path) -> None: + def export_vtk(self, filename: Path, reference: Location | None = None) -> None: + offset = reference.offset_from(self.center) if reference else np.zeros(3) + out_file = gridToVTK( str(filename), - self._east_coords, - self._north_coords, - self._depth_coords, - cellData={"velocity": self._velocity_model}, + self._east_coords + offset[0], + self._north_coords + offset[1], + -(self._depth_coords + offset[2]), + pointData={"velocity": self._velocity_model}, ) logger.info("vtk: exported velocity model to %s", out_file)