Skip to content

Commit

Permalink
removed all 'random' loggers that I created without ArtifactManager
Browse files Browse the repository at this point in the history
  • Loading branch information
wangpatrick57 committed Sep 6, 2024
1 parent 7d2c6ca commit 0d6b37c
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 30 deletions.
23 changes: 10 additions & 13 deletions benchmark/tpch/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,6 @@
)
from util.shell import subprocess_run

benchmark_tpch_logger = logging.getLogger("benchmark/tpch")
benchmark_tpch_logger.setLevel(logging.INFO)


@click.group(name="tpch")
@click.pass_obj
Expand Down Expand Up @@ -75,17 +72,17 @@ def _clone(dbgym_cfg: DBGymConfig) -> None:
dbgym_cfg.cur_symlinks_build_path(mkdir=True) / "tpch-kit.link"
)
if expected_symlink_dpath.exists():
benchmark_tpch_logger.info(f"Skipping clone: {expected_symlink_dpath}")
logging.info(f"Skipping clone: {expected_symlink_dpath}")
return

benchmark_tpch_logger.info(f"Cloning: {expected_symlink_dpath}")
logging.info(f"Cloning: {expected_symlink_dpath}")
real_build_path = dbgym_cfg.cur_task_runs_build_path()
subprocess_run(
f"./tpch_setup.sh {real_build_path}", cwd=dbgym_cfg.cur_source_path()
)
symlink_dpath = link_result(dbgym_cfg, real_build_path / "tpch-kit")
assert expected_symlink_dpath.samefile(symlink_dpath)
benchmark_tpch_logger.info(f"Cloned: {expected_symlink_dpath}")
logging.info(f"Cloned: {expected_symlink_dpath}")


def _get_tpch_kit_dpath(dbgym_cfg: DBGymConfig) -> Path:
Expand All @@ -103,7 +100,7 @@ def _generate_queries(
) -> None:
tpch_kit_dpath = _get_tpch_kit_dpath(dbgym_cfg)
data_path = dbgym_cfg.cur_symlinks_data_path(mkdir=True)
benchmark_tpch_logger.info(
logging.info(
f"Generating queries: {data_path} [{seed_start}, {seed_end}]"
)
for seed in range(seed_start, seed_end + 1):
Expand All @@ -125,7 +122,7 @@ def _generate_queries(
)
queries_symlink_dpath = link_result(dbgym_cfg, real_dir)
assert queries_symlink_dpath.samefile(expected_queries_symlink_dpath)
benchmark_tpch_logger.info(
logging.info(
f"Generated queries: {data_path} [{seed_start}, {seed_end}]"
)

Expand All @@ -137,12 +134,12 @@ def _generate_data(dbgym_cfg: DBGymConfig, scale_factor: float) -> None:
data_path / f"tables_sf{get_scale_factor_string(scale_factor)}.link"
)
if expected_tables_symlink_dpath.exists():
benchmark_tpch_logger.info(
logging.info(
f"Skipping generation: {expected_tables_symlink_dpath}"
)
return

benchmark_tpch_logger.info(f"Generating: {expected_tables_symlink_dpath}")
logging.info(f"Generating: {expected_tables_symlink_dpath}")
subprocess_run(f"./dbgen -vf -s {scale_factor}", cwd=tpch_kit_dpath / "dbgen")
real_dir = dbgym_cfg.cur_task_runs_data_path(
f"tables_sf{get_scale_factor_string(scale_factor)}", mkdir=True
Expand All @@ -151,7 +148,7 @@ def _generate_data(dbgym_cfg: DBGymConfig, scale_factor: float) -> None:

tables_symlink_dpath = link_result(dbgym_cfg, real_dir)
assert tables_symlink_dpath.samefile(expected_tables_symlink_dpath)
benchmark_tpch_logger.info(f"Generated: {expected_tables_symlink_dpath}")
logging.info(f"Generated: {expected_tables_symlink_dpath}")


def _generate_workload(
Expand All @@ -165,7 +162,7 @@ def _generate_workload(
workload_name = workload_name_fn(scale_factor, seed_start, seed_end, query_subset)
expected_workload_symlink_dpath = symlink_data_dpath / (workload_name + ".link")

benchmark_tpch_logger.info(f"Generating: {expected_workload_symlink_dpath}")
logging.info(f"Generating: {expected_workload_symlink_dpath}")
real_dpath = dbgym_cfg.cur_task_runs_data_path(workload_name, mkdir=True)

queries = None
Expand Down Expand Up @@ -196,4 +193,4 @@ def _generate_workload(

workload_symlink_dpath = link_result(dbgym_cfg, real_dpath)
assert workload_symlink_dpath == expected_workload_symlink_dpath
benchmark_tpch_logger.info(f"Generated: {expected_workload_symlink_dpath}")
logging.info(f"Generated: {expected_workload_symlink_dpath}")
11 changes: 4 additions & 7 deletions dbms/postgres/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,6 @@
)
from util.shell import subprocess_run

dbms_postgres_logger = logging.getLogger("dbms/postgres")
dbms_postgres_logger.setLevel(logging.INFO)


@click.group(name="postgres")
@click.pass_obj
Expand Down Expand Up @@ -142,12 +139,12 @@ def _get_repo_symlink_path(dbgym_cfg: DBGymConfig) -> Path:
def _build_repo(dbgym_cfg: DBGymConfig, rebuild: bool) -> None:
expected_repo_symlink_dpath = _get_repo_symlink_path(dbgym_cfg)
if not rebuild and expected_repo_symlink_dpath.exists():
dbms_postgres_logger.info(
logging.info(
f"Skipping _build_repo: {expected_repo_symlink_dpath}"
)
return

dbms_postgres_logger.info(f"Setting up repo in {expected_repo_symlink_dpath}")
logging.info(f"Setting up repo in {expected_repo_symlink_dpath}")
repo_real_dpath = dbgym_cfg.cur_task_runs_build_path("repo", mkdir=True)
subprocess_run(
f"./build_repo.sh {repo_real_dpath}", cwd=dbgym_cfg.cur_source_path()
Expand All @@ -156,7 +153,7 @@ def _build_repo(dbgym_cfg: DBGymConfig, rebuild: bool) -> None:
# only link at the end so that the link only ever points to a complete repo
repo_symlink_dpath = link_result(dbgym_cfg, repo_real_dpath)
assert expected_repo_symlink_dpath.samefile(repo_symlink_dpath)
dbms_postgres_logger.info(f"Set up repo in {expected_repo_symlink_dpath}")
logging.info(f"Set up repo in {expected_repo_symlink_dpath}")


def _create_dbdata(
Expand Down Expand Up @@ -207,7 +204,7 @@ def _create_dbdata(
# Create symlink.
# Only link at the end so that the link only ever points to a complete dbdata.
dbdata_tgz_symlink_path = link_result(dbgym_cfg, dbdata_tgz_real_fpath)
dbms_postgres_logger.info(f"Created dbdata in {dbdata_tgz_symlink_path}")
logging.info(f"Created dbdata in {dbdata_tgz_symlink_path}")


def _generic_dbdata_setup(dbgym_cfg: DBGymConfig) -> None:
Expand Down
7 changes: 2 additions & 5 deletions manage/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@
parent_dpath_of_path,
)

task_logger = logging.getLogger("task")
task_logger.setLevel(logging.INFO)


# This is used in test_clean.py. It's defined here to avoid a circular import.
class MockDBGymConfig:
Expand Down Expand Up @@ -184,10 +181,10 @@ def clean_workspace(
ending_num_files = _count_files_in_workspace(dbgym_cfg)

if verbose:
task_logger.info(
logging.info(
f"Removed {starting_num_files - ending_num_files} out of {starting_num_files} files"
)
task_logger.info(
logging.info(
f"Workspace went from {starting_num_files - ending_num_files} to {starting_num_files}"
)

Expand Down
3 changes: 2 additions & 1 deletion tune/protox/embedding/trainer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import itertools
import logging
import random
from typing import Any, Callable, Iterator, Optional, Tuple, Union

Expand Down Expand Up @@ -165,7 +166,7 @@ def train(self, start_epoch: int = 1, num_epochs: int = 1) -> None:
self.initialize_dataloader()
for self.epoch in range(start_epoch, num_epochs + 1):
self.set_to_train()
c_f.LOGGER.info("TRAINING EPOCH %d" % self.epoch)
logging.info("TRAINING EPOCH %d" % self.epoch)

if not self.disable_tqdm:
pbar = tqdm.tqdm(range(self.iterations_per_epoch))
Expand Down
5 changes: 1 addition & 4 deletions util/shell.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@
from pathlib import Path
from typing import Optional

shell_util_logger = logging.getLogger("shell_util")
shell_util_logger.setLevel(logging.INFO)


def subprocess_run(
c: str,
Expand All @@ -17,7 +14,7 @@ def subprocess_run(
cwd_msg = f"(cwd: {cwd if cwd is not None else os.getcwd()})"

if verbose:
shell_util_logger.info(f"Running {cwd_msg}: {c}")
logging.info(f"Running {cwd_msg}: {c}")

with subprocess.Popen(
c,
Expand Down

0 comments on commit 0d6b37c

Please sign in to comment.