From e2cb6b65896279dfb3d957de3f8fb368face4a05 Mon Sep 17 00:00:00 2001 From: pengzhenghao Date: Wed, 18 Oct 2023 14:30:40 -0700 Subject: [PATCH] Fix the asset remove code; Update minor docstring --- metadrive/pull_asset.py | 14 +++++++++----- metadrive/scenario/utils.py | 25 +++++++++++++++++++++---- setup.py | 3 ++- 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/metadrive/pull_asset.py b/metadrive/pull_asset.py index 97292f4ba..547d0c7ae 100644 --- a/metadrive/pull_asset.py +++ b/metadrive/pull_asset.py @@ -1,10 +1,10 @@ import argparse import os +import progressbar import shutil import urllib.request import zipfile - -import progressbar +from filelock import Filelock from metadrive.constants import VERSION from metadrive.engine.logger import get_logger @@ -55,17 +55,21 @@ def pull_asset(update): return zip_path = os.path.join(TARGET_DIR, 'assets.zip') + zip_lock = os.path.join(TARGET_DIR, 'assets.zip.lock') # Fetch the zip file logger.info("Pull assets from {}".format(ASSET_URL)) urllib.request.urlretrieve(ASSET_URL, zip_path, MyProgressBar()) # Extract the zip file to the desired location - with zipfile.ZipFile(zip_path, 'r') as zip_ref: - zip_ref.extractall(TARGET_DIR) + lock = FileLock(zip_lock) + with lock: + with zipfile.ZipFile(zip_path, 'r') as zip_ref: + zip_ref.extractall(TARGET_DIR) # Remove the downloaded zip file (optional) - os.remove(zip_path) + if os.path.exists(zip_path): + os.remove(zip_path) logger.info("Successfully download assets, version: {}. MetaDrive version: {}".format(asset_version(), VERSION)) diff --git a/metadrive/scenario/utils.py b/metadrive/scenario/utils.py index 5f3d432e8..21cb6866b 100644 --- a/metadrive/scenario/utils.py +++ b/metadrive/scenario/utils.py @@ -5,7 +5,6 @@ import matplotlib.pyplot as plt import numpy as np from matplotlib.pyplot import figure - from metadrive.component.static_object.traffic_object import TrafficCone, TrafficBarrier from metadrive.component.traffic_light.base_traffic_light import BaseTrafficLight from metadrive.component.traffic_participants.cyclist import Cyclist @@ -323,6 +322,14 @@ def convert_recorded_scenario_exported(record_episode, scenario_log_interval=0.1 def read_scenario_data(file_path): + """Read a scenario pkl file and return the Scenario Description instance. + + Args: + file_path: the path to a scenario file (usually ends with `.pkl`). + + Returns: + The Scenario Description instance of that scenario. + """ assert SD.is_scenario_file(file_path), "File: {} is not scenario file".format(file_path) with open(file_path, "rb") as f: # unpickler = CustomUnpickler(f) @@ -333,11 +340,21 @@ def read_scenario_data(file_path): def read_dataset_summary(file_folder, check_file_existence=True): """ - We now support two methods to load pickle files. + Read the `dataset_summary.pkl` and return the metadata of each scenario in this dataset. + + This function supports two methods to load pickle files. The first is the old method where we store pickle files in + 0.pkl, 1.pkl, .... The second is the new method which use a summary file to record important metadata of + each scenario. - The first is the old method where we store pickle files in 0.pkl, 1.pkl, ... + Args: + file_folder: the path to the root folder of your dataset. + check_file_existence: check if all scenarios registered in the summary file exist. - The second is the new method which use a summary file to record important metadata of each scenario. + Returns: + A tuple of three elements: + 1) the summary dict mapping from scenario ID to its metadata, + 2) the list of all scenarios IDs, and + 3) a dict mapping from scenario IDs to the folder that hosts their files. """ summary_file = os.path.join(file_folder, SD.DATASET.SUMMARY_FILE) mapping_file = os.path.join(file_folder, SD.DATASET.MAPPING_FILE) diff --git a/setup.py b/setup.py index 30be66243..29530765a 100644 --- a/setup.py +++ b/setup.py @@ -59,7 +59,8 @@ def is_win(): "scipy", "psutil", "geopandas", - "shapely" + "shapely", + "filelock" ] nuplan_requirement = [