From 46c7445f5f0ecef94b6f248af9190aaa15dd7634 Mon Sep 17 00:00:00 2001 From: lucaw Date: Fri, 3 May 2024 11:45:33 -0700 Subject: [PATCH 1/2] Adding in new formatting guidelines / tools. --- .csharpierignore | 6 + .editorconfig | 1 + .github/workflows/black.yaml | 10 + CONTRIBUTING.md | 22 ++ pyproject.toml | 21 ++ requirements-dev.txt | 8 + requirements-test.txt | 7 - setup.py | 2 +- tasks.py | 585 ++++++++++++----------------------- 9 files changed, 270 insertions(+), 392 deletions(-) create mode 100644 .csharpierignore create mode 100644 .github/workflows/black.yaml create mode 100644 CONTRIBUTING.md create mode 100644 pyproject.toml create mode 100644 requirements-dev.txt delete mode 100644 requirements-test.txt diff --git a/.csharpierignore b/.csharpierignore new file mode 100644 index 0000000000..5f8c6e1004 --- /dev/null +++ b/.csharpierignore @@ -0,0 +1,6 @@ +unity/Dependencies/ +unity/Assets/MagicMirror +unity/Assets/MessagePack +unity/Assets/MIConvexHull +unity/Assets/Priority Queue +unity/Assets/Plugins diff --git a/.editorconfig b/.editorconfig index 64d9295585..af21e717ca 100644 --- a/.editorconfig +++ b/.editorconfig @@ -12,6 +12,7 @@ csharp_prefer_braces = true:error [*.cs] indent_size = 4 +insert_final_newline = true # Newline settings csharp_new_line_before_open_brace = none diff --git a/.github/workflows/black.yaml b/.github/workflows/black.yaml new file mode 100644 index 0000000000..e836785818 --- /dev/null +++ b/.github/workflows/black.yaml @@ -0,0 +1,10 @@ +name: Lint + +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: psf/black@23.12.1 # If version changed, update requirements-dev.txt \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000..380a8e7d73 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,22 @@ +# Contributing + +We welcome contributions from the greater community. If you would like to make such a contributions we recommend first submitting an [issue](https://github.com/allenai/ai2thor/issues) describing your proposed improvement. Doing so can ensure we can validate your suggestions before you spend a great deal of time upon them. Improvements and bug fixes should be made via a pull request from your fork of the repository at [https://github.com/allenai/ai2thor](https://github.com/allenai/ai2thor). + +All code in pull requests should adhere to the following guidelines. + +## Found a bug or want to suggest an enhancement? + +Please submit an [issue](https://github.com/allenai/ai2thor/issues) in which you note the steps +to reproduce the bug or in which you detail the enhancement. + +## Making a pull request? + +When making a pull request we require that any code respects the following guidelines. + +### Auto-formatting + +All Python and C# must be auto-formatted. To do this, simply run +```bash +invoke format +``` +from the root of the repository. \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..e43058a5ed --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,21 @@ +[tool.black] +line-length = 100 + +include = '\.pyi?$' + +exclude = ''' +( + __pycache__ + | \.git + | \.mypy_cache + | \.pytest_cache + | \.vscode + | \.venv + | \bdist\b + | \bdoc\b +) +''' + +[tool.isort] +profile = "black" +multi_line_output = 3 diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000000..b74201cb69 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,8 @@ +pytest +pytest-timeout +pytest-cov +jsonschema +shapely +pytest-mock +dictdiffer +black==23.12.1 # If version changed, update .github/workflows/black.yaml diff --git a/requirements-test.txt b/requirements-test.txt deleted file mode 100644 index 3e1697ceb1..0000000000 --- a/requirements-test.txt +++ /dev/null @@ -1,7 +0,0 @@ -pytest -pytest-timeout -pytest-cov -jsonschema -shapely -pytest-mock -dictdiffer \ No newline at end of file diff --git a/setup.py b/setup.py index 06714db8e4..5538ab5baf 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ def _read_reqs(relpath): ] REQUIREMENTS = _read_reqs("requirements.txt") -REQUIREMENTS_TEST = _read_reqs("requirements-test.txt") +REQUIREMENTS_TEST = _read_reqs("requirements-dev.txt") setup( name="ai2thor", diff --git a/tasks.py b/tasks.py index 2a3f7003df..524604bc90 100644 --- a/tasks.py +++ b/tasks.py @@ -1,3 +1,4 @@ +import glob import os import signal import sys @@ -66,9 +67,7 @@ def add_files(zipf, start_dir, exclude_ext=()): continue arcname = os.path.relpath(fn, start_dir) - if arcname.split("/")[0].endswith( - "_BackUpThisFolder_ButDontShipItWithYourGame" - ): + if arcname.split("/")[0].endswith("_BackUpThisFolder_ButDontShipItWithYourGame"): # print("skipping %s" % arcname) continue # print("adding %s" % arcname) @@ -105,19 +104,17 @@ def push_build(build_archive_name, zip_data, include_private_scenes): ChecksumSHA256=b64encode(sha.digest()).decode("ascii"), ) logger.info("pushing sha256 %s" % (sha256_key,)) - s3.Object(bucket, sha256_key).put( - Body=sha.hexdigest(), ACL=acl, ContentType="text/plain" - ) + s3.Object(bucket, sha256_key).put(Body=sha.hexdigest(), ACL=acl, ContentType="text/plain") except botocore.exceptions.ClientError: - logger.error("caught error uploading archive %s: %s" % (build_archive_name, traceback.format_exc())) + logger.error( + "caught error uploading archive %s: %s" % (build_archive_name, traceback.format_exc()) + ) logger.info("pushed build %s to %s" % (bucket, build_archive_name)) def _webgl_local_build_path(prefix, source_dir="builds"): - return os.path.join( - os.getcwd(), "unity/{}/thor-{}-WebGL/".format(source_dir, prefix) - ) + return os.path.join(os.getcwd(), "unity/{}/thor-{}-WebGL/".format(source_dir, prefix)) def _unity_version(): @@ -134,18 +131,10 @@ def _unity_playback_engines_path(): standalone_path = None if sys.platform.startswith("darwin"): - unity_hub_path = ( - "/Applications/Unity/Hub/Editor/{}/PlaybackEngines".format( - unity_version - ) - ) + unity_hub_path = "/Applications/Unity/Hub/Editor/{}/PlaybackEngines".format(unity_version) # /Applications/Unity/2019.4.20f1/Unity.app/Contents/MacOS - standalone_path = ( - "/Applications/Unity/{}/PlaybackEngines".format( - unity_version - ) - ) + standalone_path = "/Applications/Unity/{}/PlaybackEngines".format(unity_version) elif "win" in sys.platform: raise ValueError("Windows not supported yet, verify PlaybackEnginesPath") unity_hub_path = "C:/PROGRA~1/Unity/Hub/Editor/{}/Editor/Data/PlaybackEngines".format( @@ -165,22 +154,19 @@ def _unity_playback_engines_path(): return unity_path + def _unity_path(): unity_version = _unity_version() standalone_path = None if sys.platform.startswith("darwin"): - unity_hub_path = ( - "/Applications/Unity/Hub/Editor/{}/Unity.app/Contents/MacOS/Unity".format( - unity_version - ) + unity_hub_path = "/Applications/Unity/Hub/Editor/{}/Unity.app/Contents/MacOS/Unity".format( + unity_version ) # /Applications/Unity/2019.4.20f1/Unity.app/Contents/MacOS - standalone_path = ( - "/Applications/Unity/{}/Unity.app/Contents/MacOS/Unity".format( - unity_version - ) + standalone_path = "/Applications/Unity/{}/Unity.app/Contents/MacOS/Unity".format( + unity_version ) # standalone_path = ( # "/Applications/Unity-{}/Unity.app/Contents/MacOS/Unity".format( @@ -188,9 +174,7 @@ def _unity_path(): # ) # ) elif "win" in sys.platform: - unity_hub_path = "C:/PROGRA~1/Unity/Hub/Editor/{}/Editor/Unity.exe".format( - unity_version - ) + unity_hub_path = "C:/PROGRA~1/Unity/Hub/Editor/{}/Editor/Unity.exe".format(unity_version) # TODO: Verify windows unity standalone path standalone_path = "C:/PROGRA~1/{}/Editor/Unity.exe".format(unity_version) elif sys.platform.startswith("linux"): @@ -247,9 +231,7 @@ def _build( elapsed = time.time() - start if elapsed > timeout: - logger.error( - f"Timeout occurred when running command:\n{command}\nKilling the process." - ) + logger.error(f"Timeout occurred when running command:\n{command}\nKilling the process.") os.kill(process.pid, signal.SIGKILL) os.waitpid(-1, os.WNOHANG) return False @@ -315,9 +297,7 @@ def class_dataset_images_for_scene(scene_name): for o in event.metadata["objects"]: if o["receptacle"] and o["receptacleObjectIds"] and o["openable"]: print("opening %s" % o["objectId"]) - env.step( - dict(action="OpenObject", objectId=o["objectId"], forceAction=True) - ) + env.step(dict(action="OpenObject", objectId=o["objectId"], forceAction=True)) event = env.step(dict(action="GetReachablePositions", gridSize=0.25)) @@ -336,9 +316,7 @@ def class_dataset_images_for_scene(scene_name): ) ) exclude_colors.update( - set( - map(tuple, np.unique(event.instance_segmentation_frame[-1], axis=0)) - ) + set(map(tuple, np.unique(event.instance_segmentation_frame[-1], axis=0))) ) exclude_colors.update( set( @@ -415,9 +393,7 @@ def class_dataset_images_for_scene(scene_name): for o in event.metadata["objects"]: if o["receptacle"] and o["receptacleObjectIds"] and o["openable"]: print("opening %s" % o["objectId"]) - env.step( - dict(action="OpenObject", objectId=o["objectId"], forceAction=True) - ) + env.step(dict(action="OpenObject", objectId=o["objectId"], forceAction=True)) for vol in visible_object_locations: point = vol["point"] @@ -461,9 +437,7 @@ def class_dataset_images_for_scene(scene_name): # print("start x %s start_y %s end_x %s end y %s" % (start_x, start_y, end_x, end_y)) print("storing %s " % object_id) img = event.cv2img[start_y:end_y, start_x:end_x, :] - dst = cv2.resize( - img, (target_size, target_size), interpolation=cv2.INTER_LANCZOS4 - ) + dst = cv2.resize(img, (target_size, target_size), interpolation=cv2.INTER_LANCZOS4) object_type = object_id.split("|")[0].lower() target_dir = os.path.join("images", scene_name, object_type) @@ -512,14 +486,14 @@ def local_build_test(context, prefix="local", arch="OSXIntel64"): @task(iterable=["scenes"]) -def local_build( - context, prefix="local", arch="OSXIntel64", scenes=None, scripts_only=False -): +def local_build(context, prefix="local", arch="OSXIntel64", scenes=None, scripts_only=False): import ai2thor.controller build = ai2thor.build.Build(arch, prefix, False) env = dict() - if os.path.isdir("unity/Assets/Private/Scenes") or os.path.isdir("Assets/Resources/ai2thor-objaverse/NoveltyTHOR_Assets/Scenes"): + if os.path.isdir("unity/Assets/Private/Scenes") or os.path.isdir( + "Assets/Resources/ai2thor-objaverse/NoveltyTHOR_Assets/Scenes" + ): env["INCLUDE_PRIVATE_SCENES"] = "true" build_dir = os.path.join("builds", build.name) @@ -527,9 +501,7 @@ def local_build( env["BUILD_SCRIPTS_ONLY"] = "true" if scenes: - env["BUILD_SCENES"] = ",".join( - map(ai2thor.controller.Controller.normalize_scene, scenes) - ) + env["BUILD_SCENES"] = ",".join(map(ai2thor.controller.Controller.normalize_scene, scenes)) if _build("unity", arch, build_dir, build.name, env=env): print("Build Successful") @@ -671,9 +643,7 @@ class YamlUnity3dTag(yaml.SafeLoader): def let_through(self, node): return self.construct_mapping(node) - YamlUnity3dTag.add_constructor( - "tag:unity3d.com,2011:47", YamlUnity3dTag.let_through - ) + YamlUnity3dTag.add_constructor("tag:unity3d.com,2011:47", YamlUnity3dTag.let_through) qs = yaml.load( open("unity/ProjectSettings/QualitySettings.asset").read(), @@ -694,20 +664,14 @@ def let_through(self, node): def git_commit_comment(): - comment = ( - subprocess.check_output("git log -n 1 --format=%B", shell=True) - .decode("utf8") - .strip() - ) + comment = subprocess.check_output("git log -n 1 --format=%B", shell=True).decode("utf8").strip() return comment def git_commit_id(): commit_id = ( - subprocess.check_output("git log -n 1 --format=%H", shell=True) - .decode("ascii") - .strip() + subprocess.check_output("git log -n 1 --format=%H", shell=True).decode("ascii").strip() ) return commit_id @@ -731,9 +695,9 @@ def push_pip_commit(context): pip_name = os.path.basename(g) logger.info("pushing pip file %s" % g) with open(g, "rb") as f: - s3.Object( - ai2thor.build.PYPI_S3_BUCKET, os.path.join("ai2thor", pip_name) - ).put(Body=f, ACL=acl) + s3.Object(ai2thor.build.PYPI_S3_BUCKET, os.path.join("ai2thor", pip_name)).put( + Body=f, ACL=acl + ) @task @@ -809,11 +773,7 @@ def build_pip(context, version): if ( (next_maj == current_maj + 1) or (next_maj == current_maj and next_min == current_min + 1) - or ( - next_maj == current_maj - and next_min == current_min - and next_sub >= current_sub + 1 - ) + or (next_maj == current_maj and next_min == current_min and next_sub >= current_sub + 1) ): if os.path.isdir("dist"): shutil.rmtree("dist") @@ -830,9 +790,7 @@ def build_pip(context, version): fi.write("__version__ = '%s'\n" % (version)) subprocess.check_call("python setup.py clean --all", shell=True) - subprocess.check_call( - "python setup.py sdist bdist_wheel --universal", shell=True - ) + subprocess.check_call("python setup.py sdist bdist_wheel --universal", shell=True) else: raise Exception( @@ -872,9 +830,7 @@ def build_log_push(build_info, include_private_scenes): bucket = ai2thor.build.PRIVATE_S3_BUCKET acl = "private" - s3.Object(bucket, build_log_key).put( - Body=build_log, ACL=acl, ContentType="text/plain" - ) + s3.Object(bucket, build_log_key).put(Body=build_log, ACL=acl, ContentType="text/plain") def archive_push(unity_path, build_path, build_dir, build_info, include_private_scenes): @@ -908,6 +864,7 @@ def pre_test(context): "unity/builds/%s" % c.build_name(), ) + import scripts.update_private @@ -963,9 +920,7 @@ def link_build_cache(root_dir, arch, branch): os.makedirs(os.path.dirname(branch_cache_dir), exist_ok=True) # -c uses MacOS clonefile - subprocess.check_call( - "cp -a -c %s %s" % (main_cache_dir, branch_cache_dir), shell=True - ) + subprocess.check_call("cp -a -c %s %s" % (main_cache_dir, branch_cache_dir), shell=True) logger.info("copying main cache complete for %s" % encoded_branch) branch_library_cache_dir = os.path.join(branch_cache_dir, "Library") @@ -1066,9 +1021,7 @@ def ci_merge_push_pytest_results(context, commit_id): s3_obj.bucket_name, s3_obj.key, ) - logger.info( - "ci_merge_push_pytest_results pytest before url check code change logging works" - ) + logger.info("ci_merge_push_pytest_results pytest before url check code change logging works") logger.info("pytest url %s" % s3_pytest_url) logger.info("s3 obj is valid: {}".format(s3_obj)) @@ -1097,9 +1050,7 @@ def ci_pytest(branch, commit_id): start_time = time.time() - proc = subprocess.run( - "pytest", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) + proc = subprocess.run("pytest", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = dict( success=proc.returncode == 0, @@ -1114,16 +1065,17 @@ def ci_pytest(branch, commit_id): f"finished pytest for {branch} {commit_id} in {time.time() - start_time:.2f} seconds" ) -# Type hints break build server's invoke version + +# Type hints break build server's invoke version @task def ci_build( context, - commit_id = None, # Optional[str] - branch = None, # Optional[str] - skip_pip = False, # bool - novelty_thor_scenes = False, - skip_delete_tmp_dir = False, # bool - cloudrendering_first = False + commit_id=None, # Optional[str] + branch=None, # Optional[str] + skip_pip=False, # bool + novelty_thor_scenes=False, + skip_delete_tmp_dir=False, # bool + cloudrendering_first=False, ): assert (commit_id is None) == ( branch is None @@ -1139,12 +1091,11 @@ def ci_build( if is_travis_build: # a deploy key is used on the build server and an .ssh/config entry has been added # to point to the deploy key caclled ai2thor-private-github - private_url = "git@ai2thor-private-github:allenai/ai2thor-private.git" + private_url = "git@ai2thor-private-github:allenai/ai2thor-private.git" novelty_thor_url = "git@ai2thor-objaverse-github:allenai/ai2thor-objaverse.git" else: private_url = "https://github.com/allenai/ai2thor-private" - novelty_thor_url ="https://github.com/allenai/ai2thor-objaverse" - + novelty_thor_url = "https://github.com/allenai/ai2thor-objaverse" private_repos = [ scripts.update_private.Repo( @@ -1162,17 +1113,17 @@ def ci_build( if novelty_thor_scenes: logger.info("Including a NoveltyThor scenes and making it a private build") - private_repos.append( - novelty_thor_repo - ) + private_repos.append(novelty_thor_repo) else: # Needs to be here so we overwrite any existing NoveltyTHOR repo private_repos.append( scripts.update_private.Repo( url=novelty_thor_url, - target_dir=os.path.join(base_dir, "unity", "Assets", "Resources", "ai2thor-objaverse"), + target_dir=os.path.join( + base_dir, "unity", "Assets", "Resources", "ai2thor-objaverse" + ), delete_before_checkout=is_travis_build, - commit_id="066485f29d7021ac732bed57758dea4b9d481c40", # Initial commit, empty repo. + commit_id="066485f29d7021ac732bed57758dea4b9d481c40", # Initial commit, empty repo. ) ) @@ -1192,9 +1143,7 @@ def ci_build( novelty_thor_add_branches = ["new_cam_adjust"] if is_travis_build and build and build["branch"] in novelty_thor_add_branches: novelty_thor_scenes = True - private_repos.append( - novelty_thor_repo - ) + private_repos.append(novelty_thor_repo) skip_branches = ["vids", "video", "erick/cloudrendering", "it_vr"] if build and build["branch"] not in skip_branches: @@ -1203,17 +1152,13 @@ def ci_build( logger.info(f"pending build for {build['branch']} {build['commit_id']}") clean(private_repos=private_repos) subprocess.check_call("git fetch", shell=True) - subprocess.check_call( - "git checkout %s --" % build["branch"], shell=True - ) + subprocess.check_call("git checkout %s --" % build["branch"], shell=True) logger.info(f" After checkout") - subprocess.check_call( - "git checkout -qf %s" % build["commit_id"], shell=True - ) + subprocess.check_call("git checkout -qf %s" % build["commit_id"], shell=True) private_scene_options = [novelty_thor_scenes] - build_archs = ["OSXIntel64"] #, "Linux64"] + build_archs = ["OSXIntel64"] # , "Linux64"] # CloudRendering only supported with 2020.3.25 # should change this in the future to automatically install @@ -1227,9 +1172,7 @@ def ci_build( has_any_build_failed = False for include_private_scenes in private_scene_options: for arch in build_archs: - logger.info( - f"processing {arch} {build['branch']} {build['commit_id']}" - ) + logger.info(f"processing {arch} {build['branch']} {build['commit_id']}") temp_dir = arch_temp_dirs[arch] = os.path.join( os.environ["HOME"], "tmp/unity-%s-%s-%s-%s" @@ -1256,9 +1199,7 @@ def ci_build( releases_dir=rdir, ) if commit_build.exists(): - logger.info( - f"found build for commit {build['commit_id']} {arch}" - ) + logger.info(f"found build for commit {build['commit_id']} {arch}") # download the build so that we can run the tests if sys.platform.startswith("darwin"): if arch == "OSXIntel64": @@ -1269,9 +1210,7 @@ def ci_build( else: # this is done here so that when a tag build request arrives and the commit_id has already # been built, we avoid bootstrapping the cache since we short circuited on the line above - link_build_cache( - root_dir=temp_dir, arch=arch, branch=build["branch"] - ) + link_build_cache(root_dir=temp_dir, arch=arch, branch=build["branch"]) build_success = ci_build_arch( root_dir=temp_dir, @@ -1279,13 +1218,11 @@ def ci_build( commit_id=build["commit_id"], include_private_scenes=include_private_scenes, immediately_fail_and_push_log=has_any_build_failed, - timeout=60 * 60 + timeout=60 * 60, # Don't bother trying another build if one has already failed ) - has_any_build_failed = ( - has_any_build_failed or not build_success - ) + has_any_build_failed = has_any_build_failed or not build_success if build_success: logger.info( f"Build success detected for {arch} {build['commit_id']}" @@ -1297,9 +1234,7 @@ def ci_build( # the project and we can run the unit tests # waiting for all builds to complete before starting tests for arch in build_archs: - lock_file_path = os.path.join( - arch_temp_dirs[arch], "unity/Temp/UnityLockfile" - ) + lock_file_path = os.path.join(arch_temp_dirs[arch], "unity/Temp/UnityLockfile") if os.path.isfile(lock_file_path): logger.info(f"attempting to lock {lock_file_path}") lock_file = os.open(lock_file_path, os.O_RDWR) @@ -1314,9 +1249,7 @@ def ci_build( if build["tag"] is None: # its possible that the cache doesn't get linked if the builds # succeeded during an earlier run - link_build_cache( - arch_temp_dirs["OSXIntel64"], "OSXIntel64", build["branch"] - ) + link_build_cache(arch_temp_dirs["OSXIntel64"], "OSXIntel64", build["branch"]) # link builds directory so pytest can run logger.info("current directory pre-symlink %s" % os.getcwd()) @@ -1354,8 +1287,7 @@ def ci_build( for p in procs: if p: logger.info( - "joining proc %s for %s %s" - % (p, build["branch"], build["commit_id"]) + "joining proc %s for %s %s" % (p, build["branch"], build["commit_id"]) ) p.join() @@ -1375,17 +1307,13 @@ def ci_build( if is_travis_build: for i in range(12): b = travis_build(build["id"]) - logger.info( - "build state for %s: %s" % (build["id"], b["state"]) - ) + logger.info("build state for %s: %s" % (build["id"], b["state"])) if b["state"] != "started": break time.sleep(10) - logger.info( - "build complete %s %s" % (build["branch"], build["commit_id"]) - ) + logger.info("build complete %s %s" % (build["branch"], build["commit_id"])) fcntl.flock(lock_f, fcntl.LOCK_UN) @@ -1412,13 +1340,9 @@ def install_cloudrendering_engine(context, force=False): if os.path.isdir(full_dir): if force: shutil.rmtree(full_dir) - logger.info( - "CloudRendering engine already installed - removing due to force" - ) + logger.info("CloudRendering engine already installed - removing due to force") else: - logger.info( - "skipping installation - CloudRendering engine already installed" - ) + logger.info("skipping installation - CloudRendering engine already installed") return print("packages/CloudRendering-%s.zip" % _unity_version()) @@ -1441,9 +1365,7 @@ def ci_build_webgl(context, commit_id): arch = "WebGL" set_gi_cache_folder(arch) link_build_cache(os.getcwd(), arch, branch) - webgl_build_deploy_demo( - context, verbose=True, content_addressable=False, force=True - ) + webgl_build_deploy_demo(context, verbose=True, content_addressable=False, force=True) logger.info("finished webgl build deploy %s %s" % (branch, commit_id)) update_webgl_autodeploy_commit_id(commit_id) @@ -1533,6 +1455,7 @@ def ci_build_arch( finally: os.chdir(start_wd) + @task def poll_ci_build(context): import requests @@ -1542,9 +1465,7 @@ def poll_ci_build(context): start_datetime = datetime.datetime.utcnow() hours_before_timeout = 2 - print( - f"WAITING FOR BUILDS TO COMPLETE ({hours_before_timeout} hours before timeout)" - ) + print(f"WAITING FOR BUILDS TO COMPLETE ({hours_before_timeout} hours before timeout)") start_time = time.time() last_emit_time = 0 for i in range(360 * hours_before_timeout): @@ -1596,9 +1517,7 @@ def poll_ci_build(context): f"\nBuild DOES NOT exist for arch {plat}, expected log url: {commit_build.log_url}" ) else: - print( - f"\nBuild DOES exist for arch {plat}, log url: {commit_build.log_url}" - ) + print(f"\nBuild DOES exist for arch {plat}, log url: {commit_build.log_url}") if any_failures: print(f"\nERROR: BUILD FAILURES DETECTED") @@ -1656,9 +1575,7 @@ def build(context, local=False): if include_private_scenes: env["INCLUDE_PRIVATE_SCENES"] = "true" unity_path = "unity" - build_name = ai2thor.build.build_name( - plat.name(), version, include_private_scenes - ) + build_name = ai2thor.build.build_name(plat.name(), version, include_private_scenes) build_dir = os.path.join("builds", build_name) build_path = build_dir + ".zip" build_info = builds[plat.name()] = {} @@ -1813,9 +1730,7 @@ def get_depth( save_image_per_frame=True, ) else: - env = ai2thor.controller.Controller( - width=600, height=600, local_build=local_build - ) + env = ai2thor.controller.Controller(width=600, height=600, local_build=local_build) if scene is not None: env.reset(scene) @@ -1837,9 +1752,7 @@ def get_depth( from ai2thor.interact import InteractiveControllerPrompt if scene is not None: - teleport_arg = dict( - action="TeleportFull", y=0.9010001, rotation=dict(x=0, y=rotation, z=0) - ) + teleport_arg = dict(action="TeleportFull", y=0.9010001, rotation=dict(x=0, y=rotation, z=0)) if teleport is not None: teleport = [float(pos) for pos in teleport.split(",")] @@ -1895,9 +1808,7 @@ def get_depth( @task -def inspect_depth( - ctx, directory, all=False, indices=None, jet=False, under_score=False -): +def inspect_depth(ctx, directory, all=False, indices=None, jet=False, under_score=False): import numpy as np import cv2 import glob @@ -1939,15 +1850,11 @@ def sort_key_function(name): mn = np.min(raw_depth) mx = np.max(raw_depth) print("min depth value: {}, max depth: {}".format(mn, mx)) - norm = (((raw_depth - mn).astype(np.float32) / (mx - mn)) * 255.0).astype( - np.uint8 - ) + norm = (((raw_depth - mn).astype(np.float32) / (mx - mn)) * 255.0).astype(np.uint8) img = cv2.applyColorMap(norm, cv2.COLORMAP_JET) else: - grayscale = ( - 255.0 / raw_depth.max() * (raw_depth - raw_depth.min()) - ).astype(np.uint8) + grayscale = (255.0 / raw_depth.max() * (raw_depth - raw_depth.min())).astype(np.uint8) print("max {} min {}".format(raw_depth.max(), raw_depth.min())) img = grayscale @@ -1966,9 +1873,7 @@ def inspect_pixel(event, x, y, flags, param): @task -def real_2_sim( - ctx, source_dir, index, scene, output_dir, rotation=0, local_build=False, jet=False -): +def real_2_sim(ctx, source_dir, index, scene, output_dir, rotation=0, local_build=False, jet=False): import cv2 from ai2thor.util.transforms import transform_real_2_sim @@ -2056,9 +1961,7 @@ def imshow_components(labels): indices_top_left = np.where(labels == labels[0][0]) indices_top_right = np.where(labels == labels[0][img_size[1] - 1]) indices_bottom_left = np.where(labels == labels[img_size[0] - 1][0]) - indices_bottom_right = np.where( - labels == labels[img_size[0] - 1][img_size[1] - 1] - ) + indices_bottom_right = np.where(labels == labels[img_size[0] - 1][img_size[1] - 1]) indices = [ indices_top_left, @@ -2135,10 +2038,7 @@ def check_visible_objects_closed_receptacles(ctx, start_scene, end_scene): ) ) - if ( - visibility_object_id is None - and obj["objectType"] in visibility_object_types - ): + if visibility_object_id is None and obj["objectType"] in visibility_object_types: visibility_object_id = obj["objectId"] if visibility_object_id is None: @@ -2168,9 +2068,7 @@ def check_visible_objects_closed_receptacles(ctx, start_scene, end_scene): ) ) - replace_success = controller.last_event.metadata[ - "lastActionSuccess" - ] + replace_success = controller.last_event.metadata["lastActionSuccess"] if replace_success: if ( @@ -2198,9 +2096,7 @@ def list_objects_with_metadata(bucket): continuation_token = None while True: if continuation_token: - objects = s3c.list_objects_v2( - Bucket=bucket, ContinuationToken=continuation_token - ) + objects = s3c.list_objects_v2(Bucket=bucket, ContinuationToken=continuation_token) else: objects = s3c.list_objects_v2(Bucket=bucket) @@ -2271,11 +2167,7 @@ def upload_file(f_path, key): if ext in content_encoding: kwargs["ContentEncoding"] = content_encoding[ext] - if ( - not force - and key in current_objects - and etag == current_objects[key]["ETag"] - ): + if not force and key in current_objects and etag == current_objects[key]["ETag"]: if verbose: print("ETag match - skipping %s" % key) return @@ -2351,9 +2243,7 @@ def webgl_build_deploy_demo(ctx, verbose=False, force=False, content_addressable content_addressable=content_addressable, ) - webgl_deploy( - ctx, source_dir="builds/demo", target_dir="demo", verbose=verbose, force=force - ) + webgl_deploy(ctx, source_dir="builds/demo", target_dir="demo", verbose=verbose, force=force) if verbose: print("Deployed selected scenes to bucket's 'demo' directory") @@ -2363,13 +2253,9 @@ def webgl_build_deploy_demo(ctx, verbose=False, force=False, content_addressable living_rooms = [f"FloorPlan{200 + i}_physics" for i in range(1, 31)] bedrooms = [f"FloorPlan{300 + i}_physics" for i in range(1, 31)] bathrooms = [f"FloorPlan{400 + i}_physics" for i in range(1, 31)] - robothor_train = [ - f"FloorPlan_Train{i}_{j}" for i in range(1, 13) for j in range(1, 6) - ] + robothor_train = [f"FloorPlan_Train{i}_{j}" for i in range(1, 13) for j in range(1, 6)] robothor_val = [f"FloorPlan_Val{i}_{j}" for i in range(1, 4) for j in range(1, 6)] - scenes = ( - kitchens + living_rooms + bedrooms + bathrooms + robothor_train + robothor_val - ) + scenes = kitchens + living_rooms + bedrooms + bathrooms + robothor_train + robothor_val webgl_build( ctx, @@ -2425,9 +2311,7 @@ def webgl_deploy_all(ctx, verbose=False, individual_rooms=False): build_dir = "builds/{}".format(target_s3_dir) webgl_build(ctx, scenes=floorPlanName, directory=build_dir) - webgl_deploy( - ctx, source_dir=build_dir, target_dir=target_s3_dir, verbose=verbose - ) + webgl_deploy(ctx, source_dir=build_dir, target_dir=target_s3_dir, verbose=verbose) else: webgl_build(ctx, room_ranges=range_str, directory=build_dir) @@ -2459,10 +2343,7 @@ def webgl_s3_deploy( if all: flatten = lambda l: [item for sublist in l for item in sublist] room_numbers = flatten( - [ - [i for i in range(room_range[0], room_range[1])] - for key, room_range in rooms.items() - ] + [[i for i in range(room_range[0], room_range[1])] for key, room_range in rooms.items()] ) else: room_numbers = [s.strip() for s in scenes.split(",")] @@ -2477,9 +2358,7 @@ def webgl_s3_deploy( target_s3_dir = "{}/{}".format(target_dir, floor_plan_name) build_dir = "builds/{}".format(target_s3_dir) - webgl_build( - ctx, scenes=floor_plan_name, directory=build_dir, crowdsource_build=True - ) + webgl_build(ctx, scenes=floor_plan_name, directory=build_dir, crowdsource_build=True) if verbose: print("Deploying room '{}'...".format(floor_plan_name)) if not deploy_skip: @@ -2513,9 +2392,7 @@ def webgl_site_deploy( shutil.rmtree(output_dir) # os.mkdir(output_dir) - ignore_func = lambda d, files: [ - f for f in files if isfile(join(d, f)) and f.endswith(".meta") - ] + ignore_func = lambda d, files: [f for f in files if isfile(join(d, f)) and f.endswith(".meta")] if unity_build_dir != "": shutil.copytree(unity_build_dir, output_dir, ignore=ignore_func) @@ -2542,9 +2419,7 @@ def mock_client_request(context): import requests import cv2 - r = requests.post( - "http://127.0.0.1:9200/step", json=dict(action="MoveAhead", sequenceId=1) - ) + r = requests.post("http://127.0.0.1:9200/step", json=dict(action="MoveAhead", sequenceId=1)) payload = msgpack.unpackb(r.content, raw=False) metadata = payload["metadata"]["agents"][0] image = np.frombuffer(payload["frames"][0], dtype=np.uint8).reshape( @@ -2660,9 +2535,7 @@ def get_points(contoller, object_type, scene): print("Getting points in scene: '{}'...: ".format(scene)) controller.reset(scene) event = controller.step( - dict( - action="ObjectTypeToObjectIds", objectType=object_type.replace(" ", "") - ) + dict(action="ObjectTypeToObjectIds", objectType=object_type.replace(" ", "")) ) object_ids = event.metadata["actionReturn"] @@ -2673,13 +2546,11 @@ def get_points(contoller, object_type, scene): objects_types_in_scene.add(object_type) object_id = object_ids[0] - event_reachable = controller.step( - dict(action="GetReachablePositions", gridSize=0.25) - ) + event_reachable = controller.step(dict(action="GetReachablePositions", gridSize=0.25)) - target_position = controller.step( - action="GetObjectPosition", objectId=object_id - ).metadata["actionReturn"] + target_position = controller.step(action="GetObjectPosition", objectId=object_id).metadata[ + "actionReturn" + ] reachable_positions = event_reachable.metadata["actionReturn"] @@ -2700,8 +2571,7 @@ def filter_points(selected_points, point_set, minimum_distance): [ p for p in point_set - if sqr_dist(p, selected) - <= minimum_distance * minimum_distance + if sqr_dist(p, selected) <= minimum_distance * minimum_distance ] ) point_set = point_set.difference(remove_set) @@ -2828,8 +2698,7 @@ def key_sort_func(scene_name): objects = [] for objectType in targets: if filter_file is None or ( - objectType in scene_object_filter - and scene in scene_object_filter[objectType] + objectType in scene_object_filter and scene in scene_object_filter[objectType] ): dataset[scene][objectType] = [] obj = get_points(controller, objectType, scene) @@ -2838,9 +2707,7 @@ def key_sort_func(scene_name): dataset_flat = dataset_flat + objects if intermediate_directory != ".": - with open( - os.path.join(intermediate_directory, "{}.json".format(scene)), "w" - ) as f: + with open(os.path.join(intermediate_directory, "{}.json".format(scene)), "w") as f: json.dump(objects, f, indent=4) with open(os.path.join(intermediate_directory, output), "w") as f: @@ -2891,9 +2758,7 @@ def shortest_path_to_object( agentMode="bot", visibilityDistance=visibility_distance, ) - path = metrics.get_shortest_path_to_object_type( - controller, object, p, {"x": 0, "y": 0, "z": 0} - ) + path = metrics.get_shortest_path_to_object_type(controller, object, p, {"x": 0, "y": 0, "z": 0}) minimum_path_length = metrics.path_distance(path) print("Path: {}".format(path)) @@ -2980,9 +2845,7 @@ def filter_dataset(ctx, filename, output_filename, ids=False): @task -def fix_dataset_object_types( - ctx, input_file, output_file, editor_mode=False, local_build=False -): +def fix_dataset_object_types(ctx, input_file, output_file, editor_mode=False, local_build=False): import ai2thor.controller with open(input_file, "r") as f: @@ -3028,9 +2891,7 @@ def fix_dataset_object_types( @task -def test_dataset( - ctx, filename, scenes=None, objects=None, editor_mode=False, local_build=False -): +def test_dataset(ctx, filename, scenes=None, objects=None, editor_mode=False, local_build=False): import ai2thor.controller import ai2thor.util.metrics as metrics @@ -3060,9 +2921,7 @@ def test_dataset( if objects is not None: object_set = set(objects.split(",")) print("Filtering {}".format(object_set)) - filtered_dataset = [ - d for d in filtered_dataset if d["object_type"] in object_set - ] + filtered_dataset = [d for d in filtered_dataset if d["object_type"] in object_set] current_scene = None current_object = None point_counter = 0 @@ -3150,9 +3009,7 @@ def visualize_shortest_paths( dataset_filtered = [d for d in dataset if d["scene"] in scene_f_set] if object_types is not None: object_f_set = set(object_types.split(",")) - dataset_filtered = [ - d for d in dataset_filtered if d["object_type"] in object_f_set - ] + dataset_filtered = [d for d in dataset_filtered if d["object_type"] in object_f_set] print("Running for {} points...".format(len(dataset_filtered))) index = 0 @@ -3166,8 +3023,7 @@ def visualize_shortest_paths( previous_index = index controller.reset(current_scene) while ( - current_scene == datapoint["scene"] - and current_object == datapoint["object_type"] + current_scene == datapoint["scene"] and current_object == datapoint["object_type"] ): index += 1 if index > len(dataset_filtered) - 1: @@ -3181,9 +3037,7 @@ def visualize_shortest_paths( failed[key] = [] - print( - "Points for '{}' in scene '{}'...".format(current_object, current_scene) - ) + print("Points for '{}' in scene '{}'...".format(current_object, current_scene)) evt = controller.step( action="AddThirdPartyCamera", rotation=dict(x=90, y=0, z=0), @@ -3194,9 +3048,7 @@ def visualize_shortest_paths( sc = dataset_filtered[previous_index]["scene"] obj_type = dataset_filtered[previous_index]["object_type"] - positions = [ - d["initial_position"] for d in dataset_filtered[previous_index:index] - ] + positions = [d["initial_position"] for d in dataset_filtered[previous_index:index]] # print("{} : {} : {}".format(sc, obj_type, positions)) evt = controller.step( action="VisualizeShortestPaths", @@ -3295,9 +3147,7 @@ def key_sort_func(scene_name): for datapoint in filter_dataset: missing_datapoints_by_scene[datapoint["scene"]].append(datapoint) - partial_dataset_filenames = sorted( - glob.glob("{}/FloorPlan_*.png".format(dataset_dir)) - ) + partial_dataset_filenames = sorted(glob.glob("{}/FloorPlan_*.png".format(dataset_dir))) print("Datas") difficulty_order_map = {"easy": 0, "medium": 1, "hard": 2} @@ -3310,12 +3160,8 @@ def key_sort_func(scene_name): final_dataset = [] for scene in scenes: for object_type in targets: - arr = [ - p for p in partial_dataset[scene] if p["object_type"] == object_type - ] + [ - p - for p in missing_datapoints_by_scene[scene] - if p["object_type"] == object_type + arr = [p for p in partial_dataset[scene] if p["object_type"] == object_type] + [ + p for p in missing_datapoints_by_scene[scene] if p["object_type"] == object_type ] final_dataset = final_dataset + sorted( arr, @@ -3377,10 +3223,7 @@ def resort_dataset(ctx, dataset_path, output_path, editor_mode=False, local_buil new_dataset = [] while index < len(dataset): previous_index = index - while ( - current_scene == datapoint["scene"] - and current_object == datapoint["object_type"] - ): + while current_scene == datapoint["scene"] and current_object == datapoint["object_type"]: index += 1 if index > len(dataset) - 1: break @@ -3538,9 +3381,7 @@ def reachable_pos(ctx, scene, editor_mode=False, local_build=False): @task -def get_physics_determinism( - ctx, scene="FloorPlan1_physics", agent_mode="arm", n=100, samples=100 -): +def get_physics_determinism(ctx, scene="FloorPlan1_physics", agent_mode="arm", n=100, samples=100): import ai2thor.controller import random @@ -3587,11 +3428,7 @@ def act(controller, actions, n): controller, num_trials, ObjectPositionVarianceAverage() ): act(controller, actions, n) - print( - " actions: '{}', object_position_variance_average: {} ".format( - action_name, metric - ) - ) + print(" actions: '{}', object_position_variance_average: {} ".format(action_name, metric)) @task @@ -3630,8 +3467,7 @@ def generate_pypi_index(context): def ci_test_utf(branch, commit_id, base_dir): logger.info( - "running Unity Test framework testRunner for %s %s %s" - % (branch, commit_id, base_dir) + "running Unity Test framework testRunner for %s %s %s" % (branch, commit_id, base_dir) ) results_path, results_logfile = test_utf(base_dir) @@ -3669,18 +3505,44 @@ def format(context): @task def format_cs(context): - install_dotnet_format(context) + # assert tool in ["format", "csharpier"] + install_dotnet_tool(context, tool="dotnet-format") + install_dotnet_tool(context, tool="csharpier") - # the following message will get emitted, this can safely be ignored - # "Warnings were encountered while loading the workspace. Set the verbosity option to the 'diagnostic' level to log warnings" + # First run csharpier as it handles long lines correctly + print("Running csharpier on whole project") subprocess.check_call( - ".dotnet/dotnet tool run dotnet-format unity/AI2-THOR-Base.csproj -w -s", + ".dotnet/dotnet tool run dotnet-csharpier unity", shell=True, ) + # Now run dotnet-format as it allows more configuration options (e.g. curly brace with no new line). + # The following message will get emitted, this can safely be ignored + # "Warnings were encountered while loading the workspace. Set the verbosity option to the 'diagnostic' level to log warnings" + for proj in glob.glob("unity/*.csproj"): + if any( + k in proj + for k in [ + "UnityStandardAssets", + "MagicMirror", + "I360Render", + "MessagePack", + "MIConvexHull", + "Priority", + "Plugins", + ] + ): + continue + + print(f"\nRunning dotnet-format on {proj}") + subprocess.check_call( + f".dotnet/dotnet tool run dotnet-format {proj} -w -s", + shell=True, + ) + @task -def install_dotnet_format(context, force=False): +def install_dotnet_tool(context, tool: str, force=False): install_dotnet(context) base_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__))) @@ -3692,14 +3554,19 @@ def install_dotnet_format(context, force=False): tools = json.loads(f.read()) # we may want to specify a version here in the future - if not force and "dotnet-format" in tools.get("tools", {}): + if not force and tool in tools.get("tools", {}): # dotnet-format already installed return - command = os.path.join(base_dir, ".dotnet/dotnet") + " tool install dotnet-format" + command = os.path.join(base_dir, ".dotnet/dotnet") + f" tool install {tool}" subprocess.check_call(command, shell=True) +@task +def install_dotnet_format(context, force=False): + install_dotnet_tool(context, tool="dotnet-format", force=force) + + @task def install_dotnet(context, force=False): import requests @@ -3730,24 +3597,18 @@ def format_py(context): except ImportError: raise Exception("black not installed - run pip install black") - subprocess.check_call( - "black -v -t py38 --exclude unity/ --exclude .git/ .", shell=True - ) + subprocess.check_call("black -v -t py38 --exclude unity/ --exclude .git/ .", shell=True) @task -def install_unity_hub( - context, target_dir=os.path.join(os.path.expanduser("~"), "local/bin") -): +def install_unity_hub(context, target_dir=os.path.join(os.path.expanduser("~"), "local/bin")): import stat import requests if not sys.platform.startswith("linux"): raise Exception("Installation only support for Linux") - res = requests.get( - "https://public-cdn.cloud.unity3d.com/hub/prod/UnityHub.AppImage" - ) + res = requests.get("https://public-cdn.cloud.unity3d.com/hub/prod/UnityHub.AppImage") res.raise_for_status() os.makedirs(target_dir, exist_ok=True) @@ -3775,9 +3636,7 @@ def install_unity_editor(context, version=None, changeset=None): unity_hub_path = None if sys.platform.startswith("linux"): - unity_hub_path = os.path.join( - os.path.expanduser("~"), "local/bin/UnityHub.AppImage" - ) + unity_hub_path = os.path.join(os.path.expanduser("~"), "local/bin/UnityHub.AppImage") elif sys.platform.startswith("darwin"): unity_hub_path = "/Applications/Unity\ Hub.app/Contents/MacOS/Unity\ Hub --" else: @@ -3817,24 +3676,17 @@ def generate_unity_alf(context): # with manual activation https://docs.unity3d.com/Manual/ManualActivationGuide.html alf_path = "Unity_v%s.alf" % _unity_version() - subprocess.run( - "%s -batchmode -createManualActivationFile" % _unity_path(), shell=True - ) + subprocess.run("%s -batchmode -createManualActivationFile" % _unity_path(), shell=True) assert os.path.isfile(alf_path), "ALF not found at %s" % alf_path - print( - "ALF created at %s. Activate license at: https://license.unity3d.com/manual" - % alf_path - ) + print("ALF created at %s. Activate license at: https://license.unity3d.com/manual" % alf_path) @task def activate_unity_license(context, ulf_path): assert os.path.isfile(ulf_path), "License file '%s' not found" % ulf_path - subprocess.run( - '%s -batchmode -manualLicenseFile "%s"' % (_unity_path(), ulf_path), shell=True - ) + subprocess.run('%s -batchmode -manualLicenseFile "%s"' % (_unity_path(), ulf_path), shell=True) def test_utf(base_dir=None): @@ -3851,9 +3703,11 @@ def test_utf(base_dir=None): test_results_path = os.path.join(project_path, "utf_testResults-%s.xml" % commit_id) logfile_path = os.path.join(base_dir, "thor-testResults-%s.log" % commit_id) - command = ( - "%s -runTests -testResults %s -logFile %s -testPlatform PlayMode -projectpath %s " - % (_unity_path(), test_results_path, logfile_path, project_path) + command = "%s -runTests -testResults %s -logFile %s -testPlatform PlayMode -projectpath %s " % ( + _unity_path(), + test_results_path, + logfile_path, + project_path, ) subprocess.call(command, shell=True, cwd=base_dir) @@ -3912,9 +3766,7 @@ def test_{methodname}(self): test_record_data = " pass" if test_records: test_record_data = "\n".join(test_records) - encoded_class_name = re.sub( - r"[^a-zA-Z0-9_]", "_", re.sub("_", "__", class_name) - ) + encoded_class_name = re.sub(r"[^a-zA-Z0-9_]", "_", re.sub("_", "__", class_name)) class_data.append( f""" class {encoded_class_name}: @@ -4117,9 +3969,7 @@ def test_render(ctx, editor_mode=False, local_build=False): if img is not None: print(f"img r {img[0][0][0]} g {img[0][0][1]} b {img[0][0][2]}") - print( - f"evt frame r {evt.cv2img[0][0][0]} g {evt.cv2img[0][0][1]} b {evt.cv2img[0][0][2]}" - ) + print(f"evt frame r {evt.cv2img[0][0][0]} g {evt.cv2img[0][0][1]} b {evt.cv2img[0][0][2]}") cv2.namedWindow("image") @@ -4222,9 +4072,7 @@ def walls_to_floor_poly(walls): "empty": wall["empty"] if "empty" in wall else False, "polygon": wall_to_poly(wall), } - for (wall, wall_indx) in zip( - room["walls"], range(0, len(room["walls"])) - ) + for (wall, wall_indx) in zip(room["walls"], range(0, len(room["walls"]))) ] for (room, room_i) in zip(obj["rooms"], range(len(obj["rooms"]))) ] @@ -4405,8 +4253,7 @@ def get_benchmark_title(benchmark, default_title=""): benchmarks = [load_benchmark_filename(filename) for filename in benchmark_filenames] benchmark_titles = [ - get_benchmark_title(b, "") - for (i, b) in zip(range(0, len(benchmarks)), benchmarks) + get_benchmark_title(b, "") for (i, b) in zip(range(0, len(benchmarks)), benchmarks) ] if plot_titles is not None: @@ -4432,10 +4279,7 @@ def get_benchmark_title(benchmark, default_title=""): ) all_data = reduce( list.__add__, - [ - [(x, [y[action] for y in b]) for action in all_data[0][1][0]] - for (x, b) in all_data - ], + [[(x, [y[action] for y in b]) for action in all_data[0][1][0]] for (x, b) in all_data], ) keys = [k for (k, y) in all_data] @@ -4612,9 +4456,7 @@ def run_benchmark_from_s3_config(ctx): client = boto3.client("s3") - response = client.list_objects_v2( - Bucket=BENCHMARKING_S3_BUCKET, Prefix="benchmark_jobs/" - ) + response = client.list_objects_v2(Bucket=BENCHMARKING_S3_BUCKET, Prefix="benchmark_jobs/") s3 = boto3.resource("s3", region_name="us-west-2") benchmark_runs = [] @@ -4632,9 +4474,7 @@ def run_benchmark_from_s3_config(ctx): BENCHMARKING_S3_BUCKET, f"procedural_houses/{procedural_house}", ) - house_json = json.loads( - house_obj.get()["Body"].read().decode("utf-8") - ) + house_json = json.loads(house_obj.get()["Body"].read().decode("utf-8")) if "id" not in house_json: house_json["id"] = procedural_house.split(".")[0] procedural_houses_transformed.append(house_json) @@ -4681,12 +4521,13 @@ def run_benchmark_from_s3_config(ctx): @task def run_benchmark_from_local_config( - ctx, config_path, - house_from_s3=False, + ctx, + config_path, + house_from_s3=False, houses_path="./unity/Assets/Resources/rooms", output="out.json", local_build=False, - arch=None + arch=None, ): import copy from ai2thor.benchmarking import BENCHMARKING_S3_BUCKET, UnityActionBenchmarkRunner @@ -4694,9 +4535,7 @@ def run_benchmark_from_local_config( if house_from_s3: client = boto3.client("s3") - response = client.list_objects_v2( - Bucket=BENCHMARKING_S3_BUCKET, Prefix="benchmark_jobs/" - ) + response = client.list_objects_v2(Bucket=BENCHMARKING_S3_BUCKET, Prefix="benchmark_jobs/") s3 = boto3.resource("s3", region_name="us-west-2") benchmark_runs = [] key = config_path @@ -4723,9 +4562,7 @@ def run_benchmark_from_local_config( BENCHMARKING_S3_BUCKET, f"procedural_houses/{procedural_house}", ) - house_json = json.loads( - house_obj.get()["Body"].read().decode("utf-8") - ) + house_json = json.loads(house_obj.get()["Body"].read().decode("utf-8")) if "id" not in house_json: house_json["id"] = procedural_house.split(".")[0] procedural_houses_transformed.append(house_json) @@ -4740,15 +4577,12 @@ def run_benchmark_from_local_config( benchmark_run_config["init_params"]["commit_id"] = None benchmark_run_config["init_params"]["local_build"] = True del benchmark_run_config["init_params"]["platform"] - - + # benchmark_run_config['verbose'] = True action_groups = copy.deepcopy(benchmark_run_config["action_groups"]) del benchmark_run_config["action_groups"] - benchmark_runs.append( - (UnityActionBenchmarkRunner(**benchmark_run_config), action_groups) - ) + benchmark_runs.append((UnityActionBenchmarkRunner(**benchmark_run_config), action_groups)) benchmark_results = [] for benchmark_runner, action_group in benchmark_runs: benchmark_result = benchmark_runner.benchmark(action_group) @@ -4786,16 +4620,12 @@ def add_daily_benchmark_config(ctx, benchmark_config_filename): # validate(benchmark_config, schema=benchmarking_config_schema) try: logger.info(f"Pushing benchmark config '{benchmark_config_basename}'") - s3.Object( - BENCHMARKING_S3_BUCKET, f"benchmark_jobs/{benchmark_config_basename}" - ).put( + s3.Object(BENCHMARKING_S3_BUCKET, f"benchmark_jobs/{benchmark_config_basename}").put( Body=json.dumps(benchmark_config, indent=4), ContentType="application/json", ) except botocore.exceptions.ClientError as e: - logger.error( - f"Caught error uploading archive '{benchmark_config_basename}': {e}" - ) + logger.error(f"Caught error uploading archive '{benchmark_config_basename}': {e}") @task @@ -4865,7 +4695,10 @@ def procedural_asset_hook_test(ctx, asset_dir, house_path, asset_id=""): from objathor.asset_conversion.util import view_asset_in_thor hook_runner = ProceduralAssetHookRunner( - asset_directory=asset_dir, asset_symlink=True, verbose=True, load_file_in_unity=True + asset_directory=asset_dir, + asset_symlink=True, + verbose=True, + load_file_in_unity=True, ) controller = ai2thor.controller.Controller( # local_executable_path="unity/builds/thor-OSXIntel64-local/thor-OSXIntel64-local.app/Contents/MacOS/AI2-THOR", @@ -4880,15 +4713,15 @@ def procedural_asset_hook_test(ctx, asset_dir, house_path, asset_id=""): visibilityScheme="Distance", action_hook_runner=hook_runner, ) - - #TODO bug why skybox is not changing? from just procedural pipeline + + # TODO bug why skybox is not changing? from just procedural pipeline evt = controller.step( - action="SetSkybox", + action="SetSkybox", color={ "r": 0, "g": 0, "b": 0, - } + }, ) angle_increment = 45 @@ -4901,7 +4734,7 @@ def procedural_asset_hook_test(ctx, asset_dir, house_path, asset_id=""): output_dir="./output-test", rotations=rotations, house_path=house_path, - skybox_color=(0, 0, 0) + skybox_color=(0, 0, 0), ) # with open(house_path, "r") as f: @@ -4921,14 +4754,13 @@ def procedural_asset_hook_test(ctx, asset_dir, house_path, asset_id=""): # ] # evt = controller.step(action="CreateHouse", house=house) - # print( # f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}" # ) # print(f'Error: {evt.metadata["errorMessage"]}') # evt = controller.step( - # action="SetSkybox", + # action="SetSkybox", # color={ # "r": 0, # "g": 0, @@ -4936,7 +4768,6 @@ def procedural_asset_hook_test(ctx, asset_dir, house_path, asset_id=""): # } # ) - # evt = controller.step(dict(action="LookAtObjectCenter", objectId=instance_id)) # print( @@ -4945,10 +4776,9 @@ def procedural_asset_hook_test(ctx, asset_dir, house_path, asset_id=""): # print(f'Error: {evt.metadata["errorMessage"]}') # input() + @task -def procedural_asset_cache_test( - ctx, asset_dir, house_path, asset_ids="", cache_limit=1 -): +def procedural_asset_cache_test(ctx, asset_dir, house_path, asset_ids="", cache_limit=1): import json import ai2thor.controller from ai2thor.hooks.procedural_asset_hook import ProceduralAssetHookRunner @@ -4995,28 +4825,20 @@ def procedural_asset_cache_test( evt = controller.step(action="CreateHouse", house=house) - print( - f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}" - ) + print(f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}") print(f'Error: {evt.metadata["errorMessage"]}') - evt = controller.step( - dict(action="LookAtObjectCenter", objectId=f"{instance_id}_0") - ) + evt = controller.step(dict(action="LookAtObjectCenter", objectId=f"{instance_id}_0")) # while True: # pass - print( - f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}" - ) + print(f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}") print(f'Error: {evt.metadata["errorMessage"]}') evt = controller.step(action="GetLRUCacheKeys") - print( - f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}" - ) + print(f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}") print(f'Error: {evt.metadata["errorMessage"]}') print(f'return {evt.metadata["actionReturn"]}') @@ -5044,17 +4866,12 @@ def procedural_asset_cache_test( evt = controller.step(action="CreateHouse", house=house) - print( - f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}" - ) + print(f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}") print(f'Error: {evt.metadata["errorMessage"]}') controller.reset() evt = controller.step(action="GetLRUCacheKeys") - print( - f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}" - ) + print(f"Action {controller.last_action['action']} success: {evt.metadata['lastActionSuccess']}") print(f'Error: {evt.metadata["errorMessage"]}') print(f'return {evt.metadata["actionReturn"]}') - From ad8286bfcc368a48e685b3df4dd71243c3cb6fdf Mon Sep 17 00:00:00 2001 From: lucaw Date: Fri, 3 May 2024 11:53:23 -0700 Subject: [PATCH 2/2] Updating test_thor_msg.py to use objathor and moving things to `scripts`. --- .lgtm.yml | 11 -- .../test_controllers.py | 0 test_msg.py => scripts/test_msg.py | 0 test_thor_msg.py => scripts/test_thor_msg.py | 106 ++++++++---------- 4 files changed, 45 insertions(+), 72 deletions(-) delete mode 100644 .lgtm.yml rename test_controllers.py => scripts/test_controllers.py (100%) rename test_msg.py => scripts/test_msg.py (100%) rename test_thor_msg.py => scripts/test_thor_msg.py (75%) diff --git a/.lgtm.yml b/.lgtm.yml deleted file mode 100644 index 105b215128..0000000000 --- a/.lgtm.yml +++ /dev/null @@ -1,11 +0,0 @@ -extraction: - csharp: - index: - buildless: true - nuget_restore: false - -path_classifiers: - library: - - unity/Assets/**/*.cs - - exclude: unity/Assets/Scripts/*.cs - - exclude: unity/Assets/Scripts/**/*.cs diff --git a/test_controllers.py b/scripts/test_controllers.py similarity index 100% rename from test_controllers.py rename to scripts/test_controllers.py diff --git a/test_msg.py b/scripts/test_msg.py similarity index 100% rename from test_msg.py rename to scripts/test_msg.py diff --git a/test_thor_msg.py b/scripts/test_thor_msg.py similarity index 75% rename from test_thor_msg.py rename to scripts/test_thor_msg.py index 3fc127f9c1..94c376b876 100644 --- a/test_thor_msg.py +++ b/scripts/test_thor_msg.py @@ -1,15 +1,20 @@ +import glob import json import os import sys +import time + +from objathor.asset_conversion.util import ( + save_thor_asset_file, + add_default_annotations, + change_asset_paths, + load_existing_thor_asset_file, + create_runtime_asset_file, +) import ai2thor.controller -import ai2thor.fifo_server import ai2thor.wsgi_server -import msgpack -import glob -import ai2thor.util.runtime_assets as ra -import pathlib -import time + def make_single_object_house( asset_id, @@ -33,6 +38,7 @@ def make_single_object_house( ] return house + def view_asset_in_thor( asset_id, controller, @@ -45,16 +51,12 @@ def view_asset_in_thor( from PIL import Image house = make_single_object_house( - asset_id=asset_id, - house_path=house_path, - instance_id=instance_id, - skybox_color=skybox_color + asset_id=asset_id, house_path=house_path, instance_id=instance_id, skybox_color=skybox_color ) start = time.perf_counter() evt = controller.step(action="CreateHouse", house=house) end = time.perf_counter() - if not evt.metadata["lastActionSuccess"]: print(f"Action success: {evt.metadata['lastActionSuccess']}") @@ -63,12 +65,12 @@ def view_asset_in_thor( evt = controller.step(action="LookAtObjectCenter", objectId=instance_id) evt = controller.step( - action="SetSkybox", + action="SetSkybox", color={ "r": skybox_color[0], "g": skybox_color[1], "b": skybox_color[2], - } + }, ) if not evt.metadata["lastActionSuccess"]: @@ -95,36 +97,39 @@ def view_asset_in_thor( ) im = Image.fromarray(evt.frame) im.save( - os.path.join(output_dir, f"{rotation[0]}_{rotation[1]}_{rotation[2]}_{rotation[3]}.jpg") + os.path.join( + output_dir, f"{rotation[0]}_{rotation[1]}_{rotation[2]}_{rotation[3]}.jpg" + ) ) return evt, (end - start) + if __name__ == "__main__": width = 300 height = 300 output_dir = "./images" empty_house = "empty_house.json" - - extension = sys.argv[1] if len(sys.argv) > 1 else ".json" + + extension = sys.argv[1] if len(sys.argv) > 1 else ".json" asset_id = "Apple_1" extensions = [".json", ".msgpack", ".msgpack.gz", "gz", ".pkl.gz", ""] - controller = ai2thor.controller.Controller( - port=8200, start_unity=False, server_class=ai2thor.wsgi_server.WsgiServer, + port=8200, + start_unity=False, + server_class=ai2thor.wsgi_server.WsgiServer, # start_unity=True, local_build=True, server_class=ai2thor.fifo_server.FifoServer, scene="Procedural", gridSize=0.25, width=width, height=height, - ) objsverse_root = "./objaverse" ids = [] - dirs = glob.glob(f"{objsverse_root}/*") + dirs = glob.glob(f"{objsverse_root}/*") dirs = dirs[:1] extensions = [".pkl.gz"] @@ -139,64 +144,47 @@ def view_asset_in_thor( print(evt.metadata["errorMessage"]) # copy_to_dir = os.path.join(controller._build.base_dir, "processed_models") - copy_to_dir = evt.metadata['actionReturn'] + copy_to_dir = evt.metadata["actionReturn"] build_target_dir = os.path.join(copy_to_dir, asset_id) asset_dir = os.path.abspath(os.path.join(objsverse_root, asset_id)) for extension in extensions: print(f"---- extension {extension}") extension = extension if extension != "" else ".json" - load_file_in_unity = extension != "" and extension != ".pkl.gz" - print(f"---- running {asset_id} wit extension {extension}, load_in_unity {load_file_in_unity}") - - ra.create_runtime_asset_file( + load_file_in_unity = extension != "" and extension != ".pkl.gz" + print( + f"---- running {asset_id} wit extension {extension}, load_in_unity {load_file_in_unity}" + ) + + create_runtime_asset_file( asset_id=asset_id, - asset_directory=asset_dir, + asset_directory=asset_dir, save_dir=copy_to_dir, - asset_symlink=True, - verbose=True, + verbose=True, load_file_in_unity=load_file_in_unity, - use_extension=extension + use_extension=extension, ) - asset = ra.load_existing_thor_asset_file(build_target_dir, asset_id, force_extension=".pkl.gz") - asset = ra.add_default_annotations( - asset=asset, - asset_directory=build_target_dir, - verbose=True + asset = load_existing_thor_asset_file( + build_target_dir, asset_id, force_extension=".pkl.gz" ) - asset = ra.change_asset_pahts( - asset=asset, - save_dir=build_target_dir + asset = add_default_annotations( + asset=asset, asset_directory=build_target_dir, verbose=True ) + asset = change_asset_paths(asset=asset, save_dir=build_target_dir) print(f" -- saving asset dir {build_target_dir} name {asset_id}{extension}") - ra.save_thor_asset_file( - asset, - os.path.join(build_target_dir, f"{asset_id}{extension}") - ) - - - - - + save_thor_asset_file(asset, os.path.join(build_target_dir, f"{asset_id}{extension}")) args = {} if load_file_in_unity: args = dict( - action="CreateRuntimeAsset", - id=asset_id, - dir=copy_to_dir, - extension=extension + action="CreateRuntimeAsset", id=asset_id, dir=copy_to_dir, extension=extension ) else: args = asset - - start = time.perf_counter() print(args) - evt = controller.step( - **args - ) + evt = controller.step(**args) end = time.perf_counter() frame_time = end - start print(f"return : {controller.last_action}") @@ -204,19 +192,17 @@ def view_asset_in_thor( print(f"success: {evt.metadata['lastActionSuccess']}") print(evt.metadata["errorMessage"]) - angle_increment = 45 angles = [n * angle_increment for n in range(0, round(360 / angle_increment))] axes = [(0, 1, 0), (1, 0, 0)] rotations = [(x, y, z, degrees) for degrees in angles for (x, y, z) in axes] - evt, time_create_H = view_asset_in_thor( asset_id=asset_id, controller=controller, house_path="empty_house.json", rotations=rotations, - output_dir="./out_msg" + output_dir="./out_msg", ) print(f"return : {evt.metadata['actionReturn']}") @@ -235,8 +221,6 @@ def view_asset_in_thor( # filepath="/Users/alvaroh/ai2/ai2thor/objaverse/b8d24c146a6844788c0ba6f7b135e99e/b8d24c146a6844788c0ba6f7b135e99e.msgpack.gz", # outpath="/Users/alvaroh/ai2/ai2thor/objaverse/b8d24c146a6844788c0ba6f7b135e99e/out" # ) - + # print(f"error: {evt.metadata['lastActionSuccess']}") # print(evt.metadata["errorMessage"]) - - \ No newline at end of file