Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
mcremon-meta authored Nov 21, 2024
2 parents afca3db + ce77995 commit 10a0ee0
Show file tree
Hide file tree
Showing 235 changed files with 840,465 additions and 2,346 deletions.
52 changes: 45 additions & 7 deletions .ci/docker/common/install_cache.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,26 @@ set -ex
# shellcheck source=/dev/null
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"

install_ubuntu() {
echo "Preparing to build sccache from source"
apt-get update
# libssl-dev will not work as it is upgraded to libssl3 in Ubuntu-22.04.
# Instead use lib and headers from OpenSSL1.1 installed in `install_openssl.sh``
apt-get install -y cargo
echo "Checking out sccache repo"
git clone https://github.com/mozilla/sccache -b v0.8.2

cd sccache
echo "Building sccache"
cargo build --release
cp target/release/sccache /opt/cache/bin
echo "Cleaning up"
cd ..
rm -rf sccache
apt-get remove -y cargo rustc
apt-get autoclean && apt-get clean
}

install_binary() {
echo "Downloading sccache binary from S3 repo"
curl --retry 3 https://s3.amazonaws.com/ossci-linux/sccache -o /opt/cache/bin/sccache
Expand All @@ -22,15 +42,33 @@ mkdir -p /opt/cache/bin
sed -e 's|PATH="\(.*\)"|PATH="/opt/cache/bin:\1"|g' -i /etc/environment
export PATH="/opt/cache/bin:$PATH"

# NB: Install the pre-built binary from S3 as building from source
# https://github.com/pytorch/sccache has started failing mysteriously
# in which sccache server couldn't start with the following error:
# sccache: error: Invalid argument (os error 22)
install_binary
install_ubuntu

function write_sccache_stub() {
BINARY=$1
printf "#!/bin/sh\nif [ \$(env -u LD_PRELOAD ps -p \$PPID -o comm=) != sccache ]; then\n exec sccache %s \"\$@\"\nelse\n exec %s \"\$@\"\nfi" "$(which "${BINARY}")" "$(which "${BINARY}")" > "/opt/cache/bin/${BINARY}"
if [ $1 == "gcc" ]; then
# Do not call sccache recursively when dumping preprocessor argument
# For some reason it's very important for the first cached nvcc invocation
cat >"/opt/cache/bin/$1" <<EOF
#!/bin/sh
if [ "\$1" = "-E" ] || [ "\$2" = "-E" ]; then
exec $(which $1) "\$@"
elif [ \$(env -u LD_PRELOAD ps -p \$PPID -o comm=) != sccache ]; then
exec sccache $(which $1) "\$@"
else
exec $(which $1) "\$@"
fi
EOF
else
cat >"/opt/cache/bin/$1" <<EOF
#!/bin/sh
if [ \$(env -u LD_PRELOAD ps -p \$PPID -o comm=) != sccache ]; then
exec sccache $(which $1) "\$@"
else
exec $(which $1) "\$@"
fi
EOF
fi
chmod a+x "/opt/cache/bin/${BINARY}"
}

Expand All @@ -44,7 +82,7 @@ init_sccache() {

# NB: This function is adopted from PyTorch core at
# https://github.com/pytorch/pytorch/blob/main/.ci/pytorch/common-build.sh
as_ci_user sccache --stop-server > /dev/null 2>&1 || true
as_ci_user sccache --stop-server >/dev/null 2>&1 || true
rm -f "${SCCACHE_ERROR_LOG}" || true

# Clear sccache stats before using it
Expand Down
1 change: 1 addition & 0 deletions .ci/docker/ubuntu/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ COPY ./common/utils.sh utils.sh
RUN bash ./install_cache.sh && rm install_cache.sh utils.sh
ENV SCCACHE_BUCKET ossci-compiler-cache-circleci-v2
ENV SCCACHE_S3_KEY_PREFIX executorch
ENV SCCACHE_REGION us-east-1

ARG TORCH_VERSION
COPY ./common/install_pytorch.sh install_pytorch.sh
Expand Down
1 change: 1 addition & 0 deletions .ci/scripts/gather_test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"resnet50": "linux.12xlarge",
"llava": "linux.12xlarge",
"llama3_2_vision_encoder": "linux.12xlarge",
# "llama3_2_text_decoder": "linux.12xlarge", # TODO: re-enable test when Huy's change is in / model gets smaller.
# This one causes timeout on smaller runner, the root cause is unclear (T161064121)
"dl3": "linux.12xlarge",
"emformer_join": "linux.12xlarge",
Expand Down
42 changes: 36 additions & 6 deletions .ci/scripts/test_llama.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,41 @@ set -exu
# shellcheck source=/dev/null
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"

MODEL_NAME=$1 # stories110M
BUILD_TOOL=$2 # buck2 or cmake
DTYPE=$3 # fp16, bf16, or fp32
MODE=${4:-"xnnpack+custom"} # portable or xnnpack+custom or xnnpack+custom+qe
UPLOAD_DIR=${5:-}
while [[ $# -gt 0 ]]; do
case "$1" in
-model)
MODEL_NAME="$2" # stories110M
shift 2
;;
-build_tool)
BUILD_TOOL="$2" # buck2 or cmake
shift 2
;;
-dtype)
DTYPE="$2" # fp16, bf16, or fp32
shift 2
;;
-mode)
MODE="$2" # portable or xnnpack+custom or xnnpack+custom+qe
shift 2
;;
-upload)
UPLOAD_DIR="$2"
shift 2
;;
*)
echo "Unknown option: $1"
usage
;;
esac
done

# Default mode to xnnpack+custom if not set
MODE=${MODE:-"xnnpack+custom"}

# Default UPLOAD_DIR to empty string if not set
UPLOAD_DIR="${UPLOAD_DIR:-}"

if [[ $# -lt 4 ]]; then # Assuming 4 mandatory args
echo "Expecting atleast 4 positional arguments"
echo "Usage: [...]"
Expand Down Expand Up @@ -150,7 +180,7 @@ cleanup_files() {
}

prepare_artifacts_upload() {
if [ -n "$UPLOAD_DIR" ]; then
if [ -n "${UPLOAD_DIR}" ]; then
echo "Preparing for uploading generated artifacs"
zip -j model.zip "${EXPORTED_MODEL_NAME}" tokenizer.bin
mkdir -p "${UPLOAD_DIR}"
Expand Down
4 changes: 2 additions & 2 deletions .ci/scripts/test_model.sh
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ test_model() {
bash examples/models/llava/install_requirements.sh
STRICT="--no-strict"
fi
if [[ "$MODEL_NAME" == "llama3_2_vision_encoder" ]]; then
# Install requirements for llama vision
if [[ "$MODEL_NAME" == "llama3_2_vision_encoder" || "$MODEL_NAME" == "llama3_2_text_decoder" ]]; then
# Install requirements for llama vision.
bash examples/models/llama3_2_vision/install_requirements.sh
fi
# python3 -m examples.portable.scripts.export --model_name="llama2" should works too
Expand Down
62 changes: 62 additions & 0 deletions .github/scripts/check_labels.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
#!/usr/bin/env python3
"""Check whether a PR has required labels."""

import sys
from typing import Any

from github_utils import gh_delete_comment, gh_post_pr_comment
from gitutils import get_git_remote_name, get_git_repo_dir, GitRepo
from label_utils import has_required_labels, is_label_err_comment, LABEL_ERR_MSG
from trymerge import GitHubPR


def delete_all_label_err_comments(pr: "GitHubPR") -> None:
for comment in pr.get_comments():
if is_label_err_comment(comment):
gh_delete_comment(pr.org, pr.project, comment.database_id)


def add_label_err_comment(pr: "GitHubPR") -> None:
# Only make a comment if one doesn't exist already
if not any(is_label_err_comment(comment) for comment in pr.get_comments()):
gh_post_pr_comment(pr.org, pr.project, pr.pr_num, LABEL_ERR_MSG)


def parse_args() -> Any:
from argparse import ArgumentParser

parser = ArgumentParser("Check PR labels")
parser.add_argument("pr_num", type=int)
# add a flag to return a non-zero exit code if the PR does not have the required labels
parser.add_argument(
"--exit-non-zero",
action="store_true",
help="Return a non-zero exit code if the PR does not have the required labels",
)

return parser.parse_args()


def main() -> None:
args = parse_args()
repo = GitRepo(get_git_repo_dir(), get_git_remote_name())
org, project = repo.gh_owner_and_name()
pr = GitHubPR(org, project, args.pr_num)

try:
if not has_required_labels(pr):
print(LABEL_ERR_MSG, flush=True)
add_label_err_comment(pr)
if args.exit_non_zero:
raise RuntimeError("PR does not have required labels")
else:
delete_all_label_err_comments(pr)
except Exception as e:
if args.exit_non_zero:
raise RuntimeError(f"Error checking labels: {e}") from e

sys.exit(0)


if __name__ == "__main__":
main()
6 changes: 3 additions & 3 deletions .github/scripts/github_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,10 @@ def gh_fetch_url(
headers: Optional[Dict[str, str]] = None,
data: Union[Optional[Dict[str, Any]], str] = None,
method: Optional[str] = None,
reader: Callable[[Any], Any] = lambda x: x.read(),
reader: Callable[[Any], Any] = json.load,
) -> Any:
return gh_fetch_url_and_headers(
url, headers=headers, data=data, reader=json.load, method=method
url, headers=headers, data=data, reader=reader, method=method
)[1]


Expand Down Expand Up @@ -169,7 +169,7 @@ def gh_post_commit_comment(

def gh_delete_comment(org: str, repo: str, comment_id: int) -> None:
url = f"{GITHUB_API_URL}/repos/{org}/{repo}/issues/comments/{comment_id}"
gh_fetch_url(url, method="DELETE")
gh_fetch_url(url, method="DELETE", reader=lambda x: x.read())


def gh_fetch_merge_base(org: str, repo: str, base: str, head: str) -> str:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/android-perf.yml
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ jobs:
fail-fast: false
with:
runner: linux.4xlarge
docker-image: executorch-ubuntu-22.04-clang12-android
docker-image: executorch-ubuntu-22.04-qnn-sdk
submodules: 'true'
timeout: 60
upload-artifact: android-models
Expand Down
Loading

0 comments on commit 10a0ee0

Please sign in to comment.