Skip to content

Commit

Permalink
SynapseAi 1.1.1 release
Browse files Browse the repository at this point in the history
 * Modify paths to 1.1.1-94
 * Update dockerfiles with 1.1.1 content
 * Update installation scripts for 1.1.1 install versions
  • Loading branch information
omrialmog committed Nov 24, 2021
1 parent 8e3df64 commit 6e6c761
Show file tree
Hide file tree
Showing 16 changed files with 194 additions and 179 deletions.
209 changes: 108 additions & 101 deletions README.md

Large diffs are not rendered by default.

13 changes: 5 additions & 8 deletions dockerfiles/Dockerfile_amzn2_tensorflow_installer
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@ ARG REVISION
FROM ${BASE_NAME}:${VERSION}-${REVISION}
ARG VERSION
ARG REVISION
ARG TF_MINOR
ARG TF_VERSION=2.6.0
ARG TF_VERSION
ARG OPENMPI_VER=4.0.5
ARG ARTIFACTORY_URL

Expand Down Expand Up @@ -52,20 +51,18 @@ RUN wget https://github.com/protocolbuffers/protobuf/releases/download/v3.6.1/pr
rm -rf protoc-3.6.1-linux-x86_64.zip

COPY requirements-training-release.txt requirements-training-release.txt
COPY requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN wget https://bootstrap.pypa.io/get-pip.py && \
python3 get-pip.py pip==21.0.1 && \
rm -rf get-pip.py && \
pip3 install tensorflow-estimator==${TF_MINOR} \
tensorboard==${TF_MINOR} \
keras==${TF_MINOR} \
tensorflow-cpu==${TF_VERSION} \
tensorflow_text==${TF_MINOR} \
pip3 install tensorflow-cpu==${TF_VERSION} \
tensorflow-model-optimization==0.5.0 && \
pip3 install --no-deps -r requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt && \
# pycocotools has to be installed in separated process otherwise it fails with 'numpy.ufunc size changed'
pip3 install pycocotools==2.0.1 && \
pip3 install -r requirements-training-release.txt && \
rm requirements-training-release.txt
rm requirements-training-release.txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN python3 -m pip install habana-tensorflow=="${VERSION}"."${REVISION}" \
--index-url "https://${ARTIFACTORY_URL}"/artifactory/api/pypi/gaudi-python/simple && \
Expand Down
1 change: 1 addition & 0 deletions dockerfiles/Dockerfile_centos8.3_base_installer
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ RUN dnf update -y && dnf install -y \
lsof \
epel-release && \
dnf install -y jemalloc && \
dnf install -y https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/java-1.8.0-openjdk-headless-1.8.0.312.b07-1.el8_4.x86_64.rpm && \
dnf groupinstall -y "Development Tools" && \
dnf clean all && rm -rf /var/cache/dnf && \
# Layer saving files to /tmp
Expand Down
13 changes: 5 additions & 8 deletions dockerfiles/Dockerfile_centos8.3_tensorflow_installer
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@ ARG REVISION
FROM ${BASE_NAME}:${VERSION}-${REVISION}
ARG VERSION
ARG REVISION
ARG TF_MINOR
ARG TF_VERSION=2.6.0
ARG TF_VERSION
ARG OPENMPI_VER=4.0.5
ARG ARTIFACTORY_URL
ARG HABANA_PIP_VERSION="21.1.1"
Expand Down Expand Up @@ -50,20 +49,18 @@ RUN wget --no-verbose https://download.open-mpi.org/release/open-mpi/v4.0/openmp
ENV MPICC=/usr/local/openmpi/bin/mpicc

COPY requirements-training-release.txt requirements-training-release.txt
COPY requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN python3 -m pip install pip=="${HABANA_PIP_VERSION}" && \
pip3 install setuptools==41.0.0

RUN pip3 install tensorflow-estimator==${TF_MINOR} \
tensorboard==${TF_MINOR} \
keras==${TF_MINOR} \
tensorflow-cpu==${TF_VERSION} \
tensorflow_text==${TF_MINOR} \
RUN pip3 install tensorflow-cpu==${TF_VERSION} \
tensorflow-model-optimization==0.5.0 && \
pip3 install --no-deps -r requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt && \
# pycocotools has to be installed in separated process otherwise it fails with 'numpy.ufunc size changed'
pip3 install pycocotools==2.0.1 && \
pip3 install -r requirements-training-release.txt && \
rm requirements-training-release.txt
rm requirements-training-release.txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN python3 -m pip install habana-tensorflow=="${VERSION}"."${REVISION}" \
--index-url "https://${ARTIFACTORY_URL}"/artifactory/api/pypi/gaudi-python/simple && \
Expand Down
9 changes: 9 additions & 0 deletions dockerfiles/Dockerfile_rhel8.3_base_installer
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ RUN echo "[appstream]" > /etc/yum.repos.d/CentOS-Linux-AppStream.repo && \
echo "mirrorlist=http://mirrorlist.centos.org/?release=\$releasever&arch=\$basearch&repo=AppStream&infra=\$infra" >> /etc/yum.repos.d/CentOS-Linux-AppStream.repo && \
echo "gpgcheck=0" >> /etc/yum.repos.d/CentOS-Linux-AppStream.repo

RUN echo "[BaseOS]" > /etc/yum.repos.d/CentOS-Linux-BaseOS.repo && \
echo "name=CentOS Linux 8 - BaseOS" >> /etc/yum.repos.d/CentOS-Linux-BaseOS.repo && \
echo "mirrorlist=http://mirrorlist.centos.org/?release=\$releasever&arch=\$basearch&repo=BaseOS&infra=\$infra" >> /etc/yum.repos.d/CentOS-Linux-BaseOS.repo && \
echo "gpgcheck=0" >> /etc/yum.repos.d/CentOS-Linux-BaseOS.repo

RUN dnf install -y \
sudo \
redhat-lsb-core \
Expand All @@ -30,9 +35,13 @@ RUN dnf install -y \
llvm \
jemalloc \
cmake3 \
kernel-tools \
kernel-tools-libs \
libarchive && \
dnf clean all && rm -rf /var/cache/yum

RUN dnf update -y glib2 systemd

RUN dnf module install python38 -y && \
rm -f /etc/alternatives/python3 && \
ln -s /usr/bin/python3.8 /etc/alternatives/python3 && \
Expand Down
1 change: 1 addition & 0 deletions dockerfiles/Dockerfile_rhel8.3_pytorch_installer
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ RUN dnf install -y \
openssh-server \
curl \
redhat-lsb-core \
openmpi-devel \
cairo-devel \
iproute \
git \
Expand Down
13 changes: 5 additions & 8 deletions dockerfiles/Dockerfile_rhel8.3_tensorflow_installer
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ ARG REVISION
FROM ${BASE_NAME}:${VERSION}-${REVISION}
ARG VERSION
ARG REVISION
ARG TF_MINOR
ARG TF_VERSION=2.6.0
ARG TF_VERSION
ARG OPENMPI_VER=4.0.5
ARG ARTIFACTORY_URL
ARG HABANA_PIP_VERSION="21.1.1"
Expand Down Expand Up @@ -52,20 +51,18 @@ RUN wget --no-verbose https://download.open-mpi.org/release/open-mpi/v4.0/openmp
ENV MPICC=/usr/local/openmpi/bin/mpicc

COPY requirements-training-release.txt requirements-training-release.txt
COPY requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN python3 -m pip install pip=="${HABANA_PIP_VERSION}" && \
pip3 install setuptools==41.0.0

RUN pip3 install tensorflow-estimator==${TF_MINOR} \
tensorboard==${TF_MINOR} \
keras==${TF_MINOR} \
tensorflow-cpu==${TF_VERSION} \
tensorflow_text==${TF_MINOR} \
RUN pip3 install tensorflow-cpu==${TF_VERSION} \
tensorflow-model-optimization==0.5.0 && \
pip3 install --no-deps -r requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt && \
# pycocotools has to be installed in separated process otherwise it fails with 'numpy.ufunc size changed'
pip3 install pycocotools==2.0.1 && \
pip3 install -r requirements-training-release.txt && \
rm requirements-training-release.txt
rm requirements-training-release.txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN python3 -m pip install habana-tensorflow=="${VERSION}"."${REVISION}" \
--index-url "https://${ARTIFACTORY_URL}"/artifactory/api/pypi/gaudi-python/simple && \
Expand Down
2 changes: 1 addition & 1 deletion dockerfiles/Dockerfile_ubuntu20.04_base_installer
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ ARG REVISION
ENV LANG=en_US.UTF-8
ENV LANGUAGE=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
ENV LC_CTYPE=en_US.UTF-8i
ENV LC_CTYPE=en_US.UTF-8
ENV DEBIAN_FRONTEND=noninteractive
ENV GC_KERNEL_PATH=/usr/lib/habanalabs/libtpc_kernels.so
ENV HABANA_LOGS=/var/log/habana_logs/
Expand Down
13 changes: 5 additions & 8 deletions dockerfiles/Dockerfile_ubuntu_tensorflow_installer
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@ ARG REVISION
FROM ${BASE_NAME}:${VERSION}-${REVISION}
ARG VERSION
ARG REVISION
ARG TF_MINOR
ARG TF_VERSION=2.6.0
ARG TF_VERSION
ARG OPENMPI_VER=4.0.5
ARG ARTIFACTORY_URL

Expand All @@ -37,18 +36,16 @@ RUN wget --no-verbose https://download.open-mpi.org/release/open-mpi/v4.0/openmp
ENV MPICC=/usr/local/openmpi/bin/mpicc

COPY requirements-training-release.txt requirements-training-release.txt
COPY requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN python3 -m pip install pip==21.0.1 && \
pip3 install tensorflow-estimator==${TF_MINOR} \
tensorboard==${TF_MINOR} \
keras==${TF_MINOR} \
tensorflow-cpu==${TF_VERSION} \
tensorflow_text==${TF_MINOR} \
pip3 install tensorflow-cpu==${TF_VERSION} \
tensorflow-model-optimization==0.5.0 && \
pip3 install --no-deps -r requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt && \
# pycocotools has to be installed in separated process otherwise it fails with 'numpy.ufunc size changed'
pip3 install pycocotools==2.0.1 && \
pip3 install -r requirements-training-release.txt && \
rm requirements-training-release.txt
rm requirements-training-release.txt requirements-no-deps-tensorflow-cpu-"$TF_VERSION".txt

RUN python3 -m pip install habana-tensorflow=="${VERSION}"."${REVISION}" \
--index-url "https://${ARTIFACTORY_URL}"/artifactory/api/pypi/gaudi-python/simple && \
Expand Down
16 changes: 7 additions & 9 deletions dockerfiles/docker_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
#
# HabanaLabs script for building docker images

: "${1?"Usage: $0 MODE [tensorflow,pytorch] OS [amzn2,centos8.3,rhel8.3,ubuntu18.04,ubuntu20.04] TF_VERSION(if MODE=tensorflow) [2.5.1, 2.6.0])"}"
: "${2?"Usage: $0 MODE [tensorflow,pytorch] OS [amzn2,centos8.3,rhel8.3,ubuntu18.04,ubuntu20.04] TF_VERSION(if MODE=tensorflow) [2.5.1, 2.6.0])"}"
: "${1?"Usage: $0 MODE [tensorflow,pytorch] OS [amzn2,centos8.3,rhel8.3,ubuntu18.04,ubuntu20.04] TF_VERSION(if MODE=tensorflow) [2.5.2, 2.6.2, 2.7.0])"}"
: "${2?"Usage: $0 MODE [tensorflow,pytorch] OS [amzn2,centos8.3,rhel8.3,ubuntu18.04,ubuntu20.04] TF_VERSION(if MODE=tensorflow) [2.5.2, 2.6.2, 2.7.0])"}"

VERSION="${CUSTOM_VERSION:-1.1.0}"
REVISION="${CUSTOM_REVISION:-614}"
VERSION="${CUSTOM_VERSION:-1.1.1}"
REVISION="${CUSTOM_REVISION:-94}"
MODE="$1"
OS="$2"
TF_VERSION="$3"
Expand All @@ -22,10 +22,10 @@ ARTIFACTORY_REPO="gaudi-docker"
case $MODE in
tensorflow)
case $TF_VERSION in
2.5.1|2.6.0);;
2.5.2|2.6.2|2.7.0);;
*)
echo "Provide correct TF_VERSION argument"
echo "Provided TF_VERSION: $3 - supported TF_VERSION [2.5.1, 2.6.0]"
echo "Provided TF_VERSION: $3 - supported TF_VERSION [2.5.2, 2.6.2, 2.7.0]"
exit 1;;
esac
;;
Expand Down Expand Up @@ -80,9 +80,8 @@ function buildDocker {
*)
esac
TF_CPU_POSTFIX="-tf-cpu-${TF_VERSION}"
TF_MINOR=${TF_VERSION%.*}.0
IMAGE_NAME="${ARTIFACTORY_URL}/${ARTIFACTORY_REPO}/${VERSION}/${OS}/habanalabs/${MODE}-installer${TF_CPU_POSTFIX}:${VERSION}-${REVISION}"
BUILDARGS+=" --build-arg ARTIFACTORY_URL="$ARTIFACTORY_URL" --build-arg TF_VERSION="$TF_VERSION" --build-arg TF_MINOR="$TF_MINOR" --build-arg VERSION="$VERSION" --build-arg REVISION="$REVISION""
BUILDARGS+=" --build-arg ARTIFACTORY_URL="$ARTIFACTORY_URL" --build-arg TF_VERSION="$TF_VERSION" --build-arg VERSION="$VERSION" --build-arg REVISION="$REVISION""
;;
pytorch)
case $OS in
Expand Down Expand Up @@ -112,7 +111,6 @@ echo "REVISION: $REVISION"
echo "MODE: $MODE"
echo "OS: $OS"
echo "TF_VERSION: $TF_VERSION"
echo "TF_MINOR: $TF_MINOR"
echo "PT_VERSION: $PT_VERSION"
echo "ARTIFACTORY_REPO: $ARTIFACTORY_REPO"
echo "---------------------------------------------"
Expand Down
1 change: 1 addition & 0 deletions dockerfiles/requirements-no-deps-tensorflow-cpu-2.5.2.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
#No need to install tensorflow_io for tf-cpu 2.5.X, but build script require this file to proceed
2 changes: 2 additions & 0 deletions dockerfiles/requirements-no-deps-tensorflow-cpu-2.6.2.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
tensorflow-io==0.21.0
tensorflow-io-gcs-filesystem==0.21.0
2 changes: 2 additions & 0 deletions dockerfiles/requirements-no-deps-tensorflow-cpu-2.7.0.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
tensorflow-io==0.22.0
tensorflow-io-gcs-filesystem==0.22.0
2 changes: 1 addition & 1 deletion dockerfiles/requirements-training-release.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ imgaug==0.4.0
keras #version not specified, as different are required for a particular version of tensorflow-cpu
cloudpickle==1.6.0
numpy>=1.18.0
tensorflow-addons==0.13.0
tensorflow-addons==0.14.0
munch==2.5.0
git+https://github.com/nvidia/dllogger@26a0f8f1958de2c0c460925ff6102a4d2486d6cc#egg=dllogger
git+https://github.com/tensorpack/tensorpack@11ca8b2c34056feb331744281000f78e3c157983
Expand Down
37 changes: 20 additions & 17 deletions installation_scripts/al2_tensorflow_installation.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,29 +35,32 @@ echo "export OPAL_PREFIX=${MPI_ROOT}" | sudo tee -a /etc/profile.d/habanalabs.sh
echo 'export LD_LIBRARY_PATH=${MPI_ROOT}/lib:${LD_LIBRARY_PATH}' | sudo tee -a /etc/profile.d/habanalabs.sh
echo 'export PATH=${MPI_ROOT}/bin:${PATH}' | sudo tee -a /etc/profile.d/habanalabs.sh

wget --no-verbose https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-"${OPENMPI_VER}".tar.gz && \
tar -xvf openmpi-"${OPENMPI_VER}".tar.gz && \
cd openmpi-"${OPENMPI_VER}" && \
./configure --prefix="${MPI_ROOT}" && \
make -j && \
make install && \
cp LICENSE ${MPI_ROOT} && \
cd - && \
rm -rf openmpi-"${OPENMPI_VER}"* && \
/sbin/ldconfig
if [[ `${MPI_ROOT}/bin/mpirun --version` == *"$OPENMPI_VER"* ]]; then
echo "OpenMPI found. Skipping installation."
else
echo "OpenMPI not found. Installing OpenMPI ${OPENMPI_VER}.."
wget --no-verbose https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-"${OPENMPI_VER}".tar.gz && \
tar -xvf openmpi-"${OPENMPI_VER}".tar.gz && \
cd openmpi-"${OPENMPI_VER}" && \
./configure --prefix="${MPI_ROOT}" && \
make -j && \
make install && \
cp LICENSE ${MPI_ROOT} && \
cd - && \
rm -rf openmpi-"${OPENMPI_VER}"* && \
/sbin/ldconfig
fi

export MPICC=${MPI_ROOT}/bin/mpicc
${PYTHON} -m pip install --user mpi4py==3.0.3

# workaround for broken dependency in TF2.6.0 after release of TF packages for version 2.7.0
${PYTHON} -m pip install --user tensorflow-estimator==2.6.0
${PYTHON} -m pip install --user tensorboard==2.6.0
${PYTHON} -m pip install --user keras==2.6.0
#install base tensorflow package
${PYTHON} -m pip install --user tensorflow-cpu==2.6.0
${PYTHON} -m pip install --user tensorflow-cpu==2.6.2
#install tensorflow-io package with no deps, as it has broken dependency on tensorflow and would try to install non-cpu package
${PYTHON} -m pip install --user --no-deps tensorflow-io==0.21.0 tensorflow-io-gcs-filesystem==0.21.0
#install Habana tensorflow bridge & Horovod
${PYTHON} -m pip install --user habana-tensorflow==1.1.0.614 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple
${PYTHON} -m pip install --user habana-horovod==1.1.0.614 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple
${PYTHON} -m pip install --user habana-tensorflow==1.1.1.94 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple
${PYTHON} -m pip install --user habana-horovod==1.1.1.94 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple

source /etc/profile.d/habanalabs.sh
${PYTHON} -c 'import tensorflow as tf;import habana_frameworks.tensorflow as htf;htf.load_habana_module();x = tf.constant(2);y = x + x;assert y.numpy() == 4, "Sanity check failed: Wrong Add output";assert "HPU" in y.device, "Sanity check failed: Operation not executed on Habana";print("Sanity check passed")'
39 changes: 21 additions & 18 deletions installation_scripts/u18_tensorflow_installation.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,29 +36,32 @@ sudo apt update
sudo apt install -y python3.7-dev # mpi4py
sudo apt install -y wget

wget --no-verbose https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-"${OPENMPI_VER}".tar.gz && \
tar -xvf openmpi-"${OPENMPI_VER}".tar.gz && \
cd openmpi-"${OPENMPI_VER}" && \
sudo apt install -y libnuma-dev && \
./configure --prefix=$MPI_ROOT && \
make -j && \
sudo make install && \
sudo touch ~root/openmpi-4.0.5_installed && \
cd - && \
rm -rf openmpi-"${OPENMPI_VER}"* && \
sudo /sbin/ldconfig
if [[ `${MPI_ROOT}/bin/mpirun --version` == *"$OPENMPI_VER"* ]]; then
echo "OpenMPI found. Skipping installation."
else
echo "OpenMPI not found. Installing OpenMPI ${OPENMPI_VER}.."
wget --no-verbose https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-"${OPENMPI_VER}".tar.gz && \
tar -xvf openmpi-"${OPENMPI_VER}".tar.gz && \
cd openmpi-"${OPENMPI_VER}" && \
sudo apt install -y libnuma-dev && \
./configure --prefix=$MPI_ROOT && \
make -j && \
sudo make install && \
sudo touch ~root/openmpi-4.0.5_installed && \
cd - && \
rm -rf openmpi-"${OPENMPI_VER}"* && \
sudo /sbin/ldconfig
fi

${PYTHON} -m pip install --user mpi4py==3.0.3

# workaround for broken dependency in TF2.6.0 after release of TF packages for version 2.7.0
${PYTHON} -m pip install --user tensorflow-estimator==2.6.0
${PYTHON} -m pip install --user tensorboard==2.6.0
${PYTHON} -m pip install --user keras==2.6.0
#install base tensorflow package
${PYTHON} -m pip install --user tensorflow-cpu==2.6.0
${PYTHON} -m pip install --user tensorflow-cpu==2.6.2
#install tensorflow-io package with no deps, as it has broken dependency on tensorflow and would try to install non-cpu package
${PYTHON} -m pip install --user --no-deps tensorflow-io==0.21.0 tensorflow-io-gcs-filesystem==0.21.0
#install Habana tensorflow bridge & Horovod
${PYTHON} -m pip install --user habana-tensorflow==1.1.0.614 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple
${PYTHON} -m pip install --user habana-horovod==1.1.0.614 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple
${PYTHON} -m pip install --user habana-tensorflow==1.1.1.94 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple
${PYTHON} -m pip install --user habana-horovod==1.1.1.94 --extra-index-url https://vault.habana.ai/artifactory/api/pypi/gaudi-python/simple

source /etc/profile.d/habanalabs.sh
${PYTHON} -c 'import tensorflow as tf;import habana_frameworks.tensorflow as htf;htf.load_habana_module();x = tf.constant(2);y = x + x;assert y.numpy() == 4, "Sanity check failed: Wrong Add output";assert "HPU" in y.device, "Sanity check failed: Operation not executed on Habana";print("Sanity check passed")'

0 comments on commit 6e6c761

Please sign in to comment.