Skip to content

Commit

Permalink
Merge pull request #129 from ecobee/dev
Browse files Browse the repository at this point in the history
fix output file names, add nullable dtype handling, upgrade to pandas 1.2.1
  • Loading branch information
tomstesco authored Jan 20, 2021
2 parents f58698d + 2fa94fc commit 03fa12c
Show file tree
Hide file tree
Showing 22 changed files with 281 additions and 115 deletions.
8 changes: 8 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
*
!Dockerfile
!src/
!scripts/
!setup.py
!requirements.txt
!requirements_unfixed.txt
!pytest.ini
7 changes: 4 additions & 3 deletions .env.template
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
PACKAGE_NAME=building-controls-simulator
VERSION_TAG=0.3.2-alpha
VERSION_TAG=0.3.3-alpha
DOCKERHUB_REPOSITORY=tstesco
USER_NAME=bcs
DOCKER_IMAGE=${PACKAGE_NAME}
LOCAL_PACKAGE_DIR=<where you cloned repo>
LOCAL_PACKAGE_DIR=<where you cloned the repo>
DOCKER_HOME_DIR=/home/${USER_NAME}
DOCKER_LIB_DIR=${DOCKER_HOME_DIR}/lib
DOCKER_PACKAGE_DIR=${DOCKER_LIB_DIR}/${PACKAGE_NAME}
Expand All @@ -14,7 +15,7 @@ PACKAGE_DIR=/home/bcs/lib/${PACKAGE_NAME}
PYENV_ROOT=${DOCKER_HOME_DIR}/pyenv
ENERGYPLUS_INSTALL_DIR=${EXT_DIR}/EnergyPlus
ENERGYPLUSTOFMUSCRIPT=${EXT_DIR}/EnergyPlusToFMU-3.0.0/Scripts/EnergyPlusToFMU.py
GOOGLE_APPLICATION_CREDENTIALS=${DOCKER_HOME_DIR}/.config/gcloud/application_default_credentials.json
# GOOGLE_APPLICATION_CREDENTIALS=${DOCKER_HOME_DIR}/.config/gcloud/application_default_credentials.json
# DYD_GOOGLE_CLOUD_PROJECT=<GCP project for DYD access>
# DYD_GCS_URI_BASE=<GCP DYD bucket filter>
# DYD_METADATA_URI=<GCP DYD bucket URI to metadata file>
Expand Down
6 changes: 5 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,15 @@ data/
weather/
idf/
fmu/
notebooks/
notes/
!test/idf/v8-9-0/AZ_Phoenix_gasfurnace_crawlspace_IECC_2018_cycles.idf
!test/idf/v9-4-0/heatedbsmt_2story_2300sqft_gasfurnace_AC.idf
!test/idf/v9-4-0/heatedbsmt_1story_2000sqft_gasfurnace_AC.idf
!test/idf/v9-4-0/slab_1story_2000sqft_gasfurnace_AC.idf
!test/fmu/fmu-models/deadband/deadband.fmu
!requirements.txt
!requirements_unfixed.txt
__pycache__/
.pytest_cache/
.ipynb_checkpoints/
Expand All @@ -35,4 +39,4 @@ docs/source/generated/
.test.env
docker-compose.yml
build/
notebooks/test_*
!notebooks/test_*
2 changes: 1 addition & 1 deletion .test.env.template
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ ENERGYPLUS_INSTALL_VERSION=9-4-0
EPLUS_DIR=${ENERGYPLUS_INSTALL_DIR}/EnergyPlus-${ENERGYPLUS_INSTALL_VERSION}
EPLUS_IDD=${ENERGYPLUS_INSTALL_DIR}/EnergyPlus-${ENERGYPLUS_INSTALL_VERSION}/PreProcess/IDFVersionUpdater/V${ENERGYPLUS_INSTALL_VERSION}-Energy+.idd
ENERGYPLUSTOFMUSCRIPT=${EXT_DIR}/EnergyPlusToFMU-3.0.0/Scripts/EnergyPlusToFMU.py
GOOGLE_APPLICATION_CREDENTIALS=${DOCKER_HOME_DIR}/.config/gcloud/application_default_credentials.json
# GOOGLE_APPLICATION_CREDENTIALS=${DOCKER_HOME_DIR}/.config/gcloud/application_default_credentials.json
# DYD_GOOGLE_CLOUD_PROJECT=<GCP project for DYD access>
# DYD_GCS_URI_BASE=<GCP DYD bucket filter>
# DYD_METADATA_URI=<GCP DYD bucket URI to metadata file>
Expand Down
20 changes: 8 additions & 12 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,8 @@ RUN sudo apt-get update && sudo apt-get upgrade -y \
python3-dev \
python3-distutils \
subversion \
p7zip-full
p7zip-full \
&& sudo rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

# install nodejs and npm (for plotly)
# install pyenv https://github.com/pyenv/pyenv-installer
Expand Down Expand Up @@ -106,13 +107,8 @@ RUN curl -sL https://deb.nodesource.com/setup_12.x | sudo bash - \
&& sudo rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

# copying will cause rebuild at minimum to start from here
COPY ./src "${PACKAGE_DIR}/src"
COPY ./scripts "${PACKAGE_DIR}/scripts"
COPY ./requirements_fixed.txt "${PACKAGE_DIR}/requirements_fixed.txt"
COPY ./requirements_unfixed.txt "${PACKAGE_DIR}/requirements_unfixed.txt"
COPY ./setup.py "${PACKAGE_DIR}/setup.py"
COPY ./pytest.ini "${PACKAGE_DIR}/pytest.ini"
COPY ./.test.env "${PACKAGE_DIR}/.test.env"
# use .dockerignore to add files to docker image
COPY ./ "${PACKAGE_DIR}"

# copied directory will not have user ownership by default
# install energyplus versions desired in `scripts/setup/install_ep.sh`
Expand All @@ -125,17 +121,17 @@ RUN sudo chown -R "${USER_NAME}" "${PACKAGE_DIR}" \
&& cd "${PACKAGE_DIR}" \
&& ${PYENV_ROOT}/versions/3.8.6/bin/python3.8 -m venv "${LIB_DIR}/${VENV_NAME}" \
&& . "${LIB_DIR}/${VENV_NAME}/bin/activate" \
&& pip install --upgrade setuptools pip \
# && pip install -r "requirements_fixed.txt" \
&& pip install -r "requirements_unfixed.txt" \
&& pip install --no-cache-dir --upgrade setuptools pip \
# && pip install --no-cache-dir -r "requirements.txt" \
&& pip install --no-cache-dir -r "requirements_unfixed.txt" \
&& pip install --editable . \
&& cd "${EXT_DIR}/PyFMI" \
&& python "setup.py" install --fmil-home="${FMIL_HOME}" \
&& cd "${EXT_DIR}" \
&& wget "https://github.com/RJT1990/pyflux/archive/0.4.15.zip" \
&& unzip "0.4.15.zip" && rm "0.4.15.zip" \
&& cd "pyflux-0.4.15" \
&& pip install .
&& pip install --no-cache-dir .

# install jupyter lab extensions for plotly
# if jupyter lab build fails with webpack optimization, set --minimize=False
Expand Down
69 changes: 47 additions & 22 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,33 +36,58 @@ Copy the template files and fill in the variables mentioned below:
```bash
cp .env.template .env
cp docker-compose.yml.template docker-compose.yml
# and if you want to run the tests
# .test.env does not need to be editted, unless you want to inject creds
cp .test.env.template .test.env
```
Note: `docker-compose` behaviour may be slightly different on your host OS
(Windows, Mac OS, Linux) with respect to how the expansion of environment
variables works. If the base `docker-compose.yml` file fails on interpreting
variables, try inlining those specific variables, e.g. replacing `${LOCAL_PACKAGE_DIR}`
with `<where you cloned the repo to>/building-controls-simulator`.
(Windows, Mac OS, Linux) and version of the CLI with respect to how the expansion of environment
variables works. Make sure you have the correct version (mentioned above).

Edit in `.env.template`:
```bash
...
LOCAL_PACKAGE_DIR=<where you cloned repo>
...
DYD_GCS_URI_BASE=<Donate your data Google Cloud Service bucket>
DYD_METADATA_URI=<Donate your data meta_data file Google Cloud Service URI>
NREL_DEV_API_KEY=<your key>
NREL_DEV_EMAIL=<your email>
LOCAL_PACKAGE_DIR=<where you cloned the repo>
...
```

Now you're ready to build and launch the container!
Now you're ready to get the image and launch the container!
If you delete the docker image just go through the setup here again to rebuild it.

##### Note: Docker images may use up to 12 GB of disk space - make sure you have this available before building.
The size of the container image can be reduced to roughly 5 GB by not installing
### Pull Docker image from Dockerhub

You can access the latest release image from: https://hub.docker.com/r/tstesco/building-controls-simulator/tags via CLI:

```bash
docker pull tstesco/building-controls-simulator:0.3.3-alpha
```

If you are using the Dockerhub repository make sure that your `.env` file contains
the line
```bash
DOCKERHUB_REPOSITORY=tstesco
```

This allows `docker-compose.yml` to find and use the correct image. Change this
line in `docker-compose.yml` if you want to use a locally built image.

```yml
# change this if want to build your own image
image: ${DOCKERHUB_REPOSITORY}/${DOCKER_IMAGE}:${VERSION_TAG}
```
to
```yml
# change this if want to build your own image
image: ${DOCKER_IMAGE}:${VERSION_TAG}
```
##### Note: Locally built Docker images may use up to 10 GB of disk space - make sure you have this available before building.
The size of the container image can be reduced to below 5 GB by not installing
every EnergyPlus version in `scripts/setup/install_ep.sh` and not downloading
all IECC 2018 IDF files in `scripts/setup/download_IECC_idfs.sh`. Simply comment
out the files you do not need.
out the versions/files you do not need in the respective files.

## Run BCS with Jupyter Lab Server (recommended: option 1)

Expand Down Expand Up @@ -286,29 +311,29 @@ python -m pytest src/python

## Changing dependency versions

The dependencies are pinned to exact versions in the `requirements_fixed.txt` file.
The dependencies are pinned to exact versions in the `requirements.txt` file.
To change this simply change line (approx) 124 in the `Dockerfile` from:
```
&& pip install -r "requirements_fixed.txt" \
# && pip install -r "requirements_unfixed.txt" \
&& pip install --no-cache-dir -r "requirements.txt" \
# && pip install --no-cache-dir -r "requirements_unfixed.txt" \
```

to

```
# && pip install -r "requirements_fixed.txt" \
&& pip install -r "requirements_unfixed.txt" \
# && pip install --no-cache-dir -r "requirements.txt" \
&& pip install --no-cache-dir -r "requirements_unfixed.txt" \
```
This will install the latest satisfying versions of all dependencies. After testing that
the dependencies are working freeze them into a new `requirements_fixed.txt` file.
the dependencies are working freeze them into a new `requirements.txt` file.
```
pip freeze > requirements_fixed.txt
pip freeze > requirements.txt
```
Several dependencies are installed from source so these must be removed from the
`requirements_fixed.txt` file. These are:
`requirements.txt` file. These are:
```
PyFMI
Expand Down
20 changes: 9 additions & 11 deletions requirements_fixed.txt → requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ click==7.1.2
coverage==5.3.1
cycler==0.10.0
Cython==0.29.21
dask==2020.12.0
dask==2021.1.0
decorator==4.4.2
defusedxml==0.6.0
docutils==0.16
Expand All @@ -30,14 +30,14 @@ flake8==3.8.4
fsspec==0.8.5
future==0.18.2
gcsfs==0.7.1
google-api-core==1.24.1
google-api-core==1.25.0
google-auth==1.24.0
google-auth-oauthlib==0.4.2
google-cloud-bigquery==2.3.1
google-cloud-bigquery-storage==2.1.0
google-cloud-core==1.5.0
google-cloud-storage==1.35.0
google-crc32c==1.1.0
google-crc32c==1.1.1
google-resumable-media==1.2.0
googleapis-common-protos==1.52.0
grpcio==1.34.1
Expand Down Expand Up @@ -72,7 +72,7 @@ munch==2.5.0
mypy-extensions==0.4.3
nbclient==0.5.1
nbconvert==6.0.7
nbformat==5.1.0
nbformat==5.1.2
nest-asyncio==1.4.3
notebook==6.2.0
numba==0.52.0
Expand All @@ -81,7 +81,7 @@ numexpr==2.7.2
numpy==1.19.5
oauthlib==3.1.0
packaging==20.8
pandas==1.2.0
pandas==1.2.1
pandas-gbq==0.14.1
pandocfilters==1.4.3
parso==0.8.1
Expand All @@ -93,7 +93,7 @@ Pillow==8.1.0
plotly==4.14.3
pluggy==0.13.1
prometheus-client==0.9.0
prompt-toolkit==3.0.10
prompt-toolkit==3.0.11
proto-plus==1.13.0
protobuf==3.14.0
psutil==5.8.0
Expand All @@ -107,23 +107,21 @@ pycparser==2.20
pydata-google-auth==1.1.0
pydot3k==1.0.17
pyflakes==2.2.0
pyflux==0.4.15
PyFMI==2.7.4
Pygments==2.7.4
pyparsing==2.4.7
pyrsistent==0.17.3
pytest==6.2.1
pytest-ordering==0.6
python-dateutil==2.8.1
pytz==2020.5
PyYAML==5.3.1
pyzmq==21.0.0
PyYAML==5.4
pyzmq==21.0.1
regex==2020.11.13
requests==2.25.1
requests-oauthlib==1.3.0
retrying==1.3.3
rsa==4.7
scikit-learn==0.24.0
scikit-learn==0.24.1
scipy==1.6.0
Send2Trash==1.5.0
Shapely==1.7.1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,9 @@ def get_output_states(self):
def get_model_name(self):
# only need the idf file name because the weather is determined from
# the combination of idf file and data_source-identifier
return f"EnergyPlus_{self.idf.idf_name.rstrip('.idf')}"
_model_name = f"EnergyPlus_{self.idf.idf_name.rstrip('.idf')}"
_model_name = _model_name.replace(".", "_")
return _model_name

@property
def timesteps_per_hour(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ def get_output_states(self):
]

def get_model_name(self):
return f"Deadband_{self.deadband}".replace(".", "-")
_model_name = f"Deadband_{self.deadband}"
_model_name = _model_name.replace(".", "_")
return _model_name

def initialize(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,5 +29,6 @@ class FMIController(ControllerModel):
step_size_seconds = attr.ib()

def get_model_name(self):
fmu_name = os.path.basename(self.fmu_path)
return f"FMU_{fmu_name}"
_model_name = os.path.basename(self.fmu_path)
_model_name = _model_name.replace(".", "_")
return _model_name
10 changes: 10 additions & 0 deletions src/python/BuildingControlsSimulator/DataClients/DataClient.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,16 @@ def get_data(self):
[STATES.CALENDAR_EVENT],
] = pd.NA

# finally convert dtypes to final types now that nulls in
# non-nullable columns have been properly filled or removed
_data = convert_spec(
_data,
src_spec=self.internal_spec,
dest_spec=self.internal_spec,
src_nullable=True,
dest_nullable=False
)

else:
raise ValueError(
f"ID={self.sim_config['identifier']} has no full_data_periods "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ def put_data(self, df, sim_name):
pass

def get_file_name(self, sim_name):
return f"{sim_name}.{self.file_extension}"
# sim_name may contain . character, replace this safely
safe_sim_name = sim_name.replace(".","_")
return f"{safe_sim_name}.{self.file_extension}"

def get_local_cache_file(self, sim_name):
if self.local_cache:
Expand Down Expand Up @@ -114,8 +116,6 @@ def write_data_by_extension(
index=False,
)
elif file_extension == "csv.zip":
raise NotImplementedError("Pandas 1.2.0 has issue with writting zip files.")
# see
_df.to_csv(
filepath_or_buffer,
compression="zip",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,8 @@ def read_data_static(filepath_or_buffer, data_spec, extension="parquet.gzip"):
if _col != data_spec.datetime_column
],
data_spec,
src_nullable=True,
dest_nullable=True,
),
)
elif extension == "csv.zip":
Expand All @@ -129,6 +131,8 @@ def read_data_static(filepath_or_buffer, data_spec, extension="parquet.gzip"):
if _col != data_spec.datetime_column
],
data_spec,
src_nullable=True,
dest_nullable=True,
),
)
elif extension in ["csv.gzip", "csv.gz"]:
Expand All @@ -143,6 +147,8 @@ def read_data_static(filepath_or_buffer, data_spec, extension="parquet.gzip"):
if _col != data_spec.datetime_column
],
data_spec,
src_nullable=True,
dest_nullable=True,
),
)
else:
Expand Down
Loading

0 comments on commit 03fa12c

Please sign in to comment.