Skip to content

Commit

Permalink
Merge pull request #187 from ecobee/pre-release-0.4.0
Browse files Browse the repository at this point in the history
changes for v0.4.1
  • Loading branch information
tomstesco authored Apr 30, 2021
2 parents 193ed61 + 4d83b3c commit a538f1e
Show file tree
Hide file tree
Showing 16 changed files with 198 additions and 94 deletions.
4 changes: 3 additions & 1 deletion .env.template
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ SIMULATION_EPW_DIR=${WEATHER_DIR}/simulation_epw
JUPYTER_LOG_DIR=${DOCKER_HOME_DIR}/jupyter_lab_logs
LOCAL_CACHE_DIR=${DOCKER_PACKAGE_DIR}/data
TEST_DIR=
ACADOS_SOURCE_DIR=${ACADOS_DIR}
BLASFEO_MAIN_FOLDER="${EXT_DIR}/blasfeo"
HPIPM_MAIN_FOLDER="${EXT_DIR}/hpipm"
ACADOS_DIR=${EXT_DIR}/acados
ACADOS_SOURCE_DIR=${ACADOS_DIR}
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${HPIPM_MAIN_FOLDER}/lib:${BLASFEO_MAIN_FOLDER}/lib:${ACADOS_DIR}/lib
42 changes: 37 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,12 @@ RUN sudo apt-get update && sudo apt-get upgrade -y \
xz-utils \
zlib1g-dev \
unzip \
# python2.7 \
python3-dev \
python3-distutils \
subversion \
p7zip-full \
bc \
gfortran \
&& sudo rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

# install nodejs and npm (for plotly)
Expand All @@ -75,7 +75,6 @@ RUN sudo apt-get update && sudo apt-get upgrade -y \
# install FMUComplianceChecker
# install EnergyPlusToFMU
# download and extract PyFMI release
# note: PyFMI 2.7.4 is latest release that doesnt require Assimulo which is unnecessary
# because we dont use builtin PyFMI ODE simulation capabilities
RUN curl -sL https://deb.nodesource.com/setup_12.x | sudo bash - \
&& sudo apt-get update && sudo apt-get install -y nodejs \
Expand All @@ -99,11 +98,36 @@ RUN curl -sL https://deb.nodesource.com/setup_12.x | sudo bash - \
&& cd "${EXT_DIR}" \
&& wget "https://github.com/lbl-srg/EnergyPlusToFMU/archive/refs/tags/v3.1.0.zip" \
&& unzip "v3.1.0.zip" && rm "v3.1.0.zip" \
# install sundials 4.1.0 is latest supported (dep of assimulo)
&& cd "${EXT_DIR}" \
&& wget "https://github.com/LLNL/sundials/releases/download/v4.1.0/sundials-4.1.0.tar.gz" \
&& tar -xzf "sundials-4.1.0.tar.gz" && rm "sundials-4.1.0.tar.gz" \
&& cd "sundials-4.1.0" \
&& mkdir "build" \
&& cd "build" \
&& cmake -DCMAKE_INSTALL_PREFIX="${EXT_DIR}/sundials" .. \
&& make install \
# intsall lapack and blas (dep of assimulo)
&& cd "${EXT_DIR}" \
&& wget "https://github.com/Reference-LAPACK/lapack/archive/refs/tags/v3.9.1.tar.gz" \
&& tar -xzf "v3.9.1.tar.gz" && rm "v3.9.1.tar.gz" \
&& cd "lapack-3.9.1" \
&& mkdir build \
&& cd "build" \
&& cmake -DCMAKE_INSTALL_PREFIX="${EXT_DIR}/lapack" .. \
&& cmake --build . -j --target install \
# get Assimulo source
&& cd "${EXT_DIR}" \
&& wget "https://github.com/modelon-community/Assimulo/archive/refs/tags/Assimulo-3.2.5.tar.gz" \
&& tar -xzf "Assimulo-3.2.5.tar.gz" && rm "Assimulo-3.2.5.tar.gz" \
&& mv "${EXT_DIR}/Assimulo-Assimulo-3.2.5" "${EXT_DIR}/Assimulo-3.2.5" \
# get PyFMI source
&& cd "${EXT_DIR}" \
&& wget "https://github.com/modelon-community/PyFMI/archive/refs/tags/PyFMI-2.8.6.tar.gz" \
&& tar -xzf "PyFMI-2.8.6.tar.gz" \
&& tar -xzf "PyFMI-2.8.6.tar.gz" && rm "PyFMI-2.8.6.tar.gz"\
&& mv "${EXT_DIR}/PyFMI-PyFMI-2.8.6" "${EXT_DIR}/PyFMI" \
&& rm -rf "${EXT_DIR}/PyFMI-PyFMI-2.8.6" "PyFMI-2.8.6.tar.gz" \
# make PACKAGE_DIR and cleanup
&& cd "${LIB_DIR}" \
&& mkdir "${PACKAGE_DIR}" \
&& sudo rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

Expand All @@ -114,7 +138,6 @@ COPY ./ "${PACKAGE_DIR}"
# copied directory will not have user ownership by default
# install energyplus versions desired in `scripts/setup/install_ep.sh`
# install python dev environment
# copy .bashrc file to user home for use on startup. This can be further configured by user.
RUN sudo chown -R "${USER_NAME}" "${PACKAGE_DIR}" \
&& cd "${PACKAGE_DIR}" \
&& mv "${PACKAGE_DIR}/.vscode" "${LIB_DIR}/.vscode" \
Expand All @@ -126,9 +149,16 @@ RUN sudo chown -R "${USER_NAME}" "${PACKAGE_DIR}" \
&& pip install --no-cache-dir --upgrade setuptools pip \
# && pip install --no-cache-dir -r "requirements.txt" \
&& pip install --no-cache-dir -r "requirements_unfixed.txt" \
# install bcs
&& pip install --editable . \
# install Assimulo (dep of PyFMI 2.8+)
&& cd "${EXT_DIR}/Assimulo-3.2.5" \
&& python setup.py install --sundials-home="${HOME}/sundials" --blas-home="${HOME}/lapack/lib" --lapack-home="${HOME}/lapack" \
# install PyFMI
&& cd "${EXT_DIR}/PyFMI" \
&& python "setup.py" install --fmil-home="${FMIL_HOME}" \
&& cd "${PACKAGE_DIR}" \
&& . "scripts/setup/install_solvers.sh" \
&& cd "${EXT_DIR}" \
&& wget "https://github.com/RJT1990/pyflux/archive/0.4.15.zip" \
&& unzip "0.4.15.zip" && rm "0.4.15.zip" \
Expand All @@ -137,6 +167,7 @@ RUN sudo chown -R "${USER_NAME}" "${PACKAGE_DIR}" \

# install jupyter lab extensions for plotly
# if jupyter lab build fails with webpack optimization, set --minimize=False
# copy .rc files to user home for use on startup. This can be further configured by user.
RUN cd "${PACKAGE_DIR}" \
&& . "${LIB_DIR}/${VENV_NAME}/bin/activate" \
&& export NODE_OPTIONS="--max-old-space-size=8192" \
Expand All @@ -146,6 +177,7 @@ RUN cd "${PACKAGE_DIR}" \
&& jupyter lab build --dev-build=False --minimize=True \
&& unset NODE_OPTIONS \
&& cp "${PACKAGE_DIR}/scripts/setup/.bashrc" "$HOME/.bashrc" \
&& cp "${PACKAGE_DIR}/scripts/setup/.pdbrc" "$HOME/.pdbrc" \
&& chmod +x "${PACKAGE_DIR}/scripts/setup/jupyter_lab_bkgrnd.sh"

WORKDIR "${LIB_DIR}"
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,7 @@ Several dependencies are installed from source so these must be removed from the
```
PyFMI
pyflux
Assimulo
hpipm-python
```
Expand Down
3 changes: 0 additions & 3 deletions dev_docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,4 @@ docker-compose run \
--rm \
--service-ports \
--volume=${LOCAL_PACKAGE_DIR}:${DOCKER_PACKAGE_DIR}:consistent\
--volume=${LOCAL_CONTROLLER_DIR}:${DOCKER_CONTROLLER_DIR}:consistent \
--volume=${LOCAL_THERMAL_DIR}:${DOCKER_THERMAL_DIR}:consistent \
--volume=/Users/tom.s/.config/gcloud:${DOCKER_HOME_DIR}/.config/gcloud:ro \
building-controls-simulator bash
6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,13 @@ defusedxml==0.7.1
docutils==0.16
entrypoints==0.3
eppy==0.5.56
flake8==3.9.0
fsspec==0.9.0
flake8==3.9.1
fsspec==2021.4.0
future==0.18.2
gcsfs==0.8.0
google-api-core==1.26.3
google-api-python-client==2.2.0
google-auth==1.28.1
google-auth==1.29.0
google-auth-httplib2==0.1.0
google-auth-oauthlib==0.4.4
google-cloud-bigquery==2.3.1
Expand Down
3 changes: 3 additions & 0 deletions scripts/setup/.pdbrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import IPython
# Interactive shell
alias interacti IPython.embed()
2 changes: 2 additions & 0 deletions scripts/setup/install_solvers.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,5 @@ echo "BLASFEO_MAIN_FOLDER=$BLASFEO_MAIN_FOLDER"
# export LD_LIBRARY_PATH
export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${HPIPM_MAIN_FOLDER}/lib:${BLASFEO_MAIN_FOLDER}/lib"
echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH"

cd "${PACKAGE_DIR}"
3 changes: 1 addition & 2 deletions scripts/setup/jupyter_lab.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@ if [ ! -d "${JUPYTER_LOG_DIR}" ]; then mkdir "${JUPYTER_LOG_DIR}"; fi
echo "jupyter-lab accessable at: http://localhost:8888/lab"
echo "jupyter-lab logs are being stored in: ${JUPYTER_LOG_DIR}/${FNAME}"

cd "${LIB_DIR}"
. "${LIB_DIR}/${VENV_NAME}/bin/activate"
jupyter-lab --ip="0.0.0.0" --no-browser > "${JUPYTER_LOG_DIR}/${FNAME}"

echo "$!" > "${JUPYTER_LOG_DIR}/JUPYTER_SERVER_PID.txt"

set +eu +o pipefail
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ class EnergyPlusBuildingModel(BuildingModel):
heat_on = attr.ib(default=False)
cool_on = attr.ib(default=False)

model_creation_step = attr.ib(default=True)

# for reference on how attr defaults wor for mutable types (e.g. list) see:
# https://www.attrs.org/en/stable/init.html#defaults
input_states = attr.ib()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ class ControllerModel(ABC):

current_t_idx = attr.ib(default=None)
current_t_start = attr.ib(default=None)
start_utc = attr.ib(default=None)

init_status = attr.ib(factory=list)
step_status = attr.ib(factory=list)
Expand All @@ -51,13 +52,9 @@ def get_model_name(self):
"""Defines human readable uniquely identifing name"""
pass

def set_status(self, status, min_log_level):
self.status = status
self.log_status(min_log_level=min_log_level)

def log_status(self, min_log_level):
if self.log_level >= min_log_level:
logging.info(f"{type(self)}: status:{str(self.status)}")
def get_step_time_utc(self):
"""For debugging use"""
return self.start_utc + pd.Timedelta(seconds=self.current_t_idx * self.step_size_seconds)

def update_settings(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
ControllerModel,
)
from BuildingControlsSimulator.DataClients.DataStates import STATES
from BuildingControlsSimulator.ControllerModels.ControllerStatus import CONTROLLERSTATUS
from BuildingControlsSimulator.Conversions.Conversions import Conversions


Expand Down Expand Up @@ -141,6 +142,9 @@ def do_step(
step_weather_forecast_input,
):
"""Simulate controller time step."""
self.step_status = []
self.step_status.append(CONTROLLERSTATUS.STEP_BEGAN)

t_ctrl = self.calc_t_control(step_sensor_input)
self.step_output[STATES.TEMPERATURE_CTRL] = t_ctrl

Expand Down Expand Up @@ -179,7 +183,7 @@ def do_step(

self.add_step_to_output(self.step_output)
self.current_t_idx += 1

self.step_status.append(CONTROLLERSTATUS.STEP_SUCCESSFUL)
return self.step_output

def add_step_to_output(self, step_output):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,5 +151,7 @@ def read_data_static(filepath_or_buffer, data_spec, extension="parquet.gzip"):

# get intersection of columns
_df = _df[set(data_spec.full.columns) & set(_df.columns)]
# convert datetime_column to pd datetime
_df[data_spec.datetime_column] = pd.to_datetime(_df[data_spec.datetime_column])

return _df
22 changes: 22 additions & 0 deletions src/python/BuildingControlsSimulator/DataClients/DataSpec.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ def spec_unit_conversion(df, src_spec, dest_spec):
dest_unit == UNITS.FARHENHEIT
):
df[k] = df[k] / 10.0
elif (src_unit == UNITS.FARHENHEIT) and (
dest_unit == UNITS.FARHENHEITx10
):
df[k] = df[k] * 10.0
elif (src_unit == UNITS.FARHENHEITx10) and (dest_unit == UNITS.CELSIUS):
df[k] = Conversions.F2C(df[k] / 10.0)
elif (src_unit == UNITS.CELSIUS) and (dest_unit == UNITS.FARHENHEITx10):
Expand Down Expand Up @@ -802,6 +806,24 @@ def __init__(self):
"channel": CHANNELS.WEATHER,
"unit": UNITS.RELATIVE_HUMIDITY,
},
"DirectNormalRadiation":{
"internal_state": STATES.DIRECT_NORMAL_IRRADIANCE,
"dtype": "float32",
"channel": CHANNELS.WEATHER,
"unit": UNITS.WATTS_PER_METER_SQUARED,
},
"GlobalHorizontalRadiation":{
"internal_state": STATES.GLOBAL_HORIZONTAL_IRRADIANCE,
"dtype": "float32",
"channel": CHANNELS.WEATHER,
"unit": UNITS.WATTS_PER_METER_SQUARED,
},
"DiffuseHorizontalRadiation":{
"internal_state": STATES.DIFFUSE_HORIZONTAL_IRRADIANCE,
"dtype": "float32",
"channel": CHANNELS.WEATHER,
"unit": UNITS.WATTS_PER_METER_SQUARED,
},
},
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def get_schedule_change_points(data, sim_step_size_seconds):
if data.empty:
schedule_chg_pts = {}
return schedule_chg_pts

schedule_data = data[[STATES.DATE_TIME, STATES.SCHEDULE]].dropna(
subset=[STATES.SCHEDULE]
)
Expand Down Expand Up @@ -338,6 +338,37 @@ def get_schedule_change_points(data, sim_step_size_seconds):

_week_start = _week_start + pd.Timedelta(days=7)

# check for complete schedule
# in cases where schedule is incomplete it is because the data period
# for the simulation does not have at least 1 full week of data
# addtional data is required for spin up simulation time and
# a full schedule is generated by copying previous days schedule
# to any days without a schedule
for _time, _sched in schedule_chg_pts.items():
_days_active = [False] * 7
for _climate in _sched:
_days_active = [any(btup) for btup in zip(_days_active, _climate['on_day_of_week'])]

if not all(_days_active):
_active_idxs = [
_idx for _idx, _active
in enumerate(_days_active)
if _active
]
_inactive_idxs = [
_idx for _idx, _active
in enumerate(_days_active)
if not _active
]

for _idx in _inactive_idxs:
for _climate in _sched:
if _idx == 0:
# get first schedule by wrapping week
_climate['on_day_of_week'][0] = _climate['on_day_of_week'][max(_active_idxs)]
else:
_climate['on_day_of_week'][_idx] = _climate['on_day_of_week'][min(_active_idxs)]

return schedule_chg_pts

@staticmethod
Expand Down
Loading

0 comments on commit a538f1e

Please sign in to comment.