From 59b89793de3077a493e40995be0ea14fa1c14763 Mon Sep 17 00:00:00 2001 From: corneel27 Date: Wed, 30 Oct 2024 19:28:11 +0100 Subject: [PATCH 1/4] Version 2024.11.0.dev_a: integrate several kinds of heatpumps --- dao/config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dao/config.yaml b/dao/config.yaml index 20bb9b5..fbaf0e1 100644 --- a/dao/config.yaml +++ b/dao/config.yaml @@ -1,6 +1,6 @@ --- name: 刀 Day Ahead Optimizer -version: "2024.10.6" +version: "2024.11.0.dev_a" slug: day_ahead_opt description: Docker used by Home Assistant Community Add-ons for day ahead optimizations url: https://github.com/corneel27/day-ahead/ From e9412a75cc2099154b12a027ed4c11a38b26f11f Mon Sep 17 00:00:00 2001 From: corneel27 Date: Fri, 1 Nov 2024 19:28:36 +0100 Subject: [PATCH 2/4] Version 2024.11.0: Fixed a db-error when getting Tibber-data with the default sqlite-db --- dao/CHANGELOG.md | 2 ++ dao/config.yaml | 2 +- dao/prog/utils.py | 13 +++++++++---- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/dao/CHANGELOG.md b/dao/CHANGELOG.md index 9fd94f1..e9f73ab 100644 --- a/dao/CHANGELOG.md +++ b/dao/CHANGELOG.md @@ -1,5 +1,7 @@ # Changelog 刀 DAO # Day Ahead Optimizer +## [V2024.11.0] +- Fixed a db-error when getting Tibber-data with the default sqlite-db ## [V2024.10.6] - Fixed a few errors in the graphics for users with more than one battery diff --git a/dao/config.yaml b/dao/config.yaml index fbaf0e1..c2523cc 100644 --- a/dao/config.yaml +++ b/dao/config.yaml @@ -1,6 +1,6 @@ --- name: 刀 Day Ahead Optimizer -version: "2024.11.0.dev_a" +version: "2024.11.0" slug: day_ahead_opt description: Docker used by Home Assistant Community Add-ons for day ahead optimizations url: https://github.com/corneel27/day-ahead/ diff --git a/dao/prog/utils.py b/dao/prog/utils.py index bbe9c81..f45a5d3 100644 --- a/dao/prog/utils.py +++ b/dao/prog/utils.py @@ -102,13 +102,18 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: url = config.get(["api url"], tibber_options, "https://api.tibber.com/v1-beta/gql") db_da_engine = config.get(['database da', "engine"], None, "mysql") db_da_server = config.get(['database da', "server"], None, "core-mariadb") - db_da_port = int(config.get(['database da', "port"], None, 3306)) - db_da_name = config.get(['database da', "database"], None, "day_ahead") + db_da_port = int(config.get(['database da', "port"], None, 0)) + if db_da_engine == "sqlite": + db_da_name = config.get(['database da', "database"], None, "day_ahead.db") + else: + db_da_name = config.get(['database da', "database"], None, "day_ahead") db_da_user = config.get(['database da', "username"], None, "day_ahead") db_da_password = config.get(['database da', "password"]) + db_da_path = config.get(['database da', "db_path"], None, "../data") db_time_zone = config.get(["time_zone"]) - db_da = DBmanagerObj(db_dialect=db_da_engine, db_name=db_da_name, db_server=db_da_server, db_port=db_da_port, - db_user=db_da_user, db_password=db_da_password, db_time_zone=db_time_zone) + db_da = DBmanagerObj(db_dialect=db_da_engine, db_name=db_da_name, db_server=db_da_server, + db_port=db_da_port, db_user=db_da_user, db_password=db_da_password, + db_path=db_da_path, db_time_zone=db_time_zone) prices_options = config.get(["prices"]) headers = { "Authorization": "Bearer " + tibber_options["api_token"], From f62edd60ecff19278be7df857e8a5935235b0260 Mon Sep 17 00:00:00 2001 From: corneel27 Date: Thu, 7 Nov 2024 23:47:22 +0100 Subject: [PATCH 3/4] Version 2024.11.1.dev_a: Scheduling of heatpump can be set via a ha-entity - Scheduling of heatpump can be achieved in three ways: - on/off - heat curve adjustment - calculated power - When setting a state of an entity failed then an error message is written in the log (name of the entity, new failed value) ## [V2024.11.1] - Fixed an error when getting Tibber-data when using a Tibber pulse: only data before today are stored --- dao/CHANGELOG.md | 4 ++++ dao/config.yaml | 2 +- dao/prog/utils.py | 50 ++++++++++++++++++++++++++--------------------- 3 files changed, 33 insertions(+), 23 deletions(-) diff --git a/dao/CHANGELOG.md b/dao/CHANGELOG.md index e9f73ab..0655531 100644 --- a/dao/CHANGELOG.md +++ b/dao/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog 刀 DAO # Day Ahead Optimizer +## [V2024.11.1] +- Fixed an error when getting Tibber-data when using a Tibber pulse: +only data before today are stored + ## [V2024.11.0] - Fixed a db-error when getting Tibber-data with the default sqlite-db diff --git a/dao/config.yaml b/dao/config.yaml index c2523cc..15d8cef 100644 --- a/dao/config.yaml +++ b/dao/config.yaml @@ -1,6 +1,6 @@ --- name: 刀 Day Ahead Optimizer -version: "2024.11.0" +version: "2024.11.1" slug: day_ahead_opt description: Docker used by Home Assistant Community Add-ons for day ahead optimizations url: https://github.com/corneel27/day-ahead/ diff --git a/dao/prog/utils.py b/dao/prog/utils.py index f45a5d3..f9423d5 100644 --- a/dao/prog/utils.py +++ b/dao/prog/utils.py @@ -188,6 +188,8 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: '}" ' \ '}' + now = datetime.datetime.now() + today_ts = datetime.datetime(year=now.year,month=now.month, day=now.day).timestamp() logging.debug(query) resp = post(url, headers=headers, data=query) tibber_dict = json.loads(resp.text) @@ -195,29 +197,33 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: consumption_nodes = tibber_dict['data']['viewer']['homes'][0]['consumption']['nodes'] tibber_df = pd.DataFrame(columns=['time', 'code', 'value']) for node in production_nodes: - time_stamp = str(int(get_datetime_from_str(node['from']).timestamp())) - if not (node["production"] is None): - code = "prod" - value = float(node["production"]) - logging.info(f"{node} {time_stamp} {value}") - tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] - if not (node["profit"] is None): - code = 'profit' - value = float(node["profit"]) - logging.info(f"{node} {time_stamp} {value}") - tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] + timestamp = int(get_datetime_from_str(node['from']).timestamp()) + if timestamp < today_ts: + time_stamp = str(timestamp) + if not (node["production"] is None): + code = "prod" + value = float(node["production"]) + logging.info(f"{node} {time_stamp} {value}") + tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] + if not (node["profit"] is None): + code = 'profit' + value = float(node["profit"]) + logging.info(f"{node} {time_stamp} {value}") + tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] for node in consumption_nodes: - time_stamp = str(int(get_datetime_from_str(node['from']).timestamp())) - if not (node["consumption"] is None): - code = "cons" - value = float(node["consumption"]) - logging.info(f"{node} {time_stamp} {value}") - tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] - if not (node["cost"] is None): - code = "cost" - value = float(node["cost"]) - logging.info(f"{node} {time_stamp} {value}") - tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] + timestamp = int(get_datetime_from_str(node['from']).timestamp()) + if timestamp < today_ts: + time_stamp = str(timestamp) + if not (node["consumption"] is None): + code = "cons" + value = float(node["consumption"]) + logging.info(f"{node} {time_stamp} {value}") + tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] + if not (node["cost"] is None): + code = "cost" + value = float(node["cost"]) + logging.info(f"{node} {time_stamp} {value}") + tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] logging.info(f"Opgehaalde data bij Tibber (database records):" f"\n{tibber_df.to_string(index=False)}") db_da.savedata(tibber_df) From 2531a6ebeb4aa2db1bdaccb2b8d36294e872645e Mon Sep 17 00:00:00 2001 From: Cees van Beek <16610333+corneel27@users.noreply.github.com> Date: Wed, 18 Dec 2024 16:32:10 +0100 Subject: [PATCH 4/4] Develop (#168) * Version 2024.11.0.dev_a: integrate several kinds of heatpumps * Version 2024.11.0.dev_a: test pull request * Version 2024.11.0.dev: test pull request * Version 2024.11.0.dev_a: Scheduling of heatpump can be set via a ha-entity - Scheduling of heatpump can be achieved in three ways: - on/off - heat curve adjustment - calculated power - When setting a state of an entity failed then an error message is written in the log (name of the entity, new failed value) * Version 2024.11.0.dev: (#139) * Version 2024.11.0.dev: test pull request * Version 2024.11.0.dev_a: Scheduling of heatpump can be set via a ha-entity - Scheduling of heatpump can be achieved in three ways: - on/off - heat curve adjustment - calculated power - When setting a state of an entity failed then an error message is written in the log (name of the entity, new failed value) * Version 2024.11.0.dev: (#139) (#145) * Version 2024.11.0.dev: test pull request * Version 2024.11.0.dev_a: Scheduling of heatpump can be set via a ha-entity - Scheduling of heatpump can be achieved in three ways: - on/off - heat curve adjustment - calculated power - When setting a state of an entity failed then an error message is written in the log (name of the entity, new failed value) * Version 2024.11.1.dev_a: Scheduling of heatpump can be set via a ha-entity - Scheduling of heatpump can be achieved in three ways: - on/off - heat curve adjustment - calculated power - When setting a state of an entity failed then an error message is written in the log (name of the entity, new failed value) ## [V2024.11.1] - Fixed an error when getting Tibber-data when using a Tibber pulse: only data before today are stored * Version 2024.11.1.dev_b: - Scheduling of boiler can be set via a ha-entity - when boiler is heated bij the heat pump for room-heating then there can only be "one" heating function in an hour, therefore is a new setting introduced in the boiler-section: "boiler heated by heatpump". This setting can be "True" or "False" - the code is brought inline with PEP 8 (Style Guide for Python Code) * Implementatie on/off warmtepomp (#147) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Develop (#148) * Version 2024.11.0.dev: (#139) * Version 2024.11.0.dev: test pull request * Version 2024.11.0.dev_a: Scheduling of heatpump can be set via a ha-entity - Scheduling of heatpump can be achieved in three ways: - on/off - heat curve adjustment - calculated power - When setting a state of an entity failed then an error message is written in the log (name of the entity, new failed value) * Implementatie on/off warmtepomp (#147) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_c: - Scheduling of boiler can be set via a ha-entity - when boiler is heated bij the heat pump for room-heating then there can only be "one" heating function in an hour, therefore is a new setting introduced in the boiler-section: "boiler heated by heatpump". This setting can be "True" or "False" - the code is brought inline with PEP 8 (Style Guide for Python Code) * Version 2024.11.1.dev_c: - Fixed error in a api-call: `http://:5000/api/report/cost/deze_maand`?? - fixed error api call "netto_cost" * Version 2024.11.1.dev_d: There was a general error in api-calls, fixed. * Implementatie van hp_enabled en hp_heat_demand (#151) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Bugfix day_ahead.py & extra optie warmtepomp: min. draai uren (#155) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). * Versie 2024.11.1.dev_e (#156) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Versie 2024.11.1.dev_e (#156) (#157) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * bugfixje heatpump en toevoegen van optionele ha entity voor degree_days_factor (#158) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Versie 2024.11.1.dev_e1 (#159) * Versie 2024.11.1.dev_e (#156) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * bugfixje heatpump en toevoegen van optionele ha entity voor degree_days_factor (#158) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Bugfix: api werkte niet met deze_maand, deze_week etc. als geen tibber data (#160) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Develop (#161) * Versie 2024.11.1.dev_e (#156) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * bugfixje heatpump en toevoegen van optionele ha entity voor degree_days_factor (#158) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Bugfix: api werkte niet met deze_maand, deze_week etc. als geen tibber data (#160) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.12.0.rc_3 Implementation of "on/off" and "power" adjustmentof heatpump * develop_cees (#162) * Develop (#161) * Versie 2024.11.1.dev_e (#156) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * bugfixje heatpump en toevoegen van optionele ha entity voor degree_days_factor (#158) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Bugfix: api werkte niet met deze_maand, deze_week etc. als geen tibber data (#160) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.12.0.rc_3 Implementation of "on/off" and "power" adjustmentof heatpump --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Update docs.md (#163) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Definitieve versie 2024.12.0 (#165) * format da_report * maximizing heat_needed * Aanvullingen DOCS.md (#164) * develop_cees (#162) * Develop (#161) * Versie 2024.11.1.dev_e (#156) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * bugfixje heatpump en toevoegen van optionele ha entity voor degree_days_factor (#158) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Bugfix: api werkte niet met deze_maand, deze_week etc. als geen tibber data (#160) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.12.0.rc_3 Implementation of "on/off" and "power" adjustmentof heatpump --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Update docs.md (#163) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Definitieve versie 2024.12.0 --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Develop cees (#166) * format da_report * maximizing heat_needed * Aanvullingen DOCS.md (#164) * develop_cees (#162) * Develop (#161) * Versie 2024.11.1.dev_e (#156) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * bugfixje heatpump en toevoegen van optionele ha entity voor degree_days_factor (#158) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Bugfix: api werkte niet met deze_maand, deze_week etc. als geen tibber data (#160) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.12.0.rc_3 Implementation of "on/off" and "power" adjustmentof heatpump --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Update docs.md (#163) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Definitieve versie 2024.12.0 --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Develop cees (#167) * format da_report * maximizing heat_needed * Aanvullingen DOCS.md (#164) * develop_cees (#162) * Develop (#161) * Versie 2024.11.1.dev_e (#156) * Implementatie van hp_enabled en hp_heat_demand (#151) (#152) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.11.1.dev_e: - "optimal lower level" is not used anymore: it was too difficult and too complex to understand and didn't give enough good results - The calculated cycle costs are (per battery) logged (level info). --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * bugfixje heatpump en toevoegen van optionele ha entity voor degree_days_factor (#158) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> * Bugfix: api werkte niet met deze_maand, deze_week etc. als geen tibber data (#160) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Version 2024.12.0.rc_3 Implementation of "on/off" and "power" adjustmentof heatpump --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Update docs.md (#163) * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update day_ahead.py Bug fixes warmtepomp Toevoegen van optie om minimale run lengte van warmtepomp te garanderen * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py Bugfix minimale run lengte warmtepomp * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update day_ahead.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update da_report.py * Update config.yaml * Update day_ahead.py * Update da_report.py Bugfix: api did not work for deze_week, deze_maand etc. * Update da_report.py * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Update DOCS.md * Add files via upload * Update DOCS.md --------- Co-authored-by: Cees van Beek <16610333+corneel27@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> * Definitieve versie 2024.12.0 * Definitieve versie 2024.12.0 * Set version number (2024.12.0) in changelog --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> --------- Co-authored-by: soeter01 <50205733+soeter01@users.noreply.github.com> --- dao/CHANGELOG.md | 65 +- dao/DOCS.md | 427 ++-- dao/Dockerfile | 4 +- dao/config.yaml | 2 +- dao/data/options_example.json | 11 +- dao/data/options_start.json | 6 +- dao/images/COP.png | Bin 0 -> 9396 bytes dao/images/power.png | Bin 0 -> 9769 bytes dao/prog/check_db.py | 239 ++- dao/prog/da_base.py | 339 +-- dao/prog/da_config.py | 77 +- dao/prog/da_graph.py | 89 +- dao/prog/da_meteo.py | 279 ++- dao/prog/da_prices.py | 306 +-- dao/prog/da_report.py | 1378 +++++++----- dao/prog/da_scheduler.py | 7 +- dao/prog/day_ahead.py | 2592 ++++++++++++++++------- dao/prog/db_manager.py | 381 ++-- dao/prog/graphs.py | 29 +- dao/prog/utils.py | 232 +- dao/prog/version.py | 2 +- dao/requirements.txt | 6 +- dao/run/run.sh | 1 + dao/tests/data/options_sqlite.json | 2 +- dao/tests/prog/test_dao.py | 26 +- dao/webserver/app/__init__.py | 13 +- dao/webserver/app/routes.py | 62 +- dao/webserver/app/templates/home.html | 4 +- dao/webserver/app/templates/report.html | 3 +- 29 files changed, 4348 insertions(+), 2234 deletions(-) create mode 100644 dao/images/COP.png create mode 100644 dao/images/power.png diff --git a/dao/CHANGELOG.md b/dao/CHANGELOG.md index 3313c20..bafef71 100644 --- a/dao/CHANGELOG.md +++ b/dao/CHANGELOG.md @@ -1,6 +1,59 @@ # Changelog 刀 DAO # Day Ahead Optimizer + +# [V2024.12.0] +# LET OP +De energiebelasting wijzigt per 1 januari 2025.
+Neem deze over van onderstaande lijst in je instellingen:
+``` + "energy taxes delivery": { + "2022-01-01": 0.06729, + "2023-01-01": 0.12599, + "2024-01-01": 0.10880, + "2025-01-01": 0.10154 + }, + "energy taxes redelivery": { + "2022-01-01": 0.06729, + "2023-01-01": 0.12599, + "2024-01-01": 0.10880, + "2025-01-01": 0.10154 + }, +``` +# Breaking change +There is an extra optional parameter when calling an api-report: **expected**.
+When you call the api without `expected` or `expected=0` (default value) then only **recorded** values +are reported and the expected part of the json-result will be empty. +When you call with the parameter `expected=1` then the expected values are reported in the expected +part of the json-result.
+For periods with the interval "hour", there will be no change. +But for periodes with the interval "day" or "month" this can lead to new results. +For instance when you call the api for period "deze_week", without the parameter "expected" you get this result:
+```{ "message":"Success", "recorded": [{"time":"2024-12-02 00:00","value":36.7450000558},{"time":"2024-12-03 00:00","value":19.1840000708},{"time":"2024-12-04 00:00","value":36.8009995644},{"time":"2024-12-05 00:00","value":19.7590002147},{"time":"2024-12-06 00:00","value":43.3299993972},{"time":"2024-12-07 00:00","value":24.9570001736},{"time":"2024-12-08 00:00","value":6.462}], "expected" : [] }```
+But when you call it with "expected=1" then you get:
+```{ "message":"Success", "recorded": [{"time":"2024-12-02 00:00","value":36.7450000558},{"time":"2024-12-03 00:00","value":19.1840000708},{"time":"2024-12-04 00:00","value":36.8009995644},{"time":"2024-12-05 00:00","value":19.7590002147},{"time":"2024-12-06 00:00","value":43.3299993972},{"time":"2024-12-07 00:00","value":24.9570001736}], "expected" : [{"time":"2024-12-08 00:00","value":14.282395}] }```
+The total consumption of "2024-12-08" is now mentioned in the expexted part, because a part of the consumption is expected. + +# Other changes +- "optimal lower level" is not used anymore: it was too difficult and too complex to understand and +didn't give enough good results +- The calculated cycle costs are (per battery) logged (level info). +- There was a general error in api-calls, fixed. +- There is an error reported in a api-call: `http://:5000/api/report/cost/deze_maand`.
This error is fixed? +- Fixed error api call "netto_cost" +- Scheduling of boiler can be postponed via a ha-entity +- when boiler is heated bij the heat pump for room-heating then there can only be "one" heating +function in an hour, therefore is a new setting introduced in the boiler-section: "boiler heated +by heatpump". This setting can be "True" or "False" +- the code is brought inline with PEP 8 (Style Guide for Python Code) +- Scheduling of heatpump can be set via a ha-entity +- Scheduling of heatpump can be achieved in three ways: + - on/off + - heat curve adjustment + - calculated power +- When setting a state of an entity failed then an error message is written in the log + (name of the entity, new failed value) + ## [V2024.11.1] - Fixed an error when getting Tibber-data when using a Tibber pulse: only data before today are stored @@ -420,7 +473,7 @@ Zie DOCS.md ### Removed De functionaliteit om via de websocket in HA een berekening te starten is verwijderd. -Dat kan nu via een rest-command: /api/run +Dat kan nu via een rest-command: `/api/run` ## [v0.3.1] - 2023-09-12 @@ -495,7 +548,7 @@ Dit is hersteld. Dit heeft twee voordelen:
- het rekent veel sneller - er wordt makkelijker tussen twee "stages" geinterpoleerd.
- Als dit goed bevalt, zal het ook worden geimplementeerd voor het ontladen (van dc naar ac) en van dc naar batterij en vice versa.
+ Als dit goed bevalt, zal het ook worden geimplementeerd voor het ontladen (van `dc` naar `ac`) en van `dc` naar batterij en vice versa.
- de prijzengrafieken zijn in blokvorm en uitgelijnd met de verbruiksgrafieken @@ -531,7 +584,7 @@ De loggings zijn te vinden in data\log\dashboard.log. - versienummer in bestand _version.py - check op voldoende aantal rijen bij prognose data (dynamische prijzen en meteo) - bij 2 rijen of minder wordt er niet gerekend
- - bij 3 tot 8 rijen wordt er wel gerekend maar wordt er wel een waarschuwing afgegeven + - bij 3 tot 8 rijen wordt er wel gerekend, maar wordt er wel een waarschuwing afgegeven - een changelog - naar keuze datum-tijd of alleen tijd input helper voor aangeven wanneer een elektrische auto geladen moet zijn @@ -548,7 +601,7 @@ gaf het programma verkeerde resultaten voor dat eerste uur. Dit is gefixed. - laden auto wordt alleen uitgezet als auto thuis is (en aangesloten) - ongebruikte instellingen uit DOCS.md gehaald - navigatieknoppen in webserver bij "home" omgezet -- menu optie **Meteo** in webserver voorzien van toelichting "in ontwikkeling" +- menuoptie **Meteo** in webserver voorzien van toelichting "in ontwikkeling" - notificatie via Home Assistant toegevoegd. Zie voor meer informatie DOCS.md bij **notification entity** - in het instellingenbestand options.json is de naam van de entity aanduiding veranderd:
`"entity ready time"` wordt `"entity ready datetime"` @@ -557,8 +610,8 @@ gaf het programma verkeerde resultaten voor dat eerste uur. Dit is gefixed. ### Issues Als het programma draait in scheduler-mode wordt een websocket geopend naar HA zodat vanuit HA een -optimaliserings berekening kan worden gestart. -Als HA stopt (bijv voor een update) dan blijft de websocket "in de lucht" maar is niet meer effectief. +optimaliseringsberekening kan worden gestart. +Als HA stopt (bijv. voor een update) dan blijft de websocket "in de lucht" maar is niet meer effectief. ### Removed diff --git a/dao/DOCS.md b/dao/DOCS.md index ec8b0df..a953846 100644 --- a/dao/DOCS.md +++ b/dao/DOCS.md @@ -562,146 +562,157 @@ Dit regelt de supervisor van Home Assistant dan voor je. ---------------------------- -| Key | Subkey | Type | Default | Opmerkingen | -|---------------------------|------------------------------|------------------|------------------------------------|----------------------------------------------------| -| **homeassistant** | protocol api | string | http | Alleen invullen | -| | ip adress | string | supervisor | als addon op | -| | ip port | integer | blanco | andere machine | -| | token | string | blanco | draait | -| **database ha** | engine | string | mysql | keuze uit: mysql / sqlite / postgresql | -| | server | string | core-mariadb | default als addo met mysql als engine | -| | database | string | homeassistant | | -| | username | string | homeassistant | | -| | password | string | | | -| **database da** | engine | string | mysql | keuze uit: mysql / sqlite / postgresql | -| | server | string | core-mariadb | default als addon met mysql als engine | -| | database | string | day_ahead | | -| | username | string | day_ahead | | -| | password | string | | | -| **meteoserver-key** | | string | | | -| **prices** | source day ahead | string | nordpool | keuze uit: nordpool / entsoe / easyenergy / tibber | -| | entsoe-api-key | string | | alleen bij entsoe als source | -| | regular high | getal | | | -| | regular low | getal | | | -| | switch to low | integer | 23 | | -| | energy taxes delivery | list | | {datum : getal} | -| | energy taxes redelivery | list | | {datum : getal} | -| | cost supplier delivery | list | | {datum : getal} | -| | cost supplier redelivery | list | | {datum : getal} | -| | vat | list | | {datum : getal} | -| | last invoice | datum | | begindatum contract | -| | tax refund | boolean | | | -| **log level** | | string | "info" | keuze uit "debug", "info", "warning" of "error" | -| **use_calc_baseload** | | boolean | "False" | | +| Key | Subkey | Type | Default | Opmerkingen | +|--------------------------|------------------------------|------------------|------------------------------------|----------------------------------------------------| +| **homeassistant** | protocol api | string | http | Alleen invullen | +| | ip adress | string | supervisor | als addon op | +| | ip port | integer | blanco | andere machine | +| | token | string | blanco | draait | +| **database ha** | engine | string | mysql | keuze uit: mysql / sqlite / postgresql | +| | server | string | core-mariadb | default als addo met mysql als engine | +| | database | string | homeassistant | | +| | username | string | homeassistant | | +| | password | string | | | +| **database da** | engine | string | mysql | keuze uit: mysql / sqlite / postgresql | +| | server | string | core-mariadb | default als addon met mysql als engine | +| | database | string | day_ahead | | +| | username | string | day_ahead | | +| | password | string | | | +| **meteoserver-key** | | string | | | +| **prices** | source day ahead | string | nordpool | keuze uit: nordpool / entsoe / easyenergy / tibber | +| | entsoe-api-key | string | | alleen bij entsoe als source | +| | regular high | getal | | | +| | regular low | getal | | | +| | switch to low | integer | 23 | | +| | energy taxes delivery | list | | {datum : getal} | +| | energy taxes redelivery | list | | {datum : getal} | +| | cost supplier delivery | list | | {datum : getal} | +| | cost supplier redelivery | list | | {datum : getal} | +| | vat | list | | {datum : getal} | +| | last invoice | datum | | begindatum contract | +| | tax refund | boolean | | | +| **log level** | | string | "info" | keuze uit "debug", "info", "warning" of "error" | +| **use_calc_baseload** | | boolean | "False" | | | **baseload calc periode** | | getal | 56 | alleen als "use_calc_baseload" = True | -| **baseload** | | list 24 getallen | | alleen als "use_calc_baseload" = False | -| **graphical backend** | | string | "" | | -| **graphics** | style | string | "default" | kies uit lijst | -| | battery balance | boolean | "True" | | -| | prices delivery | boolean | "True" | | -| | prices redelivery | boolean | "True" | | -| | average delivery | boolean | "True" | | -| | show | boolean | "False" | | -| **strategy** | | string | "minimize cost" | "minimize cost" of "minimize consumption" | -| **notifications** | notification entity | string | "" | | -| | opstarten | boolean | "False" | -| | berekening | boolean | "False" | | -| | last activity entity | string | "" | | -| **grid** | max_power | getal | 17 | | -| **history** | save days | getal | 7 | | -| **dashboard** | port | getal | 5000 | | -| **boiler** | boiler present | boolean | "False" | | -| | entity actual temp. | string | | | -| | entity setpoint | string | | | -| | entity hysterese | string | | | -| | cop | getal | | kWh/kWh | -| | cooling rate | getal | | K/h | -| | volume | getal | | liter | -| | heating allowed below | getal | | °C | -| | elec. power | getal | | W | -| | activate service | string | | | -| | activate entity | string | | | -| **heating** | heater present | boolean | "False" | | -| | degree days factor | getal | | kWh/K.day | -| | stages | list | | {max_power, cop} | -| | ______max_power | getal | | W | -| | ______cop | getal | | kWh/kWh | -| | entity adjust heating curve | string | | kWh/kWh | -| | adjustment factor | getal | | K | -| **battery** | | list | | 0, 1 of meer {..} | -| | name | string | | | -| | name | string | | | -| | entity actual level | string | | | -| | upper limit | getal | | % | -| | lower limit | getal | | % | -| | optimal lower level | getal | | % | -| | entity actual level | string | | | -| | entity min soc end opt | string | 0 | | -| | entity max soc end opt | string | 100 | | -| | charge stages | list | | {power, efficiency} | -| | ______power | getal | | W | -| | ______efficiency | getal | | W/W (factor 0..1) | -| | discharge stages | list | | {power, efficiency} | -| | ______power | getal | | W | -| | ______efficiency | getal | | W/W (factor 0..1) | -| | reduced hours | uur-waarde paren | {} | W | -| | minimum power | getal | | W | -| | dc_to_bat efficiency | getal | | 0 .. 1.0 | -| | dc_to_bat max power | getal | 2 x max power charge | W | -| | bat_to_dc efficiency | getal | | 0 .. 1.0 | -| | bat_to_dc max power | getal | 2 x max power discharge | W | -| | cycle cost | getal | | euro | -| | entity set power feedin | string | | input_number | -| | entity stop inverter | string | | input_datetime | -| | entity balance switch | string | | input_boolean | -| | solar | list | | 0, 1 of meer {..} pv_dc, zie solar (pv_ac) | -| **solar** | | list | | 0, 1 of meer {..} pv_ac | -| | name | string | | | -| | tilt | getal | | helling 0 ..90 | -| | orientation | getal | | -180(N) ..-90(W)..0(Z) ..90(W)..180(N) | -| | capacity | getal | | kWp | -| | yield | getal | | Wh/J/cm2 | -| | entity pv switch | string | "" | input_boolean | -| **electric vehicle** | | list | | 0, 1 of meer {..} electric vehicle | -| | name | string | | | -| | capacity | getal | | kWh | -| | entity position | string | | tracker | -| | entity actual level | string | | | -| | entity plugged in | string | | binary_sensor | -| | charging stages | list | | 2 of meer {..} | -| | ______ampere | getal | | A | -| | ______efficiency | getal | 1 | factor 0..1 | -| | charge three phase | boolean | true | true of false | -| | charge scheduler | | | | -| | _____entity set level | string | | input_number | -| | _____level margin | getal | 0 | | -| | _____entity ready datetime | string | | input_datetime | -| | entity set charging ampere | string | | input_number | -| | charge switch | string | | input_boolean | -| | entity stop laden | string | "" | input_datetime | -| **machines** | | list | | 0, 1 of meer {..} pv_ac | -| | name | string | | | -| | programs | list | | 1 of meer {..} progrma | -| | _____name | string | | | -| | _____power | list of numbers | | 0, 1 of meer numbers | -| | entity start window | string | | input_datetime, tijd | -| | entity end window | string | | input_datetime, tijd | -| | entity selected program | string | | input_select | -| | entity calculated start | string | "" | input_datetime, datum en tijd | -| | entity calculated end | string | "" | input_datetime, datum en tijd | -| **tibber** | api url | string, url | https://api.tibber.com/v1-beta/gql | desgewenst | -| | api_token | string | | | -| **report** | entities grid consumption | list of string | [] | | -| | entities grid production | list of string | [] | | -| | entities solar production ac | list of string | [] | | -| | entities solar production dc | list of string | [] | | -| | entities ev consumption | list of string | [] | | -| | entities wp consumption | list of string | [] | | -| | entities boiler consumption | list of string | [] | | -| | entities battery consumption | list of string | [] | | -| | entities battery production | list of string | [] | | -| **scheduler** | active | boolean | True | -| | | list | {time, task} | | +| **baseload** | | list 24 getallen | | alleen als "use_calc_baseload" = False | +| **graphical backend** | | string | "" | | +| **graphics** | style | string | "default" | kies uit lijst | +| | battery balance | boolean | "True" | | +| | prices delivery | boolean | "True" | | +| | prices redelivery | boolean | "True" | | +| | average delivery | boolean | "True" | | +| | show | boolean | "False" | | +| **strategy** | | string | "minimize cost" | "minimize cost" of "minimize consumption" | +| **notifications** | notification entity | string | "" | | +| | opstarten | boolean | "False" | +| | berekening | boolean | "False" | | +| | last activity entity | string | "" | | +| **grid** | max_power | getal | 17 | | +| **history** | save days | getal | 7 | | +| **dashboard** | port | getal | 5000 | | +| **boiler** | boiler present | boolean | "False" | | +| | entity boiler enabled | string | | bij afwezigheid wordt boiler ingepland | +| | entity actual temp. | string | | | +| | entity setpoint | string | | | +| | entity hysterese | string | | | +| | cop | getal | | kWh/kWh | +| | cooling rate | getal | | K/h | +| | volume | getal | | liter | +| | heating allowed below | getal | | °C | +| | elec. power | getal | | W | +| | boiler heated by heatpump | boolean | "True" | W | +| | activate service | string | | | +| | activate entity | string | | | +| **heating** | heater present | boolean | "False" | | +| | entity hp enabled | string | | bij afwezigheid wordt heatpump ingepland | +| | degree days factor | getal of string | | kWh/K.day of HA 'sensor' entity | +| | adjustment | string | "power" | type besturing warmtepomp | +| | entity hp heat demand | string | | | +| | entity hp heat produced | string | | | +| | entity hp switch | string | | | +| | entity avg outside temp | string | | | +| | entity hp cop | string | | als geen entity is ingevuld, wordt cop=4 kWh/kWh | +| | entity hp power | string | | als geen entity is ingevuld, wordt power=1,5 kW | +| | min run length | getal | 1 | | +| | stages | list | | {max_power, cop} | +| | ______max_power | getal | | W | +| | ______cop | getal | | kWh/kWh | +| | entity adjust heating curve | string | | kWh/kWh | +| | adjustment factor | getal | | K | +| **battery** | | list | | 0, 1 of meer {..} | +| | name | string | | | +| | name | string | | | +| | entity actual level | string | | | +| | upper limit | getal | | % | +| | lower limit | getal | | % | +| | entity actual level | string | | | +| | entity min soc end opt | string | 0 | | +| | entity max soc end opt | string | 100 | | +| | charge stages | list | | {power, efficiency} | +| | ______power | getal | | W | +| | ______efficiency | getal | | W/W (factor 0..1) | +| | discharge stages | list | | {power, efficiency} | +| | ______power | getal | | W | +| | ______efficiency | getal | | W/W (factor 0..1) | +| | reduced hours | uur-waarde paren | {} | W | +| | minimum power | getal | | W | +| | dc_to_bat efficiency | getal | | 0 .. 1.0 | +| | dc_to_bat max power | getal | 2 x max power charge | W | +| | bat_to_dc efficiency | getal | | 0 .. 1.0 | +| | bat_to_dc max power | getal | 2 x max power discharge | W | +| | cycle cost | getal | | euro | +| | entity set power feedin | string | | input_number | +| | entity stop inverter | string | | input_datetime | +| | entity balance switch | string | | input_boolean | +| | solar | list | | 0, 1 of meer {..} pv_dc, zie solar (pv_ac) | +| **solar** | | list | | 0, 1 of meer {..} pv_ac | +| | name | string | | | +| | tilt | getal | | helling 0 ..90 | +| | orientation | getal | | -180(N) ..-90(W)..0(Z) ..90(W)..180(N) | +| | capacity | getal | | kWp | +| | yield | getal | | Wh/J/cm2 | +| | entity pv switch | string | "" | input_boolean | +| **electric vehicle** | | list | | 0, 1 of meer {..} electric vehicle | +| | name | string | | | +| | capacity | getal | | kWh | +| | entity position | string | | tracker | +| | entity actual level | string | | | +| | entity plugged in | string | | binary_sensor | +| | charging stages | list | | 2 of meer {..} | +| | ______ampere | getal | | A | +| | ______efficiency | getal | 1 | factor 0..1 | +| | charge three phase | boolean | true | true of false | +| | charge scheduler | | | | +| | _____entity set level | string | | input_number | +| | _____level margin | getal | 0 | | +| | _____entity ready datetime | string | | input_datetime | +| | entity set charging ampere | string | | input_number | +| | charge switch | string | | input_boolean | +| | entity stop laden | string | "" | input_datetime | +| **machines** | | list | | 0, 1 of meer {..} pv_ac | +| | name | string | | | +| | programs | list | | 1 of meer {..} progrma | +| | _____name | string | | | +| | _____power | list of numbers | | 0, 1 of meer numbers | +| | entity start window | string | | input_datetime, tijd | +| | entity end window | string | | input_datetime, tijd | +| | entity selected program | string | | input_select | +| | entity calculated start | string | "" | input_datetime, datum en tijd | +| | entity calculated end | string | "" | input_datetime, datum en tijd | +| **tibber** | api url | string, url | https://api.tibber.com/v1-beta/gql | desgewenst | +| | api_token | string | | | +| **report** | entities grid consumption | list of string | [] | | +| | entities grid production | list of string | [] | | +| | entities solar production ac | list of string | [] | | +| | entities solar production dc | list of string | [] | | +| | entities ev consumption | list of string | [] | | +| | entities wp consumption | list of string | [] | | +| | entities boiler consumption | list of string | [] | | +| | entities battery consumption | list of string | [] | | +| | entities battery production | list of string | [] | | +| **scheduler** | active | boolean | True | +| | | list | {time, task} | | + ### **homeassistant**
@@ -855,21 +866,31 @@ De meteodata worden opgehaald bij meteoserver. Ook hiervoor heb je een key nodig excl. btw, kaal, euro per kWh * regular low: idem het "lage" tarief, excl. btw, kaal , euro per kWh * switch to low: tijdstop waarop je omschakelt naar "laag tarief", default 23 - * energy taxes delivery: energiebelasting op verbruik excl. btw, euro per kWh - 2022-01-01 : 0.06729, - 2023-01-01 : 0.12599 - * energy taxes redelivery: energiebelasting op teruglevering excl. btw, euro per kWh - 2022-01-01: 0.06729, - 2023-01-01: 0.12599 - * cost supplier delivery : opslag leverancier euro per kWh, excl. btw - bijv voor Tibber: - * 2022-01-01: 0.002 - * 2023-03-01: 0.018 - * cost supplier redelivery: opslag leverancier voor teruglevering per kWh, ex btw + * energy taxes delivery: energiebelasting op verbruik excl. btw, euro per kWh:
+ * 2022-01-01 : 0.06729, + * 2023-01-01 : 0.12599, + * 2024-01-01 : 0.10880, + * 2025-01-01 : 0.10154 + * energy taxes redelivery: energiebelasting op teruglevering excl. btw, euro per kWh + * 2022-01-01 : 0.06729, + * 2023-01-01 : 0.12599, + * 2024-01-01 : 0.10880, + * 2025-01-01 : 0.10154 + * cost supplier delivery: opslag leverancier euro per kWh, excl. btw bijv voor Tibber: - * 2022-01-01: 0.002 - * 2023-03-01: 0.018 - * 2023-09-01: 0.009 + * 2022-01-01: 0.002 + * 2023-03-01: 0.018 + * 2023-09-01: 0.009 + * 2024-04-01: 0.0175, + * 2024-08-01: 0.020496 + * cost supplier redelivery: opslag leverancier voor teruglevering per kWh, ex btw + bijv voor Tibber: + * 2022-01-01: 0.002 + * 2023-03-01: 0.018 + * 2023-09-01: 0.009 + * 2024-04-01: 0.0175, + * 2024-08-01: 0.020496 + * vat: btw in % * 2022-01-01: 21 * 2022-07-01: 9 @@ -1055,30 +1076,56 @@ Zodra de timer voltooid is er wat loos. Als je aan deze functionaliteit geen beh ### **boiler**
Instellingen voor optimalisering van het elektraverbruik van je warmwater boiler - * boiler present: True of False (default False). Als je False invult worden onderstaande boiler-instellingen genegeerd. - * entity actual temp. : entiteit in ha die de actuele boilertemp. presenteert - * entity setpoint: entiteit die de ingestelde boilertemp. presenteert - * entity hysterese: entiteit die de gehanteerde hysterese voor de boiler presenteert - * cop: cop van de boiler bijv. 3: met 1 kWh elektriciteit wordt 3 kWh warm water gemaakt (een elektrische boiler heeft een cop = 1) - * cooling rate: gemiddelde afkoelsnelheid van de boiler in K/uur - * volume: inhoud van de boiler in liter - * heating allowed below: temperatuurgrens in °C waaronder de boiler mag worden opgewarmd - * elec. power: elektrisch vermogen van de boiler in W - * activate service: naam van de service van deze entiteit - * activate entity: entiteit (meestal van een inputhelper) waarmee de boiler opwarmen wordt gestart + * `boiler present`: True of False (default False). Als je False invult worden onderstaande boiler-instellingen genegeerd. + * `entity boiler enabled`: entiteit in ha die aangeeft of je boiler is ingeschakeld. Tijdens vakantie kun je hiermee richting +DAO aangeven dat de boiler niet hoeft te worden ingepland. + * `entity actual temp.` : entiteit in ha die de actuele boilertemp. presenteert + * `entity setpoint`: entiteit die de ingestelde boilertemp. presenteert + * `entity hysterese`: entiteit die de gehanteerde hysterese voor de boiler presenteert + * `cop`: de cop van de boiler bijv. 3: met 1 kWh elektriciteit wordt 3 kWh warm water gemaakt (een elektrische boiler heeft een cop = 1) + * `cooling rate`: gemiddelde afkoelsnelheid van de boiler in K/uur + * `volume`: inhoud van de boiler in liter + * `heating allowed below`: temperatuurgrens in °C waaronder de boiler mag worden opgewarmd + * `elec. power`: elektrisch vermogen van de boiler in W + * `activate entity`: entiteit (meestal van een inputhelper) waarmee de boiler opwarmen wordt gestart + * `activate service`: naam van de service van deze entiteit. Voor een input_button is dat "press", voor een input_boolean is "turn_on". + * `boiler heated by heatpump`: True of False (default True). Als de boiler wordt opgeward door de warmtepomp zal het gebruik van de warmtepomp voor verwarming +rekening houden met het gebruik van de wp door de boiler en vive versa. De wp zal dan nooit tegelijk in bedrijf zijn voor de boiler en voor de verwarming. ### **heating**
Dit onderdeel is nog in ontwikkeling. - * `heater present` : True of False (default False). Als je False invult worden onderstaande heater-instellingen genegeerd. - * `degree days factor`: kWh/K.dag hoeveel thermische kWh is er nodig per graaddag
- zet deze op 0 als je geen wp hebt - * `stages` : een lijst met vermogens schijven van de wp: hoe hoger het vermogen hoe lager de cop + * `heater present` : True of False (default False). Als je False invult worden onderstaande heater-instellingen genegeerd en wordt een warmtepomp niet ingepland. + + * `entity hp enabled`: HA 'input_boolean' entiteit die aangeeft of je warmtepomp moet worden ingepland. Als False zal de warmtepomp nooit worden ingepland. Tijdens vakantie kun je hiermee richting +DAO aangeven dat de warmtepomp niet hoeft te worden ingepland. + * `degree days factor`: kWh/K.dag hoeveel thermische kWh is er nodig per graaddag (met 16°C als referentietemperatuur).
+ Zet deze op 0 als je geen warmtepomp hebt zodat er ook geen warmtevraag is.
+Behalve een getal kun je hier ook een HA entiteit (bijv een input_number) +opgegeven, zodat je in HA deze factor kunt berekenen op basis van wind- en/of zonprognoses. + * `entity hp heat produced` : HA 'sensor' entiteit die aangeeft hoeveel kWh thermische energie vandaag al is geleverd door de warmtepomp. + * `entity hp heat demand` : HA 'binary_sensor' entiteit die aangeeft of de thermostaat van de on/off warmtepomp is in- of uitgeschakeld. Als "on" zal de warmtepomp worden ingepland. + * `adjustment`. Je hebt de keuze uit drie soorten regelingen: + * "on/off" : voor een aan/uit warmtepomp waarvan de aan/uit thermostaat stand door HA entiteit `entity hp heat demand` wordt gegeven. HA entiteit `entity hp switch` moet dan worden gebruikt + om de warmtepomp aan/uit te schakelen middels een automation in HA. DAO rekent de optimale inzet van de warmtepomp uit. De COP en vermogen waarop de warmtepomp draait hangen af van de voorspelde buitentemperatuur en dienen door + HA te worden berekend en aan DAO te worden doorgegeven middels de `entitity hp cop` en `entity hp power` entiteiten (zie hieronder). + * "power" : + * "heating curve" : + * `stages` : een lijst met vermogens schijven van de wp: hoe hoger het vermogen hoe lager de cop (niet voor on/off warmtepomp) + * `max_power`: het maximum elektrische vermogen van de betreffende schijf in W * `cop`: de cop van de wp behorende bij deze schijf. Dus een cop van 7 met een vermogen van 225 W betekent een thermisch vermogen van 7 x 225 = 1575 W - * `entity adjust heating curve`: entiteit waarmee de stooklijn kan worden verschoven + * `entity adjust heating curve`: entiteit waarmee de stooklijn kan worden verschoven (niet voor on/off warmtepomp). * `adjustment factor`: float K/10% Het aantal graden voor de verschuiving van de stooklijn als de actuele - da prijs 10% afwijkt van het daggemiddelde + da prijs 10% afwijkt van het daggemiddelde (niet voor on/off warmtepomp). + * `min run length` : minimaal aantal uren [1-5] dat de warmtepomp achter elkaar moet draaien (alleen voor on/off warmtepomp en om te voorkomen dat de warmtepomp teveel schakelt). + * `entity avg outside temp` : HA 'input_number' entiteit die de door DAO voorspelde buitentemperatuur in °C bevat (hiermee kun je mbv een automation in HA de COP en het vermogen van een on/off warmtepomp in berekenen). + * `entity hp cop` : HA 'sensor' entiteit die aangeeft wat de COP van de warmtepomp is bij een gegeven buitentemperatuur (alleen voor on/off warmtepomp). Bijvoorbeeld voor een Daikin Altherma 8kW:
+ ![cop.png](images/COP.png) + * `entity hp switch` : HA 'input_boolean' entiteit die de warmtepomp middels een automation in HA in- of uitschakelt. + * `entity hp power` : Bij "on/off" warmtepomp: HA 'sensor' entiteit die aangeeft op welk vermogen in kW de warmtepomp zal draaien bij een gegeven buitentemperatuur. Bij "power" warmtepomp: HA 'input_number' entiteit waarin DAO + het optimaal berekende vermogen in kW zet. Bijvoorbeeld voor een Daikin Altherma 8kW:
+ ![power.png](images/power.png) ### **battery**
De gegevens en de instellingen van geen, een of meer batterijen @@ -1088,7 +1135,6 @@ Dit onderdeel is nog in ontwikkeling. * capacity: capaciteit van de batterij in kWh * lower limit: onderste SoC limiet (tijdelijk) * upper limit: bovenste SoC limiet - * optimal lower level: onderste SoC limiet voor langere tijd * entity min soc end opt: (default 0) entity in home assistant (input_number), waarmee je de minimale SoC in procenten kunt opgeven die de batterij aan het einde van de berekening moet hebben * entity max soc end opt: (default 100) entity in home assistant (input_number), waarmee je de @@ -1440,14 +1486,45 @@ Je kunt kiezen uit: - **dit_contractjaar** met interval maand Het laatste stuk **?\=\** is facultatief. -Voorlopig is er een parameter die je kunt invullen: -- **?cumulate=1**
+Er zijn twee parameters die je kunt invullen, De eerste opgegeven parameter begint altijd met een +vraagteken. Een tweede parameter volgt dan met een "&". +- **cumulate=1**
Als je cumulate opgeeft en je zet deze op "1" dan worden alle resultaten cumulatief berekend. -Bijvoorbeeld: ```/api/report/profit/vorige_week?cumulate=1``` geeft als resultaat:
-```{ "message":"Success", "recorded": [{"time":"2023-10-02 00:00","value":10.9429554939},{"time":"2023-10-03 00:00","value":19.7526173011},{"time":"2023-10-04 00:00","value":24.1756554841},{"time":"2023-10-05 00:00","value":31.4851145427},{"time":"2023-10-06 00:00","value":37.0579458385},{"time":"2023-10-07 00:00","value":38.6841635039},{"time":"2023-10-08 00:00","value":40.9582582529}], "expected" : [] }``` +Bijvoorbeeld: ```/api/report/profit/vorige_week?cumulate=1``` geeft als resultaat:
+```` +{"message":"Success", "recorded": [{"time":"2023-10-02 00:00","value":10.9429554939}, +{"time":"2023-10-03 00:00","value":19.7526173011},{"time":"2023-10-04 00:00","value":24.1756554841}, +{"time":"2023-10-05 00:00","value":31.4851145427},{"time":"2023-10-06 00:00","value":37.0579458385}, +{"time":"2023-10-07 00:00","value":38.6841635039},{"time":"2023-10-08 00:00","value":40.9582582529}], +"expected" : [] } +```` +- **expected=1**
+Als je deze parameter opgeeft en je zet de waarde op "1", dan worden ook de door DAO berekende +prognoses mee teruggegeven in het resultaat. +Bijvoorbeeld: ```/api/report/profit/vandaag?expected=1``` geeft als resultaat:
+```` +{ "message":"Success", "recorded": [{"time":"2024-12-08 00:00","value":0.0}, +{"time":"2024-12-08 01:00","value":0.0},{"time":"2024-12-08 02:00","value":0.0}, +{"time":"2024-12-08 03:00","value":0.0},{"time":"2024-12-08 04:00","value":0.0}, +{"time":"2024-12-08 05:00","value":0.0},{"time":"2024-12-08 06:00","value":0.0}, +{"time":"2024-12-08 07:00","value":0.0},{"time":"2024-12-08 08:00","value":0.0}, +{"time":"2024-12-08 09:00","value":0.0},{"time":"2024-12-08 10:00","value":0.0}, +{"time":"2024-12-08 11:00","value":0.201345665},{"time":"2024-12-08 12:00","value":0.64805301}, +{"time":"2024-12-08 13:00","value":0.0076923475},{"time":"2024-12-08 14:00","value":0.1709918615}, +{"time":"2024-12-08 15:00","value":0.427633268},{"time":"2024-12-08 16:00","value":0.0019065631}, +{"time":"2024-12-08 17:00","value":0.0},{"time":"2024-12-08 18:00","value":0.0}, +{"time":"2024-12-08 19:00","value":0.0},{"time":"2024-12-08 20:00","value":0.0},{ +"time":"2024-12-08 21:00","value":0.0},{"time":"2024-12-08 22:00","value":0.0}], +"expected" : [{"time":"2024-12-08 23:00","value":0.0}] } +```` +**Let op** +Het gebruik van deze parameter leidt soms tot onverwachte resultaten als de api bij perioden wordt gebruikt +met een interval van een dag (deze week, deze maand) of een maand (dit jaar). Wanneer op een dag of een maand +een deel van de variabele zowel "recorded" als "expected" is wordt de hele waarde weergegeven onder "expected". ## Gebruik van deze api voor presentatie in Home Assistant + ### Aanmaken van sensoren Je maakt gebruik van de restful integratie van Home Assistant (https://www.home-assistant.io/integrations/rest/). Daarvoor maak je in ```configuration.yaml``` de gewenste sensoren aan. diff --git a/dao/Dockerfile b/dao/Dockerfile index ea28d84..025a280 100644 --- a/dao/Dockerfile +++ b/dao/Dockerfile @@ -34,7 +34,9 @@ COPY miplib.tar.gz /tmp/ WORKDIR /root/dao/prog RUN echo 'platform: "$ENV_BUILD_ARCH"' \ && export BUILD_ARCH="$ENV_BUILD_ARCH" \ - && echo 'export BUILD_ARCH="$ENV_BUILD_ARCH"' >> ~/.bashrc + && echo 'export BUILD_ARCH="$ENV_BUILD_ARCH"' >> ~/.bashrc \ + && export PYTHONPATH="/root:/root/dao:/root/dao/prog" \ + && echo 'export PYTHONPATH="/root:/root/dao:/root/dao/prog"' >> ~/.bashrc RUN if [ "${BUILD_ARCH}" = "aarch64" ]; then \ tar -xvf /tmp/miplib.tar.gz -C /root/dao/prog && \ diff --git a/dao/config.yaml b/dao/config.yaml index 15d8cef..2a5aa62 100644 --- a/dao/config.yaml +++ b/dao/config.yaml @@ -1,6 +1,6 @@ --- name: 刀 Day Ahead Optimizer -version: "2024.11.1" +version: 2024.12.0 slug: day_ahead_opt description: Docker used by Home Assistant Community Add-ons for day ahead optimizations url: https://github.com/corneel27/day-ahead/ diff --git a/dao/data/options_example.json b/dao/data/options_example.json index 4b2a940..b934ad3 100644 --- a/dao/data/options_example.json +++ b/dao/data/options_example.json @@ -19,12 +19,14 @@ "energy taxes delivery": { "2022-01-01": 0.06729, "2023-01-01": 0.12599, - "2024-01-01": 0.10880 + "2024-01-01": 0.10880, + "2025-01-01": 0.10154 }, "energy taxes redelivery": { "2022-01-01": 0.06729, "2023-01-01": 0.12599, - "2024-01-01": 0.10880 + "2024-01-01": 0.10880, + "2025-01-01": 0.10154 }, "cost supplier delivery": { "2022-01-01": 0.002, @@ -43,7 +45,7 @@ "2022-07-01": 9, "2023-01-01": 21 }, - "last invoice": "2022-09-01", + "last invoice": "2024-09-01", "tax refund": "True" }, "logging level" : "info", @@ -115,7 +117,10 @@ }, "heating": { "heater present": "True", + "entity hp enabled": "binary_sensor.heatpump_heating", "degree days factor": 3.6, + "adjustment" : "power", + "//adjustment" : "on/off | power | heating curve", "stages": [ { "max_power": 225, diff --git a/dao/data/options_start.json b/dao/data/options_start.json index 9c0e12b..772bc0d 100644 --- a/dao/data/options_start.json +++ b/dao/data/options_start.json @@ -18,12 +18,14 @@ "energy taxes delivery": { "2022-01-01": 0.06729, "2023-01-01": 0.12599, - "2024-01-01": 0.10880 + "2024-01-01": 0.10880, + "2025-01-01": 0.10154 }, "energy taxes redelivery": { "2022-01-01": 0.06729, "2023-01-01": 0.12599, - "2024-01-01": 0.10880 + "2024-01-01": 0.10880, + "2025-01-01": 0.10154 }, "cost supplier delivery": { "2022-01-01": 0.002, diff --git a/dao/images/COP.png b/dao/images/COP.png new file mode 100644 index 0000000000000000000000000000000000000000..33607fd4d1514c2bd1aa16b97d4f84031a3baab1 GIT binary patch literal 9396 zcmb7q1yodFxAveQC;}paN{0f{2r|-LQlio+jYxM3N|$tZ=cvRuq;w3OQiIembmuV4 zJ@|e1yZ>7McmMm}wPw~?=bU}^Tl?AXv-f_(UaQIz-J!e#0)dDW6=dFkKsaZ>Uy$Gi z@Z|8SP!PD_IKPpX0#yxCuLC!@mXa!xAkg<1!prw~!2N9p1s!J)h`9awkJD{mVg>>| zQB;(XeCJ`fJx}8DP9tz_|He5JQ78{TkNNWng7hwNB3rVgXdS(VZSjUWvWLq$5!7xc z{Xe!sNiBtFw61=dT0~3PboW<084%1z367?74%&s9jf>9HMn+MMt&j5$Gk#4m4tYXy zZc4wH{e*WKjc7P#IcCu)EhsabKgCcB`!0ANm)U8swbakm?;;f2+uMmjpX!<6SbPv` zcn&_Olk^Cb^D>YY^gw?2=BFhBFVJghNfJ;v6ADMd5myw%8htGk_5RM6=;-LpDF~bh z^y!6iw%GitQ*V?)*S(|4%1S{&L8qG_V20n%tU!o<PTa^Btj(rd8nCP?{V zgC9oIm%8orvcASDBtThWSegh?bW8pyW2%NLq&<OM4arck}yO<&v)-gB9vIoI<|Ct*&!M2bsi8YJ1k8zB5WL) zb)p`52K}V;rGbQjg#6yo>voh;8YRtsEdJ1=Otedzv^B2i_#Y0|es>p-5BYb$^c&E4 zrW9yuNwbI~$PY`Cccq*Lhv)r~+((q;uA3OwN{xlG+A$^CCM6z>ssAI zv#5S)z8P_KK3M5(ZIy(*CyHl|S_~`h>Ug&tc{JD_v0$4djXVE=|M>X$hZ#|0?2=^SQm%HQNtV)7ly1g!RWNgYx7?wCo0%Y! z?_t@*1pm(;=c*Lnhyo2c2Ln}hs_rz=HIK(@m_#=1Yd-cp(4D8q_eY4d{h|6LN8mN_ zlF;p4dOt%_+T>~<-1XM>DGiP>8E(^3V{MdYx>fVdI=NwXE@+A&<*%Lp29^Jd`TvXW z*>LD9E|wB*E`ndo1YYRjjkO|pwBMn3d@Z0CNHw90a;Gh{$w$hthsJ(=~ zs&qEU9HE47d~isJId9VBHNk&JCGPC3v`YifKo4Lhzzg9A;-Bf$pFu5`Q z;&?7vdfEGg+6-m$FtsXT?s@fRysy}V;TSs}A6 zLdvBF*1yNBq;oT1c%687ok2R8jxljU*j6r2f5-7*?C)lLlC3>0OR3-wvMuohF)41M z$rst4$7iPGA=xv>j%(94PWE!~0dyf>#2wB=V&w0XJ)u0XQE4v1psYqhMvWv7Yss); zqk7W;Y(X?}LC73`QP&70O#SZM%h%zm859fL$|wXDYy*v{2v+kT>ZJ7~!+uBh#j@bd ztNf0NddoJK~_m|A6=}pP_S-zN`I)nPSq90$aXy&lP8QPT>4o>YKEnVP- zrf(IqZgcR9-)IY#TBS%f^K_fEM(C<(Dmqq`D61srSJ#!X@h7tcRvW)H%-=^{-4Ud2 zzBtSNnYANkkVf%F+LJzMn1z;Oi*#%g)hp)Tm;M0N#DIFQ)m%WAdMJtA;Z@!$G|!8L zun$%3ck4Dc23J05$(36u1Vcp&$qnQ-_1;-@N1GVr1U~0i5M>ULbAcE1&edxQRtle)rLz5;el%FWZ&vFd00L1S0!Gj`z* zf#PSc4&*o8!pb#cvwp^4NyoM}b*h=wh-*-m11Ue&WA#?$x)@RKjl4#uwy1$&wOSAm zb6DDCwZFA-IrXp+Wm2VDef4% zwd)HDB21$Oxj~oW?eRSs6%-lC!>dqhr?HCCyxZc=7Z234#9R-SZ73ERM7qdXCX@3B zi|#kyqgO0>BZBb9V$eGcYIfQZ_YE2CHPmI38$_<;zWYgiHoUj3>8NRAGv8nyHQhz? zh5Al?C0C0Kf96f^NR=k~w24AmCNp2Cm2M^7=&3tJA61m>d^F>byca_WW!@s~_uv40 zu)E-m*=EY%@u8m$-zSGX5=EG@e^zKzfwxHCGF~=~JD0Z1jh&!Q?R37IMb9PbE_-@+ zi%n7cki~qF$3|NIejWF^(BrqUhxwB9Efr28Nc5^n(K`a)#2copV}R*$&DY+6ywRIl z#lM;j!Ol78+7-`vAP~Gmg`2kokhULsxX07$!i4tI^F7nMobeUcDs5|wi8wXNeerAO zcU?GO=Ezy@6;Y2GH|D$gqwANBh9kQEsJTh4XoF2UM78ber0XQ*T9ODrqsTiv zWjA}rv|?(G4I5~8zd5}Cw)=2v$tlDISL1PCuUEUp?FbHshwoW3N}ln>oI%!)=iSfp zqv}|eKl)fXI*=&$kXPDYZoGpPt2~$#|Ko)YY77D|81F!D2hu8tCiALD3}13};oex? zIkS-bI#sl>2EXle*{G3KeM)t@-C$E$RRs5`ae{hUe-!b4J{g}$}$8gJ%pt4Xtdv*b*td4EuJR#P?IWUPY;6>zk#g2|h(Ew$o7ZXUaK z;-2mj?7Np_2znbPMd}0{pR-6K8KW&@*u*x^mlX6MXV0M9;W=8$Igl3|RC0n}9vrH) zB5GzbUU2%Tq;yy)QH1$obz&T+E@}jjfwAwMNxN%V#yp8#wuMW=1e&e>L}WWb3`mdq z_Fb`ZJ;Cb442As*GO&hn#u+FitPYiV`jLarzZY52w&W-Ex8cTWWlePJdeg2eoM;y4 zugi#~Wz1xt>c_ej5VoQ%_F70SSP0=*dftJj{`+{0yw7NtQasR5FrTAeppT(Iwqrui z^tl*)q92R?UZmQ1_uAfisqE`=1r zyfMEDO*1k6P`4eeB1;EuJi-`dp5yW-X&xhK+xgU#VHCc~@1D^Dzva?5{m$$8TCa>t z$=WgVA z-n;5uSDE0=O97xvsbSluT8kr-@3e?u^9G|T@QD3B(K+_)vgJsB!|1!&;0Y0wuf{^; zpM>g{)CtXaf9{JNboI$cJw<6hscY>k9i?zL!%ksEKx&;ucfVm_WJ+YZo~wJX+=Jyd zg!vOc*|z&gxH!U5(atmvAYr3jKz~MlAmLNDWZ9d2mNT8WnJFbn=xr?qQl2|^5jovd z1dydr3-)vUu*u$ifgeKK=uJANRs;fsCI$^ryCU8ycJ${!d(Rjt50=E$SzWo!tyI|d zrDj8*ud1GEn^kZU6r@2{mu90ZS8lxt&4H37H_YiPeeG2ED%IkHJ!t;yHI{d|FYGSv zn+zi>l>wT|yK2qVti%F?$rG{dGt(qgBn(8T3W*dR&Cx&g;ySkXUDd|qIy25;o2?L& zu7U@k(f6Z!d*3*^MrUTy;^TYll`>eBzI`EkVg64}4Wo%FvQ}mB6;Y6xI&VDyb6@>Q z=kv=De!C;Z5r$WBPdGz%xck4@bT?AtLzTYXGzUDt1OiEZYV#-RK}?O-`UDhvc8EO= zo8#GI>ogV72>0YN0H5Ul<bF;A>n5EM&+iHh3Ztb-^F-u3`ECg z_dt8fj$ROG-_d%v`3EMsGLMtHuRLjhXgxMYVQy<+yS)5rqud_Dv5U|Cu%}5uFp&0_ zl?Zn`sfFYdm~KkjMo19X#y244b% z8I2#}_wxfzImTQlkXE83O9_KLb+=T@gw?HPNAsQ;Tm$zLunCK_6uY)Zgw!5t;ol0m z8b-j!@$S&0a8T0A+(uLnf|zWZqUcU}Tmwu1)zPCy)CSl#26Ybr#&4M;LM=CL6y7`f ztBCiflY{KD2KJ#5qK2>2??shNtv+oIg{#zqWUNB>i80^iDJuJfYk(c z%Zpvoun>8sr}K-5%p~u&>gwq^J4wl#bV>sVRj4|o+bR=rkh4QIFLJT#Gs7iS#}j;g z@#iRW{6uE;VGW1`EYCEhHy zPLbp)s#VYONHI^-p2A!p=570yes|2d6q~9QBd+OqR%$?pw#=G#f6eZV`65U+9eaX^ zFNO@JiwTk26W9%G#i+QNOrd=bNp+g3b^ucfmWn58XSINU$a5+py>RbPx;m@q@aR<_V z6Pn6{D5q1Ny){P|M*{3mdj#nAsDbv%0vQMmH>#B7C;q4m9lHf4?sLX zvV$I@n$^s9U{KKLx&F4)yHo_Ej0;e^bO2Hb-ZL`qF*mtlcR9|#JYhfuBy5Oy4~S(NW(|MF4YNoy9HF9Fc9Z23O-+sJ1Qiyh-l~=qHbM*Js0YjsRQ- zqAPF;z@Ssj6Qm#pG*acHHrxDp#9R6K3!yfl8zbQ^Yu zbe-VJhpb;;;fnI?Z;)hqS1j!Tev$dW7XY2!)YG3b)I1CXUdC;W`{i$u4!yGmVBga! zNR@VI3rX?CJ+tR#U~!|;Zi0J4$+Nmjj^V{y`!KGtD`qVfRN10Wdl z{GVIs%{~E%Q7719%3omJfBckLHQ{N&z&VMV2JK5Gu*-r&GC5cLDl5N=v(WbJ$Xo14 zty)b$%jMNab-#_{p_V#_zJ<&40VD(cJ@_ zwbPnQv~5e1i>pLJ4CedL+~dGgHxVF;>v{{&y8{w6<1a?wY+qoc=N&>97AqyRP&ij|QxyC^evemKxDGDpJtDJR4i;Po`TFMQIkTy0JQ zAJ2m425VJc-4NFThu7Vpqey*RbZOaa9l79i&U^(i$*3?OI#u ze9mQOFsmyU=dqzVl9H$#`TIc~UOAhM=su77o=uWWzT&S(kRCR-v@yJh?7YVP+|Jf& zrSO9yc{}nTD>P0m>5)Rxp7ug>@ry++Cr5jY-J`VS1vLP?l)GA0zK?BXC&`tNp@5pr zR59O;mF_8yX7}?;^^=<)m{74CZSVc7fwixK1k4IGTu%PYXs!1KPVj#JOYTGL zlDxkK0wi%s;Ppi544lnpsr+`Mo2Ba9YzHqs1tr%$bWm-VrBYH-62JxSkm|F^|(c zs8J7LOU3PL$+CS-@>F2&or<`%PZ#E(ymIi8Tbulp{>Jj^0QCE=KPQU zB&5K0jRJP17JkLnAqM#_qJW^Fx(`aZplQ0Q(X5F#8Y(Vc`;ssm^LVE(+S&Z~9(%f< zuIsy%t5k$yD;aspJa-;_+V&#*9_N3h6x1zVi2Zs)aIr$^pWLI}OPn@3*tYldm#|1- zSL@fTbOJ2_A`*NjY~Qast(Y*Wb?)dY|HZGY-)_)-)nO*u^dEJ!wzVDTy`A~)2`!L3 z{gK*iBR#a;gS?LDX*2IFFlTNoLC^8X%=`cS3vVqkd)c9c~)=c^?wFU)I#??xv1}bWrbZIIUu4?FTQ{ z41@=A6Wg$eXCd7(riLT++e7$vP3Fi@wYadWiW^^V;QHQ=vP>n!-tipi%RcsT+`dQ# zs~-BIg|_aQ?@JyW#KVD|%zB4xwZ(8a75ePsnaZ4Y-0jz&Xg>R$3e!O7h9MIVE)TOp z5?Ew_psbcI!_O9Ou-lw~9g|O<#9lQDJ{f7bZ&>p~a3UEdb$!MAy>L|wL5nM7`K(Ot zk;Kg{?Wrf3{)$unbYtt$k^GF8xF{dJ)Y}BiVHSS!0EA0+D?D@g8Ct86%0|#5sL{&& z?Ow%uMP2S^cqBOVrNAfL;;_rz2xRKR)rjE-Bah!CX&q862t^lDH!jU%bmv$ZJiNWt5HNh(Zd;DWvyp4{<@JwWnD8LZ)dF5bKW z3Yum|G3meRbIk(1vxMZRod8nCk|%dA%!U*}APO@`&YF-KNZ|jtKJ4EVxsG4-*KYq} z{`l|+WDNdSL6Y=HV5#!?!)&&hH!PM=d}DmvCR;04mFd-si#MxwA;p?i9E87a5qQly zP&_~<)8FQxB9`#QZHnrtN(W%;SSf@@-+u2%AWrWi2a#cxfi$ zhXHJilihr(md$kqSpsz4I?exrs)^wN@k{=0Ya2CAn- zIRu(1foeM+Db^z`^8m1?Rbt#3oGM!HZ&gL-^lHF{J$*IU zm0!ya(PDH7(X%U7KfQWYO{wWodc)PKO5Y|dxmy>ac8{H2m{#NhKW9~;B;r%6N zrs3Q+BQ;!FhE=XV+COj{U1;xenzf4aTecQ8%H_Y}_%pZE5CPq%=#XwV;y_ejhr}3&krtK9Z<|#rd zUtyU}muDocw3q9l{XQ@hAnTtgeA;hm41@+9t!phFM38X zlrwI6nbs-J$vIKEUOAIjsp#+HQ7+T|I*EpulzP1~u0_Zi5@z;&$`UuCG3YL^V1(nW zl`VCo_VkCfil^x!6|bT@7FK{#`#B1SzM7Zrf3U58Tbixt)ibY6^^cd$u71svbenug zH}s<8v;Ccpm>hXVv!xW1mFS<_R?y*U&RB&QM`-DDh6x84{MdZ8raq0!-4oUTZ;=Uk zd?~andDI&VpHE7$rhOcg)14A_^vt%OJ$dZA7EmI3)g9{!>wFI%`<*42*yxeTPVLlL zveBlhxh>I_QuN65N!qu<%pj&8I#1$YhJmd7l}RD}2b8uQE-NrYY>|DJi=)3VP5R3C ztXJLRw@zzoYv7mt4Wo2CF44!o#GDa!@+jLQ#iisxW z_>AR>*qgrTL#9r2a4_+*fbOu*B2p0;#$>RVuR1X-KF67{5rjMh6QiJno|#0gVEnt^ z+jVe#Gjw%z&2GC*>ciX)uk2v&HxMnJw&vBWp{8Bf%}aB~ad)Xb$L!b;bM+Sa!y@$| zVxfDB0!!Fc-}R@}B@3mV_yPHH)9(C7sz+pjbkBdekA{w2u0^rNG{*PDEVYEt2ZTI& z);I9p^6y!Rg4a76m%-sB1_!{ofrX{h!N4{jQ={RT3NJmVQpB(?>8ikakX*Tzk>~`6 zkn^GbmGD>jD53T8d07})val77!$^uiG!^vG=UvgeRZ+7mI4%wPeVpIV@WpQ{$?E5fLT52#g4oU*@3Tmv7n*3+8-NXwU8& z5J?{C_=qUq?p|9f@w%-m>u_XXb P8i5pLRb{HAj067zGBdyS literal 0 HcmV?d00001 diff --git a/dao/images/power.png b/dao/images/power.png new file mode 100644 index 0000000000000000000000000000000000000000..b709ec5cb335820ef4439528eb2f09357b3f51f4 GIT binary patch literal 9769 zcmbVyWmFv9wr%4OG-!|@!4e4W?ry;y65Ks#Xb2Dp?j9T(8iEB29$bREHZH-XaeI~T zocr#dH|~4q{ixBSs&?;Mwb!b(=A3JXzf+aNen$2T1Oj0z$V+R0KnSbAL4%G6TzQSS zISKqAxNFEsf+|KR{s1S)))FcbAW(HI=KTj0;2gt6Uf&%A!s+^ZAoMwxSb#wEZ3@y7 zTHYpy%edZJTUl%;i7yZdOL&`65{&KfZJl94cBZ_?Q#c^ryFfnIrW`ly$MW zh~s;74YNd!W3~xflG6~(K5e4euF>9q_u9og!>9C0eLqyOm}A=kLFQa8H`^(gD^ z{ysAs+blD1j`Q&F@WuBz2t*eghzg1$L?i==YoObJKngUu2q3yI|M5jDRF=7SwAa(} zUra`{h7yr1P!P#}NMG!e63yUE!J94C;Jk+mb!9U}E6RcG&y~g^kn16p)kOtv%p$I> za+PTP_{iKA@$%voFfP24Ez7V8^=+*rn&JDyd}tODJm;P6)S`N`WW+i&jqVs>A}la; z$z}QnP7(dYSy0!OClgXu>6^SdiImZlxtUs;O|DI1L^8LY1%ha-DNfmy4BgBysqa!) zGaY8ew9*`DxS2(@${vnMgQZIA(id>NT*_AT_Pa(Rcg3Rl`kN^Jb6Hlv8aZP zU3|0@S51hbD1wvy1=wsx+q21-^|q6vE|Kr|p`^GJ4oxl%^IrB z@-}A(aqpOMb>qj@_zSr`nwE>=={A>jMbIyHFm`M#vt|YGY$Dq%ds=TgI+uMkY!eExJwrD^H_$c*~X|@f*>oRS0+@UwqC;UHMCX8YdHFyWq=i zarsf6m#`i0WuS#>Ml0|2MN&3yP2so$%fK0+?k;p;!gEII_6z}A-$F2w)+&ldcrI^a zzI0=N(JkdH%~Af8C4rq6T^~YLlA8TIc6L7so~+_6dCpMnjQ*hXxgp(|;z;}45y1@w z0b8?sX`KF^;wyW~rxu}MRDM0ROc`E35T>M(oHkWpjJ##704fzj-WcCYN5W2#PmkEo zkM6_Fg&@qhaRk7NwC;`@(%ilvR}zho(mvFZeksEn+b7{J+81SG5e$S4Ic@`mBUVGY zfrXRA +`w;=;$4Jz1r6~|rTihg&=|PC&l$ESHcm(%b_tr$*!%=p1ev^xc6C8e zo4l9qh4n^|47lHodJO+UIe+-GIQ$*zu^t*?ZwZ<>D^2bPrYifq8|cR%{*V_R-^b8< zye8z7CD0Sa!O{~dP1%9kve*yWgwPIYFIt3yMe-E1Fgue|q_RN*>SMFBx+W$SD~MIh zo)zxJLPDg&X6pq=$C9;G2>zNb<2*lZm zhXx94!5agC5+ud{H4;Sq2ALZaSor^RQ8|TjOlZINh3$yd!^hYwhV(~jJh908D{pNU#xgzXRcZ^WUbq~$;sSx znx{Iasz1sG%c^9w&F%D_gB%bLSZJq5NtVvBb#~moFo5EWZq3K!Vkk(dsAiq}^Kx@n z_omBku>~xq3KRXj9K?RY*RYwLziJ)m%tS2ax9H3}Lc<-hex-9+S2Q09K7=A4Tx$Nt z=6*wp&kHRnu^G>oxs4{gxVmB{9D}D!#9bn}_kmURV2`VpPOZwJi+qlDRJ^I?_R>Cx zFJHd&RlvEXrbkjy3?0spj_tt<`=k}%JuO_vOvr1=S@HqBEy<(#8 zqPoJWc|h-R(RYP&>pwP)3!tX?;EnnG(vBSA_@x~3A1BQJJm1dK<;sm1gV*P?%gc+O zKJ@toGqUd0`6yk?qGL;_R8EjyNgquHVf#j#ZV`dr6L_NADDF5v*#zj0Lh}+g?r^hi zvY(dkM{Ixqn5m8A_DL4cM!qn8%$EOj@Oay!jzP3${jv^u+Zbt1L z!#SEi4=XEYoj1#XumOU_NzW%3EM(wpZ{F7##FmJPt_JUi*o&PNq7u1+w@(X`( zE(%fbkYuC+DQuMY=s+YueRTK6J(BWZ2pv~uZ$nZ(>)Q4P1;m?v2`0k4@b!qrb%~!R@eP~w)LJF1<2fbOX z==Pv~5?+d6llw0|Y|ida(>ea}gTGThId$sQx?49dkA=fY~!l55|%>=yBHtcif+ zhquu%c1Bs6p6qdMQwR5F`4_Xlzt(@JoMof92P@ctCOm6?va>M%eY|hzI+qHpGdP0Wa zlonnXh0M);&uNT56{%7K@KRICxVzHXu%A)B^IBuL0+iMpl$9TI=FTLavi%v~)ID9w z-94_KR>%cJ(EyGmLWa|I)5z@^Y2LuLFlQ@WVUKb-50|l*xD@Rtr;HoN6ry)upO*W; zXW6d(@wW>;uKDQxeZr>>qh1+TPhBxrCohqx8Ie7nZI7~?!HjR)7tv(ds$A_p!0+cC zyR2hvNJ`$0>|wTWNLwoI4Ue{n`KtF;K|CPGX%*R;VPRshfZd_1`{P#y2y`KVs`J3- z6`_&4vRYx(xOWayMA>s%dE)6a_F9s+bnSPAn-l-_viOsE=3w4UPuIPWYygS%XsmyL zb?j@TKw-SGH_bPp^S*Bm7JO=?^DHDzL!Dt7^)9T*$A+%wzZaK^_^P<>=V zpv4?@=I!QV!KiU~h=P`c9qZk8p9eqUzMXKUJAwCF!o`?gC^)ie^51$=5Hdz6M`N^^ zgTyJ(caxUet`~;)2X}_8NJac~*1XD`^y>~#U1tlU3VBnX*v~NNFHYl?w7`E<%3ZCG z4Ow_3rN(?43y2-Yj?c@&?HH1Ky|6&aBFG+sDQq`OZ1q|#k4ZbJVjckxBVT+Sn|9}S zp-09e*~M48H%KDwd$;$vPow8-MsJN>g-@Ibn^OBK-`Umt$*Bd(TP>IbdKF@z@7p&+ zN0WH`0+hyyZqxYAss3Sitvz5hvPb5NWf7lg<+mwrf7LeHK2{sMoyO!cQ{U}}c=z9b zGh3;8^m@5lby$-$b^zHj^3H_il@Kb(4Jd?_#Z<3Z-74G!=BoSZ8JTA)T{X70+z&S6 zV%fk8Ji?Gx|J?(|gSYWx9hyLdt>Q-fYV-izROD7xREn9zF~nM(>~qY{@&27@YzR;Zk_Hb?ggLi>JAc*7@x;xnd?@(*6oq{SShwO50D3- zq+cxJ|3Z61YNNsFFhl42Hk6;Pd*t_573+1E#$<7FFqD7YnU-!>+PDK$uZ^0@2EVP_?@{W(Co&hmS%mCaqb>@7 z8m+LrRU#Dq_|YNyy+Eqd22!LBOLHC&9G^8&%mjK3yw5!nZOCO${K+a+@^JssD=G)NVs-zpX@(DqB<2vWtu+}iE|-Yh|e zuB!zz-`V7KiGxYw>B&i7J_w|Vih!*SIG!ww6gNcI>422~Dr@_}%#KlU?-#k0SsXIx zFP=V4*L5mvd92yuE5GJ6r0^g8oR~C4IIjgk#Nk5R#6k2$eXa`@yR%X+Eh`zyG2A?d z`DVH!B$w%c+fMW7QA>So${yj}uUOlBUceV&`ir=Fbk<)aet* zs7dg~xMb>e)Wd~(`X<+SJfjOm16=R-V?*m(er+S+6`zLtg5)U?4P?#UzG`M}va%;i z4iARO@2%^9$u62@Glc6ikuw+2LJ>3oN2xBcmV!-Df7C@NCP&c_sGg zMvtMaq#OITR_?`X=+n*$j9OOm$hghrfovzD=)l3=Q(|MSCST2w?qhx>>st*R&o6-~ zG55?(29WFi8FZvp&Ew^w^;S)d1`&PrAGveirp9T8zW(o#sxZLz$>*r<8m5iD7hLWY zAKZ)G2)D}G+@w~cY5qN(I=FmSkcSp4X{v+8)PLPDK)CEHr@@gh7JT|*keAQ@W9xkv5qx_3dMu%skG0@7C@4sv2Gi8R z=yBH+M&@gc>@M(GggTc-bCSRF-H+UB2>#^-p4X7zr7+%np}z}}6*@qU6-y<~=L#W% z?emvH=Am(26Uq+JlMNpL*4~eZfi@gn2>CJIQQQwX+B>hLEaL20hpYbAfU0)jvFAR{ zMV)4SEqCqPKlfwEp$_SD8IOIuS(5zz_ka%rLP)4WuTXW7n$SVU=ZVPdh$A$^~j3*^M}4_ zB|N<2H1uW(aml7u#s>ffz@-rN^Er5C$xUBZ4R)&ETqaUrzrY*Acbne(BP5`5sMVs) zs^g*UldMXFk;Rjg?Wm^nuJ7``$5nBUb^)cduoHkZR03Z54vBg9O|LK@l!$^WSje9~ z3Db^TqixCpwkq1Wrn#P6_aKFhWK&uOPpN#C(hRV9Zj}_&*4B3BR`2FsdV4o-CmYAi zcy%;a{(ZTMvxBO&wLJPICKb%ZP|c!Df$}-THlr)v=!pWK{0_bs5OC@-5Y#F6c;B>P zE&6Z+b6;^8zAG5rcul2OL{b%E`4$B}U0e9@YpGg0@n39a#<_dE^`YZ%S=~_O65Uu% zF3cn$7#nQod=<5%`IM=OMp9o9Qf3X7UT#6nC278)>X790Dou*{oA zcdf48Z?x7^H;6?S_u}~lKNbk=th9%vU`=$mERshKCc2L`eaCNEnS@0IXU^9eY5V5# zWFr2&r8Ed3r8}hLfBdB&>zR?Xxhmtv-pv91&i7|NRm|u-wq3;czO()5NV==N&C3p+ z0G69L*-!Zg!A4SgP`+CfT;8SFskyNK1^~uXmT$iyE`lDH8<9C0^cPV2Z=D#0LH}f4 z3$;kp0a67O%MG%g?9c8iwlFai>x1a`o|B&d=_-v+NPG*3s%Aem%{2fHR*I1=oFO(Z1R4)suN7fg zqjVfVZU7KX9nZ4zI1w5B8~RSTY^(-;OK>rUE`bPUfzl32i!3HAs}gSa1@EgG;za5 zz<%%E8xi?0j4Sq_v}O8uiThM`!QL*$(~H6;#n|WCp;Q4Ve^wEGscc5=y>B2g+IJ%@ zmzNlODvUn(UlB-Jl(G=0oM3}!Z*;<$4A}RJu926O)jM4gShA`n=bmA}6N@dMy4D zVA`5Ck=`_|t>$s~DfnYqPoB*8=uAKh1O3fL>vuX!%Kmn)fv1@Cv+s()Hm7M|eDnmt zX27htA30M2TQDI}DHK;5*9WrV_$e&~6eFW-B6J%8+m6tTjL`Q7Z=m-$Bb5xL2x-2u z8k#p3N3n>`STCs>r7gdQWnv)LhG+$-L~0yb=6GQKL!nn>_5ccOlHa2v0tjMqYU+N2#`wzU40))LK%-ZmljUJ(^0qx_=;ObY6|gi# z21J13^mYf-$bI=|y85&%C*61s9?4ZeQgNRPv4F29hwi8~I%S=YEx+wJWNHnhe{23G zw-Q2sh_)S5p25A;W~+Hu1(vd|AlRBCQ_mLvCm|t;$`4doCbCBR-#+AA);?zV+dMsZ zF_tLvtXIJ29N5OqIlAN3C|DctZ8mo7N*ob-4&AzEs$PkUzUD1A0&mWJ0XRB@;{n=A zlS>Z(Qaig(pFe*NR1@=xvAY)ViW#(1D@o7Nm=)eEH%G%aw+Ee#-SUh8m|BsTuvqW` zI$X4j)G%*KAdry7Kri4OPl4sC{0!jh6`3A(@Cj4l#~HDwdnBe~F^}zWs&+s4HHxe5 zE8ayVxE?(BJaA#=kL=D1s~6_Clkx7U4{Q&Qzw&AfO?Ih-T~8a+Yf)JNNchukBZnCK zy`LFI_sY|na564-&J&=R9A!_MVH-8spK~2k@ed6G@B$U&)5H4N+vh$)59tb^#Pk=d zuJx-kO3rowi0Kn;9c=i43kY`o_CfS`Bmo~_0IRwx|Cv76DmSXB>+5h|I?OOFB`yUX zo5~e?0q5(^lz4W)JV1}fSIO+`_hX%_29qUmPXtI~5f0TVqJUsLqs(3jLry&5slSXM z1AEdC@CCoD7+e?}T-clF7(f&(vUA^3~f`m7BwL-6XP-8KRO&&OC)}bSUy!vjpCks z-P5w}a+hK)mH!?l;Lv>OP|M1qc{vYmw%*Ldc3uXGnX~Za;_TFhucL4ZLsm2UA@ zK#+0tD0=U(m$vIevoVUVzS85F1`H_nk>i7Pf1MJ$ifLr1x=3=qU@QnX09Te}O5-EBqaUD(!*6vQcCpVfGgK-T->} zhqaKH5$lNWQchFSunB3P|*n0 z{9ENXbcEjX1g!H+zuVEpYaFs0!shxKzi~o;I{;1`=L_=sYFcKhIZ2_RI4>PsUHMwt z6NgQvRd5a;kqqlq9?J~{hD66VAz4rBGRERt|2M|m=^v2vDqFsXQ!0ngZ5|cACFF^o zR*E9?*JL=wLuNFq8`$^o%`Az7_sGOM|MHEsd`G`Wq#_gsmc5+?eGz1~I3~vjDt9P) z%2+we_>IFLF01;{-|+2DJbAdquI9LVmFp5E+}@UvT0|lco|GX$Py3|TmmXO@Zrd?~ zhdWkF#w#$rB5dJQ`G<(e@i<0ya)jeS`SsMFJCONH-sJI+2U|7iELwntULMTX_+HpO zgc3cZs|GY?YH$)AbkOMJ_OFhL6Zw{QFA+f-{~U!qSw2McJI3&qQNllJS8e8PUSjJo z=14%fgGIc=H>*r6%hg{s5UGjSqlhT%VMqG<-TQQwN3;Zb`?_OxYOBZcl8r(iQOlX! z+@EbNS1tmQ(=Z#QsDQ6+$C5sI8A-TIqZ>&y2`1h$!#079u6!{xh{(beFT+R17{AxoFRO(tgsMEp*?ol=&y_X>izqE?4&V ztVa3)x=(H+C|^mR73&>*qg+{;ryb;!5)Te9<82h1OMLa;x95=EeE~L%4Z2L-{6_gh zLL4dZp-8Yej1H)9?*kgx+C0yj&U|Y0Wk<8Mua`>=by$7}Gq*n<1F_QT2?`>#?}d*^RJ0sy~?1?Q2q zD-Na6bgpjBJ|}P`{?$#s({N%KJ@mPQ72&X5vdl0^ZBlZ#A>!)@WZD)|Q zw@XrU@F|c&N?ghMhdxI^VXbQ$=dgT-W({k)CIaIeS2fos176WNDGZer)UO}Xtk8Eq zCEC1FB-d>2bbrsy9d?a_9xIvFCn0}!N5_d0&(USgX|*~M_Lrrzne}of9H#qP5XN*> zJR47qR(?)+B*`ccpD5EpDw@bq;3+CkQ!tPK$G4asXch8gw&yz`;vTY$gw6C`wJrVJ zD=6gUo#Y;lcc4?D-B^D2;V!(HtSoicE3Ntt#$U0rdd^DfywbwQFvrzeE+lGDw^pBx zOwQ*Q5ruM-D)2cjqq1T_^;ly)_=DVoLetu7;`@5uLK?l)EqxODmLzX7OJR8 z+JJqGJu8zYNBUKp$z?sF+kUJ4ARBRecSrPWw~RV@?NSBT>Da31=n&V*j%qtu^1ztD z0Sj~!)hX~m$-Nn~S80n=9W=RETQ||cZ1CHgW4nSM=>H&11AhFDiz9`Ga^F*q><~HJqVolYmZXf*R@>7s4f{ zbc2wvPus1~kQ0Tf;uZN_j0r<@bB4hZ-t~J`(*qm}YHgobxaT9V#j*Jz`pmZarUn$gF!<`?|Yk4Kik?Q-)L~f>^SSY>({pyq^fJ^_eBh6u}Ov-gkgMzZ>>Vs{VNfqpy7FQq*gob zsW3wNWh^089)|J;TL$TcB%pNe(y9iG@-+$>;*BRN+(~73;Gy~JNgCgu{}!Doim`pd z=K2wT!TrtT5@F&vo%g#~`6z+l^1q;~7+W1nZa+sZ_4wl&-95U(b!0CV)?VIF)5Jv8 zN|-Q^KX#M-5gs@4#5xMe>NOv*4W{8w3uSB2j7Hm>%6XfZw}3C}1M=n1p097-$wb!L z7z8+|Q&CqtL++#~C+X-Pa0Rx5|8BXkszCz9k^KK{y!c>O0-oPK`u56!ldE&Kpdd>e z{|N0I0zyW>j-tfwq(hKcRINq9%=5_iPAJY|YP1QRT(5uLf#81d)EAW6`F!jM?<6Q@yZ zAAv@}xQfdGS&H_dnW^p2 zpeH0-U9^Fu`}PFC9d@bJUlC1aQ8V*Z-aJgJKJ2=aI>2dr>6d2N;J!Wl!D|2bSod}A z1y9=ttB4xW)@pI#W$U+u&PlZPM{>(l#QqH?7Nf6uRjzF&U%J-kJ?)M5DCYNPYY9ti znwa3M*!$yOg3gs>L#PNZ*f(6&w{{uB&zXp~8M(t-%xaM{yRf9!qG)jX1Is!I>{R(T zzk7b^{QW|RsZ6A0q;`*LU%A&fRJ4OP+YVN4v40kx4{S&wQ_EfX4J^62;t8Fqzcx6= zx9lF=CiCo-TEfsGf;>5*Z*RdbF5%Cew%m*P(O({N-Xx~Ag--GMZwmC%Ig6D;+)|cR{Y0`f zO@D3Ujjfg}Eo{ED+YW#bfF;Ag2ZP#%qkZglb_RqEFA7%2^Q+qT$b;-QH-)J_%9f+G zP$YjeX`}b^u7k;0Cih0Bs}Y9_m#4U7Vt6kpVlIB;3GuRE^vCygFIpSN=pnc}s=am= zGZ0iE%J*t^t`ZzUkWKsJLX0*S=o;}QL_?Sb$=X%?1#vV=8sY)F8E31#=NRXw3@VTW zF0e64_V1R7e$4+%d-FdmQB2pX!MH#VEq?{r@+Ck>O#B!yQQ8<)#o+^J`oKOY8CB^D I$@fA33zq?lzyJUM literal 0 HcmV?d00001 diff --git a/dao/prog/check_db.py b/dao/prog/check_db.py index c85fc61..f1ea154 100644 --- a/dao/prog/check_db.py +++ b/dao/prog/check_db.py @@ -1,96 +1,121 @@ -from db_manager import DBmanagerObj -# from da_base import DaBase -from da_config import Config -import version +import sys import datetime -from utils import version_number, error_handling import logging -from sqlalchemy import (Table, Column, Integer, DateTime, String, BigInteger, Float, - ForeignKey, UniqueConstraint, select, desc, insert, update) -import sqlalchemy_utils +from sqlalchemy import ( + Table, + Column, + Integer, + DateTime, + String, + BigInteger, + Float, + ForeignKey, + UniqueConstraint, + select, + desc, + insert, + update, +) import pandas as pd +# from da_base import DaBase +# sys.path.append("../") +from da_config import Config +from version import __version__ +from utils import version_number, error_handling +from db_manager import DBmanagerObj -class CheckDB(): +class CheckDB: def __init__(self, file_name: str | None = None): # super().__init__(file_name) self.file_name = file_name self.config = Config(self.file_name) - self.version = version.__version__ + self.version = __version__ self.last_version = None - db_da_engine = self.config.get(['database da', "engine"], None, "mysql") + self.db_da = self.config.get_db_da(check_create=True) + self.engine = self.db_da.engine + """ + db_da_engine = self.config.get(["database da", "engine"], None, "mysql") if db_da_engine == "sqlite": - db_da_name = self.config.get(['database da', "database"], None, "day_ahead.db") + db_da_name = self.config.get( + ["database da", "database"], None, "day_ahead.db" + ) else: - db_da_name = self.config.get(['database da', "database"], None, "day_ahead") - db_da_server = self.config.get(['database da', "server"], None, "core-mariadb") - db_da_port = int(self.config.get(['database da', "port"], None, 0)) - db_da_user = self.config.get(['database da', "username"], None, "day_ahead") - db_da_path = self.config.get(['database da', "db_path"], None, "../data") - db_da_password = self.config.get(['database da', "password"]) + db_da_name = self.config.get(["database da", "database"], None, "day_ahead") + db_da_server = self.config.get(["database da", "server"], None, "core-mariadb") + db_da_port = int(self.config.get(["database da", "port"], None, 0)) + db_da_user = self.config.get(["database da", "username"], None, "day_ahead") + db_da_path = self.config.get(["database da", "db_path"], None, "../data") + db_da_password = self.config.get(["database da", "password"]) db_da_time_zone = self.config.get(["time_zone"]) - self.db_url = DBmanagerObj.db_url(db_dialect=db_da_engine, db_name=db_da_name, - db_server=db_da_server, db_user=db_da_user, - db_password=db_da_password, db_port=db_da_port, - db_path=db_da_path) + self.db_url = DBmanagerObj.db_url( + db_dialect=db_da_engine, + db_name=db_da_name, + db_server=db_da_server, + db_user=db_da_user, + db_password=db_da_password, + db_port=db_da_port, + db_path=db_da_path, + ) if not sqlalchemy_utils.database_exists(self.db_url): sqlalchemy_utils.create_database(self.db_url) try: - self.db_da = DBmanagerObj(db_dialect=db_da_engine, db_name=db_da_name, - db_server=db_da_server, db_user=db_da_user, - db_password=db_da_password, db_port=db_da_port, - db_path=db_da_path, db_time_zone=db_da_time_zone) + self.db_da = DBmanagerObj( + db_dialect=db_da_engine, + db_name=db_da_name, + db_server=db_da_server, + db_user=db_da_user, + db_password=db_da_password, + db_port=db_da_port, + db_path=db_da_path, + db_time_zone=db_da_time_zone, + ) self.engine = self.db_da.engine except Exception as ex: error_handling(ex) logging.error("Check your credentials") + """ def upsert_variabel(self, variabel_table, record): - select_variabel = select(variabel_table.c.id).where(variabel_table.c.id == record[0]) + select_variabel = select(variabel_table.c.id).where( + variabel_table.c.id == record[0] + ) with self.engine.connect() as connection: variabel_result = connection.execute(select_variabel).first() if variabel_result: - query = update(variabel_table - ).where( - variabel_table.c.id == record[0] - ).values( - code=record[1], - name=record[2], - dim=record[3] + query = ( + update(variabel_table) + .where(variabel_table.c.id == record[0]) + .values(code=record[1], name=record[2], dim=record[3]) ) else: - query = insert(variabel_table - ).values( - id=record[0], - code=record[1], - name=record[2], - dim=record[3] + query = insert(variabel_table).values( + id=record[0], code=record[1], name=record[2], dim=record[3] ) with self.engine.connect() as connection: connection.execute(query) connection.commit() return - def check_db_da(self): + def update_db_da(self): # Defining the Engine # Create the Metadata Object metadata = self.db_da.metadata # Define the version table version_table = Table( - 'version', + "version", metadata, - Column('id', Integer, primary_key=True, autoincrement=True), - Column('moment', DateTime, unique=True), - Column('value', String(20), unique=True), + Column("id", Integer, primary_key=True, autoincrement=True), + Column("moment", DateTime, unique=True), + Column("value", String(20), unique=True), ) # Create the version table (if not exists) metadata.create_all(self.engine) l_version = 470 - query = select( - version_table.c.moment, - version_table.c.value - ).order_by(desc(version_table.c.moment)) + query = select(version_table.c.moment, version_table.c.value).order_by( + desc(version_table.c.moment) + ) with self.engine.connect() as connection: rows = pd.read_sql(query, connection) if len(rows) >= 1: @@ -99,20 +124,24 @@ def check_db_da(self): n_version = version_number(self.version) if l_version < n_version: - moment = datetime.datetime.fromtimestamp(round(datetime.datetime.now().timestamp())) - insert_query = insert(version_table).values(moment=moment, value=self.version) + moment = datetime.datetime.fromtimestamp( + round(datetime.datetime.now().timestamp()) + ) + insert_query = insert(version_table).values( + moment=moment, value=self.version + ) with self.engine.connect() as connection: connection.execute(insert_query) connection.commit() variabel_tabel = Table( - 'variabel', + "variabel", metadata, - Column('id', Integer, primary_key=True, autoincrement=True), - Column('code', String(10), unique=True, nullable=False), - Column('name', String(50), unique=True, nullable=False), - Column('dim', String(10), nullable=False), - sqlite_autoincrement=True # Ensure SQLite uses AUTOINCREMENT + Column("id", Integer, primary_key=True, autoincrement=True), + Column("code", String(10), unique=True, nullable=False), + Column("name", String(50), unique=True, nullable=False), + Column("dim", String(10), nullable=False), + sqlite_autoincrement=True, # Ensure SQLite uses AUTOINCREMENT ) if l_version <= 472: @@ -120,84 +149,90 @@ def check_db_da(self): # Create the version table (if not exists) variabel_tabel.create(self.engine) records = [ - [1, 'cons', 'Verbruik', 'kWh'], - [2, 'prod', 'Productie', 'kWh'], - [3, 'da', 'Tarief', 'euro/kWh'], - [4, 'gr', 'Globale straling', 'J/cm2'], - [5, 'temp', 'Temperatuur', '°C'], - [6, 'solar_rad', 'PV radiation', 'J/cm2'], - [7, 'cost', 'cost', 'euro'], - [8, 'profit', 'profit', 'euro'], - [9, 'bat_in', 'Batterij in', 'kWh'], - [10, 'bat_out', 'Batterij uit', 'kWh'], - [11, 'base', 'Basislast', 'kWh'], - [12, 'boil', 'Boiler', 'kWh'], - [13, 'wp', 'Warmtepomp', 'kWh'], - [14, 'ev', 'Elektrische auto', 'kWh'], - [15, 'pv_ac', 'Zonne energie AC', 'kWh'], - [16, 'soc', 'SoC', '%'], - [17, 'pv_dc', 'Zonne energie DC', 'kWh'], + [1, "cons", "Verbruik", "kWh"], + [2, "prod", "Productie", "kWh"], + [3, "da", "Tarief", "euro/kWh"], + [4, "gr", "Globale straling", "J/cm2"], + [5, "temp", "Temperatuur", "°C"], + [6, "solar_rad", "PV radiation", "J/cm2"], + [7, "cost", "cost", "euro"], + [8, "profit", "profit", "euro"], + [9, "bat_in", "Batterij in", "kWh"], + [10, "bat_out", "Batterij uit", "kWh"], + [11, "base", "Basislast", "kWh"], + [12, "boil", "Boiler", "kWh"], + [13, "wp", "Warmtepomp", "kWh"], + [14, "ev", "Elektrische auto", "kWh"], + [15, "pv_ac", "Zonne energie AC", "kWh"], + [16, "soc", "SoC", "%"], + [17, "pv_dc", "Zonne energie DC", "kWh"], ] for i in range(len(records)): record = records[i] self.upsert_variabel(variabel_tabel, record) - logging.info("Table \"variabel\" met inhoud gecreeerd.") + logging.info('Table "variabel" met inhoud gecreeerd.') # table "values" maken values_tabel = Table( - 'values', + "values", metadata, - Column('id', Integer, primary_key=True, autoincrement=True), - Column('variabel', Integer, ForeignKey("variabel.id", - ondelete="CASCADE"), - nullable=False), - Column('time', BigInteger, nullable=False), - Column('value', Float), + Column("id", Integer, primary_key=True, autoincrement=True), + Column( + "variabel", + Integer, + ForeignKey("variabel.id", ondelete="CASCADE"), + nullable=False, + ), + Column("time", BigInteger, nullable=False), + Column("value", Float), UniqueConstraint("variabel", "time"), - sqlite_autoincrement=True # Ensure SQLite uses AUTOINCREMENT + sqlite_autoincrement=True, # Ensure SQLite uses AUTOINCREMENT ) values_tabel.create(self.engine) - logging.info("Table \"values\" gecreeerd.") + logging.info('Table "values" gecreeerd.') prognoses_tabel = Table( - 'prognoses', + "prognoses", metadata, - Column('id', Integer, primary_key=True, autoincrement=True), - Column('variabel', Integer, ForeignKey("variabel.id", - ondelete="CASCADE"), - nullable=False), - Column('time', BigInteger, nullable=False), - Column('value', Float), + Column("id", Integer, primary_key=True, autoincrement=True), + Column( + "variabel", + Integer, + ForeignKey("variabel.id", ondelete="CASCADE"), + nullable=False, + ), + Column("time", BigInteger, nullable=False), + Column("value", Float), UniqueConstraint("variabel", "time"), - sqlite_autoincrement=True # Ensure SQLite uses AUTOINCREMENT + sqlite_autoincrement=True, # Ensure SQLite uses AUTOINCREMENT ) prognoses_tabel.create(self.engine) - logging.info("Table \"prognoses\" gecreeerd.") + logging.info('Table "prognoses" gecreeerd.') if l_version < 20240307: - record = [18, 'mach', 'Apparatuur', 'kWh'] + record = [18, "mach", "Apparatuur", "kWh"] self.upsert_variabel(variabel_tabel, record) - logging.info("Table \"variabel\" geupdated.") + logging.info('Table "variabel" geupdated.') if l_version < 20240805: records_2024_8_5 = [ - [16, 'soc', 'SoC', '%'], - [19, 'soc_0', 'SoC 1', '%'], - [20, 'soc_1', 'SoC 2', '%'], - [21, 'soc_2', 'SoC 3', '%'], - [22, 'soc_3', 'SoC 4', '%'], + [16, "soc", "SoC", "%"], + [19, "soc_0", "SoC 1", "%"], + [20, "soc_1", "SoC 2", "%"], + [21, "soc_2", "SoC 3", "%"], + [22, "soc_3", "SoC 4", "%"], ] for i in range(len(records_2024_8_5)): record = records_2024_8_5[i] self.upsert_variabel(variabel_tabel, record) - logging.info("Table \"variabel\" geupdated.") + logging.info('Table "variabel" geupdated.') def main(): checkdb = CheckDB("../data/options.json") - checkdb.check_db_da() + checkdb.update_db_da() if __name__ == "__main__": diff --git a/dao/prog/da_base.py b/dao/prog/da_base.py index baba30c..ab9a2f7 100644 --- a/dao/prog/da_base.py +++ b/dao/prog/da_base.py @@ -1,8 +1,3 @@ -""" -Het programma Day Ahead Optimalisatie kun je je energieverbruik en energiekosten optimaliseren als je gebruik maakt -van dynamische prijzen. -Zie verder: DOCS.md -""" import datetime import sys import os @@ -22,6 +17,8 @@ from da_meteo import Meteo from da_prices import DaPrices from db_manager import DBmanagerObj +from typing import Union +from hassapi.models import StateList class NotificationHandler(Handler): @@ -45,38 +42,51 @@ class DaBase(hass.Hass): def __init__(self, file_name: str = None): self.file_name = file_name path = os.getcwd() - new_path = "/".join(list(path.split('/')[0:-2])) - if not new_path in sys.path: + new_path = "/".join(list(path.split("/")[0:-2])) + if new_path not in sys.path: sys.path.append(new_path) self.make_data_path() self.debug = False - self.generate_tasks() + self.tasks = self.generate_tasks() self.log_level = logging.INFO self.notification_entity = None try: self.config = Config(self.file_name) - except ValueError as ex: + except ValueError: self.config = None return log_level_str = self.config.get(["logging level"], None, "info") _log_level = getattr(logging, log_level_str.upper(), None) if not isinstance(_log_level, int): - raise ValueError('Invalid log level: %s' % _log_level) + raise ValueError("Invalid log level: %s" % _log_level) self.log_level = _log_level - logging.addLevelName(logging.DEBUG, 'debug') - logging.addLevelName(logging.INFO, 'info') - logging.addLevelName(logging.WARNING, 'waarschuwing') - logging.addLevelName(logging.ERROR, 'fout') - logging.addLevelName(logging.CRITICAL, 'kritiek') + logging.addLevelName(logging.DEBUG, "debug") + logging.addLevelName(logging.INFO, "info") + logging.addLevelName(logging.WARNING, "waarschuwing") + logging.addLevelName(logging.ERROR, "fout") + logging.addLevelName(logging.CRITICAL, "kritiek") logging.getLogger().setLevel(self.log_level) - self.protocol_api = self.config.get(['homeassistant', 'protocol api'], default="http") - self.ip_address = self.config.get(['homeassistant', 'ip adress'], default="supervisor") - self.ip_port = self.config.get(['homeassistant', 'ip port'], default=None) + self.protocol_api = self.config.get( + ["homeassistant", "protocol api"], default="http" + ) + self.ip_address = self.config.get( + ["homeassistant", "ip adress"], default="supervisor" + ) + self.ip_port = self.config.get(["homeassistant", "ip port"], default=None) if self.ip_port is None: self.hassurl = self.protocol_api + "://" + self.ip_address + "/core/" else: - self.hassurl = self.protocol_api + "://" + self.ip_address + ":" + str(self.ip_port) + "/" - self.hasstoken = self.config.get(['homeassistant', 'token'], default=os.environ.get("SUPERVISOR_TOKEN")) + self.hassurl = ( + self.protocol_api + + "://" + + self.ip_address + + ":" + + str(self.ip_port) + + "/" + ) + self.hasstoken = self.config.get( + ["homeassistant", "token"], default=os.environ.get("SUPERVISOR_TOKEN") + ) super().__init__(hassurl=self.hassurl, token=self.hasstoken) headers = { "Authorization": "Bearer " + self.hasstoken, @@ -85,36 +95,11 @@ def __init__(self, file_name: str = None): resp = get(self.hassurl + "api/config", headers=headers) resp_dict = json.loads(resp.text) # logging.debug(f"hass/api/config: {resp.text}") - self.config.set("latitude", resp_dict['latitude']) - self.config.set("longitude", resp_dict['longitude']) - self.config.set("time_zone", resp_dict['time_zone']) - db_da_engine = self.config.get(['database da', "engine"], None, "mysql") - db_da_server = self.config.get(['database da', "server"], None, "core-mariadb") - db_da_port = int(self.config.get(['database da', "port"], None, 0)) - if db_da_engine == "sqlite": - db_da_name = self.config.get(['database da', "database"], None, "day_ahead.db") - else: - db_da_name = self.config.get(['database da', "database"], None, "day_ahead") - db_da_user = self.config.get(['database da', "username"], None, "day_ahead") - db_da_password = self.config.get(['database da', "password"]) - db_da_path = self.config.get(['database da', "db_path"], None, "../data") - db_time_zone = self.config.get(["time_zone"]) - self.db_da = DBmanagerObj(db_dialect=db_da_engine, db_name=db_da_name, db_server=db_da_server, - db_port=db_da_port, db_user=db_da_user, db_password=db_da_password, - db_path=db_da_path, db_time_zone=db_time_zone) - db_ha_engine = self.config.get(['database ha', "engine"], None, "mysql") - db_ha_server = self.config.get(['database ha', "server"], None, "core-mariadb") - db_ha_port = int(self.config.get(['database ha', "port"], None, 0)) - if db_ha_engine == "sqlite": - db_ha_name = self.config.get(['database ha', "database"], None, "home_assistant_v2.db") - else: - db_ha_name = self.config.get(['database ha', "database"], None, "homeassistant") - db_ha_user = self.config.get(['database ha', "username"], None, "day_ahead") - db_ha_password = self.config.get(['database ha', "password"]) - db_ha_path = self.config.get(['database ha', "db_path"], None, "/homeassistant") - self.db_ha = DBmanagerObj(db_dialect=db_ha_engine, db_name=db_ha_name, db_server=db_ha_server, - db_port=db_ha_port, db_user=db_ha_user, db_password=db_ha_password, - db_path=db_ha_path, db_time_zone=db_time_zone) + self.config.set("latitude", resp_dict["latitude"]) + self.config.set("longitude", resp_dict["longitude"]) + self.config.set("time_zone", resp_dict["time_zone"]) + self.db_da = self.config.get_db_da() + self.db_ha = self.config.get_db_ha() self.meteo = Meteo(self.config, self.db_da) self.solar = self.config.get(["solar"]) @@ -124,99 +109,119 @@ def __init__(self, file_name: str = None): self.strategy = self.config.get(["strategy"]) self.tibber_options = self.config.get(["tibber"], None, None) - self.notification_entity = self.config.get(["notifications", "notification entity"], None, None) - self.notification_opstarten = self.config.get(["notifications", "opstarten"], None, False) - if type(self.notification_opstarten) is str and self.notification_opstarten.lower() == "true": + self.notification_entity = self.config.get( + ["notifications", "notification entity"], None, None + ) + self.notification_opstarten = self.config.get( + ["notifications", "opstarten"], None, False + ) + if ( + type(self.notification_opstarten) is str + and self.notification_opstarten.lower() == "true" + ): self.notification_opstarten = True else: self.notification_opstarten = False - self.notification_berekening = self.config.get(["notifications", "berekening"], None, False) - if type(self.notification_berekening) is str and self.notification_berekening.lower() == "true": + self.notification_berekening = self.config.get( + ["notifications", "berekening"], None, False + ) + if ( + type(self.notification_berekening) is str + and self.notification_berekening.lower() == "true" + ): self.notification_berekening = True else: self.notification_berekening = False - self.last_activity_entity = self.config.get(["notifications", "last activity entity"], None, None) + self.last_activity_entity = self.config.get( + ["notifications", "last activity entity"], None, None + ) self.set_last_activity() self.graphics_options = self.config.get(["graphics"]) self.db_da.log_pool_status() - def generate_tasks(self): - self.tasks = { + def set_value(self, entity_id: str, value: Union[int, float, str]) -> StateList: + try: + result = super().set_value(entity_id, value) + state = self.get_state(entity_id).state + if isinstance(value, (int, float)): + if round(float(state), 5) != round(float(value), 5): + raise ValueError + else: + if state != value: + raise ValueError + except Exception: + logging.error(f"Fout bij schrijven naar {entity_id}, waarde {value}") + # error_handling(ex) + raise + return result + + @staticmethod + def generate_tasks(): + tasks = { "calc_optimum_met_debug": { "name": "Optimaliseringsberekening met debug", - "cmd": [ - "python3", - "../prog/day_ahead.py", - "debug", - "calc"], + "cmd": ["python3", "../prog/day_ahead.py", "debug", "calc"], "object": "DaCalc", "function": "calc_optimum_met_debug", - "file_name": "calc_debug"}, + "file_name": "calc_debug", + }, "calc_optimum": { "name": "Optimaliseringsberekening zonder debug", - "cmd": [ - "python3", - "../prog/day_ahead.py", - "calc"], + "cmd": ["python3", "../prog/day_ahead.py", "calc"], "function": "calc_optimum", - "file_name": "calc"}, + "file_name": "calc", + }, "tibber": { "name": "Verbruiksgegevens bij Tibber ophalen", - "cmd": [ - "python3", - "../prog/day_ahead.py", - "tibber"], + "cmd": ["python3", "../prog/day_ahead.py", "tibber"], "function": "get_tibber_data", - "file_name": "tibber"}, + "file_name": "tibber", + }, "meteo": { "name": "Meteoprognoses ophalen", - "cmd": [ - "python3", - "day_ahead.py", - "meteo"], + "cmd": ["python3", "day_ahead.py", "meteo"], "function": "get_meteo_data", - "file_name": "meteo"}, + "file_name": "meteo", + }, "prices": { "name": "Day ahead prijzen ophalen", - "cmd": [ - "python3", - "../prog/day_ahead.py", - "prices"], + "cmd": ["python3", "../prog/day_ahead.py", "prices"], "function": "get_day_ahead_prices", - "file_name": "prices"}, + "file_name": "prices", + }, "calc_baseloads": { "name": "Bereken de baseloads", - "cmd": [ - "python3", - "../prog/day_ahead.py", - "calc_baseloads"], + "cmd": ["python3", "../prog/day_ahead.py", "calc_baseloads"], "function": "calc_baseloads", - "file_name": "baseloads"}, + "file_name": "baseloads", + }, "clean": { "name": "Bestanden opschonen", - "cmd": [ - "python3", - "../prog/day_ahead.py", - "clean_data"], + "cmd": ["python3", "../prog/day_ahead.py", "clean_data"], "function": "clean_data", - "file_name": "clean"}, + "file_name": "clean", + }, "consolidate": { "name": "Verbruik/productie consolideren", - "cmd": [ - "python3", - "../prog/day_ahead.py", - "consolidate"], + "cmd": ["python3", "../prog/day_ahead.py", "consolidate"], "function": "consolidate_data", - "file_name": "consolidate"} + "file_name": "consolidate", + }, } + return tasks def start_logging(self): logging.debug(f"python pad:{sys.path}") logging.info(f"Day Ahead Optimalisering versie: {__version__}") - logging.info(f"Day Ahead Optimalisering gestart op: {datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')}") + logging.info( + f"Day Ahead Optimalisering gestart op: " + f"{datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')}" + ) if self.config is not None: - logging.debug(f"Locatie: latitude {str(self.config.get(['latitude']))} " - f"longitude: {str(self.config.get(['longitude']))}") + logging.debug( + f"Locatie: latitude {str(self.config.get(['latitude']))} " + f"longitude: {str(self.config.get(['longitude']))}" + ) @staticmethod def make_data_path(): @@ -227,8 +232,11 @@ def make_data_path(): def set_last_activity(self): if self.last_activity_entity is not None: - self.call_service("set_datetime", entity_id=self.last_activity_entity, - datetime=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + self.call_service( + "set_datetime", + entity_id=self.last_activity_entity, + datetime=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + ) def get_meteo_data(self, show_graph: bool = False): self.meteo.get_meteo_data(show_graph) @@ -240,6 +248,7 @@ def get_tibber_data(): @staticmethod def consolidate_data(): from da_report import Report + report = Report() start_dt = None if len(sys.argv) > 2: @@ -253,7 +262,9 @@ def consolidate_data(): report.consolidate_data(start_dt) def get_day_ahead_prices(self): - self.prices.get_prices(self.config.get(["source day ahead"], self.prices_options, "nordpool")) + self.prices.get_prices( + self.config.get(["source day ahead"], self.prices_options, "nordpool") + ) def save_df(self, tablename: str, tijd: list, df: pd.DataFrame): """ @@ -263,7 +274,7 @@ def save_df(self, tablename: str, tijd: list, df: pd.DataFrame): :param df: het dataframe met de code van de variabelen in de kolomheader :return: None """ - df_db = pd.DataFrame(columns=['time', 'code', 'value']) + df_db = pd.DataFrame(columns=["time", "code", "value"]) df = df.reset_index(drop=True) columns = df.columns.values.tolist()[1:] for index in range(len(tijd)): @@ -271,7 +282,7 @@ def save_df(self, tablename: str, tijd: list, df: pd.DataFrame): for c in columns: db_row = [str(utc), c, float(df.loc[index, c])] df_db.loc[df_db.shape[0]] = db_row - logging.debug('Save calculated data:\n{}'.format(df_db.to_string())) + logging.debug("Save calculated data:\n{}".format(df_db.to_string())) self.db_da.savedata(df_db, tablename=tablename) return @@ -283,17 +294,17 @@ def get_calculated_baseload(weekday: int) -> list: :return: een lijst van eerder berekende baseload van 24uurvoor de betreffende dag """ in_file = "../data/baseload/baseload_" + str(weekday) + ".json" - with open(in_file, 'r') as f: + with open(in_file, "r") as f: result = json.load(f) return result def calc_da_avg(self) -> float: """ - calculates the average of the last 24 hour values of the day ahead prices + calculates the average of the last '24' hour values of the day ahead prices :return: the calculated average """ # old sql query - ''' + """ sql_avg = ( "SELECT AVG(t1.`value`) avg_da FROM " "(SELECT `time`, `value`, from_unixtime(`time`) 'begin' " @@ -301,30 +312,37 @@ def calc_da_avg(self) -> float: "WHERE `variabel`.`code` = 'da' AND `values`.`variabel` = `variabel`.`id` " "ORDER BY `time` desc LIMIT 24) t1 " ) - ''' + """ # Reflect existing tables from the database - values_table = Table('values', self.db_da.metadata, autoload_with=self.db_da.engine) - variabel_table = Table('variabel', self.db_da.metadata, autoload_with=self.db_da.engine) + values_table = Table( + "values", self.db_da.metadata, autoload_with=self.db_da.engine + ) + variabel_table = Table( + "variabel", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Construct the inner query - inner_query = select( - values_table.c.time, - values_table.c.value, - self.db_da.from_unixtime(values_table.c.time).label('begin') - ).where( - and_( - variabel_table.c.code == 'da', - values_table.c.variabel == variabel_table.c.id, + inner_query = ( + select( + values_table.c.time, + values_table.c.value, + self.db_da.from_unixtime(values_table.c.time).label("begin"), ) - ).order_by( - values_table.c.time.desc() - ).limit(24).alias('t1') + .where( + and_( + variabel_table.c.code == "da", + values_table.c.variabel == variabel_table.c.id, + ) + ) + .order_by(values_table.c.time.desc()) + .limit(24) + .alias("t1") + ) # Construct the outer query - outer_query = select( - func.avg(inner_query.c.value).label('avg_da') - ) - from sqlalchemy.dialects import mysql, postgresql + outer_query = select(func.avg(inner_query.c.value).label("avg_da")) + from sqlalchemy.dialects import mysql # , postgresql + query_str = str(inner_query.compile(dialect=mysql.dialect())) logging.debug(f"inner query p_avg: {query_str}") query_str = str(outer_query.compile(dialect=mysql.dialect())) @@ -335,17 +353,23 @@ def calc_da_avg(self) -> float: result = connection.execute(outer_query) return result.scalar() - def set_entity_value(self, entity_key: str, options: dict, value: int | float| str): + def set_entity_value( + self, entity_key: str, options: dict, value: int | float | str + ): entity_id = self.config.get([entity_key], options, None) if entity_id is not None: self.set_value(entity_id, value) - def set_entity_option(self, entity_key: str, options: dict, value: int | float| str): + def set_entity_option( + self, entity_key: str, options: dict, value: int | float | str + ): entity_id = self.config.get([entity_key], options, None) if entity_id is not None: self.select_option(entity_id, value) - def set_entity_state(self, entity_key: str, options: dict, value: int | float| str): + def set_entity_state( + self, entity_key: str, options: dict, value: int | float | str + ): entity_id = self.config.get([entity_key], options, None) if entity_id is not None: self.set_state(entity_id, value) @@ -354,6 +378,7 @@ def clean_data(self): """ takes care for cleaning folders data/log and data/images """ + def clean_folder(folder: str, pattern: str): current_time = time.time() day = 24 * 60 * 60 @@ -364,22 +389,27 @@ def clean_folder(folder: str, pattern: str): for f in list_files: if fnmatch.fnmatch(f, pattern): creation_time = os.path.getctime(f) - if (current_time - creation_time) >= self.config.get(["save days"], self.history_options, 7) * day: + if (current_time - creation_time) >= self.config.get( + ["save days"], self.history_options, 7 + ) * day: os.remove(f) logging.info(f"{f} removed") os.chdir(current_dir) + clean_folder("../data/log", "*.log") clean_folder("../data/log", "dashboard.log.*") clean_folder("../data/images", "*.png") def calc_optimum_met_debug(self): from day_ahead import DaCalc + dacalc = DaCalc(self.file_name) dacalc.debug = True dacalc.calc_optimum() def calc_optimum(self): from day_ahead import DaCalc + dacalc = DaCalc(self.file_name) dacalc.debug = False dacalc.calc_optimum() @@ -387,6 +417,7 @@ def calc_optimum(self): @staticmethod def calc_baseloads(): from da_report import Report + report = Report() report.calc_save_baseloads() @@ -400,17 +431,26 @@ def run_task_function(self, task, logfile: bool = True): run_task = self.tasks[task] file_handler = None stream_handler = None - logging.basicConfig(level=self.log_level, - format='%(asctime)s %(levelname)s: %(message)s', - datefmt='%Y-%m-%d %H:%M:%S') + logging.basicConfig( + level=self.log_level, + format="%(asctime)s %(levelname)s: %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) logger = logging.getLogger() - formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') + formatter = logging.Formatter( + "%(asctime)s %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S" + ) if logfile: # old_stdout = sys.stdout for handler in logger.handlers[:]: # make a copy of the list logger.removeHandler(handler) - file_name = ("../data/log/" + run_task["file_name"] + "_" + - datetime.datetime.now().strftime("%Y-%m-%d__%H:%M") + ".log") + file_name = ( + "../data/log/" + + run_task["file_name"] + + "_" + + datetime.datetime.now().strftime("%Y-%m-%d__%H:%M") + + ".log" + ) file_handler = logging.FileHandler(file_name) file_handler.setLevel(self.log_level) @@ -426,8 +466,11 @@ def run_task_function(self, task, logfile: bool = True): logger.addHandler(notification_handler) self.start_logging() try: - logging.info(f"Day Ahead Optimalisatie gestart: " - f"{datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')} taak: {run_task['function']}") + logging.info( + f"Day Ahead Optimalisatie gestart: " + f"{datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')} " + f"taak: {run_task['function']}" + ) self.db_da.log_pool_status() getattr(self, run_task["function"])() self.set_last_activity() @@ -449,12 +492,17 @@ def run_task_cmd(self, task): data = proc.stdout.decode() err = proc.stderr.decode() log_content = data + err - filename = ("../data/log/" + run_task["file_name"] + "_" + - datetime.datetime.now().strftime("%Y-%m-%d__%H:%M:%S") + ".log") + filename = ( + "../data/log/" + + run_task["file_name"] + + "_" + + datetime.datetime.now().strftime("%Y-%m-%d__%H:%M:%S") + + ".log" + ) with open(filename, "w") as f: f.write(log_content) - ''' + """ # klass = globals()["class_name"] # instance = klass() @@ -469,7 +517,8 @@ def run_task_cmd(self, task): # sys.stdout = log_file try: logging.info(f"Day Ahead Optimalisatie gestart: " - f"{datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')} taak: {run_task['task']}") + f"{datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')} " + f" taak: {run_task['task']}") getattr(self, run_task["task"])() self.set_last_activity() except Exception as ex: @@ -478,4 +527,4 @@ def run_task_cmd(self, task): # log_file.flush() # sys.stdout = old_stdout # log_file.close() - ''' + """ diff --git a/dao/prog/da_config.py b/dao/prog/da_config.py index 862a624..4c6938f 100644 --- a/dao/prog/da_config.py +++ b/dao/prog/da_config.py @@ -1,11 +1,16 @@ import json import logging import os +from logging import raiseExceptions + +from dao.prog.db_manager import DBmanagerObj +import sqlalchemy_utils class Config: - def parse(self, file_name:str): + @staticmethod + def parse(file_name: str): with open(file_name, "r") as file_json: try: return json.load(file_json) @@ -19,7 +24,9 @@ def __init__(self, file_name: str): file_secrets = datapath + "/secrets.json" self.secrets = self.parse(file_secrets) - def get(self, keys: list, options: dict = None, default=None) -> str | dict | list | None: + def get( + self, keys: list, options: dict = None, default=None + ) -> str | dict | list | None: if options is None: options = self.options if keys[0] in options: @@ -39,6 +46,72 @@ def get(self, keys: list, options: dict = None, default=None) -> str | dict | li def set(self, key, value): self.options[key] = value + def get_db_da(self, check_create: bool = False): + db_da_engine = self.get(["database da", "engine"], None, "mysql") + db_da_server = self.get(["database da", "server"], None, "core-mariadb") + db_da_port = int(self.get(["database da", "port"], None, 0)) + if db_da_engine == "sqlite": + db_da_name = self.get(["database da", "database"], None, "day_ahead.db") + else: + db_da_name = self.get(["database da", "database"], None, "day_ahead") + db_da_user = self.get(["database da", "username"], None, "day_ahead") + db_da_password = self.get(["database da", "password"]) + db_da_path = self.get(["database da", "db_path"], None, "../data") + db_time_zone = self.get(["time_zone"]) + if check_create: + db_url = DBmanagerObj.db_url( + db_dialect=db_da_engine, + db_name=db_da_name, + db_server=db_da_server, + db_user=db_da_user, + db_password=db_da_password, + db_port=db_da_port, + db_path=db_da_path, + ) + if not sqlalchemy_utils.database_exists(db_url): + sqlalchemy_utils.create_database(db_url) + try: + db_da = DBmanagerObj( + db_dialect=db_da_engine, + db_name=db_da_name, + db_server=db_da_server, + db_user=db_da_user, + db_password=db_da_password, + db_port=db_da_port, + db_path=db_da_path, + db_time_zone=db_time_zone, + ) + except Exception as ex: + logging.error("Check your settings for day_ahead database") + return db_da + + def get_db_ha(self): + db_ha_engine = self.get(["database ha", "engine"], None, "mysql") + db_ha_server = self.get(["database ha", "server"], None, "core-mariadb") + db_ha_port = int(self.get(["database ha", "port"], None, 0)) + if db_ha_engine == "sqlite": + db_ha_name = self.get(["database ha", "database"], None, "home-assistant_v2.db") + else: + db_ha_name = self.get(["database ha", "database"], None, "homeassistant") + db_ha_user = self.get(["database ha", "username"], None, "homeassistant") + db_ha_password = self.get(["database ha", "password"]) + db_ha_path = self.get(["database ha", "db_path"], None, "/homeassistant") + db_time_zone = self.get(["time_zone"]) + try: + db_ha = DBmanagerObj( + db_dialect=db_ha_engine, + db_name=db_ha_name, + db_server=db_ha_server, + db_user=db_ha_user, + db_password=db_ha_password, + db_port=db_ha_port, + db_path=db_ha_path, + db_time_zone=db_time_zone, + ) + except Exception as ex: + logging.error("Check your settings for Home Assitant database") + return db_ha + def get_config(file_name: str, keys: list, default=None): config = Config(file_name=file_name) diff --git a/dao/prog/da_graph.py b/dao/prog/da_graph.py index ef1ca40..9274902 100644 --- a/dao/prog/da_graph.py +++ b/dao/prog/da_graph.py @@ -9,8 +9,8 @@ class GraphBuilder: def __init__(self, backend=None): - plt.set_loglevel(level='warning') - pil_logger = logging.getLogger('PIL') + plt.set_loglevel(level="warning") + pil_logger = logging.getLogger("PIL") # override the logger logging level to INFO pil_logger.setLevel(logging.INFO) @@ -45,9 +45,9 @@ def build(df, options, show=True): else: ax = axis_right if "column" in serie: - data_array = df[serie['column']] + data_array = df[serie["column"]] else: - data_array = df[serie['name']] + data_array = df[serie["name"]] if ("negativ" in serie) or (("sign" in serie) and (serie["sign"] == "neg")): data_array = np.negative(data_array) s_type = serie["type"] @@ -59,44 +59,86 @@ def build(df, options, show=True): labels.append(label) plot = None if s_type == "bar": - plot = ax.bar(ind, data_array, label=label, width=width, color=color, align="edge") + plot = ax.bar( + ind, data_array, label=label, width=width, color=color, align="edge" + ) elif s_type == "line": linestyle = serie["linestyle"] - plot = ax.plot(ind, data_array, label=label, linestyle=linestyle, color=color, align="edge") + plot = ax.plot( + ind, + data_array, + label=label, + linestyle=linestyle, + color=color, + align="edge", + ) else: # stacked bar data_sum = np.sum(data_array) if data_sum >= 0: if vax == "left": - plot = ax.bar(ind, data_array, width=width, bottom=stacked_plus, label=label, color=color, - align="edge") + plot = ax.bar( + ind, + data_array, + width=width, + bottom=stacked_plus, + label=label, + color=color, + align="edge", + ) stacked_plus = stacked_plus + data_array else: - plot = ax.bar(ind+width, data_array, width=width, bottom=stacked_plus_right, label=label, - color=color, align="edge") + plot = ax.bar( + ind + width, + data_array, + width=width, + bottom=stacked_plus_right, + label=label, + color=color, + align="edge", + ) stacked_plus_right = stacked_plus_right + data_array elif data_sum < 0: if vax == "left": - plot = ax.bar(ind, data_array, width=width, bottom=stacked_neg, label=label, color=color, - align="edge") + plot = ax.bar( + ind, + data_array, + width=width, + bottom=stacked_neg, + label=label, + color=color, + align="edge", + ) stacked_neg = stacked_neg + data_array else: - plot = ax.bar(ind+width, data_array, width=width, bottom=stacked_neg_right, label=label, - color=color, align="edge") + plot = ax.bar( + ind + width, + data_array, + width=width, + bottom=stacked_neg_right, + label=label, + color=color, + align="edge", + ) stacked_neg_right = stacked_neg_right + data_array if plot is not None: handles.append(plot) xlabels = df[options["haxis"]["values"]].values.tolist() - axis.set_xticks(ind, labels=xlabels, ) + axis.set_xticks( + ind, + labels=xlabels, + ) if "title" in options["haxis"]: axis.set_xlabel(options["haxis"]["title"]) if len(df.index) > 8: axis.xaxis.set_major_locator(ticker.MultipleLocator(2)) axis.xaxis.set_minor_locator(ticker.MultipleLocator(1)) if len(str(xlabels[0])) > 2: - axis.set_xticks(axis.get_xticks(), axis.get_xticklabels(), rotation=45, ha='right') + axis.set_xticks( + axis.get_xticks(), axis.get_xticklabels(), rotation=45, ha="right" + ) - ylim = math.ceil(max(np.max(stacked_plus), - np.min(stacked_neg))) + ylim = math.ceil(max(np.max(stacked_plus), -np.min(stacked_neg))) # math.ceil(max(max(accu_out_p) + max(c_l_p) + max(pv_p), -min(min(base_n), min(boiler_n), # min(heatpump_n), min(ev_n), min(c_t_n), min(accu_in_n) ))) if np.min(stacked_neg) < 0: @@ -106,9 +148,9 @@ def build(df, options, show=True): axis.set_ylabel(options["vaxis"][0]["title"]) if axis_right: - ylim = math.ceil(max(np.max(stacked_plus_right), - np.min(stacked_neg_right))) - # math.ceil(max(max(accu_out_p) + max(c_l_p) + max(pv_p), -min(min(base_n), min(boiler_n), - # min(heatpump_n), min(ev_n), min(c_t_n), min(accu_in_n) ))) + ylim = math.ceil( + max(np.max(stacked_plus_right), -np.min(stacked_neg_right)) + ) if np.min(stacked_neg_right) < 0: axis_right.set_ylim([-ylim, ylim]) else: @@ -121,7 +163,12 @@ def build(df, options, show=True): axis.set_position([box.x0, box.y0, box.width * 0.8, box.height]) # Put a legend to the right of the current axis # axis.legend(loc = 'center left', bbox_to_anchor=(1, 0.5)) - axis.legend(handles=handles, labels=labels, loc='upper left', bbox_to_anchor=(1.05, 1.00)) + axis.legend( + handles=handles, + labels=labels, + loc="upper left", + bbox_to_anchor=(1.05, 1.00), + ) if show: plt.show() else: diff --git a/dao/prog/da_meteo.py b/dao/prog/da_meteo.py index 2966c9a..84cb73f 100644 --- a/dao/prog/da_meteo.py +++ b/dao/prog/da_meteo.py @@ -4,9 +4,11 @@ import logging import pandas as pd import pytz +import ephem from requests import get import matplotlib.pyplot as plt import graphs + # import os, sys # sys.path.append(os.path.abspath("../dalib")) from da_config import Config @@ -14,6 +16,7 @@ from sqlalchemy import Table, select, func, and_ +# noinspection PyUnresolvedReferences class Meteo: def __init__(self, config: Config, db_da: DBmanagerObj): self.config = config @@ -35,7 +38,9 @@ def makerefmoment(moment): return datetime.datetime.timestamp(date_ref) @staticmethod - def direct_radiation_factor(hcol: float, acol: float, hzon: float, azon: float) -> float: + def direct_radiation_factor( + hcol: float, acol: float, hzon: float, azon: float + ) -> float: """ berekent de omrekenfacor van directe zon straling op het collectorvlak alle parameters in radialen @@ -48,8 +53,13 @@ def direct_radiation_factor(hcol: float, acol: float, hzon: float, azon: float) if hzon <= 0: return 0 else: - return max(0.0, (math.cos(hcol) * math.sin(hzon) + math.sin(hcol) * math.cos(hzon) * math.cos( - acol - azon))) / math.sin(hzon) + return max( + 0.0, + ( + math.cos(hcol) * math.sin(hzon) + + math.sin(hcol) * math.cos(hzon) * math.cos(acol - azon) + ), + ) / math.sin(hzon) def sun_position(self, utc_time): """ @@ -59,7 +69,7 @@ def sun_position(self, utc_time): """ # param nb: latitude: noorderbreed in graden # param ol: longitude: oosterlengte in graden - ''' + """ # oude methode jd = (float(utc_time) / 86400.0) + 2440587.5 @@ -74,8 +84,8 @@ def sun_position(self, utc_time): alfa_zon = lamda_zon_deg - 2.468 * math.sin(2 * lamda_zon_rad) + 0.053 * math.sin( 4 * lamda_zon_rad) - 0.0014 * math.sin(6 * lamda_zon_rad) # in graden - delta_zon = 22.8008 * math.sin(lamda_zon_rad) + 0.5999 * pow(math.sin(lamda_zon_rad), 3) + 0.0493 * pow( - math.sin(lamda_zon_rad), 5) + delta_zon = 22.8008 * math.sin(lamda_zon_rad) + 0.5999 * pow(math.sin(lamda_zon_rad), 3) + + 0.0493 * pow(math.sin(lamda_zon_rad), 5) delta_zon_rad = math.radians(delta_zon) noorder_breedte = self.latitude ooster_lengte = self.longitude @@ -91,43 +101,53 @@ def sun_position(self, utc_time): # hoogte boven horizon h_rad = math.asin(math.sin(noorder_breedte_rad) * math.sin(delta_zon_rad) - + math.cos(noorder_breedte_rad) * math.cos(delta_zon_rad) * math.cos(h_rad)) + + math.cos(noorder_breedte_rad) * math.cos(delta_zon_rad) * + math.cos(h_rad)) a_rad = math.atan2(math.sin(h_rad), - math.cos(h_rad) * math.sin(noorder_breedte_rad) - math.tan(delta_zon_rad) * math.cos( - noorder_breedte_rad)) # links of rechts van zuid + math.cos(h_rad) * math.sin(noorder_breedte_rad) - + math.tan(delta_zon_rad) * math.cos(noorder_breedte_rad)) + # links of rechts van zuid result = {'h': h_rad, 'A': a_rad} # tot hier oude methode - ''' + """ # vanaf hier nieuwe methode - ''' + """ Declinatie en uurhoek - De in de afbeelding over deklinatie en uurhoek getekende hoeken zoals u en d leggen de stralingsrichting vast. - Op iedere datum geldt: d = constant. Deze constante kan op de n- de dag van het jaar met grote nauwkeurigheid + De in de afbeelding over deklinatie en uurhoek getekende hoeken zoals u en d leggen + de stralingsrichting vast. + Op iedere datum geldt: d = constant. Deze constante kan op de n- de dag van het jaar + met grote nauwkeurigheid worden berekend met behulp van formule 1: d = 23,44° sin {360°(284 + n)/365} (1) Eveneens op iedere datum geldt, dat: u = t x 15° (2) - met t gelijk aan de tijd in uren volgens Z.T. Met gehulp van (1) en (2) kan nu de stralingsrichting worden + met t gelijk aan de tijd in uren volgens Z.T. Met gehulp van (1) en (2) kan nu de + stralingsrichting worden gevonden op ieder gewenst tijdstip op iedere gewenste datum. Azimut en zonshoogte - De stralingsrichting is ook vast te leggen met behulp van de hoeken a en h. Zie de figuur over Azimut en - zonshoogte. In appendix A is afgeleid, hoe deze hoeken kunnen worden geschreven als functie van de zojuist - genoemde hoeken u en d. Het blijkt handiger om h te schrijven als functie van u en d en om a te schrijven als + De stralingsrichting is ook vast te leggen met behulp van de hoeken a en h. Zie de figuur + over Azimut en + zonshoogte. In appendix A is afgeleid, hoe deze hoeken kunnen worden geschreven als functie + van de zojuist + genoemde hoeken u en d. Het blijkt handiger om h te schrijven als functie van u en d en + om a te schrijven als functie van u, d en h. Gevonden wordt: h = arcsin (sin ф sin d – cos ф cos d cos u) (3) a = arcsin { (cos d sin u) / cos h } (4) - De hoek ф is gelijk aan de breedtegraad van de plaats op aarde, waar a en h moeten worden bepaald. + De hoek ф is gelijk aan de breedtegraad van de plaats op aarde, waar a en h moeten + worden bepaald. De waarden, die a en h aannemen, zijn nu dus plaatsafhankelijk. - ''' - ''' + """ + """ dt = datetime.datetime.fromtimestamp(utc_time) dt_start = datetime.datetime(dt.year,1,1) dif = dt - dt_start n = dif.days - d = math.radians(23.44 * math.sin(math.radians(360*(284 + n) / 365))) # declinatie in radialen + d = math.radians(23.44 * math.sin(math.radians(360*(284 + n) / 365))) # declinatie + in radialen dtz = datetime.datetime.fromtimestamp(utc_time, tz=pytz.utc) t = dtz.hour u = t * math.radians(15) #uurhoek in radialen @@ -137,9 +157,8 @@ def sun_position(self, utc_time): h_degrees = math.degrees(h) a_degrees = math.degrees(a) result = {'d': math.degrees(d), 'u': math.degrees(u), 'h': h, 'A': a} - ''' + """ - import ephem observer = ephem.Observer() observer.lat = math.radians(self.latitude) # breedtegraad observer.lon = math.radians(self.longitude) @@ -154,7 +173,7 @@ def get_dif_rad_factor(self, utc_time): cor_utc_time = float(utc_time) + 1800 # 52 graden noorderbreedte, 5 graden oosterlengte sunpos = self.sun_position(cor_utc_time) - sun_h = sunpos['h'] # hoogte boven horizon in rad + sun_h = sunpos["h"] # hoogte boven horizon in rad if sun_h > 0: # maximale theoretische straling op hor vlak value = 360 * 1.37 * math.sin(sun_h) @@ -162,7 +181,9 @@ def get_dif_rad_factor(self, utc_time): value = 0.0 return value - def solar_rad(self, utc_time: float, radiation: float, h_col: float, a_col: float) -> float: + def solar_rad( + self, utc_time: float, radiation: float, h_col: float, a_col: float + ) -> float: """ :param utc_time: utc tijd in sec :param radiation: globale straling in J/cm² @@ -176,8 +197,10 @@ def solar_rad(self, utc_time: float, radiation: float, h_col: float, a_col: floa q_tot = radiation else: sun_pos = self.sun_position(utc_time) - dir_rad_factor = min(2.0, self.direct_radiation_factor( - h_col, a_col, sun_pos['h'], sun_pos['A'])) + dir_rad_factor = min( + 2.0, + self.direct_radiation_factor(h_col, a_col, sun_pos["h"], sun_pos["A"]), + ) # maximale straling op horz.vlak q_oz = self.get_dif_rad_factor(utc_time) @@ -222,31 +245,38 @@ def solar_rad_df(self, global_rad): tilt = min(90, max(0, tilt)) hcol = math.radians(tilt) acol = math.radians(orientation) - global_rad['solar_rad'] = '' # new column empty + global_rad["solar_rad"] = "" # new column empty # make sure indexes pair with number of rows global_rad = global_rad.reset_index() for row in global_rad.itertuples(): utc_time = row.tijd radiation = float(row.gr) q_tot = self.solar_rad(int(utc_time) - 3600, radiation, hcol, acol) - global_rad.loc[(global_rad.tijd == utc_time), 'solar_rad'] = q_tot + global_rad.loc[(global_rad.tijd == utc_time), "solar_rad"] = q_tot return global_rad - def get_from_meteoserver (self, model: str) -> pd.DataFrame: - parameters = "?lat=" + str(self.latitude) + "&long=" + str(self.longitude) + "&key=" + self.meteoserver_key + def get_from_meteoserver(self, model: str) -> pd.DataFrame: + parameters = ( + "?lat=" + + str(self.latitude) + + "&long=" + + str(self.longitude) + + "&key=" + + self.meteoserver_key + ) if model == "harmonie": url = "https://data.meteoserver.nl/api/uurverwachting.php" else: url = "https://data.meteoserver.nl/api/uurverwachting_gfs.php" resp = get(url + parameters) - logging.debug (resp.text) + logging.debug(resp.text) try: json_object = json.loads(resp.text) except Exception as ex: logging.info(ex) logging.error(f"Geen meteodata via model: {model}") return pd.DataFrame() - if not "data" in json_object: + if "data" not in json_object: return pd.DataFrame() data = json_object["data"] @@ -257,49 +287,81 @@ def get_from_meteoserver (self, model: str) -> pd.DataFrame: # Convert a List of dictionaries using from_records() method. df = pd.DataFrame.from_records(data) df = self.solar_rad_df(df) - df1 = df[['tijd', 'tijd_nl', 'gr', 'temp', 'solar_rad']] + df1 = df[["tijd", "tijd_nl", "gr", "temp", "solar_rad"]] logging.info(f"Meteo data {model}: \n{df1.to_string(index=True)}") logging.info(f"Aantal meteorecords {model}: {len(df1)}") return df1 def get_meteo_data(self, show_graph=False): df1 = self.get_from_meteoserver("harmonie") - df_db = pd.DataFrame(columns=['time', 'code', 'value']) + df_db = pd.DataFrame(columns=["time", "code", "value"]) count = len(df1) if count == 0: logging.error("No data recieved from meteoserver") else: df1 = df1.reset_index() # make sure indexes pair with number of rows for row in df1.itertuples(): - df_db.loc[df_db.shape[0]] = [str(int(row.tijd) - 3600), 'gr', float(row.gr)] - df_db.loc[df_db.shape[0]] = [str(int(row.tijd) - 3600), 'temp', float(row.temp)] - df_db.loc[df_db.shape[0]] = [str(int(row.tijd) - 3600), 'solar_rad', float(row.solar_rad)] + df_db.loc[df_db.shape[0]] = [ + str(int(row.tijd) - 3600), + "gr", + float(row.gr), + ] + df_db.loc[df_db.shape[0]] = [ + str(int(row.tijd) - 3600), + "temp", + float(row.temp), + ] + df_db.loc[df_db.shape[0]] = [ + str(int(row.tijd) - 3600), + "solar_rad", + float(row.solar_rad), + ] if count < 39: df1 = self.get_from_meteoserver("gfs") for row in df1[count:].itertuples(): - df_db.loc[df_db.shape[0]] = [str(int(row.tijd) - 3600), 'gr', float(row.gr)] - df_db.loc[df_db.shape[0]] = [str(int(row.tijd) - 3600), 'temp', float(row.temp)] - df_db.loc[df_db.shape[0]] = [str(int(row.tijd) - 3600), 'solar_rad', float(row.solar_rad)] + df_db.loc[df_db.shape[0]] = [ + str(int(row.tijd) - 3600), + "gr", + float(row.gr), + ] + df_db.loc[df_db.shape[0]] = [ + str(int(row.tijd) - 3600), + "temp", + float(row.temp), + ] + df_db.loc[df_db.shape[0]] = [ + str(int(row.tijd) - 3600), + "solar_rad", + float(row.solar_rad), + ] count += 1 if count >= 48: break df_tostring = df_db # df_tostring["tijd"] = pd.to_datetime(df_tostring["time"]) - df_tostring['tijd'] = ( - df_tostring['time'].apply(lambda x: datetime.datetime.fromtimestamp(int(x)).strftime("%Y-%m-%d %H:%M"))) + df_tostring["tijd"] = df_tostring["time"].apply( + lambda x: datetime.datetime.fromtimestamp(int(x)).strftime("%Y-%m-%d %H:%M") + ) logging.debug(f"Meteo data records \n{df_tostring.to_string(index=False)}") self.db_da.savedata(df_db) - style = self.config.get(['graphics', 'style'], None, "default") + style = self.config.get(["graphics", "style"], None, "default") plt.style.use(style) - graphs.make_graph_meteo(df1, file="../data/images/meteo_" + datetime.datetime.now().strftime("%Y-%m-%d__%H-%M") - + ".png", show=show_graph) + graphs.make_graph_meteo( + df1, + file="../data/images/meteo_" + + datetime.datetime.now().strftime("%Y-%m-%d__%H-%M") + + ".png", + show=show_graph, + ) - ''' - url = "https://api.forecast.solar/estimate/watthours/"+str(self.latitude)+"/"+str(self.longitude)+"/45/5/5.5" + """ + url = "https://api.forecast.solar/estimate/watthours/"+str(self.latitude)+"/" + +str(self.longitude)+"/45/5/5.5" resp = get(url) + print (resp.text) json_object = json.loads(resp.text) data = json_object["result"] @@ -317,107 +379,150 @@ def get_meteo_data(self, show_graph=False): if (day != last_day): # or (last_hour < hour-1): if last_day == -1: for h in range(last_hour+1, hour): - time_h = dt.datetime(datetime_obj.year, datetime_obj.month, datetime_obj.day, h,0,0 ) + time_h = dt.datetime(datetime_obj.year, datetime_obj.month, + datetime_obj.day, h,0,0 ) time_utc = dt.datetime.timestamp(time_h) - 3600 - df_db.loc[df_db.shape[0]] = [str(int(time_utc)), time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] + df_db.loc[df_db.shape[0]] = [str(int(time_utc)), + time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] else: for h in range(last_hour + 1, 24): - time_h = dt.datetime(last_datetime_obj.year,last_datetime_obj.month,last_datetime_obj.day,h,0,0) + time_h = dt.datetime(last_datetime_obj.year,last_datetime_obj.month, + last_datetime_obj.day,h,0,0) time_utc = dt.datetime.timestamp(time_h) - 3600 - df_db.loc[df_db.shape[0]] = [str(int(time_utc)), time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] + df_db.loc[df_db.shape[0]] = [str(int(time_utc)), + time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] for h in range(0, hour): - time_h = dt.datetime(datetime_obj.year, datetime_obj.month, datetime_obj.day, h, 0, 0) + time_h = dt.datetime(datetime_obj.year, datetime_obj.month, + datetime_obj.day, h, 0, 0) time_utc = dt.datetime.timestamp(time_h) - 3600 - df_db.loc[df_db.shape[0]] = [str(int(time_utc)), time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] + df_db.loc[df_db.shape[0]] = [str(int(time_utc)), + time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] last_value = 0 - time_h = dt.datetime(datetime_obj.year, datetime_obj.month, datetime_obj.day, hour, 0, 0) + time_h = dt.datetime(datetime_obj.year, datetime_obj.month, d + atetime_obj.day, hour, 0, 0) time_utc = dt.datetime.timestamp(time_h) -3600 - df_db.loc[df_db.shape[0]] = [str(int(time_utc)), time_h.strftime("%Y-%m-%d %H:%M"), 'pv', pv_w - last_value] + df_db.loc[df_db.shape[0]] = [str(int(time_utc)), time_h.strftime("%Y-%m-%d %H:%M"), + 'pv', pv_w - last_value] last_hour = hour last_value = pv_w last_day = day last_datetime_obj = datetime_obj for h in range(last_hour + 1, 24): - time_h = dt.datetime(last_datetime_obj.year, last_datetime_obj.month, last_datetime_obj.day, h, 0, 0) + time_h = dt.datetime(last_datetime_obj.year, last_datetime_obj.month, + last_datetime_obj.day, h, 0, 0) time_utc = dt.datetime.timestamp(time_h) - 3600 - df_db.loc[df_db.shape[0]] = [str(int(time_utc)), time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] + df_db.loc[df_db.shape[0]] = [str(int(time_utc)), + time_h.strftime("%Y-%m-%d %H:%M"), 'pv', 0] print(df_db) - graphs.make_graph_meteo(df_db, file = "../data/images/meteo" + datetime.datetime.now().strftime("%H%M") + + graphs.make_graph_meteo(df_db, file = "../data/images/meteo" + + datetime.datetime.now().strftime("%H%M") + ".png", show=show_graph) del df_db["time_str"] print(df_db) self.db_da.savedata(df_db) - ''' + """ - def calc_graaddagen(self, date: datetime.datetime = None, weighted: bool = False) -> float: + def get_avg_temperature(self, date: datetime.datetime = None) -> float: """ Berekent gewogen met temperatuur grens van 16 oC :param date: de datum waarvoor de berekening wordt gevraagd als None: vandaag - :param weighted : boolean, berekenen met (true) of zonder (false) weegfactor :return: berekende gewogen graaddagen """ if date is None: - date = datetime.datetime.combine(datetime.datetime.today(), datetime.datetime.min.time()) + date = datetime.datetime.combine( + datetime.datetime.today(), datetime.datetime.min.time() + ) date_utc = int(date.timestamp()) # Reflect existing tables from the database - values_table = Table('values', self.db_da.metadata, autoload_with=self.db_da.engine) - variabel_table = Table('variabel', self.db_da.metadata, autoload_with=self.db_da.engine) + values_table = Table( + "values", self.db_da.metadata, autoload_with=self.db_da.engine + ) + variabel_table = Table( + "variabel", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Construct the inner query - inner_query = select( - values_table.c.time, - values_table.c.value, - self.db_da.from_unixtime(values_table.c.time).label('begin') - ).where( - and_( - variabel_table.c.code == 'temp', - values_table.c.variabel == variabel_table.c.id, - values_table.c.time >= date_utc + inner_query = ( + select( + values_table.c.time, + values_table.c.value, + self.db_da.from_unixtime(values_table.c.time).label("begin"), + ) + .where( + and_( + variabel_table.c.code == "temp", + values_table.c.variabel == variabel_table.c.id, + values_table.c.time >= date_utc, + ) ) - ).order_by( - values_table.c.time.asc() - ).limit(24).alias('t1') + .order_by(values_table.c.time.asc()) + .limit(24) + .alias("t1") + ) # Construct the outer query - outer_query = select( - func.avg(inner_query.c.value).label('avg_temp') - ) + outer_query = select(func.avg(inner_query.c.value).label("avg_temp")) # Execute the query and fetch the result with self.db_da.engine.connect() as connection: result = connection.execute(outer_query) avg_temp = result.scalar() - ''' + """ sql_avg_temp = ( "SELECT AVG(t1.`value`) avg_temp FROM " "(SELECT `time`, `value`, from_unixtime(`time`) 'begin' " "FROM `values` , `variabel` " - "WHERE `variabel`.`code` = 'temp' AND `values`.`variabel` = `variabel`.`id` AND time >= " + - str(date_utc) + " " + "WHERE `variabel`.`code` = 'temp' + AND `values`.`variabel` = `variabel`.`id` + AND time >= " + str(date_utc) + " " "ORDER BY `time` ASC LIMIT 24) t1 " ) data = self.db_da.run_select_query(sql_avg_temp) avg_temp = float(data['avg_temp'].values[0]) - ''' + """ + return avg_temp + + def calc_graaddagen( + self, + date: datetime.datetime = None, + avg_temp: float | None = None, + weighted: bool = False, + ) -> float: + """ + Berekent graaddagen met temperatuur grens van 16 oC + :param date: de datum waarvoor de berekening wordt gevraagd + als None: vandaag + :param avg_temp: de gemiddelde temperatuur, default None + :param weighted: boolean, gewogen als true, default false + :return: berekende eventueel gewogen graaddagen + """ + if date is None: + date = datetime.datetime.combine( + datetime.datetime.today(), datetime.datetime.min.time() + ) + if avg_temp is None: + avg_temp = self.get_avg_temperature(date) weight_factor = 1 if weighted: mon = date.month if mon <= 2 or mon >= 11: weight_factor = 1.1 - elif mon >= 4 or mon <= 9: - weight_factor = 0.9 + elif 4 <= mon <= 9: + weight_factor = 0.8 if avg_temp >= 16: result = 0 else: result = weight_factor * (16 - avg_temp) return result - def calc_solar_rad(self, solar_opt: dict, utc_time: int, global_rad: float) -> float: + def calc_solar_rad( + self, solar_opt: dict, utc_time: int, global_rad: float + ) -> float: """ :param solar_opt: definitie van paneel met tilt: helling t.o.v. plat vlak in graden, 0 = vlak (horizontaal), 90 = verticaal diff --git a/dao/prog/da_prices.py b/dao/prog/da_prices.py index 3a99bd9..8ec71d0 100644 --- a/dao/prog/da_prices.py +++ b/dao/prog/da_prices.py @@ -1,4 +1,3 @@ -import math from da_config import Config import pandas as pd from db_manager import DBmanagerObj @@ -26,36 +25,43 @@ def get_time_latest_record(self, code: str) -> datetime.datetime: :param code: de code van het record :return: datum en tijd van het laatst aanwezige record """ - ''' + """ query = ("SELECT from_unixtime(`time`) tijd, `value` " "FROM `values`, `variabel` " "WHERE `variabel`.`code` = '" + code + "' and `values`.`variabel` = `variabel`.`id` " "ORDER BY `time` desc LIMIT 1") - ''' + """ # Reflect existing tables from the database with self.db_da.engine.connect() as connection: - values_table = Table('values', self.db_da.metadata, autoload_with=connection) - variabel_table = Table('variabel', self.db_da.metadata, autoload_with=connection) + values_table = Table( + "values", self.db_da.metadata, autoload_with=connection + ) + variabel_table = Table( + "variabel", self.db_da.metadata, autoload_with=connection + ) # Construct the query - query = select( - self.db_da.from_unixtime(values_table.c.time).label('tijd'), - values_table.c.value - ).where( - and_( - variabel_table.c.code == code, - values_table.c.variabel == variabel_table.c.id, + query = ( + select( + self.db_da.from_unixtime(values_table.c.time).label("tijd"), + values_table.c.value, ) - ).order_by( - values_table.c.time.desc() - ).limit(1) + .where( + and_( + variabel_table.c.code == code, + values_table.c.variabel == variabel_table.c.id, + ) + ) + .order_by(values_table.c.time.desc()) + .limit(1) + ) # Execute the query and fetch the result with self.db_da.engine.connect() as connection: result = connection.execute(query) result = result.scalar() - if type(result) == str: + if type(result) is str: result = datetime.datetime.strptime(result, "%Y-%m-%d %H:%M:%S") return result @@ -66,8 +72,7 @@ def get_prices(self, source): arg_s = sys.argv[2] start = datetime.datetime.strptime(arg_s, "%Y-%m-%d") else: - start = pd.Timestamp( - year=now.year, month=now.month, day=now.day, tz='CET') + start = pd.Timestamp(year=now.year, month=now.month, day=now.day, tz="CET") # end if len(sys.argv) > 3: arg_s = sys.argv[3] @@ -90,26 +95,33 @@ def get_prices(self, source): # day-ahead market prices (€/MWh) if source.lower() == "entsoe": start = pd.Timestamp( - year=start.year, month=start.month, day=start.day, tz='CET') - end = pd.Timestamp(year=end.year, month=end.month, - day=end.day, tz='CET') + year=start.year, month=start.month, day=start.day, tz="CET" + ) + end = pd.Timestamp(year=end.year, month=end.month, day=end.day, tz="CET") api_key = self.config.get(["prices", "entsoe-api-key"]) client = EntsoePandasClient(api_key=api_key) da_prices = pd.DataFrame() try: - da_prices = client.query_day_ahead_prices( - 'NL', start=start, end=end) + da_prices = client.query_day_ahead_prices("NL", start=start, end=end) except Exception as ex: logging.error(ex) logging.error(f"Geen data van Entsoe: tussen {start} en {end}") if len(da_prices.index) > 0: - df_db = pd.DataFrame(columns=['time', 'code', 'value']) - da_prices = da_prices.reset_index() # make sure indexes pair with number of rows - logging.info(f"Day ahead prijzen van Entsoe: \n{da_prices.to_string(index=False)}") + df_db = pd.DataFrame(columns=["time", "code", "value"]) + da_prices = ( + da_prices.reset_index() + ) # make sure indexes pair with number of rows + logging.info( + f"Day ahead prijzen van Entsoe: \n{da_prices.to_string(index=False)}" + ) + last_time = start for row in da_prices.itertuples(): last_time = int(datetime.datetime.timestamp(row[1])) - df_db.loc[df_db.shape[0]] = [str(last_time), 'da', row[2] / 1000] - logging.debug(f"Day ahead prijzen (source: entsoe, db-records): \n{df_db.to_string(index=False)}") + df_db.loc[df_db.shape[0]] = [str(last_time), "da", row[2] / 1000] + logging.debug( + f"Day ahead prijzen (source: entsoe, db-records): \n" + f"{df_db.to_string(index=False)}" + ) self.db_da.savedata(df_db) end_dt = datetime.datetime(end.year, end.month, end.day, 23) last_time_dt = datetime.datetime.fromtimestamp(last_time) @@ -117,7 +129,9 @@ def get_prices(self, source): if len(df_db) == 0: logging.error(f"Geen data van Entsoe tot en met {end_dt}") else: - logging.warning(f"Geen data van Entsoe tussen {last_time_dt} en {end_dt}") + logging.warning( + f"Geen data van Entsoe tussen {last_time_dt} en {end_dt}" + ) if source.lower() == "nordpool": # ophalen bij Nordpool @@ -127,108 +141,142 @@ def get_prices(self, source): else: end_date = start try: - hourly_prices_spot = prices_spot.hourly(areas=['NL'], end_date=end_date) + hourly_prices_spot = prices_spot.hourly(areas=["NL"], end_date=end_date) + except ConnectionError: + logging.error(f"Geen data van Nordpool: tussen {start} en {end}") + return except Exception as ex: - # logging.error(ex) + logging.exception(ex) logging.error(f"Geen data van Nordpool: tussen {start} en {end}") return - hourly_values = hourly_prices_spot['areas']['NL']['values'] + hourly_values = hourly_prices_spot["areas"]["NL"]["values"] s = pp.pformat(hourly_values, indent=2) logging.info(f"Day ahead prijzen van Nordpool:\n {s}") - df_db = pd.DataFrame(columns=['time', 'code', 'value']) + df_db = pd.DataFrame(columns=["time", "code", "value"]) for hourly_value in hourly_values: - time_dt = hourly_value['start'] + time_dt = hourly_value["start"] time_ts = time_dt.timestamp() - value = hourly_value['value'] - if value == float('inf'): + value = hourly_value["value"] + if value == float("inf"): continue else: value = value / 1000 - df_db.loc[df_db.shape[0]] = [str(time_ts), 'da', value] - logging.debug(f"Day ahead prices for {end_date.strftime('%Y-%m-%d') if end_date else 'tomorrow'}" - f" (source: nordpool, db-records): \n {df_db.to_string(index=False)}") + df_db.loc[df_db.shape[0]] = [str(time_ts), "da", value] + logging.debug( + f"Day ahead prices for " + f"{end_date.strftime('%Y-%m-%d') if end_date else 'tomorrow'}" + f" (source: nordpool, db-records): \n {df_db.to_string(index=False)}" + ) if len(df_db) < 24: - logging.warning(f"Retrieve of day ahead prices for " - f"{end_date.strftime('%Y-%m-%d') if end_date else 'tomorrow'} failed") + logging.warning( + f"Retrieve of day ahead prices for " + f"{end_date.strftime('%Y-%m-%d') if end_date else 'tomorrow'} " + f"failed" + ) self.db_da.savedata(df_db) if source.lower() == "easyenergy": # ophalen bij EasyEnergy # 2022-06-25T00:00:00 - startstr = start.strftime('%Y-%m-%dT%H:%M:%S') - endstr = end.strftime('%Y-%m-%dT%H:%M:%S') - url = "https://mijn.easyenergy.com/nl/api/tariff/getapxtariffs?startTimestamp=" + \ - startstr + "&endTimestamp=" + endstr + startstr = start.strftime("%Y-%m-%dT%H:%M:%S") + endstr = end.strftime("%Y-%m-%dT%H:%M:%S") + url = ( + "https://mijn.easyenergy.com/nl/api/tariff/getapxtariffs?startTimestamp=" + + startstr + + "&endTimestamp=" + + endstr + ) resp = get(url) logging.debug(resp.text) json_object = json.loads(resp.text) df = pd.DataFrame.from_records(json_object) - logging.info(f"Day ahead prijzen van Easyenergy:\n {df.to_string(index=False)}") + logging.info( + f"Day ahead prijzen van Easyenergy:\n {df.to_string(index=False)}" + ) # datetime.datetime.strptime('Tue Jun 22 12:10:20 2010 EST', '%a %b %d %H:%M:%S %Y %Z') - df_db = pd.DataFrame(columns=['time', 'code', 'value']) + df_db = pd.DataFrame(columns=["time", "code", "value"]) df = df.reset_index() # make sure indexes pair with number of rows for row in df.itertuples(): - dtime = str(int(datetime.datetime.fromisoformat(row.Timestamp).timestamp())) - df_db.loc[df_db.shape[0]] = [dtime, 'da', row.TariffReturn] + dtime = str( + int(datetime.datetime.fromisoformat(row.Timestamp).timestamp()) + ) + df_db.loc[df_db.shape[0]] = [dtime, "da", row.TariffReturn] - logging.debug(f"Day ahead prijzen (source: easy energy, db-records): \n {df_db.to_string(index=False)}") + logging.debug( + f"Day ahead prijzen (source: easy energy, db-records): \n " + f"{df_db.to_string(index=False)}" + ) self.db_da.savedata(df_db) if source.lower() == "tibber": now_ts = datetime.datetime.now().timestamp() get_ts = start.timestamp() count = 1 + math.ceil((now_ts - get_ts) / 3600) - query = '{ ' \ - '"query": ' \ - ' "{ ' \ - ' viewer { ' \ - ' homes { ' \ - ' currentSubscription { ' \ - ' priceInfo { ' \ - ' today { ' \ - ' energy ' \ - ' startsAt ' \ - ' } ' \ - ' tomorrow { ' \ - ' energy ' \ - ' startsAt ' \ - ' } ' \ - ' range(resolution: HOURLY, last: '+str(count)+') { ' \ - ' nodes { ' \ - ' energy ' \ - ' startsAt ' \ - ' } ' \ - ' } ' \ - ' } ' \ - ' } ' \ - ' } ' \ - ' } ' \ - '}" ' \ - '}' + query = ( + "{ " + '"query": ' + ' "{ ' + " viewer { " + " homes { " + " currentSubscription { " + " priceInfo { " + " today { " + " energy " + " startsAt " + " } " + " tomorrow { " + " energy " + " startsAt " + " } " + " range(resolution: HOURLY, last: " + str(count) + ") { " + " nodes { " + " energy " + " startsAt " + " } " + " } " + " } " + " } " + " } " + " } " + '}" ' + "}" + ) logging.debug(query) tibber_options = self.config.get(["tibber"]) - url = self.config.get(["api url"], tibber_options, "https://api.tibber.com/v1-beta/gql") + url = self.config.get( + ["api url"], tibber_options, "https://api.tibber.com/v1-beta/gql" + ) headers = { "Authorization": "Bearer " + tibber_options["api_token"], "content-type": "application/json", } resp = post(url, headers=headers, data=query) tibber_dict = json.loads(resp.text) - today_nodes = tibber_dict['data']['viewer']['homes'][0]['currentSubscription']['priceInfo']['today'] - tomorrow_nodes = tibber_dict['data']['viewer']['homes'][0]['currentSubscription']['priceInfo']['tomorrow'] - range_nodes = ( - tibber_dict)['data']['viewer']['homes'][0]['currentSubscription']['priceInfo']['range']['nodes'] - df_db = pd.DataFrame(columns=['time', 'code', 'value']) + today_nodes = tibber_dict["data"]["viewer"]["homes"][0][ + "currentSubscription" + ]["priceInfo"]["today"] + tomorrow_nodes = tibber_dict["data"]["viewer"]["homes"][0][ + "currentSubscription" + ]["priceInfo"]["tomorrow"] + range_nodes = tibber_dict["data"]["viewer"]["homes"][0][ + "currentSubscription" + ]["priceInfo"]["range"]["nodes"] + df_db = pd.DataFrame(columns=["time", "code", "value"]) for lst in [today_nodes, tomorrow_nodes, range_nodes]: for node in lst: - dt = datetime.datetime.strptime(node['startsAt'], "%Y-%m-%dT%H:%M:%S.%f%z") + dt = datetime.datetime.strptime( + node["startsAt"], "%Y-%m-%dT%H:%M:%S.%f%z" + ) time_stamp = dt.timestamp() value = float(node["energy"]) logging.info(f"{node} {dt} {time_stamp} {value}") - df_db.loc[df_db.shape[0]] = [time_stamp, 'da', value] - logging.debug(f"Day ahead prijzen (source: tibber, db-records): \n {df_db.to_string(index=False)}") + df_db.loc[df_db.shape[0]] = [time_stamp, "da", value] + logging.debug( + f"Day ahead prijzen (source: tibber, db-records): \n " + f"{df_db.to_string(index=False)}" + ) self.db_da.savedata(df_db) if source.lower() == "tibber": @@ -236,54 +284,68 @@ def get_prices(self, source): get_ts = start.timestamp() count = 1 + math.ceil((now_ts - get_ts) / 3600) count_str = str(count) - query = '{ ' \ - '"query": ' \ - ' "{ ' \ - ' viewer { ' \ - ' homes { ' \ - ' currentSubscription { ' \ - ' priceInfo { ' \ - ' today { ' \ - ' energy ' \ - ' startsAt ' \ - ' } ' \ - ' tomorrow { ' \ - ' energy ' \ - ' startsAt ' \ - ' } ' \ - ' range(resolution: HOURLY, last: '+count_str+') { ' \ - ' nodes { ' \ - ' energy ' \ - ' startsAt ' \ - ' } ' \ - ' } ' \ - ' } ' \ - ' } ' \ - ' } ' \ - ' } ' \ - '}" ' \ - '}' + query = ( + "{ " + '"query": ' + ' "{ ' + " viewer { " + " homes { " + " currentSubscription { " + " priceInfo { " + " today { " + " energy " + " startsAt " + " } " + " tomorrow { " + " energy " + " startsAt " + " } " + " range(resolution: HOURLY, last: " + count_str + ") { " + " nodes { " + " energy " + " startsAt " + " } " + " } " + " } " + " } " + " } " + " } " + '}" ' + "}" + ) logging.debug(query) tibber_options = self.config.get(["tibber"]) - url = self.config.get(["api url"], tibber_options, "https://api.tibber.com/v1-beta/gql") + url = self.config.get( + ["api url"], tibber_options, "https://api.tibber.com/v1-beta/gql" + ) headers = { "Authorization": "Bearer " + tibber_options["api_token"], "content-type": "application/json", } resp = post(url, headers=headers, data=query) tibber_dict = json.loads(resp.text) - today_nodes = tibber_dict['data']['viewer']['homes'][0]['currentSubscription']['priceInfo']['today'] - tomorrow_nodes = tibber_dict['data']['viewer']['homes'][0]['currentSubscription']['priceInfo']['tomorrow'] - range_nodes = ( - tibber_dict)['data']['viewer']['homes'][0]['currentSubscription']['priceInfo']['range']['nodes'] - df_db = pd.DataFrame(columns=['time', 'code', 'value']) + today_nodes = tibber_dict["data"]["viewer"]["homes"][0][ + "currentSubscription" + ]["priceInfo"]["today"] + tomorrow_nodes = tibber_dict["data"]["viewer"]["homes"][0][ + "currentSubscription" + ]["priceInfo"]["tomorrow"] + range_nodes = tibber_dict["data"]["viewer"]["homes"][0][ + "currentSubscription" + ]["priceInfo"]["range"]["nodes"] + df_db = pd.DataFrame(columns=["time", "code", "value"]) for lst in [today_nodes, tomorrow_nodes, range_nodes]: for node in lst: - dt = datetime.datetime.strptime(node['startsAt'], "%Y-%m-%dT%H:%M:%S.%f%z") + dt = datetime.datetime.strptime( + node["startsAt"], "%Y-%m-%dT%H:%M:%S.%f%z" + ) time_stamp = str(int(dt.timestamp())) value = float(node["energy"]) logging.info(f"{node} {dt} {time_stamp} {value}") - df_db.loc[df_db.shape[0]] = [time_stamp, 'da', value] - logging.debug(f"Day ahead prijzen (source: tibber, db-records): \n {df_db.to_string(index=False)}") + df_db.loc[df_db.shape[0]] = [time_stamp, "da", value] + logging.debug( + f"Day ahead prijzen (source: tibber, db-records): \n " + f"{df_db.to_string(index=False)}" + ) self.db_da.savedata(df_db) diff --git a/dao/prog/da_report.py b/dao/prog/da_report.py index f948dc5..6b0816e 100644 --- a/dao/prog/da_report.py +++ b/dao/prog/da_report.py @@ -4,7 +4,6 @@ import base64 from io import BytesIO from dateutil.relativedelta import relativedelta -from dao.prog.db_manager import DBmanagerObj from dao.prog.da_config import Config from dao.prog.da_graph import GraphBuilder import math @@ -18,33 +17,10 @@ class Report: periodes = {} - def __init__(self, file_name:str="../data/options.json"): + def __init__(self, file_name: str = "../data/options.json"): self.config = Config(file_name) - db_da_engine = self.config.get(['database da', "engine"], None, "mysql") - db_da_server = self.config.get(['database da', "server"], None, "core-mariadb") - db_da_port = int(self.config.get(['database da', "port"], None, 3306)) - if db_da_engine == "sqlite": - db_da_name = self.config.get(['database da', "database"], None, "day_ahead.db") - else: - db_da_name = self.config.get(['database da', "database"], None, "day_ahead") - db_da_user = self.config.get(['database da', "username"], None, "day_ahead") - db_da_password = self.config.get(['database da', "password"]) - db_da_path = self.config.get(['database da', "db_path"], None, "../data") - db_time_zone = self.config.get(["time_zone"], None, 'Europe/Amsterdam') - self.db_da = DBmanagerObj(db_dialect=db_da_engine, db_name=db_da_name, db_server=db_da_server, - db_port=db_da_port, db_user=db_da_user, db_password=db_da_password, - db_time_zone=db_time_zone, db_path=db_da_path) - db_ha_engine = self.config.get(['database ha', "engine"], None, "mysql") - db_ha_server = self.config.get(['database ha', "server"], None, "core-mariadb") - db_ha_port = int(self.config.get(['database ha', "port"], None, 3306)) - db_ha_name = self.config.get(['database ha', "database"], None, "homeassistant") - db_ha_user = self.config.get(['database ha', "username"], None, "day_ahead") - db_ha_password = self.config.get(['database ha', "password"]) - db_ha_path = self.config.get(['database ha', "db_path"], None, "/homeassistant") - self.db_ha = DBmanagerObj(db_dialect=db_ha_engine, db_name=db_ha_name, db_server=db_ha_server, - db_port=db_ha_port, db_user=db_ha_user, db_password=db_ha_password, - db_time_zone=db_time_zone, db_path=db_ha_path) - + self.db_da = self.config.get_db_da() + self.db_ha = self.config.get_db_ha() self.prices_options = self.config.get(["prices"]) # eb + ode levering self.taxes_l_def = self.prices_options["energy taxes delivery"] @@ -58,117 +34,142 @@ def __init__(self, file_name:str="../data/options.json"): self.report_options = self.config.get(["report"]) self.make_periodes() self.energy_balance_dict = { - "cons": - {"dim": "kWh", - "sign": "pos", - "name": "Verbruik", - "sensors": self.config.get(["entities grid consumption"], self.report_options, []), - "color": '#00bfff' - }, - "prod": - {"dim": "kWh", - "sign": "neg", - "name": "Productie", - "sensors": self.config.get(["entities grid production"], self.report_options, []), - "color": '#0080ff' - }, - "bat_out": - {"dim": "kWh", - "sign": "pos", - "name": "Accu_uit", - "sensors": self.config.get(["entities battery production"], self.report_options, []), - "color": 'red' - }, - "bat_in": - {"dim": "kWh", - "sign": "neg", - "name": "Accu in", - "sensors": self.config.get(["entities battery consumption"], self.report_options, []), - "color": '#ff8000' - }, - "pv_ac": - {"dim": "kWh", - "sign": "pos", - "name": "PV ac", - "sensors": self.config.get(["entities solar production ac"], self.report_options, []), - "color": 'green' - }, - "ev": - {"dim": "kWh", - "sign": "neg", - "name": "Elec. vehicle", - "sensors": self.config.get(["entities ev consumption"], self.report_options, []), - "color": 'yellow' - }, - - "wp": - {"dim": "kWh", - "sign": "neg", - "name": "WP", - "sensors": self.config.get(["entities wp consumption"], self.report_options, []), - "color": '#a32cc4' - }, - "boil": - {"dim": "kWh", - "sign": "neg", - "name": "Boiler", - "sensors": self.config.get(["entities boiler consumption"], self.report_options, []), - "color": '#e39ff6' - }, - "base": - {"dim": "kWh", - "sign": "neg", - "name": "Baseload", - "sensors": "calc", - "function": "calc_base", - "color": "#f1a603" - }, + "cons": { + "dim": "kWh", + "sign": "pos", + "name": "Verbruik", + "sensors": self.config.get( + ["entities grid consumption"], self.report_options, [] + ), + "color": "#00bfff", + }, + "prod": { + "dim": "kWh", + "sign": "neg", + "name": "Productie", + "sensors": self.config.get( + ["entities grid production"], self.report_options, [] + ), + "color": "#0080ff", + }, + "bat_out": { + "dim": "kWh", + "sign": "pos", + "name": "Accu_uit", + "sensors": self.config.get( + ["entities battery production"], self.report_options, [] + ), + "color": "red", + }, + "bat_in": { + "dim": "kWh", + "sign": "neg", + "name": "Accu in", + "sensors": self.config.get( + ["entities battery consumption"], self.report_options, [] + ), + "color": "#ff8000", + }, + "pv_ac": { + "dim": "kWh", + "sign": "pos", + "name": "PV ac", + "sensors": self.config.get( + ["entities solar production ac"], self.report_options, [] + ), + "color": "green", + }, + "ev": { + "dim": "kWh", + "sign": "neg", + "name": "Elec. vehicle", + "sensors": self.config.get( + ["entities ev consumption"], self.report_options, [] + ), + "color": "yellow", + }, + "wp": { + "dim": "kWh", + "sign": "neg", + "name": "WP", + "sensors": self.config.get( + ["entities wp consumption"], self.report_options, [] + ), + "color": "#a32cc4", + }, + "boil": { + "dim": "kWh", + "sign": "neg", + "name": "Boiler", + "sensors": self.config.get( + ["entities boiler consumption"], self.report_options, [] + ), + "color": "#e39ff6", + }, + "base": { + "dim": "kWh", + "sign": "neg", + "name": "Baseload", + "sensors": "calc", + "function": "calc_base", + "color": "#f1a603", + }, } self.grid_dict = { - "cons": - {"dim": "kWh", - "sign": "pos", - "name": "Verbruik", - "sensors": self.config.get(["entities grid consumption"], self.report_options, []), - }, - "prod": - {"dim": "kWh", - "sign": "neg", - "name": "Productie", - "sensors": self.config.get(["entities grid production"], self.report_options, []), - }, - "cost": - {"dim": "eur", - "sign": "neg", - "name": "Kosten", - "sensors": "calc", - "function": "calc_cost" - }, - "profit": - {"dim": "eur", - "sign": "pos", - "name": "Opbrengst", - "sensors": "calc", - "function": "calc_cost" - }, + "cons": { + "dim": "kWh", + "sign": "pos", + "name": "Verbruik", + "sensors": self.config.get( + ["entities grid consumption"], self.report_options, [] + ), + }, + "prod": { + "dim": "kWh", + "sign": "neg", + "name": "Productie", + "sensors": self.config.get( + ["entities grid production"], self.report_options, [] + ), + }, + "cost": { + "dim": "eur", + "sign": "neg", + "name": "Kosten", + "sensors": "calc", + "function": "calc_cost", + }, + "profit": { + "dim": "eur", + "sign": "pos", + "name": "Opbrengst", + "sensors": "calc", + "function": "calc_cost", + }, } self.balance_graph_options = { "title": "Energiebalans", - "style": self.config.get(['graphics', 'style']), - "haxis": { - "values": "#interval" - }, - "vaxis": [{ - "title": "kWh" - }], - "series_keys": ["base", "wp", "boil", "ev", "bat_in", "prod", "pv_ac", "bat_out", "cons"], + "style": self.config.get(["graphics", "style"]), + "haxis": {"values": "#interval"}, + "vaxis": [{"title": "kWh"}], + "series_keys": [ + "base", + "wp", + "boil", + "ev", + "bat_in", + "prod", + "pv_ac", + "bat_out", + "cons", + ], "series": [], } for key in self.balance_graph_options["series_keys"]: # key, serie in self.energy_balance_dict.items(): serie = self.energy_balance_dict[key] - serie["column"] = serie['name'] - serie["type"] = "stacked", + serie["column"] = serie["name"] + serie["type"] = ("stacked",) serie["title"] = serie["name"] self.balance_graph_options["series"].append(serie) return @@ -189,7 +190,9 @@ def create_dict(name, _vanaf, _tot, interval): self.periodes.update(create_dict("morgen", vanaf_m, tot_m, interval="uur")) # vandaag en morgen - self.periodes.update(create_dict("vandaag en morgen", vanaf, tot_m, interval="uur")) + self.periodes.update( + create_dict("vandaag en morgen", vanaf, tot_m, interval="uur") + ) # gisteren tot_g = vanaf @@ -219,7 +222,9 @@ def create_dict(name, _vanaf, _tot, interval): # dit jaar vanaf = datetime.datetime(now.year, 1, 1) - tot = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta(days=1) + tot = datetime.datetime(now.year, now.month, now.day) + datetime.timedelta( + days=1 + ) self.periodes.update(create_dict("dit jaar", vanaf, tot, "maand")) # vorig jaar @@ -229,7 +234,8 @@ def create_dict(name, _vanaf, _tot, interval): # dit contractjaar vanaf = datetime.datetime.strptime( - self.prices_options['last invoice'], "%Y-%m-%d") + self.prices_options["last invoice"], "%Y-%m-%d" + ) now = datetime.datetime.now() tot = datetime.datetime(now.year, now.month, now.day) tot = tot + datetime.timedelta(days=1) @@ -241,8 +247,14 @@ def create_dict(name, _vanaf, _tot, interval): self.periodes.update(create_dict("365 dagen", vanaf, tot, "maand")) return - def get_sensor_data(self, sensor: str, vanaf: datetime.datetime, tot: datetime.datetime, - col_name: str, agg: str = "uur") -> pd.DataFrame: + def get_sensor_data( + self, + sensor: str, + vanaf: datetime.datetime, + tot: datetime.datetime, + col_name: str, + agg: str = "uur", + ) -> pd.DataFrame: """ Retrieves and aggregates sensordata from ha database :param sensor: name off the sensor in ha @@ -252,13 +264,14 @@ def get_sensor_data(self, sensor: str, vanaf: datetime.datetime, tot: datetime.d :param agg: "maand", "dag" or "uur" :return: dataframe with the data """ - ''' + """ if agg == "uur": sql = "SELECT FROM_UNIXTIME(t2.`start_ts`) 'tijd', " \ "FROM_UNIXTIME(t2.`start_ts`) 'tot', " \ "round(greatest(t2.`state` - t1.`state`, 0),3) '" + col_name + "' " \ "FROM `statistics` t1,`statistics` t2, `statistics_meta` " \ - "WHERE statistics_meta.`id` = t1.`metadata_id` AND statistics_meta.`id` = t2.`metadata_id` " \ + "WHERE statistics_meta.`id` = t1.`metadata_id` + AND statistics_meta.`id` = t2.`metadata_id` " \ "AND statistics_meta.`statistic_id` = '" + sensor + "' " \ "AND (t2.`start_ts` = t1.`start_ts` + 3600) " \ "AND t1.`state` IS NOT null AND t2.`state` IS NOT null " \ @@ -272,7 +285,8 @@ def get_sensor_data(self, sensor: str, vanaf: datetime.datetime, tot: datetime.d "MAX(FROM_UNIXTIME(t2.`start_ts`)) 'tot', " \ "ROUND(sum(greatest(t2.`state` - t1.`state`, 0)),3) '" + col_name + "' " \ "FROM `statistics` t1,`statistics` t2, `statistics_meta` " \ - "WHERE statistics_meta.`id` = t1.`metadata_id` AND statistics_meta.`id` = t2.`metadata_id` " \ + "WHERE statistics_meta.`id` = t1.`metadata_id` + AND statistics_meta.`id` = t2.`metadata_id` " \ "AND statistics_meta.`statistic_id` = '" + sensor + "' " \ "AND (t2.`start_ts` = t1.`start_ts` + 3600) " \ "AND t1.`state` IS NOT null AND t2.`state` IS NOT null " \ @@ -285,7 +299,8 @@ def get_sensor_data(self, sensor: str, vanaf: datetime.datetime, tot: datetime.d "MAX(FROM_UNIXTIME(t2.`start_ts`)) AS 'tot', " \ "ROUND(sum(greatest(t2.`state` - t1.`state`, 0)),3) '" + col_name + "' " \ "FROM `statistics` t1,`statistics` t2, `statistics_meta` " \ - "WHERE statistics_meta.`id` = t1.`metadata_id` AND statistics_meta.`id` = t2.`metadata_id` " \ + "WHERE statistics_meta.`id` = t1.`metadata_id` + AND statistics_meta.`id` = t2.`metadata_id` " \ "AND statistics_meta.`statistic_id` = '" + sensor + "' " \ "AND (t2.`start_ts` = t1.`start_ts` + 3600) " \ "AND t1.`state` IS NOT null AND t2.`state` IS NOT null " \ @@ -296,64 +311,72 @@ def get_sensor_data(self, sensor: str, vanaf: datetime.datetime, tot: datetime.d df = self.db_ha.run_select_query(sql) # print(df_sensor) return df - ''' - - statistics = Table('statistics', self.db_ha.metadata, autoload_with=self.db_ha.engine) - statistics_meta = Table('statistics_meta', self.db_ha.metadata, autoload_with=self.db_ha.engine) + """ + statistics = Table( + "statistics", self.db_ha.metadata, autoload_with=self.db_ha.engine + ) + statistics_meta = Table( + "statistics_meta", self.db_ha.metadata, autoload_with=self.db_ha.engine + ) # Define aliases for the tables - t1 = statistics.alias('t1') - t2 = statistics.alias('t2') + t1 = statistics.alias("t1") + t2 = statistics.alias("t2") # Define parameters - start_ts_param1 = vanaf.strftime('%Y-%m-%d %H:%M:%S') # '2024-01-01 00:00:00' - start_ts_param2 = tot.strftime('%Y-%m-%d %H:%M:%S') # '2024-05-23 00:00:00' + start_ts_param1 = vanaf.strftime("%Y-%m-%d %H:%M:%S") # '2024-01-01 00:00:00' + start_ts_param2 = tot.strftime("%Y-%m-%d %H:%M:%S") # '2024-05-23 00:00:00' if agg == "maand": - column = self.db_ha.month(t2.c.start_ts).label('maand') + column = self.db_ha.month(t2.c.start_ts).label("maand") elif agg == "dag": - column = self.db_ha.day(t2.c.start_ts).label('dag') + column = self.db_ha.day(t2.c.start_ts).label("dag") else: # interval == "uur" - column = self.db_ha.hour(t2.c.start_ts).label('uur') + column = self.db_ha.hour(t2.c.start_ts).label("uur") if agg == "uur": - columns = [column, - self.db_ha.from_unixtime(t2.c.start_ts).label('tijd'), - self.db_ha.from_unixtime(t2.c.start_ts).label('tot'), - case( - (t2.c.state > t1.c.state, t2.c.state - t1.c.state), - else_=0 - ).label(col_name) - ] + columns = [ + column, + self.db_ha.from_unixtime(t2.c.start_ts).label("tijd"), + self.db_ha.from_unixtime(t2.c.start_ts).label("tot"), + case((t2.c.state > t1.c.state, t2.c.state - t1.c.state), else_=0).label( + col_name + ), + ] else: - columns = [column, - func.min(self.db_ha.from_unixtime(t2.c.start_ts)).label('tijd'), - func.max(self.db_ha.from_unixtime(t2.c.start_ts)).label('tot'), - func.sum(case((t2.c.state > t1.c.state, t2.c.state - t1.c.state), - else_=0) - ).label(col_name) - ] + columns = [ + column, + func.min(self.db_ha.from_unixtime(t2.c.start_ts)).label("tijd"), + func.max(self.db_ha.from_unixtime(t2.c.start_ts)).label("tot"), + func.sum( + case((t2.c.state > t1.c.state, t2.c.state - t1.c.state), else_=0) + ).label(col_name), + ] # Build the query to retrieve raw data - query = select( - *columns - ).select_from( - t1.join(t2, t2.c.start_ts == t1.c.start_ts + 3600) - .join(statistics_meta, - (statistics_meta.c.id == t1.c.metadata_id) & - (statistics_meta.c.id == t2.c.metadata_id)) - ).where( - (statistics_meta.c.statistic_id == sensor) & - (t1.c.state.isnot(None)) & - (t2.c.state.isnot(None)) & - (t1.c.start_ts >= self.db_ha.unix_timestamp(start_ts_param1) - 3600) & - (t1.c.start_ts < self.db_ha.unix_timestamp(start_ts_param2) - 3600) + query = ( + select(*columns) + .select_from( + t1.join(t2, t2.c.start_ts == t1.c.start_ts + 3600).join( + statistics_meta, + (statistics_meta.c.id == t1.c.metadata_id) + & (statistics_meta.c.id == t2.c.metadata_id), + ) + ) + .where( + (statistics_meta.c.statistic_id == sensor) + & (t1.c.state.isnot(None)) + & (t2.c.state.isnot(None)) + & (t1.c.start_ts >= self.db_ha.unix_timestamp(start_ts_param1) - 3600) + & (t1.c.start_ts < self.db_ha.unix_timestamp(start_ts_param2) - 3600) + ) ) if agg != "uur": query = query.group_by(agg) - from sqlalchemy.dialects import postgresql, sqlite, mysql - query_str = str(query.compile(dialect=sqlite.dialect())) + from sqlalchemy.dialects import sqlite # , postgresql, mysql + query_str = str(query.compile(dialect=sqlite.dialect())) + logging.debug(f"query get sensor data:\n {query_str}") # Execute the query and load results into a DataFrame with self.db_ha.engine.connect() as connection: df_raw = pd.read_sql(query, connection) @@ -367,28 +390,54 @@ def get_sensor_data(self, sensor: str, vanaf: datetime.datetime, tot: datetime.d return df_raw @staticmethod - def aggregate_data(df_raw: pd.DataFrame, col_name: str, agg: str = "uur") -> pd.DataFrame: - df_raw['tot'] = df_raw.apply(lambda x: datetime.datetime.fromtimestamp(x['tijd']), axis=1) + def aggregate_data( + df_raw: pd.DataFrame, col_name: str, agg: str = "uur" + ) -> pd.DataFrame: + df_raw["tot"] = df_raw.apply( + lambda x: datetime.datetime.fromtimestamp(x["tijd"]), axis=1 + ) + if len(df_raw) > 0: # Extract year and month or day if agg == "maand": - df_raw['maand'] = df_raw['start_ts_t2'].dt.to_period('M') - df_aggregated = df_raw.groupby('maand').agg( - maand=('start_ts_t2', lambda x: f"{x.dt.year.iloc[0]}-{x.dt.month.iloc[0]:2}"), - tijd=('start_ts_t2', lambda x: x.dt.strftime('%Y-%m-01 00:00:00').iloc[0]), - tot=('start_ts_t2', 'max'), - col_name=(col_name, 'sum') - ).reset_index(drop=True) + df_raw["maand"] = df_raw["start_ts_t2"].dt.to_period("M") + df_aggregated = ( + df_raw.groupby("maand") + .agg( + maand=( + "start_ts_t2", + lambda x: f"{x.dt.year.iloc[0]}-{x.dt.month.iloc[0]:2}", + ), + tijd=( + "start_ts_t2", + lambda x: x.dt.strftime("%Y-%m-01 00:00:00").iloc[0], + ), + tot=("start_ts_t2", "max"), + col_name=(col_name, "sum"), + ) + .reset_index(drop=True) + ) elif agg == "dag": - df_raw['dag'] = df_raw['start_ts_t2'].dt.to_period('D') - df_aggregated = df_raw.groupby('dag').agg( - dag=('start_ts_t2', lambda x: f"{x.dt.year.iloc[0]}-{x.dt.month.iloc[0]:2}-{x.dt.day.iloc[0]:2}"), - tijd=('start_ts_t2', lambda x: x.dt.strftime('%Y-%m-01 00:00:00').iloc[0]), - tot=('start_ts_t2', 'max'), - col_name=(col_name, 'sum') - ).reset_index(drop=True) + df_raw["dag"] = df_raw["start_ts_t2"].dt.to_period("D") + df_aggregated = ( + df_raw.groupby("dag") + .agg( + dag=( + "start_ts_t2", + lambda x: f"{x.dt.year.iloc[0]}-{x.dt.month.iloc[0]:2}-" + f"{x.dt.day.iloc[0]:2}", + ), + tijd=( + "start_ts_t2", + lambda x: x.dt.strftime("%Y-%m-01 00:00:00").iloc[0], + ), + tot=("start_ts_t2", "max"), + col_name=(col_name, "sum"), + ) + .reset_index(drop=True) + ) else: # agg == "uur" - df_raw['uur'] = df_raw['start_ts_t2'].dt.to_period('h') + df_raw["uur"] = df_raw["start_ts_t2"].dt.to_period("h") df_aggregated = df_raw else: df_aggregated = pd.DataFrame(columns=[agg, "tijd", "tot", col_name]) @@ -400,7 +449,9 @@ def aggregate_data(df_raw: pd.DataFrame, col_name: str, agg: str = "uur") -> pd. return df_aggregated @staticmethod - def copy_col_df(copy_from: pd.DataFrame, copy_to: pd.DataFrame, col_name: str) -> pd.DataFrame: + def copy_col_df( + copy_from: pd.DataFrame, copy_to: pd.DataFrame, col_name: str + ) -> pd.DataFrame: """ kopieert kolom "col_name" van copy_from naar copy_to, :param copy_from: @@ -412,15 +463,20 @@ def copy_col_df(copy_from: pd.DataFrame, copy_to: pd.DataFrame, col_name: str) - if dt == float: copy_to[col_name] = 0.0 else: - copy_to[col_name] = '' + copy_to[col_name] = "" # copy_from = copy_from.reset_index() for row in copy_from.itertuples(): copy_to.at[row.tijd, col_name] = copy_from.at[row.tijd, col_name] return copy_to @staticmethod - def add_col_df(add_from: pd.DataFrame, add_to: pd.DataFrame, col_name_from: str, col_name_to: str = None, - negation: bool = False) -> pd.DataFrame: + def add_col_df( + add_from: pd.DataFrame, + add_to: pd.DataFrame, + col_name_from: str, + col_name_to: str = None, + negation: bool = False, + ) -> pd.DataFrame: # add_from = add_from.reset_index() if add_from is None: return add_to @@ -433,8 +489,10 @@ def add_col_df(add_from: pd.DataFrame, add_to: pd.DataFrame, col_name_from: str, # add_to[col_name_to] =add_to[col_name_to] + add_from[col_name_from] col_index = add_from.columns.get_loc(col_name_from) + 1 for row in add_from.itertuples(): + # add_from.at[row.tijd, col_name_from]) add_to.at[row.tijd, col_name_to] = ( - add_to.at[row.tijd, col_name_to] + factor * row[col_index]) # add_from.at[row.tijd, col_name_from]) + add_to.at[row.tijd, col_name_to] + factor * row[col_index] + ) return add_to def get_latest_present(self, code: str) -> datetime.datetime: @@ -442,7 +500,7 @@ def get_latest_present(self, code: str) -> datetime.datetime: :param code: de code van de variabele :return: datetime van het laatste record """ - ''' + """ sql = "SELECT `time`, `variabel`.`id`, `value` \ FROM `values` , `variabel` \ WHERE `variabel`.`code` = '" + code + "' \ @@ -450,34 +508,43 @@ def get_latest_present(self, code: str) -> datetime.datetime: ORDER BY `time` DESC \ LIMIT 1;" data = self.db_da.run_select_query(sql) - ''' - values_table = Table('values', self.db_da.metadata, autoload_with=self.db_da.engine) + """ + values_table = Table( + "values", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Aliases for the values table - t1 = values_table.alias('t1') - variabel_table = Table('variabel', self.db_da.metadata, autoload_with=self.db_da.engine) + t1 = values_table.alias("t1") + variabel_table = Table( + "variabel", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Aliases for the variabel table - v1 = variabel_table.alias('v1') - - query = select( - t1.c.time, - v1.c.id, - t1.c.value - ).where( - and_( - v1.c.code == code, - t1.c.variabel == v1.c.id, + v1 = variabel_table.alias("v1") + + query = ( + select(t1.c.time, v1.c.id, t1.c.value) + .where( + and_( + v1.c.code == code, + t1.c.variabel == v1.c.id, + ) ) - ).order_by(t1.c.time) + .order_by(t1.c.time) + ) with self.db_da.engine.connect() as connection: result_row = connection.execute(query).first() if result_row is not None: - result = datetime.datetime.fromtimestamp(dict(result_row)['time']) + result = datetime.datetime.fromtimestamp(dict(result_row)["time"]) else: result = datetime.datetime(year=2020, month=1, day=1) return result - def get_sensor_sum(self, sensor_list: list, vanaf: datetime.datetime, - tot: datetime.datetime, col_name: str) -> pd.DataFrame: + def get_sensor_sum( + self, + sensor_list: list, + vanaf: datetime.datetime, + tot: datetime.datetime, + col_name: str, + ) -> pd.DataFrame: """ Berekent een dataframe met sum van de waarden van de sensoren in de list :param sensor_list: een list of strings met de entiteiten van de sensoren @@ -495,17 +562,24 @@ def get_sensor_sum(self, sensor_list: list, vanaf: datetime.datetime, result = df else: result = self.add_col_df(df, result, col_name) - counter = + 1 + counter = +1 return result - def calc_cost(self, vanaf: datetime.datetime, tot: datetime.datetime, code: str) -> pd.DataFrame: - cons_df = self.get_sensor_sum(self.grid_dict["cons"]["sensors"], vanaf, tot, "cons") - prod_df = self.get_sensor_sum(self.grid_dict["prod"]["sensors"], vanaf, tot, "prod") + def calc_cost( + self, vanaf: datetime.datetime, tot: datetime.datetime + ) -> pd.DataFrame: + cons_df = self.get_sensor_sum( + self.grid_dict["cons"]["sensors"], vanaf, tot, "cons" + ) + prod_df = self.get_sensor_sum( + self.grid_dict["prod"]["sensors"], vanaf, tot, "prod" + ) + da_df = self.get_price_data(vanaf, tot) da_df.index = pd.to_datetime(da_df["time"]) data = self.copy_col_df(cons_df, da_df, "cons") data = self.copy_col_df(prod_df, data, "prod") - result = pd.DataFrame(columns=['time', 'code', 'value']) + result = pd.DataFrame(columns=["time", "code", "value"]) for row in data.itertuples(): cost = row.cons * row.da_cons db_row = [str(int(row.time.timestamp())), "cost", cost] @@ -522,7 +596,7 @@ def consolidate_data(self, _start=None, _end=None) -> None: tot = datetime.datetime(now.year, now.month, now.day) else: tot = _end - for code, categorie in itertools.chain(self.grid_dict.items()): # self.energy_balance_dict.items()): + for code, categorie in itertools.chain(self.grid_dict.items()): if _start is None: start = self.get_latest_present(code) + datetime.timedelta(hours=1) else: @@ -535,11 +609,16 @@ def consolidate_data(self, _start=None, _end=None) -> None: data = self.get_sensor_sum(categorie["sensors"], start, tot, code) if data is None: continue - df_db = pd.DataFrame(columns=['time', 'code', 'value', 'tijd']) + df_db = pd.DataFrame(columns=["time", "code", "value", "tijd"]) data = data.rename(columns={code: "value"}) data["tijd"] = pd.to_datetime(data["tijd"]) for row in data.itertuples(): - db_row = [str(int(row.tijd.timestamp())), code, float(row.value), row.tijd] + db_row = [ + str(int(row.tijd.timestamp())), + code, + float(row.value), + row.tijd, + ] # print(db_row) df_db.loc[df_db.shape[0]] = db_row print(df_db) @@ -548,8 +627,25 @@ def consolidate_data(self, _start=None, _end=None) -> None: def recalc_df_ha(self, org_data_df: pd.DataFrame, interval: str) -> pd.DataFrame: from dao.prog.utils import get_value_from_dict + + def get_datasoort(ds): + for s in ds: + if s == "expected": + return "expected" + return "recorded" + fi_df = pd.DataFrame( - columns=[interval, "vanaf", "tot", "consumption", "production", "cost", "profit", "datasoort"]) + columns=[ + interval, + "vanaf", + "tot", + "consumption", + "production", + "cost", + "profit", + "datasoort", + ] + ) if len(org_data_df.index) == 0: return fi_df old_dagstr = "" @@ -582,11 +678,28 @@ def recalc_df_ha(self, org_data_df: pd.DataFrame, interval: str) -> pd.DataFrame col_3 = (row.consumption * (row.price + taxes_l + ol_l)) * (1 + btw / 100) col_4 = (row.production * (row.price + taxes_t + ol_t)) * (1 + btw / 100) col_5 = row.datasoort - fi_df.loc[fi_df.shape[0]] = [tijd_str, row.tijd, row.tijd + - datetime.timedelta(hours=1), col_1, col_2, col_3, col_4, col_5] + fi_df.loc[fi_df.shape[0]] = [ + tijd_str, + row.tijd, + row.tijd + datetime.timedelta(hours=1), + col_1, + col_2, + col_3, + col_4, + col_5, + ] if interval != "uur": - fi_df = fi_df.groupby([interval], as_index=False).agg({"vanaf": 'min', "tot": 'max', "consumption": 'sum', - "production": 'sum', "cost": 'sum', "profit": 'sum'}) + fi_df = fi_df.groupby([interval], as_index=False).agg( + { + "vanaf": "min", + "tot": "max", + "consumption": "sum", + "production": "sum", + "cost": "sum", + "profit": "sum", + "datasoort": get_datasoort, + } + ) return fi_df def aggregate_balance_df(self, df: pd.DataFrame, interval: str): @@ -605,14 +718,26 @@ def aggregate_balance_df(self, df: pd.DataFrame, interval: str): tijd_str = str(row.vanaf)[0:10] else: tijd_str = str(row.vanaf)[0:7] # jaar maand - result.loc[result.shape[0]] = [tijd_str, row.vanaf, row.vanaf + datetime.timedelta(hours=1), row.datasoort, - row.cons, row.prod, row.bat_out, row.bat_in, row.pv_ac, row.ev, row.wp, - row.boil, row.base] + result.loc[result.shape[0]] = [ + tijd_str, + row.vanaf, + row.vanaf + datetime.timedelta(hours=1), + row.datasoort, + row.cons, + row.prod, + row.bat_out, + row.bat_in, + row.pv_ac, + row.ev, + row.wp, + row.boil, + row.base, + ] if interval != "uur": - agg_dict = {"vanaf": 'min'} + agg_dict = {"vanaf": "min"} for key, categorie in self.energy_balance_dict.items(): - agg_dict[key] = 'sum' + agg_dict[key] = "sum" result = result.groupby([interval], as_index=False).agg(agg_dict) return result @@ -621,7 +746,16 @@ def aggregate_balance_df(self, df: pd.DataFrame, interval: str): def calc_base(df: pd.DataFrame) -> pd.DataFrame: base_load = [] for row in df.itertuples(): - base_load.append(row.cons - row.prod + row.bat_out - row.bat_in + row.pv_ac - row.ev - row.wp - row.boil) + base_load.append( + row.cons + - row.prod + + row.bat_out + - row.bat_in + + row.pv_ac + - row.ev + - row.wp + - row.boil + ) result = df.assign(base=base_load) return result @@ -637,7 +771,7 @@ def tijd_at_interval(interval: str, moment: datetime.datetime) -> str | int: return moment.hour else: # uur result = moment - return result.strftime('%Y-%m-%d %H:%M:%S') + return result.strftime("%Y-%m-%d %H:%M:%S") def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): periode_d = self.periodes[periode] @@ -645,8 +779,9 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): tot = _tot if _tot else periode_d["tot"] interval = periode_d["interval"] result = pd.DataFrame(columns=[interval, "tijd"]) - last_realised_moment = datetime.datetime.fromtimestamp(math.floor(datetime.datetime.now(). - timestamp() / 3600) * 3600) + last_realised_moment = datetime.datetime.fromtimestamp( + math.floor(datetime.datetime.now().timestamp() / 3600) * 3600 + ) moment = vanaf while moment < tot: if interval == "maand": @@ -665,30 +800,36 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): moment = old_moment + relativedelta(months=1) result.loc[result.shape[0]] = [tijd_str, old_moment] result.index = pd.to_datetime(result["tijd"]) - values_table = Table('values', self.db_da.metadata, autoload_with=self.db_da.engine) + values_table = Table( + "values", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Aliases for the values table - t1 = values_table.alias('t1') - variabel_table = Table('variabel', self.db_da.metadata, autoload_with=self.db_da.engine) + t1 = values_table.alias("t1") + variabel_table = Table( + "variabel", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Aliases for the variabel table - v1 = variabel_table.alias('v1') + v1 = variabel_table.alias("v1") if interval == "maand": - column = self.db_da.month(t1.c.time).label('maand') + column = self.db_da.month(t1.c.time).label("maand") elif interval == "dag": - column = self.db_da.day(t1.c.time).label('dag') + column = self.db_da.day(t1.c.time).label("dag") else: # interval == "uur" - column = self.db_da.hour(t1.c.time).label('uur') + column = self.db_da.hour(t1.c.time).label("uur") for key, categorie in self.energy_balance_dict.items(): result[key] = 0.0 - ''' + """ if interval == "maand": - sql = "SELECT concat(year(from_unixtime(t1.`time`)),LPAD(MONTH(from_unixtime(t1.`time`)),3, ' ')) "\ + sql = "SELECT concat(year(from_unixtime(t1.`time`)), + LPAD(MONTH(from_unixtime(t1.`time`)),3, ' ')) "\ "AS 'maand', " \ "date_format(from_unixtime(t1.`time`),'%Y-%m-01 00:00:00') AS 'tijd', " \ "MAX(from_unixtime(t1.`time`)) AS 'tot', " \ "sum(t1.`value`) " + key + " " \ "FROM `values` AS t1, `variabel`AS v1 " \ "WHERE (v1.`code` = '" + key + "') AND (v1.id = t1.variabel) AND " \ - "t1.`time` >= UNIX_TIMESTAMP('"+str(vanaf)+"') AND t1.`time` < UNIX_TIMESTAMP('"+str(tot)+"') " \ + "t1.`time` >= UNIX_TIMESTAMP('"+str(vanaf)+"') + AND t1.`time` < UNIX_TIMESTAMP('"+str(tot)+"') " \ "GROUP BY maand;" elif interval == "dag": sql = "SELECT date(from_unixtime(t1.`time`)) AS 'dag', " \ @@ -697,7 +838,8 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): "sum(t1.`value`) " + key + " " \ "FROM `values` AS t1, `variabel`AS v1 " \ "WHERE (v1.`code` = '" + key + "') AND (v1.id = t1.variabel) AND " \ - "t1.`time` >= UNIX_TIMESTAMP('"+str(vanaf)+"') AND t1.`time` < UNIX_TIMESTAMP('"+str(tot)+"') " \ + "t1.`time` >= UNIX_TIMESTAMP('"+str(vanaf)+"') + AND t1.`time` < UNIX_TIMESTAMP('"+str(tot)+"') " \ "GROUP BY dag;" else: # interval == "uur" sql = "SELECT from_unixtime(t1.`time`) AS 'uur', " \ @@ -705,33 +847,43 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): "t1.`value` '" + key + "' " \ "FROM `values` AS t1, `variabel`AS v1 " \ "WHERE (v1.`code` = '" + key + "') AND (v1.id = t1.variabel) AND " \ - "t1.`time`>= UNIX_TIMESTAMP('" + str(vanaf) + "') AND t1.`time` < UNIX_TIMESTAMP('" + str(tot) + "');" + "t1.`time`>= UNIX_TIMESTAMP('" + str(vanaf) + "') + AND t1.`time` < UNIX_TIMESTAMP('" + str(tot) + "');" # print(sql) code_result = self.db_da.run_select_query(sql) - ''' + """ - query = select( - column, - func.min(self.db_da.from_unixtime(t1.c.time)).label('vanaf'), - func.max(self.db_da.from_unixtime(t1.c.time)).label('tot'), - func.sum(t1.c.value).label(key) - ).where( - and_( - v1.c.code == key, - t1.c.variabel == v1.c.id, - t1.c.time >= self.db_da.unix_timestamp(vanaf.strftime('%Y-%m-%d %H:%M:%S')), - t1.c.time < self.db_da.unix_timestamp(tot.strftime('%Y-%m-%d %H:%M:%S')) + query = ( + select( + column, + func.min(self.db_da.from_unixtime(t1.c.time)).label("vanaf"), + func.max(self.db_da.from_unixtime(t1.c.time)).label("tot"), + func.sum(t1.c.value).label(key), + ) + .where( + and_( + v1.c.code == key, + t1.c.variabel == v1.c.id, + t1.c.time + >= self.db_da.unix_timestamp( + vanaf.strftime("%Y-%m-%d %H:%M:%S") + ), + t1.c.time + < self.db_da.unix_timestamp(tot.strftime("%Y-%m-%d %H:%M:%S")), + ) ) - ).group_by(interval) + .group_by(interval) + ) with self.db_da.engine.connect() as connection: code_result = pd.read_sql(query, connection) - code_result['vanaf'] = pd.to_datetime(code_result['vanaf']) - code_result['tijd'] = pd.to_datetime(code_result['vanaf']) - code_result['tot'] = pd.to_datetime(code_result['tot']) + code_result["vanaf"] = pd.to_datetime(code_result["vanaf"]) + code_result["tijd"] = pd.to_datetime(code_result["vanaf"]) + code_result["tot"] = pd.to_datetime(code_result["tot"]) # if len(code_result) > 0: - # code_result['tijd'] = code_result.apply(lambda x: self.tijd_at_interval(interval, x['tijd']), axis=1) + # code_result['tijd'] = code_result.apply(lambda x: self.tijd_at_interval(interval, + # x['tijd']), axis=1) code_result.index = code_result[interval] if code_result.shape[0] == 0: @@ -739,7 +891,7 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): last_moment = vanaf else: self.add_col_df(code_result, result, key) - last_moment = code_result['tot'].iloc[-1] + datetime.timedelta(hours=1) + last_moment = code_result["tot"].iloc[-1] + datetime.timedelta(hours=1) if last_moment < tot: ha_result = None if categorie["sensors"] == "calc": @@ -747,25 +899,32 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): ha_result = getattr(self, function)(result) else: for sensor in categorie["sensors"]: - ha_result = self.get_sensor_data(sensor, last_moment, tot, key, interval) - ha_result['tot'] = pd.to_datetime(ha_result['tijd']) + ha_result = self.get_sensor_data( + sensor, last_moment, tot, key, interval + ) + ha_result["tot"] = pd.to_datetime(ha_result["tijd"]) if interval == "maand": - ha_result['tijd'] = pd.to_datetime(ha_result[interval]) + ha_result["tijd"] = pd.to_datetime(ha_result[interval]) ha_result.index = pd.to_datetime(ha_result["tijd"]) result = self.add_col_df(ha_result, result, key) if ha_result is not None and len(ha_result) > 0: if categorie["sensors"] == "calc": now = datetime.datetime.now() - last_moment = max(datetime.datetime(now.year, now.month, now.day, now.hour), vanaf) + last_moment = max( + datetime.datetime(now.year, now.month, now.day, now.hour), + vanaf, + ) else: - last_moment = ha_result['tot'].iloc[-1] + datetime.timedelta(hours=1) + last_moment = ha_result["tot"].iloc[-1] + datetime.timedelta( + hours=1 + ) else: last_moment = vanaf if last_moment < last_realised_moment: last_moment = last_realised_moment if last_moment < tot: - ''' + """ if interval == "maand": sql = "SELECT concat(year(from_unixtime(t1.`time`)), LPAD(MONTH(from_unixtime(t1.`time`)),3, ' ')) AS 'maand', " \ @@ -789,7 +948,7 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): "GROUP BY dag;" else: # interval == "uur" sql = "SELECT from_unixtime(t1.`time`) AS 'uur', " \ - "from_unixtime(t1.`time`) AS 'tijd', from_unixtime(t1.`time`) AS 'tot', " \ + "from_unixtime(t1.`time`) AS 'tijd', from_unixtime(t1.`time`) AS 'tot'," \ "t1.`value` '" + key + "' " \ "FROM `prognoses` AS t1, `variabel`AS v1 " \ "WHERE (v1.`code` = '" + key + "') AND (v1.id = t1.variabel) AND " \ @@ -803,30 +962,37 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): "AND t1.`time` >= UNIX_TIMESTAMP('" + str(last_moment) + "') " \ "AND t1.`time` < UNIX_TIMESTAMP('" + str(tot) + "');" prog_result = self.db_da.run_select_query(sql) - ''' - prog_table = Table('prognoses', self.db_da.metadata, autoload_with=self.db_da.engine) - p1 = prog_table.alias('p1') + """ + prog_table = Table( + "prognoses", self.db_da.metadata, autoload_with=self.db_da.engine + ) + p1 = prog_table.alias("p1") # Build the SQLAlchemy query query = select( - self.db_da.from_unixtime(p1.c.time).label('tijd'), + self.db_da.from_unixtime(p1.c.time).label("tijd"), p1.c.value.label(key), - literal("expected").label('datasoort') + literal("expected").label("datasoort"), ).where( and_( p1.c.variabel == v1.c.id, v1.c.code == key, - p1.c.time >= self.db_da.unix_timestamp(last_moment.strftime('%Y-%m-%d %H:%M:%S')), - p1.c.time < self.db_da.unix_timestamp(tot.strftime('%Y-%m-%d %H:%M:%S')) + p1.c.time + >= self.db_da.unix_timestamp( + last_moment.strftime("%Y-%m-%d %H:%M:%S") + ), + p1.c.time + < self.db_da.unix_timestamp(tot.strftime("%Y-%m-%d %H:%M:%S")), ) ) with self.db_da.engine.connect() as connection: prog_result = pd.read_sql_query(query, connection) - prog_result['tijd'] = pd.to_datetime(prog_result['tijd']) - prog_result['tot'] = prog_result['tijd'] - if len(prog_result)>0: - prog_result['tijd'] = prog_result.apply(lambda x: self.tijd_at_interval(interval, x['tijd']), - axis=1) + prog_result["tijd"] = pd.to_datetime(prog_result["tijd"]) + prog_result["tot"] = prog_result["tijd"] + if len(prog_result) > 0: + prog_result["tijd"] = prog_result.apply( + lambda x: self.tijd_at_interval(interval, x["tijd"]), axis=1 + ) prog_result.index = pd.to_datetime(prog_result["tijd"]) if len(prog_result) > 0: self.add_col_df(prog_result, result, key) @@ -835,9 +1001,14 @@ def get_energy_balance_data(self, periode, _vanaf=None, _tot=None): result = getattr(self, function)(result) return result - def get_grid_data(self, periode: str, _vanaf=None, _tot=None, _interval: str | None = None, - _source: str = "all") -> pd.DataFrame: - + def get_grid_data( + self, + periode: str, + _vanaf=None, + _tot=None, + _interval: str | None = None, + _source: str = "all", + ) -> pd.DataFrame: """ Haalt de grid data: consumptie, productie, cost, profit op de drie tabellen: db_da: values tibber data @@ -845,20 +1016,26 @@ def get_grid_data(self, periode: str, _vanaf=None, _tot=None, _interval: str | N db_ha: sensoren Home Assistant tot het laatste uur voor prognoses (expected): db_da: progoses - :param periode: dus een van alle gedefinieerde perioden: vandaag, gisteren enz - :param _vanaf: als != None dan geldt dit als begintijdstip en overrullt begintijdstip van periode + :param periode: dus een van alle gedefinieerde perioden: vandaag, gisteren enz. + :param _vanaf: als != None dan geldt dit als begintijdstip en overrullt + begintijdstip van periode :param _tot: als != None dan hier het eindtijdstip :param _interval: als != None dan hier het gewenste interval :param _source: als != None dan hier de source all, da of ha :return: een dataframe met de gevraagde griddata """ - values_table = Table('values', self.db_da.metadata, autoload_with=self.db_da.engine) + + values_table = Table( + "values", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Aliases for the values table - t1 = values_table.alias('t1') - variabel_table = Table('variabel', self.db_da.metadata, autoload_with=self.db_da.engine) + t1 = values_table.alias("t1") + variabel_table = Table( + "variabel", self.db_da.metadata, autoload_with=self.db_da.engine + ) # Aliases for the variabel table - v1 = variabel_table.alias('v1') - v2 = variabel_table.alias('v2') + v1 = variabel_table.alias("v1") + v2 = variabel_table.alias("v2") if periode == "": vanaf = _vanaf @@ -872,27 +1049,42 @@ def get_grid_data(self, periode: str, _vanaf=None, _tot=None, _interval: str | N source = _source if interval == "maand": - column = self.db_da.month(t1.c.time).label('maand') + column = self.db_da.month(t1.c.time).label("maand") elif interval == "dag": - column = self.db_da.day(t1.c.time).label('dag') + column = self.db_da.day(t1.c.time).label("dag") else: # interval == "uur" - column = self.db_da.hour(t1.c.time).label('uur') + column = self.db_da.hour(t1.c.time).label("uur") result = None if source == "all" or source == "da": - for cat, label in [("cons", "consumption"), ("prod", "production"), ("cost", "cost"), ("profit", "profit")]: - query = select( - column, - func.min(self.db_da.from_unixtime(t1.c.time)).label('vanaf'), - func.max(self.db_da.from_unixtime(t1.c.time)).label('tot'), - func.sum(t1.c.value).label(label) - ).where( - and_( - t1.c.variabel == v1.c.id, - v1.c.code == cat, - t1.c.time >= self.db_da.unix_timestamp(vanaf.strftime('%Y-%m-%d %H:%M:%S')), - t1.c.time < self.db_da.unix_timestamp(tot.strftime('%Y-%m-%d %H:%M:%S')) + for cat, label in [ + ("cons", "consumption"), + ("prod", "production"), + ("cost", "cost"), + ("profit", "profit"), + ]: + query = ( + select( + column, + func.min(self.db_da.from_unixtime(t1.c.time)).label("vanaf"), + func.max(self.db_da.from_unixtime(t1.c.time)).label("tot"), + func.sum(t1.c.value).label(label), + ) + .where( + and_( + t1.c.variabel == v1.c.id, + v1.c.code == cat, + t1.c.time + >= self.db_da.unix_timestamp( + vanaf.strftime("%Y-%m-%d %H:%M:%S") + ), + t1.c.time + < self.db_da.unix_timestamp( + tot.strftime("%Y-%m-%d %H:%M:%S") + ), + ) ) - ).group_by(interval) + .group_by(interval) + ) # from sqlalchemy.dialects import postgresql, mysql # query_str = str(query.compile(engine=mysql.engine())) @@ -900,13 +1092,25 @@ def get_grid_data(self, periode: str, _vanaf=None, _tot=None, _interval: str | N with self.db_da.engine.connect() as connection: result_cat = pd.read_sql_query(query, connection) - result_cat.index = result_cat[interval] # pd.to_datetime(result_cat["vanaf"]) + result_cat.index = result_cat[ + interval + ] # pd.to_datetime(result_cat["vanaf"]) if result is None: result = result_cat else: result[label] = result_cat[label] else: - result = pd.DataFrame(columns=["uur", "vanaf", "tot", "consumption", "production", "cost", "profit"]) + result = pd.DataFrame( + columns=[ + "uur", + "vanaf", + "tot", + "consumption", + "production", + "cost", + "profit", + ] + ) result.index = result["uur"] # vanaf result["datasoort"] = "recorded" @@ -915,29 +1119,37 @@ def get_grid_data(self, periode: str, _vanaf=None, _tot=None, _interval: str | N # datetime.datetime.combine(vanaf, datetime.time(0,0)) - datetime.timedelta(hours=1) last_moment = vanaf else: - result['vanaf'] = pd.to_datetime(result['vanaf']) - result['tot'] = pd.to_datetime(result['tot']) - last_moment = result['tot'].iloc[-1] + datetime.timedelta(hours=1) + result["vanaf"] = pd.to_datetime(result["vanaf"]) + result["tot"] = pd.to_datetime(result["tot"]) + last_moment = result["tot"].iloc[-1] + datetime.timedelta(hours=1) if last_moment < tot: # get the prices: - query = select( - self.db_da.from_unixtime(t1.c.time).label('tijd'), - t1.c.value.label('price') - ).where( - and_( - v1.c.code == 'da', - t1.c.variabel == v1.c.id, - t1.c.time >= self.db_da.unix_timestamp(last_moment.strftime('%Y-%m-%d %H:%M:%S')), - t1.c.time < self.db_da.unix_timestamp(tot.strftime('%Y-%m-%d %H:%M:%S')) + query = ( + select( + self.db_da.from_unixtime(t1.c.time).label("tijd"), + t1.c.value.label("price"), ) - ).order_by(t1.c.time) + .where( + and_( + v1.c.code == "da", + t1.c.variabel == v1.c.id, + t1.c.time + >= self.db_da.unix_timestamp( + last_moment.strftime("%Y-%m-%d %H:%M:%S") + ), + t1.c.time + < self.db_da.unix_timestamp(tot.strftime("%Y-%m-%d %H:%M:%S")), + ) + ) + .order_by(t1.c.time) + ) # from sqlalchemy.dialects import postgresql # query_str = str(query.compile(engine=postgresql.engine())) with self.db_da.engine.connect() as connection: df_prices = pd.read_sql_query(query, connection) - logging.debug(f"Prijzen \n{df_prices.to_string()}/n") + logging.debug(f"Prijzen \n{df_prices.to_string()}\n") df_ha = pd.DataFrame() if source == "all" or source == "ha": @@ -945,56 +1157,76 @@ def get_grid_data(self, periode: str, _vanaf=None, _tot=None, _interval: str | N count = 0 for sensor in self.report_options["entities grid consumption"]: if count == 0: - df_ha = self.get_sensor_data(sensor, last_moment, tot, "consumption", "uur") + df_ha = self.get_sensor_data( + sensor, last_moment, tot, "consumption", "uur" + ) df_ha.index = pd.to_datetime(df_ha["tijd"]) df_ha["tijd"] = pd.to_datetime(df_ha["tijd"]) else: - df_2 = self.get_sensor_data(sensor, last_moment, tot, "consumption", "uur") + df_2 = self.get_sensor_data( + sensor, last_moment, tot, "consumption", "uur" + ) df_2.index = pd.to_datetime(df_2["tijd"]) df_ha = self.add_col_df(df_2, df_ha, "consumption") # df_cons = df_cons.merge(df_2, on=['tijd']).set_index(['tijd']).sum(axis=1) - count = + 1 + if len(df_ha) > 0: + df_ha["datasoort"] = "recorded" + count = +1 count = 0 for sensor in self.report_options["entities grid production"]: - df_p = self.get_sensor_data(sensor, last_moment, tot, "production", "uur") + df_p = self.get_sensor_data( + sensor, last_moment, tot, "production", "uur" + ) df_p.index = pd.to_datetime(df_p["tijd"]) if count == 0: df_ha = self.copy_col_df(df_p, df_ha, "production") else: df_ha = self.add_col_df(df_p, df_ha, "production") - count = + 1 + count = +1 if len(df_ha) > 0: - last_moment = df_ha['tijd'].iloc[-1] + datetime.timedelta(hours=1) - df_ha['datasoort'] = "recorded" + last_moment = df_ha["tijd"].iloc[-1] + datetime.timedelta(hours=1) + df_ha["datasoort"] = "recorded" else: last_moment = vanaf if source == "all" or source == "da": if last_moment < tot: # get prognose consumption and production: - prog_table = Table('prognoses', self.db_da.metadata, autoload_with=self.db_da.engine) - p1 = prog_table.alias('p1') - p2 = prog_table.alias('p2') + prog_table = Table( + "prognoses", + self.db_da.metadata, + autoload_with=self.db_da.engine, + ) + p1 = prog_table.alias("p1") + p2 = prog_table.alias("p2") # Build the SQLAlchemy query query = select( - self.db_da.from_unixtime(p1.c.time).label('tijd'), - p1.c.value.label('consumption'), - p2.c.value.label('production'), - literal("recorded").label('datasoort') + self.db_da.from_unixtime(p1.c.time).label("tijd"), + p1.c.value.label("consumption"), + p2.c.value.label("production"), + literal("expected").label("datasoort"), ).where( and_( p1.c.time == p2.c.time, p1.c.variabel == v1.c.id, - v1.c.code == 'cons', + v1.c.code == "cons", p2.c.variabel == v2.c.id, - v2.c.code == 'prod', - p1.c.time >= self.db_da.unix_timestamp(last_moment.strftime('%Y-%m-%d %H:%M:%S')), - p1.c.time < self.db_da.unix_timestamp(tot.strftime('%Y-%m-%d %H:%M:%S')) + v2.c.code == "prod", + p1.c.time + >= self.db_da.unix_timestamp( + last_moment.strftime("%Y-%m-%d %H:%M:%S") + ), + p1.c.time + < self.db_da.unix_timestamp( + tot.strftime("%Y-%m-%d %H:%M:%S") + ), ) ) from sqlalchemy.dialects import postgresql + query_str = str(query.compile(dialect=postgresql.dialect())) + logging.debug(f"query get prognose data:\n {query_str}") with self.db_da.engine.connect() as connection: df_prog = pd.read_sql_query(query, connection) @@ -1019,7 +1251,7 @@ def get_grid_data(self, periode: str, _vanaf=None, _tot=None, _interval: str | N result = pd.concat([result, df_ha]) result["netto_consumption"] = result["consumption"] - result["production"] - result["netto_const"] = result["cost"] - result["profit"] + result["netto_cost"] = result["cost"] - result["profit"] return result @@ -1031,14 +1263,22 @@ def get_last_day_month(input_dt: datetime): def calc_grid_columns(self, report_df, active_interval, active_view): from dao.prog.utils import get_value_from_dict + first_col = active_interval.capitalize() # if active_subject == "verbruik": # columns.extend(["Verbruik", "Productie", "Netto"]) # columns = [columns] # columns.append(["", "kWh", "kWh", "kWh"]) # else: #kosten - columns = [first_col, "Verbruik", "Productie", - "Netto verbr.", "Kosten", "Opbrengst", "Netto kosten"] + columns = [ + first_col, + "Verbruik", + "Productie", + "Netto verbr.", + "Kosten", + "Opbrengst", + "Netto kosten", + ] # columns.extend(ext_columns) fi_df = pd.DataFrame(columns=columns) if len(report_df.index) == 0: @@ -1080,15 +1320,19 @@ def calc_grid_columns(self, report_df, active_interval, active_view): col_2 = row.production col_3 = col_1 - col_2 if math.isnan(row.cost): - col_4 = (row.consumption * (row.price + taxes_l + ol_l)) * (1 + btw / 100) + col_4 = (row.consumption * (row.price + taxes_l + ol_l)) * ( + 1 + btw / 100 + ) else: col_4 = row.cost if math.isnan(row.profit): - col_5 = (row.production * (row.price + taxes_t + ol_t)) * (1 + btw / 100) + col_5 = (row.production * (row.price + taxes_t + ol_t)) * ( + 1 + btw / 100 + ) else: col_5 = row.profit col_6 = col_4 - col_5 - ''' + """ #col_7 = (row.price + taxes_l + ol_l) * (1 + btw / 100) if col_1: col_7 = col_4/col_1 @@ -1098,20 +1342,41 @@ def calc_grid_columns(self, report_df, active_interval, active_view): col_8 = col_5/col_2 else: col_8 = numpy.nan - ''' - fi_df.loc[fi_df.shape[0]] = [tijd_str, col_1, col_2, col_3, col_4, col_5, col_6] + """ + fi_df.loc[fi_df.shape[0]] = [ + tijd_str, + col_1, + col_2, + col_3, + col_4, + col_5, + col_6, + ] # , "Tarief verbr.", "Tarief prod." # , "Tarief verbr.":'mean', "Tarief prod.":"mean" # fi_df.set_index([columns[0][0]]) if active_interval != "uur": - fi_df = fi_df.groupby([first_col], as_index=False).agg({"Verbruik": 'sum', "Productie": 'sum', - "Netto verbr.": 'sum', "Kosten": 'sum', - "Opbrengst": 'sum', "Netto kosten": 'sum'}) - fi_df['Tarief verbr.'] = fi_df.apply( - lambda rw: rw.Kosten / rw.Verbruik if rw.Verbruik != 0.0 else rw.Verbruik, axis=1) - fi_df['Tarief prod.'] = fi_df.apply( - lambda rw: rw.Opbrengst / rw.Productie if rw.Productie != 0.0 else rw.Productie, axis=1) + fi_df = fi_df.groupby([first_col], as_index=False).agg( + { + "Verbruik": "sum", + "Productie": "sum", + "Netto verbr.": "sum", + "Kosten": "sum", + "Opbrengst": "sum", + "Netto kosten": "sum", + } + ) + fi_df["Tarief verbr."] = fi_df.apply( + lambda rw: rw.Kosten / rw.Verbruik if rw.Verbruik != 0.0 else rw.Verbruik, + axis=1, + ) + fi_df["Tarief prod."] = fi_df.apply( + lambda rw: ( + rw.Opbrengst / rw.Productie if rw.Productie != 0.0 else rw.Productie + ), + axis=1, + ) if active_view == "tabel": fi_df.loc["Total"] = fi_df.sum(axis=0, numeric_only=True) fi_df.at[fi_df.index[-1], first_col] = "Totaal" @@ -1129,21 +1394,24 @@ def calc_grid_columns(self, report_df, active_interval, active_view): # value = fi_df.iloc[-1][7] # fi_df.at[fi_df.index[-1], "Tarief"] = value / (len(fi_df.index)-1) - # fi_df.loc[fi_df.shape[0]] = ["Totaal", col_1_tot, col_2_tot, col_3_tot, col_4_tot, col_5_tot, col_6_tot, + # fi_df.loc[fi_df.shape[0]] = ["Totaal", col_1_tot, col_2_tot, col_3_tot, col_4_tot, + # col_5_tot, col_6_tot, # col_7_tot / count_tot] columns = fi_df.columns.values.tolist() # columns.append(["", "kWh", "kWh", "kWh", "eur", "eur", "eur", "eur/kWh", "eur/kWh"]) # columns = [columns, - fi_df.columns = [columns, ["", "kWh", "kWh", "kWh", - "eur", "eur", "eur", "eur/kWh", "eur/kWh"]] + fi_df.columns = [ + columns, + ["", "kWh", "kWh", "kWh", "eur", "eur", "eur", "eur/kWh", "eur/kWh"], + ] fi_df = fi_df.round(3) return fi_df def calc_balance_columns(self, report_df, active_interval, active_view): first_col = active_interval.capitalize() # report_df = report_df.drop('vanaf', axis=1) - report_df.style.format('{:.3f}') - report_df = report_df.drop('tijd', axis=1) + report_df.style.format("{:.3f}") + report_df = report_df.drop("tijd", axis=1) # report_df = report_df.drop('datasoort', axis=1) key_columns = report_df.columns.values.tolist()[1:] columns_1 = [first_col] @@ -1162,8 +1430,9 @@ def calc_balance_columns(self, report_df, active_interval, active_view): return report_df # ------------------------------------------------ - def get_sensor_week_data(self, sensor: str, weekday: int, vanaf: datetime.datetime, - col_name: str) -> pd.DataFrame: + def get_sensor_week_data( + self, sensor: str, weekday: int, vanaf: datetime.datetime, col_name: str + ) -> pd.DataFrame: """ Berekent de waarde van een HA-sensor over 24 uur voor een bepaalde weekdag :param sensor: @@ -1172,13 +1441,14 @@ def get_sensor_week_data(self, sensor: str, weekday: int, vanaf: datetime.dateti :param col_name: :return: """ - ''' + """ sql = "SELECT FROM_UNIXTIME(t2.`start_ts`) 'tijd', \ GREATEST(0, round(t2.state - t1.`state`,3)) '" + col_name + "', \ WEEKDAY(FROM_UNIXTIME(t2.`start_ts`)) 'weekdag', \ HOUR(FROM_UNIXTIME(t2.`start_ts`)) 'uur' \ FROM `statistics` t1,`statistics` t2, `statistics_meta` \ - WHERE statistics_meta.`id` = t1.`metadata_id` AND statistics_meta.`id` = t2.`metadata_id` \ + WHERE statistics_meta.`id` = t1.`metadata_id` + AND statistics_meta.`id` = t2.`metadata_id` \ AND statistics_meta.`statistic_id` = '" + sensor + "' \ AND (t2.`start_ts` = t1.`start_ts` + 3600) \ AND t1.`state` IS NOT null AND t2.`state` IS NOT null \ @@ -1186,32 +1456,41 @@ def get_sensor_week_data(self, sensor: str, weekday: int, vanaf: datetime.dateti AND WEEKDAY(FROM_UNIXTIME(t2.`start_ts`))= " + str(weekday) + " \ ORDER BY t1.`start_ts`;" df = self.db_ha.run_select_query(sql) - ''' - statistics = Table('statistics', self.db_ha.metadata, autoload_with=self.db_ha.engine) - statistics_meta = Table('statistics_meta', self.db_ha.metadata, autoload_with=self.db_ha.engine) + """ + statistics = Table( + "statistics", self.db_ha.metadata, autoload_with=self.db_ha.engine + ) + statistics_meta = Table( + "statistics_meta", self.db_ha.metadata, autoload_with=self.db_ha.engine + ) # Define aliases for the tables - t1 = statistics.alias('t1') - t2 = statistics.alias('t2') + t1 = statistics.alias("t1") + t2 = statistics.alias("t2") # Define parameters - start_ts_param1 = vanaf.strftime('%Y-%m-%d %H:%M:%S') # '2024-01-01 00:00:00' + start_ts_param1 = vanaf.strftime("%Y-%m-%d %H:%M:%S") # '2024-01-01 00:00:00' # Build the query to retrieve raw data - query = select( - t2.c.start_ts.label('tijd'), - t1.c.state.label('state_t1'), - t2.c.state.label('state_t2') - ).select_from( - t1.join(t2, t2.c.start_ts == t1.c.start_ts + 3600) - .join(statistics_meta, - (statistics_meta.c.id == t1.c.metadata_id) & - (statistics_meta.c.id == t2.c.metadata_id)) - ).where( - (statistics_meta.c.statistic_id == sensor) & - (t1.c.state.isnot(None)) & - (t2.c.state.isnot(None)) & - (t1.c.start_ts >= self.db_ha.unix_timestamp(start_ts_param1) - 3600) + query = ( + select( + t2.c.start_ts.label("tijd"), + t1.c.state.label("state_t1"), + t2.c.state.label("state_t2"), + ) + .select_from( + t1.join(t2, t2.c.start_ts == t1.c.start_ts + 3600).join( + statistics_meta, + (statistics_meta.c.id == t1.c.metadata_id) + & (statistics_meta.c.id == t2.c.metadata_id), + ) + ) + .where( + (statistics_meta.c.statistic_id == sensor) + & (t1.c.state.isnot(None)) + & (t2.c.state.isnot(None)) + & (t1.c.start_ts >= self.db_ha.unix_timestamp(start_ts_param1) - 3600) + ) ) # Execute the query and load results into a DataFrame @@ -1219,20 +1498,29 @@ def get_sensor_week_data(self, sensor: str, weekday: int, vanaf: datetime.dateti df_raw = pd.read_sql(query, connection) if len(df_raw) > 0: # Convert UNIX timestamps to datetime - df_raw['tijd'] = df_raw.apply(lambda x: datetime.datetime.fromtimestamp(x['tijd']), axis=1) + df_raw["tijd"] = df_raw.apply( + lambda x: datetime.datetime.fromtimestamp(x["tijd"]), axis=1 + ) # Calculate the value - df_raw[col_name] = df_raw.apply(lambda row: round(max(row['state_t2'] - row['state_t1'], 0), 3), axis=1) - df_raw['weekdag'] = df_raw.apply(lambda x: self.tijd_at_interval('weekdag', x['tijd']), axis=1) - df_raw['uur'] = df_raw.apply(lambda x: self.tijd_at_interval('heel_uur', x['tijd']), axis=1) + df_raw[col_name] = df_raw.apply( + lambda row: round(max(row["state_t2"] - row["state_t1"], 0), 3), axis=1 + ) + df_raw["weekdag"] = df_raw.apply( + lambda x: self.tijd_at_interval("weekdag", x["tijd"]), axis=1 + ) + df_raw["uur"] = df_raw.apply( + lambda x: self.tijd_at_interval("heel_uur", x["tijd"]), axis=1 + ) else: df_raw = pd.DataFrame(columns=["weekdag", "tijd", "tot", col_name]) df_raw.index = pd.to_datetime(df_raw["tijd"]) - df_wd = df_raw.loc[df_raw['weekdag'] == weekday] + df_wd = df_raw.loc[df_raw["weekdag"] == weekday] return df_wd - def get_sensor_week_sum(self, sensor_list: list, weekday: int, vanaf: datetime.datetime, - col_name: str) -> pd.DataFrame: + def get_sensor_week_sum( + self, sensor_list: list, weekday: int, vanaf: datetime.datetime, col_name: str + ) -> pd.DataFrame: counter = 0 result = None for sensor in sensor_list: @@ -1242,7 +1530,7 @@ def get_sensor_week_sum(self, sensor_list: list, weekday: int, vanaf: datetime.d else: result[col_name] = result[col_name] + df[col_name] # result = Report.add_col_df(df, result, col_name) - counter = + 1 + counter = +1 return result def calc_weekday_baseload(self, wd: int) -> list: @@ -1253,48 +1541,99 @@ def calc_weekday_baseload(self, wd: int) -> list: config = Config("../data/options.json") calc_periode = config.get(["baseload calc periode"], None, 56) - calc_start = datetime.datetime.combine((datetime.datetime.now() - datetime.timedelta(days=calc_periode)).date(), - datetime.time()) - - grid_consumption = self.get_sensor_week_sum(config.get(['report', "entities grid consumption"]), wd, - calc_start, "grid_consumption") - grid_production = self.get_sensor_week_sum(config.get(['report', "entities grid production"]), wd, calc_start, - "grid_production") - solar_production = self.get_sensor_week_sum(config.get(['report', "entities solar production ac"]), wd, - calc_start, "solar_production") - ev_consumption = self.get_sensor_week_sum(config.get(['report', "entities ev consumption"]), wd, calc_start, - "ev_consumption") - wp_consumption = self.get_sensor_week_sum(config.get(['report', "entities wp consumption"]), wd, calc_start, - "wp_consumption") - boiler_consumption = self.get_sensor_week_sum(config.get(['report', "entities boiler consumption"]), wd, - calc_start, "boiler_consumption") - battery_consumption = self.get_sensor_week_sum(config.get(['report', "entities battery consumption"]), wd, - calc_start, "battery_consumption") - battery_production = self.get_sensor_week_sum(config.get(['report', "entities battery production"]), wd, - calc_start, "battery_production") - - # baseload = grid_consumption - grid_production + solar_production - ev_consumption - wp_consumption - - # battery_consumption + battery_production - # baseload = grid_consumption - grid_consumption = grid_consumption.rename(columns={'grid_consumption': 'baseload'}) + calc_start = datetime.datetime.combine( + (datetime.datetime.now() - datetime.timedelta(days=calc_periode)).date(), + datetime.time(), + ) + + grid_consumption = self.get_sensor_week_sum( + config.get(["report", "entities grid consumption"]), + wd, + calc_start, + "grid_consumption", + ) + grid_production = self.get_sensor_week_sum( + config.get(["report", "entities grid production"]), + wd, + calc_start, + "grid_production", + ) + solar_production = self.get_sensor_week_sum( + config.get(["report", "entities solar production ac"]), + wd, + calc_start, + "solar_production", + ) + ev_consumption = self.get_sensor_week_sum( + config.get(["report", "entities ev consumption"]), + wd, + calc_start, + "ev_consumption", + ) + wp_consumption = self.get_sensor_week_sum( + config.get(["report", "entities wp consumption"]), + wd, + calc_start, + "wp_consumption", + ) + boiler_consumption = self.get_sensor_week_sum( + config.get(["report", "entities boiler consumption"]), + wd, + calc_start, + "boiler_consumption", + ) + battery_consumption = self.get_sensor_week_sum( + config.get(["report", "entities battery consumption"]), + wd, + calc_start, + "battery_consumption", + ) + battery_production = self.get_sensor_week_sum( + config.get(["report", "entities battery production"]), + wd, + calc_start, + "battery_production", + ) + + # baseload = grid_consumption - grid_production + solar_production - ev_consumption + # - wp_consumption - battery_consumption + battery_production + grid_consumption = grid_consumption.rename( + columns={"grid_consumption": "baseload"} + ) # baseload - grid_production - result = Report.add_col_df(grid_production, grid_consumption, "grid_production", "baseload", True) + result = Report.add_col_df( + grid_production, grid_consumption, "grid_production", "baseload", True + ) # baseload + solar_production - result = Report.add_col_df(solar_production, result, "solar_production", "baseload") + result = Report.add_col_df( + solar_production, result, "solar_production", "baseload" + ) # baseload - ev_consumption - result = Report.add_col_df(ev_consumption, result, "ev_consumption", "baseload", True) + result = Report.add_col_df( + ev_consumption, result, "ev_consumption", "baseload", True + ) # baseload - wp_consumption - result = Report.add_col_df(wp_consumption, result, "wp_consumption", "baseload", True) + result = Report.add_col_df( + wp_consumption, result, "wp_consumption", "baseload", True + ) # baseload - boiler_consumption - result = Report.add_col_df(boiler_consumption, result, "boiler_consumption", "baseload", True) + result = Report.add_col_df( + boiler_consumption, result, "boiler_consumption", "baseload", True + ) # baseload - battery_consumption - result = Report.add_col_df(battery_consumption, result, "battery_consumption", "baseload", True) + result = Report.add_col_df( + battery_consumption, result, "battery_consumption", "baseload", True + ) # baseload - battery_production - result = Report.add_col_df(battery_production, result, "battery_production", "baseload") + result = Report.add_col_df( + battery_production, result, "battery_production", "baseload" + ) - result = result.groupby("uur", as_index=False).agg({"tijd": 'min', "weekdag": 'mean', "baseload": 'mean'}) + result = result.groupby("uur", as_index=False).agg( + {"tijd": "min", "weekdag": "mean", "baseload": "mean"} + ) result.baseload = result.baseload.round(3) - result = result['baseload'].values.tolist() + result = result["baseload"].values.tolist() return result def calc_save_baseloads(self): @@ -1306,25 +1645,30 @@ def calc_save_baseloads(self): bl_str += str(x) + " " logging.info(bl_str) out_file = "../data/baseload/baseload_" + str(weekday) + ".json" - with open(out_file, 'w') as f: + with open(out_file, "w") as f: print(json.dumps(baseload, indent=2), file=f) return # ------------------------------------------------ - def get_field_data(self, field: str, periode: str): + def get_field_data(self, field: str, periode: str, tot=None): period = self.periodes[periode] if not (field in self.energy_balance_dict): result = None return result categorie = self.energy_balance_dict[field] - df = self.db_da.get_column_data('values', field, start=period["vanaf"], end=period["tot"]) + df = self.db_da.get_column_data( + "values", + field, + start=period["vanaf"], + end=period["tot"] if tot is None else tot, + ) df.index = pd.to_datetime(df["time"]) df = df.rename(columns={"value": field}) df["datasoort"] = "recorded" df_ha_result = pd.DataFrame() if len(df) > 0: - last_moment = df['time'].iloc[-1] + datetime.timedelta(hours=1) + last_moment = df["time"].iloc[-1] + datetime.timedelta(hours=1) else: last_moment = self.periodes[periode]["vanaf"] if last_moment < self.periodes[periode]["tot"]: @@ -1337,17 +1681,23 @@ def get_field_data(self, field: str, periode: str): else: df_ha_result = self.add_col_df(df_ha, df_ha_result, field) count += 1 - df_ha_result['datasoort'] = 'recorded' + df_ha_result["datasoort"] = "recorded" df_ha_result = df_ha_result.rename(columns={"tijd": "time"}) if len(df_ha_result) > 0: - last_moment = df_ha_result['time'].iloc[-1] + datetime.timedelta(hours=1) - df_ha_result['time'] = df_ha_result['time'].apply(lambda x: x.strftime("%Y-%m-%d %H:%M")) + last_moment = df_ha_result["time"].iloc[-1] + datetime.timedelta( + hours=1 + ) + df_ha_result["time"] = df_ha_result["time"].apply( + lambda x: x.strftime("%Y-%m-%d %H:%M") + ) if last_moment < self.periodes[periode]["tot"]: - df_prog = self.db_da.get_column_data('prognoses', field, start=last_moment, end=period["tot"]) + df_prog = self.db_da.get_column_data( + "prognoses", field, start=last_moment, end=period["tot"] + ) df_prog.index = pd.to_datetime(df_prog["time"]) df_prog = df_prog.rename(columns={"value": field}) - df_prog['datasoort'] = 'expected' + df_prog["datasoort"] = "expected" df_uur = pd.concat([df_ha_result, df_prog]) else: df_uur = df_ha_result @@ -1356,14 +1706,15 @@ def get_field_data(self, field: str, periode: str): def get_price_data(self, start, end): from dao.prog.utils import get_value_from_dict - df_da = self.db_da.get_column_data('values', "da", start=start, end=end) + + df_da = self.db_da.get_column_data("values", "da", start=start, end=end) old_dagstr = "" taxes_l = 0 taxes_t = 0 ol_l = 0 ol_t = 0 btw = 0 - columns = ["time", "da_ex", "da_cons", 'da_prod', "datasoort"] + columns = ["time", "da_ex", "da_cons", "da_prod", "datasoort"] df = pd.DataFrame(columns=columns) for row in df_da.itertuples(): if pd.isnull(row.time): @@ -1378,48 +1729,81 @@ def get_price_data(self, start, end): old_dagstr = dag_str da_cons = (row.value + taxes_l + ol_l) * (1 + btw / 100) da_prod = (row.value + taxes_t + ol_t) * (1 + btw / 100) - df.loc[df.shape[0]] = [datetime.datetime.strptime(row.time, "%Y-%m-%d %H:%M"), row.value, da_cons, - da_prod, row.datasoort] + df.loc[df.shape[0]] = [ + datetime.datetime.strptime(row.time, "%Y-%m-%d %H:%M"), + row.value, + da_cons, + da_prod, + row.datasoort, + ] return df - def get_soc_data(self, field: str, start:datetime.datetime, end:datetime.datetime)->pd.DataFrame: - df = self.db_da.get_column_data('prognoses', field, start=start, end=end) + def get_soc_data( + self, field: str, start: datetime.datetime, end: datetime.datetime + ) -> pd.DataFrame: + df = self.db_da.get_column_data("prognoses", field, start=start, end=end) return df - def get_api_data(self, field: str, periode: str, cumulate: bool = False): + def get_api_data( + self, field: str, periode: str, cumulate: bool = False, expected: bool = False + ): periode = periode.replace("_", " ") - grid_fields = ["consumption", "production", "netto_consumption", "cost", "profit", "netto_cost"] + grid_fields = [ + "consumption", + "production", + "netto_consumption", + "cost", + "profit", + "netto_cost", + ] + tot = None + if not expected: + now = datetime.datetime.now() + tot = datetime.datetime(now.year, now.month, now.day, now.hour) + else: + tot = self.periodes[periode]["tot"] df = pd.DataFrame() if field in ["grid"] + grid_fields: # grid data - df_grid = self.get_grid_data(periode) - df_grid['time'] = df_grid['vanaf'].apply(lambda x: pd.to_datetime(x).strftime("%Y-%m-%d %H:%M")) + df_grid = self.get_grid_data(periode, _tot=tot) + df_grid["time"] = df_grid["vanaf"].apply( + lambda x: pd.to_datetime(x).strftime("%Y-%m-%d %H:%M") + ) + if field in grid_fields: - df = df_grid[['time', field, "datasoort"]].copy() + df = df_grid[["time", field, "datasoort"]].copy() if cumulate: df[field] = df_grid[field].cumsum() - df.rename({field: 'value'}, axis=1, inplace=True) + df.rename({field: "value"}, axis=1, inplace=True) if field == "grid": - df = df_grid[['time', 'datasoort'] + grid_fields].copy() + df = df_grid[["time", "datasoort"] + grid_fields].copy() if cumulate: for field in grid_fields: df[field] = df[field].cumsum() - elif field == 'da': - df = self.get_price_data(self.periodes[periode]["vanaf"], self.periodes[periode]["tot"]) + elif field == "da": + df = self.get_price_data( + self.periodes[periode]["vanaf"], self.periodes[periode]["tot"] + ) elif field[0:3] == "soc": - df = self.get_soc_data(field, self.periodes[periode]["vanaf"], self.periodes[periode]["tot"]) + df = self.get_soc_data(field, self.periodes[periode]["vanaf"], tot) else: if not (field in self.energy_balance_dict): result = '{"message":"Failed"}' return result df = self.get_field_data(field, periode) - history_df = df[df['datasoort'] == 'recorded'] - history_df = history_df.drop('datasoort', axis=1) - history_json = history_df.to_json(orient='records') - expected_df = df[df['datasoort'] == 'expected'] - expected_df = expected_df.drop('datasoort', axis=1) - expected_json = expected_df.to_json(orient='records') - result = '{ "message":"Success", "recorded": ' + history_json + ', "expected" : ' + expected_json + ' }' + history_df = df[df["datasoort"] == "recorded"] + history_df = history_df.drop("datasoort", axis=1) + history_json = history_df.to_json(orient="records") + expected_df = df[df["datasoort"] == "expected"] + expected_df = expected_df.drop("datasoort", axis=1) + expected_json = expected_df.to_json(orient="records") + result = ( + '{ "message":"Success", "recorded": ' + + history_json + + ', "expected" : ' + + expected_json + + " }" + ) return result def make_graph(self, df, period, _options=None): @@ -1429,41 +1813,41 @@ def make_graph(self, df, period, _options=None): options = { "title": "Grafiek verbruik", "style": self.config.get(["graphics", "style"]), - "vaxis": [{ - "title": "kWh" + "vaxis": [{"title": "kWh"}, {"title": "euro"}], + "series": [ + { + "column": "Verbruik", + "title": "Verbruik", + "type": "stacked", + "color": "#00bfff", + }, + { + "column": "Productie", + "title": "Productie", + "negativ": "true", + "type": "stacked", + "color": "green", + }, + { + "column": "Kosten", + "label": "Kosten", + "type": "stacked", + "color": "red", + "vaxis": "right", + }, + { + "column": "Opbrengst", + "label": "Opbrengst", + "negativ": "true", + "type": "stacked", + "color": "#ff8000", + "vaxis": "right", }, - {"title": "euro" - } ], - "series": [{"column": "Verbruik", - "title": "Verbruik", - "type": "stacked", - "color": '#00bfff' - }, - {"column": "Productie", - "title": "Productie", - "negativ": "true", - "type": "stacked", - "color": 'green' - }, - {"column": "Kosten", - "label": "Kosten", - "type": "stacked", - "color": 'red', - "vaxis": "right" - }, - {"column": "Opbrengst", - "label": "Opbrengst", - "negativ": "true", - "type": "stacked", - "color": '#ff8000', - "vaxis": "right" - }, - ] } options["haxis"] = { "values": self.periodes[period]["interval"].capitalize(), - "title": self.periodes[period]["interval"] + " van " + period + "title": self.periodes[period]["interval"] + " van " + period, } gb = GraphBuilder() diff --git a/dao/prog/da_scheduler.py b/dao/prog/da_scheduler.py index 7e2e4b0..9a90dfc 100644 --- a/dao/prog/da_scheduler.py +++ b/dao/prog/da_scheduler.py @@ -11,8 +11,7 @@ def __init__(self, file_name: str = None): self.scheduler_tasks = self.config.get(["scheduler"]) self.active = True if "active" in self.scheduler_tasks: - self.active = not(self.scheduler_tasks["active"].lower() == - "false") + self.active = not (self.scheduler_tasks["active"].lower() == "false") def scheduler(self): # if not (self.notification_entity is None) and self.notification_opstarten: @@ -21,7 +20,9 @@ def scheduler(self): while True: t = datetime.datetime.now() - next_min = t - datetime.timedelta(minutes=-1, seconds=t.second, microseconds=t.microsecond) + next_min = t - datetime.timedelta( + minutes=-1, seconds=t.second, microseconds=t.microsecond + ) # wacht tot hele minuut 0% cpu time.sleep((next_min - t).total_seconds()) if not self.active: diff --git a/dao/prog/day_ahead.py b/dao/prog/day_ahead.py index 189f956..491ddc5 100644 --- a/dao/prog/day_ahead.py +++ b/dao/prog/day_ahead.py @@ -1,16 +1,23 @@ """ -Het programma Day Ahead Optimalisatie kun je je energieverbruik en -energiekosten optimaliseren als je gebruik maakt van dynamische prijzen. +Met het programma Day Ahead Optimizer kun je je energieverbruik en energiekosten optimaliseren als +je gebruik maakt van dynamische prijzen. Zie verder: DOCS.md """ + import datetime import datetime as dt import sys import math import pandas as pd from mip import Model, xsum, minimize, BINARY, CONTINUOUS -from utils import (get_value_from_dict, is_laagtarief, convert_timestr, - calc_uur_index, error_handling, calc_adjustment_heatcurve) +from utils import ( + get_value_from_dict, + is_laagtarief, + convert_timestr, + calc_uur_index, + error_handling, + calc_adjustment_heatcurve, +) import logging from da_base import DaBase @@ -29,16 +36,22 @@ def __init__(self, file_name=None): self.prices_options = self.config.get(["prices"]) self.ev_options = self.config.get(["electric vehicle"]) self.heating_options = self.config.get(["heating"]) - self.use_calc_baseload = (self.config.get(["use_calc_baseload"], None, - "false").lower() == "true") - self.heater_present = False + self.use_calc_baseload = ( + self.config.get(["use_calc_baseload"], None, "false").lower() == "true" + ) + self.hp_present = False + self.hp_enabled = False + self.hp_adjustment = None + self.hp_heat_demand = True self.boiler_present = False + self.boiler_enabled = False self.grid_max_power = self.config.get(["grid", "max_power"], None, 17) self.machines = self.config.get(["machines"], None, []) # self.start_logging() - def calc_optimum(self, _start_dt: dt.datetime | None = None, - _start_soc: float | None = None): + def calc_optimum( + self, _start_dt: dt.datetime | None = None, _start_soc: float | None = None + ): if _start_dt is not None or _start_soc is not None: self.debug = True logging.info(f"Debug = {self.debug}") @@ -49,38 +62,50 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # start_dt = dt.datetime(year=2024, month=9, day=26, hour=14, minute=0) start_ts = int(start_dt.timestamp()) modulo = start_ts % self.interval_s - if modulo > (self.interval_s-10): + if modulo > (self.interval_s - 10): start_ts = start_ts + self.interval_s - modulo start_dt = dt.datetime.fromtimestamp(start_ts) start_h = int(self.interval_s * math.floor(start_ts / self.interval_s)) fraction_first_interval = 1 - (start_ts - start_h) / self.interval_s - prog_data = self.db_da.get_prognose_data(start=start_h, end=None) + if self.interval == "hour": + prog_data = self.db_da.get_prognose_data( + start=start_h, end=None, interval=self.interval + ) + u = len(prog_data) if u <= 2: - logging.error(f"Er ontbreken voor een aantal uur gegevens " - f"(meteo en/of dynamische prijzen) " - f"er kan niet worden gerekend") + logging.error( + f"Er ontbreken voor een aantal uur gegevens " + f"(meteo en/of dynamische prijzen) " + f"er kan niet worden gerekend" + ) if self.notification_entity is not None: - self.set_value(self.notification_entity, - f"Er ontbreken voor een aantal uur gegevens; " - f"er kan niet worden gerekend") + self.set_value( + self.notification_entity, + f"Er ontbreken voor een aantal uur gegevens; " + f"er kan niet worden gerekend", + ) return if u <= 8: - logging.warning(f"Er ontbreken voor een aantal uur gegevens " - f"(meteo en/of dynamische prijzen)\n" - f"controleer of alle gegevens zijn opgehaald") + logging.warning( + f"Er ontbreken voor een aantal uur gegevens " + f"(meteo en/of dynamische prijzen)\n" + f"controleer of alle gegevens zijn opgehaald" + ) if self.notification_entity is not None: - self.set_value(self.notification_entity, - f"Er ontbreken voor een aantal uur gegevens") - - if (self.notification_entity is not None and - self.notification_berekening): - self.set_value(self.notification_entity, "DAO calc gestart " + - dt.datetime.now(). - strftime('%d-%m-%Y %H:%M:%S')) - logging.debug('Prognose data:\n{}'.format(prog_data.to_string())) - - ''' + self.set_value( + self.notification_entity, + f"Er ontbreken voor een aantal uur gegevens", + ) + + if self.notification_entity is not None and self.notification_berekening: + self.set_value( + self.notification_entity, + "DAO calc gestart " + dt.datetime.now().strftime("%d-%m-%Y %H:%M:%S"), + ) + logging.debug("Prognose data:\n{}".format(prog_data.to_string())) + + """ day_ahead prijs omrekenen naar twee prijzen 1. pl: prijs voor verbruik (levering) altijd met opslag voor @@ -94,7 +119,7 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, eb_t 0,12955 opslag leverancier ol_t (aftrek!!) 0,0 en btw over het geheel 21% - ''' + """ taxes_l_def = self.prices_options["energy taxes delivery"] # eb + ode levering # eb_l = 0.12955 @@ -114,8 +139,8 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # prijzen van een traditionele leverancier zijn alleen indicatief; # er wordt niet mee gerekend - gc_p_low = self.prices_options['regular low'] - gc_p_high = self.prices_options['regular high'] + gc_p_low = self.prices_options["regular low"] + gc_p_high = self.prices_options["regular high"] pl = [] # prijs levering day_ahead pt = [] # prijs teruglevering day_ahead pl_avg = [] # prijs levering day_ahead gemiddeld @@ -131,10 +156,8 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, taxes_l = get_value_from_dict(dag_str, taxes_l_def) taxes_t = get_value_from_dict(dag_str, taxes_t_def) btw = get_value_from_dict(dag_str, btw_def) - price_l = round((row.da_price + taxes_l + ol_l) * - (1 + btw / 100), 5) - price_t = round((row.da_price + taxes_t + ol_t) * - (1 + btw / 100), 5) + price_l = round((row.da_price + taxes_l + ol_l) * (1 + btw / 100), 5) + price_t = round((row.da_price + taxes_t + ol_t) * (1 + btw / 100), 5) pl.append(price_l) pt.append(price_t) # tarief teruglevering zonder eb en btw @@ -216,20 +239,24 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, hour_fraction.append(1) # pv.append(pv_total) for s in range(solar_num): - prod = (self.meteo.calc_solar_rad( - self.solar[s], row.time, row.glob_rad) * pv_yield[s] * - hour_fraction[-1]) + prod = ( + self.meteo.calc_solar_rad(self.solar[s], row.time, row.glob_rad) + * pv_yield[s] + * hour_fraction[-1] + ) solar_prod[s].append(prod) pv_total += prod pv_org_ac.append(pv_total) pv_total = 0 for b in range(B): for s in range(len(self.battery_options[b]["solar"])): - prod = (self.meteo.calc_solar_rad( - self.battery_options[b]["solar"][s], row.time, - row.glob_rad) * - self.battery_options[b]["solar"][s]["yield"] * - hour_fraction[-1]) + prod = ( + self.meteo.calc_solar_rad( + self.battery_options[b]["solar"][s], row.time, row.glob_rad + ) + * self.battery_options[b]["solar"][s]["yield"] + * hour_fraction[-1] + ) pv_total += prod pv_org_dc.append(pv_total) @@ -238,8 +265,9 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, taxes_t = get_value_from_dict(dag_str, taxes_t_def) btw = get_value_from_dict(dag_str, btw_def) if is_laagtarief( - dt.datetime(dtime.year, dtime.month, dtime.day, hour), - self.config.get(["switch to low"], self.prices_options, 23)): + dt.datetime(dtime.year, dtime.month, dtime.day, hour), + self.config.get(["switch to low"], self.prices_options, 23), + ): p_grl.append((gc_p_low + taxes_l) * (1 + btw / 100)) p_grt.append((gc_p_low + taxes_t) * (1 + btw / 100)) else: @@ -251,15 +279,17 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, try: if self.log_level == logging.INFO: start_df = pd.DataFrame( - {"uur": uur, - "tijd": tijd, - 'p_l': pl, - 'p_t': pt, - 'base': b_l, - 'pv_ac': pv_org_ac, - 'pv_dc': pv_org_dc - }) - start_df.set_index('uur') + { + "uur": uur, + "tijd": tijd, + "p_l": pl, + "p_t": pt, + "base": b_l, + "pv_ac": pv_org_ac, + "pv_dc": pv_org_dc, + } + ) + start_df.set_index("uur") logging.info(f"Start waarden: \n{start_df.to_string()}") except Exception as ex: logging.warning(ex) @@ -273,34 +303,39 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, logging.info(f"pv_ac: {len(pv_org_dc)}") # volledig salderen? - salderen = self.prices_options['tax refund'] == "True" + salderen = self.prices_options["tax refund"] == "True" last_invoice = dt.datetime.strptime( - self.prices_options['last invoice'], "%Y-%m-%d") - cons_data_history = ( - self.db_da.get_consumption(last_invoice, dt.datetime.today())) - logging.info(f"Verbruik dit contractjaar: " - f"{cons_data_history['consumption']}") - logging.info(f"Productie dit contractjaar: " - f"{cons_data_history['production']}") + self.prices_options["last invoice"], "%Y-%m-%d" + ) + cons_data_history = self.db_da.get_consumption( + last_invoice, dt.datetime.today() + ) + logging.info( + f"Verbruik dit contractjaar: " f"{cons_data_history['consumption']}" + ) + logging.info( + f"Productie dit contractjaar: " f"{cons_data_history['production']}" + ) if not salderen: salderen = ( - cons_data_history["production"] < - cons_data_history["consumption"]) + cons_data_history["production"] < cons_data_history["consumption"] + ) if salderen: logging.info(f"All taxes refund (alles wordt gesaldeerd)") consumption_today = 0 production_today = 0 else: - consumption_today = ( - float(self.get_state("sensor.daily_grid_consumption").state)) - production_today = ( - float(self.get_state("sensor.daily_grid_production").state)) + consumption_today = float( + self.get_state("sensor.daily_grid_consumption").state + ) + production_today = float( + self.get_state("sensor.daily_grid_production").state + ) logging.info(f"consumption today: {consumption_today} kWh") logging.info(f"production today: {production_today} kWh") - logging.info(f"verschil: " - f"{consumption_today - production_today} kWh") + logging.info(f"verschil: " f"{consumption_today - production_today} kWh") model = Model() @@ -311,11 +346,16 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, ############################################################## # pv ac ############################################################## - pv_ac = [[model.add_var(var_type=CONTINUOUS, lb=0, - ub=solar_prod[s][u] * 1.1) - for u in range(U)] for s in range(solar_num)] - pv_ac_on_off = [[model.add_var(var_type=BINARY) - for _ in range(U)] for _ in range(solar_num)] + pv_ac = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=solar_prod[s][u] * 1.1) + for u in range(U) + ] + for s in range(solar_num) + ] + pv_ac_on_off = [ + [model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(solar_num) + ] # constraints @@ -335,7 +375,6 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, one_soc = [] kwh_cycle_cost = [] start_soc = [] - opt_low_level = [] # pv_dc = [] # pv bruto productie per batterij per uur # pv_dc_hour_sum = [] # pv_from_dc_hour_sum = [] @@ -362,12 +401,14 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, pv_prod_ac.append([]) pv_prod_dc.append([]) # noinspection PyTypeChecker - max_charge_power.append(int( - self.battery_options[b]["charge stages"][-1]["power"])/1000) + max_charge_power.append( + int(self.battery_options[b]["charge stages"][-1]["power"]) / 1000 + ) # CS is aantal charge stages CS.append(len(self.battery_options[b]["charge stages"])) max_discharge_power.append( - self.battery_options[b]["discharge stages"][-1]["power"]/1000) + self.battery_options[b]["discharge stages"][-1]["power"] / 1000 + ) # reduced power red_hours = self.config.get(["reduced hours"], self.battery_options[b], {}) @@ -378,60 +419,74 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, for key, value in red_hours.items(): reduced = True hour = int(key) - power = value/1000 + power = value / 1000 for u in range(U): if uur[u] == hour: red_power[u] = power reduced_power.append(red_power) if reduced: if self.log_level == logging.DEBUG: - logging.debug(f"Reduced hours for {self.battery_options[b]['name']}") + logging.debug( + f"Reduced hours for {self.battery_options[b]['name']}" + ) print(f"hour max-power(kW)") for u in range(U): print(f"{uur[u]:2.0f} {red_power[u]:6.3f}") else: - logging.info(f"Reduced hours applied for {self.battery_options[b]['name']}") + logging.info( + f"Reduced hours applied for {self.battery_options[b]['name']}" + ) else: - logging.info(f"No reduced hours applied for {self.battery_options[b]['name']}") + logging.info( + f"No reduced hours applied for {self.battery_options[b]['name']}" + ) max_dc_from_bat_power.append( - self.config.get(["bat_to_dc max power"], - self.battery_options[b], - 2000 * max_discharge_power[b])/1000) + self.config.get( + ["bat_to_dc max power"], + self.battery_options[b], + 2000 * max_discharge_power[b], + ) + / 1000 + ) max_dc_to_bat_power.append( - self.config.get(["dc_to_bat max power"], - self.battery_options[b], - 2000 * max_discharge_power[b])/1000) + self.config.get( + ["dc_to_bat max power"], + self.battery_options[b], + 2000 * max_discharge_power[b], + ) + / 1000 + ) # DS is aantal discharge stages DS.append(len(self.battery_options[b]["discharge stages"])) sum_eff = 0 for ds in range(DS[b])[1:]: - sum_eff += self.battery_options[b]["discharge stages"][ - ds]["efficiency"] - avg_eff_dc_to_ac.append(sum_eff/(DS[b]-1)) + sum_eff += self.battery_options[b]["discharge stages"][ds]["efficiency"] + avg_eff_dc_to_ac.append(sum_eff / (DS[b] - 1)) ac = float(self.battery_options[b]["capacity"]) one_soc.append(ac / 100) # 1% van 28 kWh = 0,28 kWh kwh_cycle_cost.append(self.battery_options[b]["cycle cost"]) logging.debug(f"cycle cost: {kwh_cycle_cost[b]} eur/kWh") - eff_dc_to_bat.append(float(self.battery_options[b][ - "dc_to_bat efficiency"])) + eff_dc_to_bat.append(float(self.battery_options[b]["dc_to_bat efficiency"])) # fractie van 1 - eff_bat_to_dc.append(float(self.battery_options[b][ - "bat_to_dc efficiency"])) + eff_bat_to_dc.append(float(self.battery_options[b]["bat_to_dc efficiency"])) # fractie van 1 if _start_soc is None or b > 0: - start_soc_str = self.get_state(self.battery_options[b]["entity actual level"]).state + start_soc_str = self.get_state( + self.battery_options[b]["entity actual level"] + ).state if start_soc_str.lower() == "unavailable": start_soc.append(50) else: start_soc.append(float(start_soc_str)) else: start_soc.append(_start_soc) - logging.info(f"Startwaarde SoC {self.battery_options[b]['name']}: {start_soc[b]}%") - opt_low_level.append(float(self.battery_options[b]["optimal lower level"])) + logging.info( + f"Startwaarde SoC {self.battery_options[b]['name']}: {start_soc[b]}%" + ) # pv dc mppt pv_dc_num.append(len(self.battery_options[b]["solar"])) @@ -442,13 +497,24 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, pv_yield = self.battery_options[b]["solar"][s]["yield"] for u in range(U): # pv_prod productie van batterij b van solar s in uur u - prod_dc = self.meteo.calc_solar_rad(self.battery_options[b]["solar"][s], - int(tijd[u].timestamp()), - global_rad[u]) * pv_yield + prod_dc = ( + self.meteo.calc_solar_rad( + self.battery_options[b]["solar"][s], + int(tijd[u].timestamp()), + global_rad[u], + ) + * pv_yield + ) eff = 1 for ds in range(DS[b]): - if self.battery_options[b]["discharge stages"][ds]["power"]/1000 > prod_dc: - eff = self.battery_options[b]["discharge stages"][ds]["efficiency"] + if ( + self.battery_options[b]["discharge stages"][ds]["power"] + / 1000 + > prod_dc + ): + eff = self.battery_options[b]["discharge stages"][ds][ + "efficiency" + ] break prod_ac = prod_dc * eff pv_prod_dc[b][s].append(prod_dc) @@ -461,14 +527,24 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # totaal elektra van ac naar de busbar, ieder uur # alle variabelen definieren alles in W tenzij aangegeven - # mppt aan/uit evt bij netto prijzen onder nul - pv_dc_on_off = [[[model.add_var(var_type=BINARY) for _ in range(U)] - for _ in range(pv_dc_num[b])] for b in range(B)] - pv_prod_dc_sum = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=2 * max_charge_power[b]) - for _ in range(U)] for b in range(B)] + # mppt aan/uit eventueel bij netto prijzen onder nul + pv_dc_on_off = [ + [ + [model.add_var(var_type=BINARY) for _ in range(U)] + for _ in range(pv_dc_num[b]) + ] + for b in range(B) + ] + pv_prod_dc_sum = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=2 * max_charge_power[b]) + for _ in range(U) + ] + for b in range(B) + ] # ac_to_dc met aan uit ############################################################# - ''' + """ #ac_to_dc: wat er gaat er vanuit ac naar de omvormer ac_to_dc = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_charge_power[b]) for u in range(U)] for b in range(B)] @@ -481,64 +557,140 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # vermogens klasse aan/uit ac_to_dc_st_on = [[[model.add_var(var_type=BINARY) for u in range(U)] for cs in range(CS[b])] for b in range(B)] - ''' + """ # met sos ################################################################### - ac_to_dc_samples = [[self.battery_options[b]["charge stages"][cs]["power"]/1000 - for cs in range(CS[b])] for b in range(B)] - dc_from_ac_samples = [[(self.battery_options[b]["charge stages"][cs]["efficiency"] * - self.battery_options[b]["charge stages"][cs]["power"] / 1000) - for cs in range(CS[b])] for b in range(B)] - ac_to_dc = [[model.add_var(var_type=CONTINUOUS, - lb=0, - ub=min(reduced_power[b][u], max_charge_power[b])) - for u in range(U)] for b in range(B)] - ac_to_dc_on = [[model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(B)] - ac_to_dc_w = [[[model.add_var(var_type=CONTINUOUS, lb=0, ub=1) - for _ in range(CS[b])] for _ in range(U)] for b in range(B)] + ac_to_dc_samples = [ + [ + self.battery_options[b]["charge stages"][cs]["power"] / 1000 + for cs in range(CS[b]) + ] + for b in range(B) + ] + dc_from_ac_samples = [ + [ + ( + self.battery_options[b]["charge stages"][cs]["efficiency"] + * self.battery_options[b]["charge stages"][cs]["power"] + / 1000 + ) + for cs in range(CS[b]) + ] + for b in range(B) + ] + ac_to_dc = [ + [ + model.add_var( + var_type=CONTINUOUS, + lb=0, + ub=min(reduced_power[b][u], max_charge_power[b]), + ) + for u in range(U) + ] + for b in range(B) + ] + ac_to_dc_on = [ + [model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(B) + ] + ac_to_dc_w = [ + [ + [model.add_var(var_type=CONTINUOUS, lb=0, ub=1) for _ in range(CS[b])] + for _ in range(U) + ] + for b in range(B) + ] # tot hier met sos # ''' - ac_from_dc = [[model.add_var(var_type=CONTINUOUS, - lb=0, - ub=min(reduced_power[b][u], max_discharge_power[b])) - for u in range(U)] for b in range(B)] - ac_from_dc_on = [[model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(B)] + ac_from_dc = [ + [ + model.add_var( + var_type=CONTINUOUS, + lb=0, + ub=min(reduced_power[b][u], max_discharge_power[b]), + ) + for u in range(U) + ] + for b in range(B) + ] + ac_from_dc_on = [ + [model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(B) + ] # elektra per vermogensklasse van busbar naar ac, ieder uur - ac_from_dc_st = [[[model.add_var(var_type=CONTINUOUS, lb=0, - ub=self.battery_options[b]["discharge stages"][ds]["power"]/1000) - for _ in range(U)] for ds in range(DS[b])] for b in range(B)] - ac_from_dc_st_on = [[[model.add_var(var_type=BINARY) - for _ in range(U)] for _ in range(DS[b])] for b in range(B)] + ac_from_dc_st = [ + [ + [ + model.add_var( + var_type=CONTINUOUS, + lb=0, + ub=self.battery_options[b]["discharge stages"][ds]["power"] + / 1000, + ) + for _ in range(U) + ] + for ds in range(DS[b]) + ] + for b in range(B) + ] + ac_from_dc_st_on = [ + [[model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(DS[b])] + for b in range(B) + ] # energiebalans dc - dc_from_ac = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_charge_power[b]) - for _ in range(U)] for b in range(B)] - dc_to_ac = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_discharge_power[b]) - for _ in range(U)] for b in range(B)] - dc_from_bat = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_dc_from_bat_power[b]) - for _ in range(U)] for b in range(B)] - dc_to_bat = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_dc_to_bat_power[b]) - for _ in range(U)] for b in range(B)] + dc_from_ac = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=max_charge_power[b]) + for _ in range(U) + ] + for b in range(B) + ] + dc_to_ac = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=max_discharge_power[b]) + for _ in range(U) + ] + for b in range(B) + ] + dc_from_bat = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=max_dc_from_bat_power[b]) + for _ in range(U) + ] + for b in range(B) + ] + dc_to_bat = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=max_dc_to_bat_power[b]) + for _ in range(U) + ] + for b in range(B) + ] # SoC - soc = [[model.add_var(var_type=CONTINUOUS, - lb=min(start_soc[b], float(self.battery_options[b]["lower limit"])), - ub=max(start_soc[b], float(self.battery_options[b]["upper limit"]))) - for _ in range(U + 1)] for b in range(B)] - soc_low = [[model.add_var(var_type=CONTINUOUS, - lb=min(start_soc[b], float(self.battery_options[b]["lower limit"])), - ub=opt_low_level[b]) for _ in range(U + 1)] for b in range(B)] - soc_mid = [[model.add_var(var_type=CONTINUOUS, lb=0, - ub=-opt_low_level[b] + max(start_soc[b], - float(self.battery_options[b]["upper limit"]))) - for _ in range(U + 1)] for b in range(B)] + lower_limit = float( + self.config.get(["lower limit"], self.battery_options[b], 20) + ) + upper_limit = float( + self.config.get(["upper limit"], self.battery_options[b], 100) + ) + soc = [ + [ + model.add_var( + var_type=CONTINUOUS, + lb=min(start_soc[b], lower_limit), + ub=max(start_soc[b], upper_limit), + ) + for _ in range(U + 1) + ] + for b in range(B) + ] # alle constraints for b in range(B): for u in range(U): # laden, alles uitgedrukt in vermogen kW # met aan/uit - ''' + """ for cs in range(CS[b]): model += (ac_to_dc_st[b][cs][u] <= self.battery_options[b]["charge stages"][cs]["power"] * @@ -553,77 +705,111 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, model += dc_from_ac[b][u] == xsum(ac_to_dc_st[b][cs][u] * \ self.battery_options[b]["charge stages"][cs]["efficiency"] for cs in range(CS[b])) - ''' + """ # met sos model += xsum(ac_to_dc_w[b][u][cs] for cs in range(CS[b])) == 1 - model += xsum(ac_to_dc_w[b][u][cs] * ac_to_dc_samples[b][cs] - for cs in range(CS[b])) == ac_to_dc[b][u] - model += xsum(ac_to_dc_w[b][u][cs] * dc_from_ac_samples[b][cs] - for cs in range(CS[b])) == dc_from_ac[b][u] - model.add_sos([(ac_to_dc_w[b][u][cs], ac_to_dc_samples[b][cs]) - for cs in range(CS[b])], 2) + model += ( + xsum( + ac_to_dc_w[b][u][cs] * ac_to_dc_samples[b][cs] + for cs in range(CS[b]) + ) + == ac_to_dc[b][u] + ) + model += ( + xsum( + ac_to_dc_w[b][u][cs] * dc_from_ac_samples[b][cs] + for cs in range(CS[b]) + ) + == dc_from_ac[b][u] + ) + model.add_sos( + [ + (ac_to_dc_w[b][u][cs], ac_to_dc_samples[b][cs]) + for cs in range(CS[b]) + ], + 2, + ) # tot hier met sos # ontladen for ds in range(DS[b]): - model += (ac_from_dc_st[b][ds][u] <= - self.battery_options[b]["discharge stages"][ds]["power"] * - ac_from_dc_st_on[b][ds][u]/1000) + model += ( + ac_from_dc_st[b][ds][u] + <= self.battery_options[b]["discharge stages"][ds]["power"] + * ac_from_dc_st_on[b][ds][u] + / 1000 + ) for ds in range(DS[b])[1:]: - model += (ac_from_dc_st[b][ds][u] >= - self.battery_options[b]["discharge stages"][ds - 1]["power"] * - ac_from_dc_st_on[b][ds][u]/1000) - - model += ac_from_dc[b][u] == xsum(ac_from_dc_st[b][ds][u] for ds in range(DS[b])) + model += ( + ac_from_dc_st[b][ds][u] + >= self.battery_options[b]["discharge stages"][ds - 1]["power"] + * ac_from_dc_st_on[b][ds][u] + / 1000 + ) + + model += ac_from_dc[b][u] == xsum( + ac_from_dc_st[b][ds][u] for ds in range(DS[b]) + ) model += (xsum(ac_from_dc_st_on[b][ds][u] for ds in range(DS[b]))) <= 1 - model += dc_to_ac[b][u] == xsum(ac_from_dc_st[b][ds][u] / self.battery_options[b] - ["discharge stages"][ds]["efficiency"] - for ds in range(DS[b])) + model += dc_to_ac[b][u] == xsum( + ac_from_dc_st[b][ds][u] + / self.battery_options[b]["discharge stages"][ds]["efficiency"] + for ds in range(DS[b]) + ) for b in range(B): - for u in range(U + 1): - model += soc[b][u] == soc_low[b][u] + soc_mid[b][u] model += soc[b][0] == start_soc[b] - entity_min_soc_end = self.config.get(["entity min soc end opt"], - self.battery_options[b], - None) + entity_min_soc_end = self.config.get( + ["entity min soc end opt"], self.battery_options[b], None + ) if entity_min_soc_end is None: min_soc_end_opt = 0 else: min_soc_end_opt = float(self.get_state(entity_min_soc_end).state) - entity_max_soc_end = self.config.get(["entity max soc end opt"], - self.battery_options[b], - None) + entity_max_soc_end = self.config.get( + ["entity max soc end opt"], self.battery_options[b], None + ) if entity_max_soc_end is None: max_soc_end_opt = 100 else: max_soc_end_opt = float(self.get_state(entity_max_soc_end).state) if max_soc_end_opt <= min_soc_end_opt: - logging.error(f"'max soc end opt' ({max_soc_end_opt}) moet groter zijn dan " - f"'min soc end opt' ({min_soc_end_opt}); " - f"het programma kan nu geen optimale oplossing berekenem") + logging.error( + f"'max soc end opt' ({max_soc_end_opt}) moet groter zijn dan " + f"'min soc end opt' ({min_soc_end_opt}); " + f"het programma kan nu geen optimale oplossing berekenem" + ) return - model += soc[b][U] >= max(opt_low_level[b] / 2, min_soc_end_opt) + model += soc[b][U] >= min_soc_end_opt model += soc[b][U] <= max_soc_end_opt for u in range(U): - model += (soc[b][u + 1] == soc[b][u] + - (dc_to_bat[b][u] * eff_dc_to_bat[b] * hour_fraction[u] / one_soc[b]) - - ((dc_from_bat[b][u] * hour_fraction[u] / eff_bat_to_dc[b]) / one_soc[b])) - model += pv_prod_dc_sum[b][u] == xsum(pv_prod_dc[b][s][u] * pv_dc_on_off[b][s][u] - for s in range(pv_dc_num[b])) + model += soc[b][u + 1] == soc[b][u] + ( + dc_to_bat[b][u] * eff_dc_to_bat[b] * hour_fraction[u] / one_soc[b] + ) - ( + (dc_from_bat[b][u] * hour_fraction[u] / eff_bat_to_dc[b]) + / one_soc[b] + ) + model += pv_prod_dc_sum[b][u] == xsum( + pv_prod_dc[b][s][u] * pv_dc_on_off[b][s][u] + for s in range(pv_dc_num[b]) + ) # nakijken!!! - model += (dc_from_ac[b][u] + dc_from_bat[b][u] + pv_prod_dc_sum[b][u] == - dc_to_ac[b][u] + dc_to_bat[b][u]) + model += ( + dc_from_ac[b][u] + dc_from_bat[b][u] + pv_prod_dc_sum[b][u] + == dc_to_ac[b][u] + dc_to_bat[b][u] + ) model += dc_from_ac[b][u] <= ac_to_dc_on[b][u] * max_charge_power[b] - model += ac_from_dc[b][u] <= ac_from_dc_on[b][u] * max_discharge_power[b] + model += ( + ac_from_dc[b][u] <= ac_from_dc_on[b][u] * max_discharge_power[b] + ) model += (ac_to_dc_on[b][u] + ac_from_dc_on[b][u]) <= 1 for s in range(pv_dc_num[b]): - entity_pv_switch = self.config.get(["entity pv switch"], - self.battery_options[b]["solar"][s], - None) + entity_pv_switch = self.config.get( + ["entity pv switch"], self.battery_options[b]["solar"][s], None + ) if entity_pv_switch == "": entity_pv_switch = None if entity_pv_switch is None: @@ -634,26 +820,50 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # boiler # ##################################### boiler_on = [model.add_var(var_type=BINARY) for _ in range(U)] - self.boiler_present = self.config.get(["boiler present"], self.boiler_options, - "true").lower() == "true" - if not self.boiler_present: + self.boiler_present = ( + self.config.get(["boiler present"], self.boiler_options, "true").lower() + == "true" + ) + boiler_heated_by_heatpump = False + if self.boiler_present: + entity_boiler_enabled = self.config.get( + ["entity boiler enabled"], self.boiler_options, None + ) + if entity_boiler_enabled is None: + self.boiler_enabled = True + else: + self.boiler_enabled = ( + self.get_state(entity_boiler_enabled).state == "on" + ) + else: + self.boiler_enabled = False + if not self.boiler_present or not self.boiler_enabled: # default values boiler_setpoint = 50 boiler_hysterese = 10 spec_heat_boiler = 200 * 4.2 + 100 * 0.5 # kJ/K cop_boiler = 3 # end temp boiler - boiler_temp = [model.add_var(var_type=CONTINUOUS, lb=20, ub=20) for _ in range(U + 1)] + boiler_temp = [ + model.add_var(var_type=CONTINUOUS, lb=20, ub=20) for _ in range(U + 1) + ] # consumption boiler c_b = [model.add_var(var_type=CONTINUOUS, lb=0, ub=0) for _ in range(U)] model += xsum(boiler_on[j] for j in range(U)) == 0 - logging.info(f"Geen boiler aanwezig") + logging.info( + f"Boiler niet aanwezig of staat uit, boiler wordt niet ingepland" + ) else: # 50 huidige boilertemperatuur ophalen uit ha - boiler_act_temp = ( - float(self.get_state(self.boiler_options["entity actual temp."]).state)) - boiler_setpoint = float(self.get_state(self.boiler_options["entity setpoint"]).state) - boiler_hysterese = float(self.get_state(self.boiler_options["entity hysterese"]).state) + boiler_act_temp = float( + self.get_state(self.boiler_options["entity actual temp."]).state + ) + boiler_setpoint = float( + self.get_state(self.boiler_options["entity setpoint"]).state + ) + boiler_hysterese = float( + self.get_state(self.boiler_options["entity hysterese"]).state + ) # 0.4 #K/uur instelbaar boiler_cooling = self.boiler_options["cooling rate"] # 45 # oC instelbaar daaronder kan worden verwarmd @@ -666,59 +876,102 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, spec_heat_boiler = vol * 4.2 + 200 * 0.5 # kJ/K cop_boiler = self.boiler_options["cop"] power = self.boiler_options["elec. power"] # W - + boiler_heated_by_heatpump = ( + self.config.get( + ["boiler heated by heatpump"], self.boiler_options, "True" + ).lower() + == "true" + ) # tijdstip index waarop boiler kan worden verwarmd - boiler_start = ( - int(max(0, min(23, int((boiler_act_temp - boiler_bovengrens) / boiler_cooling))))) + boiler_start = int( + max( + 0, + min( + 23, int((boiler_act_temp - boiler_bovengrens) / boiler_cooling) + ), + ) + ) # tijdstip index waarop boiler nog aan kan # (41-40)/0.4=2.5 - boiler_end = ( - int(min(U - 1, max(0, int((boiler_act_temp - boiler_ondergrens)/boiler_cooling))))) - boiler_temp = [model.add_var(var_type=CONTINUOUS, - lb=min(boiler_act_temp, - boiler_setpoint - boiler_hysterese - 10), - ub=boiler_setpoint + 10) - for _ in range(U + 1)] # end temp boiler + boiler_end = int( + min( + U - 1, + max(0, int((boiler_act_temp - boiler_ondergrens) / boiler_cooling)), + ) + ) + boiler_temp = [ + model.add_var( + var_type=CONTINUOUS, + lb=min(boiler_act_temp, boiler_setpoint - boiler_hysterese - 10), + ub=boiler_setpoint + 10, + ) + for _ in range(U + 1) + ] # end temp boiler if boiler_start > boiler_end: # geen boiler opwarming in deze periode - c_b = [model.add_var(var_type=CONTINUOUS, lb=0, ub=0) - for _ in range(U)] # consumption boiler - model += xsum(boiler_on[j] for j in range(U) - [boiler_start:boiler_end + 1]) == 0 - logging.debug(f"Boiler: geen opwarming") + c_b = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=0) for _ in range(U) + ] # consumption boiler + model += ( + xsum(boiler_on[j] for j in range(U)[boiler_start : boiler_end + 1]) + == 0 + ) + logging.debug(f"Boiler: er wordt geen opwarming inpland") boiler_end_temp = boiler_act_temp - boiler_cooling * U logging.debug(f"Boiler eind temperatuur: {boiler_end_temp}") for u in range(U): # opwarming in K = kWh opwarming * 3600 = kJ / spec heat boiler - 3 model += boiler_temp[u + 1] == boiler_temp[u] - boiler_cooling else: - logging.info(f"Boiler opwarmen wordt ingepland tussen: " - f"{uur[boiler_start]} en {uur[boiler_end]} uur") + logging.info( + f"Boiler opwarmen wordt ingepland tussen: " + f"{uur[boiler_start]} en {uur[boiler_end]} uur" + ) needed_elec = [0.0 for _ in range(U)] needed_time = [0 for _ in range(U)] - needed_heat = max(0.0, float(spec_heat_boiler * ( - boiler_setpoint - (boiler_act_temp - 4 - boiler_cooling * ( - boiler_end - boiler_start))) / 3600)) + needed_heat = max( + 0.0, + float( + spec_heat_boiler + * ( + boiler_setpoint + - ( + boiler_act_temp + - 4 + - boiler_cooling * (boiler_end - boiler_start) + ) + ) + / 3600 + ), + ) for u in range(boiler_start, boiler_end + 1): needed_elec[u] = needed_heat / cop_boiler # kWh needed_time[u] = needed_elec[u] * 1000 / power # hour - c_b = [model.add_var(var_type=CONTINUOUS, lb=0, ub=needed_elec[u]) - for u in range(U)] # cons. boiler + c_b = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=needed_elec[u]) + for u in range(U) + ] # cons. boiler for u in range(U): model += c_b[u] == boiler_on[u] * needed_elec[u] if u < boiler_start: model += boiler_on[u] == 0 elif u > boiler_end: model += boiler_on[u] == 0 - model += xsum(boiler_on[j] for j in range(U) - [boiler_start:boiler_end + 1]) == 1 + model += ( + xsum(boiler_on[j] for j in range(U)[boiler_start : boiler_end + 1]) + == 1 + ) model += boiler_temp[0] == boiler_act_temp for u in range(U): # opwarming in K = kWh opwarming * 3600 = kJ / spec heat boiler - 3 - model += (boiler_temp[u + 1] == boiler_temp[u] - boiler_cooling + - c_b[u] * cop_boiler * 3600 / spec_heat_boiler) + model += ( + boiler_temp[u + 1] + == boiler_temp[u] + - boiler_cooling + + c_b[u] * cop_boiler * 3600 / spec_heat_boiler + ) ################################################ # electric vehicles @@ -741,7 +994,10 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, ev_capacity = self.ev_options[e]["capacity"] # plugged = self.get_state(self.ev_options["entity plugged in"]).state try: - plugged_in = self.get_state(self.ev_options[e]["entity plugged in"]).state == "on" + plugged_in = ( + self.get_state(self.ev_options[e]["entity plugged in"]).state + == "on" + ) except Exception as ex: logging.error(ex) plugged_in = False @@ -753,33 +1009,49 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, position = "away" ev_position.append(position) try: - soc_state = float(self.get_state(self.ev_options[e]["entity actual level"]).state) + soc_state = float( + self.get_state(self.ev_options[e]["entity actual level"]).state + ) except Exception as ex: logging.error(ex) soc_state = 100.0 - # onderstaande regel evt voor testen + # onderstaande regel eventueel voor testen # soc_state = min(soc_state, 90.0) actual_soc.append(soc_state) - wished_level.append(float(self.get_state( - self.ev_options[e]["charge scheduler"]["entity set level"]).state)) - level_margin.append(self.config.get(["level margin"], - self.ev_options[e]["charge scheduler"], - 0)) + wished_level.append( + float( + self.get_state( + self.ev_options[e]["charge scheduler"]["entity set level"] + ).state + ) + ) + level_margin.append( + self.config.get( + ["level margin"], self.ev_options[e]["charge scheduler"], 0 + ) + ) ready_str = self.get_state( - self.ev_options[e]["charge scheduler"]["entity ready datetime"]).state + self.ev_options[e]["charge scheduler"]["entity ready datetime"] + ).state if len(ready_str) > 9: # dus met datum en tijd - ready = dt.datetime.strptime(ready_str, '%Y-%m-%d %H:%M:%S') + ready = dt.datetime.strptime(ready_str, "%Y-%m-%d %H:%M:%S") else: - ready = dt.datetime.strptime(ready_str, '%H:%M:%S') - ready = dt.datetime(start_dt.year, start_dt.month, start_dt.day, - ready.hour, ready.minute) - if ((ready.hour == start_dt.hour and ready.minute < start_dt.minute) or - (ready.hour < start_dt.hour)): + ready = dt.datetime.strptime(ready_str, "%H:%M:%S") + ready = dt.datetime( + start_dt.year, + start_dt.month, + start_dt.day, + ready.hour, + ready.minute, + ) + if (ready.hour == start_dt.hour and ready.minute < start_dt.minute) or ( + ready.hour < start_dt.hour + ): ready = ready + dt.timedelta(days=1) - hours_available = (ready - start_dt).total_seconds()/3600 + hours_available = (ready - start_dt).total_seconds() / 3600 ev_stages = self.ev_options[e]["charge stages"] if ev_stages[0]["ampere"] != 0.0: ev_stages = [{"ampere": 0.0, "efficiency": 1}] + ev_stages @@ -790,31 +1062,40 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, max_ampere = float(max_ampere) except ValueError: max_ampere = 10 - charge_three_phase = self.config.get(["charge three phase"], - self.ev_options[e], - "true").lower() == "true" + charge_three_phase = ( + self.config.get( + ["charge three phase"], self.ev_options[e], "true" + ).lower() + == "true" + ) if charge_three_phase: ampere_f = 3 else: ampere_f = 1 ampere_factor.append(ampere_f) max_power.append(max_ampere * ampere_f * 230 / 1000) # vermogen in kW - logging.info(f"Instellingen voor laden van EV: {self.ev_options[e]['name']}") + logging.info( + f"Instellingen voor laden van EV: {self.ev_options[e]['name']}" + ) logging.info(f" Ampere Effic. Grid kW Accu kW") for cs in range(ECS[e]): if not ("efficiency" in charge_stages[e][cs]): - charge_stages[e][cs]["efficiency"] = 1.0 - charge_stages[e][cs]["power"] = (charge_stages[e][cs]["ampere"] * 230 * - ampere_factor[e]/1000) - charge_stages[e][cs]["accu_power"] = (charge_stages[e][cs]["power"] * - charge_stages[e][cs]["efficiency"]) - logging.info(f"{charge_stages[e][cs]['ampere']:>7.2f} " - f"{charge_stages[e][cs]['efficiency']:>7.2f} " - f"{charge_stages[e][cs]['power']:>7.2f} " - f"{charge_stages[e][cs]['accu_power']:>7.2f}") - - ''' + charge_stages[e][cs]["efficiency"] = 1 + charge_stages[e][cs]["power"] = ( + charge_stages[e][cs]["ampere"] * 230 * ampere_factor[e] / 1000 + ) + charge_stages[e][cs]["accu_power"] = ( + charge_stages[e][cs]["power"] * charge_stages[e][cs]["efficiency"] + ) + logging.info( + f"{charge_stages[e][cs]['ampere']:>7.2f} " + f"{charge_stages[e][cs]['efficiency']:>7.2f} " + f"{charge_stages[e][cs]['power']:>7.2f} " + f"{charge_stages[e][cs]['accu_power']:>7.2f}" + ) + + """ #test voor bug ev_plugged_in.append(True) wished_level.append(float( @@ -823,7 +1104,7 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, actual_soc.append(40) max_power.append(10 * 230 / 1000) #tot hier - ''' + """ logging.info(f"Capaciteit accu: {ev_capacity} kWh") logging.info(f"Maximaal laadvermogen: {max_power[e]} kW") logging.info(f"Klaar met laden op: {ready.strftime('%d-%m-%Y %H:%M:%S')}") @@ -833,16 +1114,21 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, logging.info(f"Locatie: {ev_position[e]}") logging.info(f"Ingeplugged:{ev_plugged_in[e]}") e_needed = ev_capacity * (wished_level[e] - actual_soc[e]) / 100 - e_needed = min(e_needed, max_power[e] * hours_available * - charge_stages[e][-1]["efficiency"]) + e_needed = min( + e_needed, + max_power[e] * hours_available * charge_stages[e][-1]["efficiency"], + ) energy_needed.append(e_needed) # in kWh logging.info(f"Benodigde energie: {energy_needed[e]} kWh") # uitgedrukt in aantal uren; bijvoorbeeld 1,5 - time_needed = energy_needed[e] / (max_power[e] * charge_stages[e][-1]["efficiency"]) + time_needed = energy_needed[e] / ( + max_power[e] * charge_stages[e][-1]["efficiency"] + ) logging.info(f"Tijd nodig om te laden: {time_needed} uur") old_switch_state = self.get_state(self.ev_options[e]["charge switch"]).state old_ampere_state = self.get_state( - self.ev_options[e]["entity set charging ampere"]).state + self.ev_options[e]["entity set charging ampere"] + ).state # afgerond naar boven in hele uren hours_needed.append(math.ceil(time_needed)) logging.info(f"Afgerond naar hele uren: {hours_needed[e]}") @@ -851,20 +1137,28 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, ready_index = U reden = "" if (wished_level[e] - level_margin[e]) <= actual_soc[e]: - reden = (f" werkelijk niveau ({actual_soc[e]:.1f}%) hoger is of gelijk aan " - f"gewenst niveau ({wished_level[e]:.1f}% minus de marge " - f"{level_margin[e]}%),") + reden = ( + f" werkelijk niveau ({actual_soc[e]:.1f}%) hoger is of gelijk aan " + f"gewenst niveau ({wished_level[e]:.1f}% minus de marge " + f"{level_margin[e]}%)," + ) if not (ev_position[e] == "home"): reden = reden + " auto is niet huis," if not ev_plugged_in[e]: reden = reden + " auto is niet ingeplugd," if not (tijd[0] < ready): reden = reden + f" opgegeven tijdstip ({str(ready)}) is verouderd," - if tijd[U-1] < ready: - reden = reden + (f" opgegeven tijdstip ({str(ready)}) ligt voorbij de " - f"planningshorizon ({tijd[U - 1]}),") - if (ev_plugged_in[e] and (ev_position[e] == "home") and - (wished_level[e] - level_margin[e] > actual_soc[e]) and (tijd[0] < ready)): + if tijd[U - 1] < ready: + reden = reden + ( + f" opgegeven tijdstip ({str(ready)}) ligt voorbij de " + f"planningshorizon ({tijd[U - 1]})," + ) + if ( + ev_plugged_in[e] + and (ev_position[e] == "home") + and (wished_level[e] - level_margin[e] > actual_soc[e]) + and (tijd[0] < ready) + ): for u in range(U): if (tijd[u] + dt.timedelta(hours=1)) >= ready: ready_index = u @@ -881,17 +1175,42 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # charger_ampere = [[model.add_var(var_type=CONTINUOUS, lb=0, # ub= charge_stages[e][-1]["ampere"]) # for cs in range(ECS[e])] for e in range(EV)] - charger_power = [[[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_power[e]) - for _ in range(U)] for _ in range(ECS[e])] for e in range(EV)] - charger_factor = [[[model.add_var(var_type=CONTINUOUS, lb=0, ub=1) for _ in range(U)] - for _ in range(ECS[e])] for e in range(EV)] - charger_on = [[[model.add_var(var_type=BINARY) for _ in range(U)] - for _ in range(ECS[e])] for e in range(EV)] - - c_ev = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_power[e]) - for _ in range(U)] for e in range(EV)] # consumption charger - ev_accu_in = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=max_power[e]) - for _ in range(U)] for e in range(EV)] # load battery + charger_power = [ + [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=max_power[e]) + for _ in range(U) + ] + for _ in range(ECS[e]) + ] + for e in range(EV) + ] + charger_factor = [ + [ + [model.add_var(var_type=CONTINUOUS, lb=0, ub=1) for _ in range(U)] + for _ in range(ECS[e]) + ] + for e in range(EV) + ] + charger_on = [ + [[model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(ECS[e])] + for e in range(EV) + ] + + c_ev = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=max_power[e]) + for _ in range(U) + ] + for e in range(EV) + ] # consumption charger + ev_accu_in = [ + [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=max_power[e]) + for _ in range(U) + ] + for e in range(EV) + ] # load battery for e in range(EV): if (energy_needed[e] > 0) and (ready_u[e] < U): @@ -899,25 +1218,40 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # laden, alles uitgedrukt in vermogen kW for cs in range(ECS[e]): # daadwerkelijk ac vermogen = vermogen van de stap x oplaadfactor (0..1) - model += (charger_power[e][cs][u] == charge_stages[e][cs]["power"] * - charger_factor[e][cs][u]) + model += ( + charger_power[e][cs][u] + == charge_stages[e][cs]["power"] * charger_factor[e][cs][u] + ) # idem met schakelaar - model += charger_power[e][cs][u] <= max_power[e] * charger_on[e][cs][u] + model += ( + charger_power[e][cs][u] + <= max_power[e] * charger_on[e][cs][u] + ) # som van alle oplaadfactoren is 1 - model += (xsum(charger_factor[e][cs][u] for cs in range(ECS[e]))) == 1 + model += ( + xsum(charger_factor[e][cs][u] for cs in range(ECS[e])) + ) == 1 # som van alle schakelaars boven 0 A en kleiner of gelijk aan 1 - model += (xsum(charger_on[e][cs][u] for cs in range(ECS[e])[1:])) <= 1 - model += c_ev[e][u] == xsum(charger_power[e][cs][u] * hour_fraction[u] - for cs in range(ECS[e])) - model += ev_accu_in[e][u] == xsum(charge_stages[e][cs]["accu_power"] * - hour_fraction[u] * - charger_factor[e][cs][u] - for cs in range(ECS[e])) - model += energy_needed[e] == xsum(ev_accu_in[e][u] for u in range(ready_u[e] + 1)) - for u in range(U)[ready_u[e]+1:]: + model += ( + xsum(charger_on[e][cs][u] for cs in range(ECS[e])[1:]) + ) <= 1 + model += c_ev[e][u] == xsum( + charger_power[e][cs][u] * hour_fraction[u] + for cs in range(ECS[e]) + ) + model += ev_accu_in[e][u] == xsum( + charge_stages[e][cs]["accu_power"] + * hour_fraction[u] + * charger_factor[e][cs][u] + for cs in range(ECS[e]) + ) + model += energy_needed[e] == xsum( + ev_accu_in[e][u] for u in range(ready_u[e] + 1) + ) + for u in range(U)[ready_u[e] + 1 :]: model += c_ev[e][u] == 0 - ''' + """ max_beschikbaar = 0 for u in range(ready_u[e] + 1): model += c_ev[e][u] <= charger_on[e][u] * hour_fraction[u] * max_power[e] @@ -928,7 +1262,7 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, model += xsum(charger_on[e][j] for j in range(ready_u[e] + 1)) == hours_needed[e] model += xsum(c_ev[e][u] for u in range(ready_u[e] + 1)) == min(max_beschikbaar, energy_needed[e]) - ''' + """ else: model += xsum(c_ev[e][u] for u in range(U)) == 0 for u in range(U): @@ -950,12 +1284,19 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # minimaal 20 kW terugleveren max 20 kW leveren (3 x 25A = 17,5 kW) # instelbaar maken? # levering - c_l = [model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) for _ in range(U)] + c_l = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) + for _ in range(U) + ] # teruglevering - c_t_total = [model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) - for _ in range(U)] - c_t_w_tax = [model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) - for _ in range(U)] + c_t_total = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) + for _ in range(U) + ] + c_t_w_tax = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) + for _ in range(U) + ] c_l_on = [model.add_var(var_type=BINARY) for _ in range(U)] c_t_on = [model.add_var(var_type=BINARY) for _ in range(U)] @@ -963,14 +1304,19 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # c_t_no_tax = [model.add_var(var_type=CONTINUOUS, lb=0, ub=0) for u in range(U)] if salderen: - c_t_no_tax = [model.add_var(var_type=CONTINUOUS, lb=0, ub=0) for _ in range(U)] + c_t_no_tax = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=0) for _ in range(U) + ] else: # alles wat meer wordt teruggeleverd dan geleverd (c_t_no_tax) wordt niet gesaldeerd # (geen belasting terug): tarief pt_notax - c_t_no_tax = [model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) - for _ in range(U)] - model += (xsum(c_t_w_tax[u] for u in range(U)) + production_today) <= \ - (xsum(c_l[u] for u in range(U)) + consumption_today) + c_t_no_tax = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=self.grid_max_power) + for _ in range(U) + ] + model += (xsum(c_t_w_tax[u] for u in range(U)) + production_today) <= ( + xsum(c_l[u] for u in range(U)) + consumption_today + ) # netto per uur alleen leveren of terugleveren niet tegelijk? for u in range(U): model += c_t_total[u] == c_t_w_tax[u] + c_t_no_tax[u] @@ -981,56 +1327,256 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, ##################################### # heatpump # ##################################### - - self.heater_present = self.heating_options["heater present"].lower() == "true" - if not self.heater_present: - c_hp = [model.add_var(var_type=CONTINUOUS, lb=0, ub=0) - for _ in range(U)] # elektriciteitsverbruik in kWh/h - p_hp = None - h_hp = None + p_hp = None + h_hp = None + c_hp = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=10) for _ in range(U) + ] # Electricity consumption per hour + hp_on = [ + model.add_var(var_type=BINARY) for _ in range(U) + ] # If on the pump will run in that hour + self.hp_present = ( + self.config.get(["heater present"], self.heating_options, "False").lower() + == "true" + ) + if self.hp_present: + entity_hp_enabled = self.config.get( + ["entity hp enabled"], self.heating_options, None + ) + self.hp_enabled = (entity_hp_enabled is None) or ( + self.get_state(entity_hp_enabled).state == "on" + ) + if not self.hp_enabled: + logging.info( + "Warmtepomp niet enabled - warmtepomp wordt niet ingepland" + ) else: - degree_days = self.meteo.calc_graaddagen() + self.hp_enabled = False + for u in range(U): + model += c_hp[u] == 0 + model += hp_on[u] == 0 + if self.hp_enabled: + # "adjustment" : keuze uit "on/off | power | heating curve", default "power" + self.hp_adjustment = self.config.get( + ["adjustment"], self.heating_options, "power" + ).lower() + + # degree days + degree_days = self.meteo.calc_graaddagen(weighted=True) if U > 24: degree_days += self.meteo.calc_graaddagen( - date=dt.datetime.combine(dt.date.today() + dt.timedelta(days=1), - dt.datetime.min.time())) - logging.info(f"Warmtepomp") - logging.info(f"Graaddagen: {degree_days:.1f}") # 3.6 heat factor kWh th / K.day - degree_days_factor = self.heating_options["degree days factor"] - heat_produced = float(self.get_state("sensor.daily_heat_production_heating").state) - heat_needed = max(0.0, degree_days * degree_days_factor - heat_produced) # heat needed - stages = self.heating_options["stages"] - S = len(stages) - c_hp = [model.add_var(var_type=CONTINUOUS, lb=0, ub=10) - for _ in range(U)] # elektriciteitsverbruik in kWh/h - # p_hp[s][u]: het gevraagde vermogen in W in dat uur - p_hp = [[model.add_var(var_type=CONTINUOUS, lb=0, ub=stages[s]["max_power"]) - for _ in range(U)] for s in range(S)] - - # schijven aan/uit, iedere schijf kan maar een keer in een uur - hp_on = [[model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(S)] - - # verbruik per uur - for u in range(U): - # verbruik in kWh is totaal vermogen in W/1000 - model += c_hp[u] == (xsum(p_hp[s][u] for s in range(S))) / 1000 - # kosten - # model += k_hp[u] == c_hp[u] * pl[u] # kosten = verbruik x tarief + date=dt.datetime.combine( + dt.date.today() + dt.timedelta(days=1), dt.datetime.min.time() + ), + weighted=True, + ) + logging.info(f"Gewogen graaddagen: {degree_days:.1f} K.day") + + # degree days factor kWh th / K.day + entity_degree_days_factor = self.config.get( + ["degree days factor"], self.heating_options, None + ) + if entity_degree_days_factor is None: + degree_days_factor = 1 + else: + try: + # if just a number is speficied use this number + degree_days_factor = float(entity_degree_days_factor) + except ValueError: + # if en entity is specified get it from HA + degree_days_factor = float( + self.get_state(entity_degree_days_factor).state + ) + logging.info(f"Degree days factor: {degree_days_factor:.1f} kWh/K.day") + + # heat produced + entity_heat_produced = self.config.get( + ["entity hp heat produced"], self.heating_options, None + ) + if entity_heat_produced is not None: + heat_produced = float(self.get_state(entity_heat_produced).state) + else: + heat_produced = 0 + logging.info(f"Reeds geproduceerde warmte: {heat_produced:.1f} kWh") + + # heat needed + heat_needed = max(0.0, degree_days * degree_days_factor - heat_produced) + logging.info(f"Nog benodigde warmte: {heat_needed:.1f} kWh") + + # heat demand + entity_hp_heat_demand = self.config.get( + ["entity hp heat demand"], self.heating_options, None + ) # Is er warmte vraag - zo ja, dan inplannen + self.hp_heat_demand = (entity_hp_heat_demand is None) or ( + self.get_state(entity_hp_heat_demand).state == "on" + ) + logging.info( + f"Actuele warmtevraag: {'Ja' if self.hp_heat_demand else 'Nee'}" + ) + if self.hp_adjustment == "on/off": + # vanaf hier code ronald + # hp_adjustment == "on/off" + logging.debug("Implementatie on/off warmtepomp") + min_run_length = int( + self.config.get(["min run length"], self.heating_options, 1) + ) # Minimum run lengte hp in uren - 1h als niet gedefinieerd + min_run_length = min( + max(min_run_length, 1), 5 + ) # Alleen waarde tussen 1 en 5 uur mogelijk + logging.debug(f"Warmtepomp draait minimaal {min_run_length} uren") + + if self.hp_heat_demand: + logging.info(f"On/off warmtepomp wordt ingepland") + avg_temp = self.meteo.get_avg_temperature() + if U > 24: + avg_temp += self.meteo.get_avg_temperature( + date=dt.datetime.combine( + dt.date.today() + dt.timedelta(days=1), + dt.datetime.min.time(), + ) + ) + avg_temp = avg_temp / 2 + entity_avg_temp = self.config.get( + ["entity avg outside temp"], self.heating_options, None + ) + if entity_avg_temp is None: + logging.warning( + f"Geen entity om gem. temperatuur te exporteren" + ) + else: + self.set_value(entity_avg_temp, round(avg_temp, 1)) - # geproduceerde warmte kWh per uur - h_hp = [model.add_var(var_type=CONTINUOUS, lb=0, ub=10000) for _ in range(U)] + logging.debug(f"Voorspelde buiten temperatuur: {avg_temp}") - # beschikbaar vermogen x aan/uit, want p_hpx[u] X hpx_on[u] kan niet - for u in range(U): - for s in range(S): - model += p_hp[s][u] <= stages[s]["max_power"] * hp_on[s][u] - # ieder uur maar een aan - model += (xsum(hp_on[s][u] for s in range(S))) + boiler_on[u] == 1 - # geproduceerde warmte = vermogen in W * COP_schijf /1000 in kWh - model += h_hp[u] == xsum((p_hp[s][u] * stages[s]["cop"]/1000) - for s in range(S)) * hour_fraction[u] - # som van alle geproduceerde warmte == benodigde warmte - model += xsum(h_hp[u] for u in range(U)) == heat_needed + # Get COP and heatpump power from HA + entity_hp_cop = self.config.get( + ["entity hp cop"], self.heating_options, None + ) + if entity_hp_cop is not None: + cop = float(self.get_state(entity_hp_cop).state) + else: + cop = 4 + # Default COP if no entity from HA + entity_hp_power = self.config.get( + ["entity hp power"], self.heating_options, None + ) + if entity_hp_cop is not None: + hp_power = float(self.get_state(entity_hp_power).state) + else: + hp_power = 1.5 + # Default power in kW if no entity from HA + + e_needed = heat_needed / cop + # Elektrical energy needed in kWh + hp_hours = math.ceil(e_needed / hp_power) + # Number of hours the heat pump still has to run + if hp_hours < min_run_length: + # Ensure pump runs for at least min_run_length hours + hp_hours = min_run_length + if (hp_hours % min_run_length) != 0: + hp_hours += min_run_length - (hp_hours % min_run_length) + # Ensure hp_hours is multiple of min_run_length + e_needed = hp_hours * hp_power + # Elektrical energy to be optimized in kWh + logging.info( + f"Elektriciteit benodigd:{e_needed:.1f} kWh, cop: {cop:.1f}, " + f"vermogen:{hp_power:.1f} kW, warmtepomp draait: {hp_hours} uren" + ) + + # Add the contraints + for u in range(U): + model += c_hp[u] == hp_power * hp_on[u] + # Energy consumption per hour is equal to power if it runs in that hour + model += xsum(hp_on[u] for u in range(U)) == hp_hours + # Ensure pump is running for designated number of hours + + # Additional constraints to ensure the minimum run length (range 1-5 hours) + for u in range(0, U, min_run_length): + if u < U - min_run_length + 1: + if min_run_length > 1: + model += hp_on[u] == hp_on[u + 1] + if min_run_length > 2: + model += hp_on[u + 1] == hp_on[u + 2] + if min_run_length > 3: + model += hp_on[u + 2] == hp_on[u + 3] + if min_run_length > 4: + model += hp_on[u + 3] == hp_on[u + 4] + else: + logging.info(f"Geen warmtevraag - warmtepomp wordt niet ingepland") + else: + # hp_adjustment == "power" or "heating curve" + logging.info(f"Warmtepomp met power-regeling wordt ingepland") + stages = self.heating_options["stages"] + S = len(stages) + c_hp = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=stages[-1]["max_power"]) + for _ in range(U) + ] # elektriciteitsverbruik in kWh/h + # p_hp[s][u]: het gevraagde vermogen in W in dat uur + p_hp = [ + [ + model.add_var( + var_type=CONTINUOUS, lb=0, ub=stages[s]["max_power"] + ) + for _ in range(U) + ] + for s in range(S) + ] + + # schijven aan/uit, iedere schijf kan maar een keer in een uur + hp_s_on = [ + [model.add_var(var_type=BINARY) for _ in range(U)] for _ in range(S) + ] + hp_on = [ + model.add_var(var_type=BINARY) for _ in range(U) + ] # If on the pump will run in that hour + + # verbruik per uur + for u in range(U): + # verbruik in kWh is totaal vermogen in W/1000 + model += ( + c_hp[u] + == (xsum(p_hp[s][u] for s in range(S))) + * hour_fraction[u] + / 1000 + ) + + # geproduceerde warmte kWh per uur + h_hp = [ + model.add_var(var_type=CONTINUOUS, lb=0, ub=10000) for _ in range(U) + ] + + # als er geen warmtevraag is eerste uur geen verbruik + if not self.hp_heat_demand: + model += c_hp[0] == 0 + + # beschikbaar vermogen x aan/uit, want p_hpx[u] X hpx_on[u] kan niet + for u in range(U): + model += hp_on[u] == xsum(hp_s_on[s][u] for s in range(S)[1:]) + for s in range(S): + model += p_hp[s][u] <= stages[s]["max_power"] * hp_s_on[s][u] + # ieder uur maar een aan + if boiler_heated_by_heatpump: + model += (xsum(hp_s_on[s][u] for s in range(S))) + boiler_on[ + u + ] == 1 + else: + model += (xsum(hp_s_on[s][u] for s in range(S))) == 1 + # geproduceerde warmte = vermogen in W * COP_schijf /1000 in kWh + model += ( + h_hp[u] + == xsum( + (p_hp[s][u] * stages[s]["cop"] / 1000) for s in range(S) + ) + * hour_fraction[u] + ) + # max heat power in kW + max_heat_power = stages[-1]["max_power"] * stages[-1]["cop"] / 1000 + # max_heat_prod = sum(max_heat_power + # een uur minder vanwege de boiler + max_heat_prod = sum(max_heat_power * hour_fraction[u] for u in range(U-1)) + # som van alle geproduceerde warmte == benodigde warmte + model += xsum(h_hp[u] for u in range(U)) == min(heat_needed, max_heat_prod) ######################################################################## # apparaten /machines @@ -1041,7 +1587,7 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, RL = [] # lengte van een run KW = [] # aantal kwartieren ma_uur_kw = [] # per machine een list met beschikbare kwartieren - ma_kw_dt = [] # per machine een list op welk tijdstip een kwartier begint + ma_kw_dt = [] # per machine een list op welk tijdstip een kwartier begint program_index = [] ma_name = [] ma_entity_plan_start = [] @@ -1050,65 +1596,89 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, ma_planned_end_dt = [] for m in range(M): error = False - ma_name.append(self.machines[m]['name']) + ma_name.append(self.machines[m]["name"]) # entities ophalen - start_window_entity = self.config.get(["entity start window"], self.machines[m], None) - end_window_entity = self.config.get(["entity end window"], self.machines[m], None) - ma_entity_plan_start.append(self.config.get(["entity calculated start"], - self.machines[m], - None)) - ma_entity_plan_end.append(self.config.get(["entity calculated end"], - self.machines[m], - None)) - entity_machine_program = self.config.get(["entity selected program"], - self.machines[m], - None) + start_window_entity = self.config.get( + ["entity start window"], self.machines[m], None + ) + end_window_entity = self.config.get( + ["entity end window"], self.machines[m], None + ) + ma_entity_plan_start.append( + self.config.get(["entity calculated start"], self.machines[m], None) + ) + ma_entity_plan_end.append( + self.config.get(["entity calculated end"], self.machines[m], None) + ) + entity_machine_program = self.config.get( + ["entity selected program"], self.machines[m], None + ) if entity_machine_program: try: - program_selected.append(self.get_state(entity_machine_program).state) + program_selected.append( + self.get_state(entity_machine_program).state + ) except Exception as ex: logging.error(ex) - p = next((i for i, item in enumerate(self.machines[m]["programs"]) if item["name"] == - program_selected[m]), 0) + p = next( + ( + i + for i, item in enumerate(self.machines[m]["programs"]) + if item["name"] == program_selected[m] + ), + 0, + ) program_index.append(p) RL.append(len(self.machines[m]["programs"][p]["power"])) # aantal stages # initialize yesterday - planned_start_dt = ( - dt.datetime(start_dt.year, start_dt.month, start_dt.day) - dt.timedelta(days=1)) + planned_start_dt = dt.datetime( + start_dt.year, start_dt.month, start_dt.day + ) - dt.timedelta(days=1) planned_end_dt = planned_start_dt if ma_entity_plan_start[m] is None: if ma_entity_plan_end is None: error = True logging.error( f"Er zijn geen entities voor doorgeven van de planning gedefinieerd " - f"bij de instellingen van {ma_name[m]}.") + f"bij de instellingen van {ma_name[m]}." + ) else: planned_end_str = self.get_state(ma_entity_plan_end[m]).state - planned_end_dt = dt.datetime.strptime(planned_end_str, '%Y-%m-%d %H:%M:%S') + planned_end_dt = dt.datetime.strptime( + planned_end_str, "%Y-%m-%d %H:%M:%S" + ) planned_start_dt = planned_end_dt - dt.timedelta(minutes=RL[m] * 15) else: planned_start_str = self.get_state(ma_entity_plan_start[m]).state - planned_start_dt = dt.datetime.strptime(planned_start_str, '%Y-%m-%d %H:%M:%S') + planned_start_dt = dt.datetime.strptime( + planned_start_str, "%Y-%m-%d %H:%M:%S" + ) if ma_entity_plan_end is not None: planned_end_str = self.get_state(ma_entity_plan_end[m]).state - planned_end_dt = dt.datetime.strptime(planned_end_str, '%Y-%m-%d %H:%M:%S') + planned_end_dt = dt.datetime.strptime( + planned_end_str, "%Y-%m-%d %H:%M:%S" + ) else: planned_end_dt = planned_start_dt + dt.timedelta(minutes=RL[m] * 15) ma_planned_start_dt.append(planned_start_dt) ma_planned_end_dt.append(planned_end_dt) start_ma_dt = start_dt - ready_ma_dt = uur[U-1] + ready_ma_dt = uur[U - 1] if start_window_entity is None: - logging.error(f"De 'entity start window' is niet gedefinieerd bij de instellingen " - f"van {ma_name[m]}.") + logging.error( + f"De 'entity start window' is niet gedefinieerd bij de instellingen " + f"van {ma_name[m]}." + ) logging.error(f"Apparaat {ma_name[m]} wordt niet ingepland.") error = True else: start_hm = self.get_state(start_window_entity).state start_ma_dt = convert_timestr(start_hm, start_dt) if end_window_entity is None: - logging.error(f"De 'entity end window' is niet gedefinieerd bij de instellingen " - f"van {ma_name[m]}.") + logging.error( + f"De 'entity end window' is niet gedefinieerd bij de instellingen " + f"van {ma_name[m]}." + ) if not error: logging.error(f"Apparaat {ma_name[m]} wordt niet ingepland.") error = True @@ -1117,10 +1687,12 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, ready_ma_dt = convert_timestr(ready, start_dt) if ready_ma_dt <= start_ma_dt: ready_ma_dt += dt.timedelta(days=1) - if (start_dt > ready_ma_dt) or (start_dt+dt.timedelta(minutes=RL[m]*15) > ready_ma_dt): + if (start_dt > ready_ma_dt) or ( + start_dt + dt.timedelta(minutes=RL[m] * 15) > ready_ma_dt + ): start_ma_dt += dt.timedelta(days=1) ready_ma_dt += dt.timedelta(days=1) - ''' + """ if not error and start_ma_dt > ready_ma_dt: if ready_ma_dt > start_ma_dt: logging.info(f"Apparaat {ma_name[m]} wordt nog niet ingepland: de " @@ -1128,31 +1700,40 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, error = True else: ready_ma_dt = ready_ma_dt + dt.timedelta(days=1) - ''' - if ready_ma_dt > tijd[U-1]: - logging.info(f"Machine {ma_name[m]} wordt niet ingepland, want {ready_ma_dt} " - f"ligt voorbij de planningshorizon {uur[U-1]}") + """ + if ready_ma_dt > tijd[U - 1]: + logging.info( + f"Machine {ma_name[m]} wordt niet ingepland, want {ready_ma_dt} " + f"ligt voorbij de planningshorizon {uur[U-1]}" + ) error = True elif start_dt >= ready_ma_dt: - logging.info(f"Machine {ma_name[m]} wordt niet ingepland, want {start_dt} " - f"ligt voorbij de einde planningswindow {ready_ma_dt}") + logging.info( + f"Machine {ma_name[m]} wordt niet ingepland, want {start_dt} " + f"ligt voorbij de einde planningswindow {ready_ma_dt}" + ) error = True - elif start_ma_dt < planned_start_dt <= start_dt : + elif start_ma_dt < planned_start_dt <= start_dt: if start_dt <= planned_end_dt: - logging.info(f"Machine {ma_name[m]} wordt niet ingepland, want {start_dt} " - f"ligt voorbij begin vorige planning(1): {planned_start_dt}") + logging.info( + f"Machine {ma_name[m]} wordt niet ingepland, want {start_dt} " + f"ligt voorbij begin vorige planning(1): {planned_start_dt}" + ) error = True elif start_dt <= ready_ma_dt: logging.info( f"Machine {ma_name[m]} wordt niet ingepland, want {start_dt} " - f"ligt voor einde huidige planning-window: {ready_ma_dt}") + f"ligt voor einde huidige planning-window: {ready_ma_dt}" + ) error = True elif ready_ma_dt + dt.timedelta(days=1) <= tijd[U - 1]: start_ma_dt += dt.timedelta(days=1) ready_ma_dt += dt.timedelta(days=1) else: - logging.info(f"Machine {ma_name[m]} wordt niet ingepland, want {start_dt} " - f"ligt voorbij begin vorige planning(2): {planned_start_dt}") + logging.info( + f"Machine {ma_name[m]} wordt niet ingepland, want {start_dt} " + f"ligt voorbij begin vorige planning(2): {planned_start_dt}" + ) error = True if error: kw_num = 0 @@ -1161,14 +1742,20 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, kw_num = math.ceil(delta.seconds / 900) KW.append(kw_num) if RL[m] == 0: - logging.info(f"Machine {ma_name[m]} wordt niet ingepland, " - f"want er is gekozen voor {program_selected[m]}") + logging.info( + f"Machine {ma_name[m]} wordt niet ingepland, " + f"want er is gekozen voor {program_selected[m]}" + ) else: if kw_num > 0: - logging.info(f"Apparaat {ma_name[m]} met programma '{program_selected[m]}' " - f"wordt ingepland tussen {start_ma_dt.strftime('%Y-%m-%d %H:%M')} " - f"en {ready_ma_dt.strftime('%Y-%m-%d %H:%M')}.") - start_ma_dt = dt.datetime.fromtimestamp(900 * math.ceil(max(start_ma_dt, start_dt).timestamp()/900)) + logging.info( + f"Apparaat {ma_name[m]} met programma '{program_selected[m]}' " + f"wordt ingepland tussen {start_ma_dt.strftime('%Y-%m-%d %H:%M')} " + f"en {ready_ma_dt.strftime('%Y-%m-%d %H:%M')}." + ) + start_ma_dt = dt.datetime.fromtimestamp( + 900 * math.ceil(max(start_ma_dt, start_dt).timestamp() / 900) + ) uur_kw = [] kw_dt = [] kwartier_dt = start_ma_dt @@ -1186,19 +1773,36 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, R.append(min(KW[m], KW[m] - RL[m] + 1)) # ma_start : wanneer machine start = 1 anders = 0 - ma_start = [[model.add_var(var_type=BINARY) for _ in range(KW[m])] for m in range(M)] + ma_start = [ + [model.add_var(var_type=BINARY) for _ in range(KW[m])] for m in range(M) + ] # machine aan per kwartier per run # ma_on = [[[model.add_var(var_type=BINARY) for kw in range(KW[m])] # for r in range(R[m])] for m in range(M)] # consumption per kwartier - c_ma_kw = [[model.add_var(var_type=CONTINUOUS, lb=0, - ub=math.ceil(max(self.machines[m]["programs"][program_index[m]] - ["power"], default=0))) - for _ in range(KW[m])] for m in range(M)] - - c_ma_u = [[model.add_var(var_type=CONTINUOUS, lb=0) for _ in range(U)] for _ in range(M)] + c_ma_kw = [ + [ + model.add_var( + var_type=CONTINUOUS, + lb=0, + ub=math.ceil( + max( + self.machines[m]["programs"][program_index[m]]["power"], + default=0, + ) + ), + ) + for _ in range(KW[m]) + ] + for m in range(M) + ] + + c_ma_u = [ + [model.add_var(var_type=CONTINUOUS, lb=0) for _ in range(U)] + for _ in range(M) + ] # kosten per uur # k_ma = [[model.add_var(var_type=CONTINUOUS) for _ in range(U)] for _ in range(M)] @@ -1214,76 +1818,124 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, model += xsum(ma_start[m][kw] for kw in range(KW[m])) == 1 # kan niet starten als je de run niet kan afmaken - for kw in range(KW[m])[KW[m]-RL[m]:]: + for kw in range(KW[m])[KW[m] - RL[m] :]: model += ma_start[m][kw] == 0 if self.log_level == logging.DEBUG: logging.debug(f"Per kwartier welke run en met welk vermogen") for kw in range(KW[m]): - print(f"kw: {kw} tijd: {ma_kw_dt[m][kw].strftime('%H:%M')} " - f"range r: {max(0, kw - RL[m]+1)} <-> {min(kw, R[m])+1} r:", end=" ") - for r in range(R[m])[max(0, kw - RL[m]+1): min(kw, R[m])+1]: - print(f"{r} power: " - f"{self.machines[m]['programs'][program_index[m]]['power'][kw-r]}", - end=" ") + print( + f"kw: {kw} tijd: {ma_kw_dt[m][kw].strftime('%H:%M')} " + f"range r: {max(0, kw - RL[m]+1)} <-> {min(kw, R[m])+1} r:", + end=" ", + ) + for r in range(R[m])[max(0, kw - RL[m] + 1) : min(kw, R[m]) + 1]: + print( + f"{r} power: " + f"{self.machines[m]['programs'][program_index[m]]['power'][kw-r]}", + end=" ", + ) print() for kw in range(KW[m]): - model += (c_ma_kw[m][kw] == - xsum(self.machines[m]["programs"][program_index[m]]["power"][kw-r] * - ma_start[m][r]/4000 - for r in range(R[m])[max(0, kw - RL[m]+1): min(kw, R[m])+1])) + model += c_ma_kw[m][kw] == xsum( + self.machines[m]["programs"][program_index[m]]["power"][kw - r] + * ma_start[m][r] + / 4000 + for r in range(R[m])[max(0, kw - RL[m] + 1) : min(kw, R[m]) + 1] + ) for u in range(U): if len(ma_uur_kw[m][u]) == 0: - if (ma_planned_start_dt[m] < (tijd[u]+dt.timedelta(hours=1)) and - ma_planned_end_dt[m] > tijd[u]): + if ( + ma_planned_start_dt[m] < (tijd[u] + dt.timedelta(hours=1)) + and ma_planned_end_dt[m] > tijd[u] + ): c_ma_sum = 0 for kw in range(RL[m]): - gepland_moment = ma_planned_start_dt[m] + dt.timedelta(minutes=kw * 15) - if max(start_dt, tijd[u]) <= gepland_moment <= ( - tijd[u] + dt.timedelta(hours=1)): + gepland_moment = ma_planned_start_dt[m] + dt.timedelta( + minutes=kw * 15 + ) + if ( + max(start_dt, tijd[u]) + <= gepland_moment + <= (tijd[u] + dt.timedelta(hours=1)) + ): verschil = gepland_moment - start_dt if start_dt > tijd[u] and verschil.seconds < 900: fraction = verschil.seconds / 900 else: fraction = 1 - c_ma_sum += self.machines[m]["programs"][program_index[m]]["power"][ - kw] * fraction / 4000 + c_ma_sum += ( + self.machines[m]["programs"][program_index[m]][ + "power" + ][kw] + * fraction + / 4000 + ) model += c_ma_u[m][u] == c_ma_sum else: model += c_ma_u[m][u] == 0 else: - model += c_ma_u[m][u] == xsum(c_ma_kw[m][kw] for kw in ma_uur_kw[m][u]) + model += c_ma_u[m][u] == xsum( + c_ma_kw[m][kw] for kw in ma_uur_kw[m][u] + ) ##################################################### # alle verbruiken in de totaal balans in kWh ##################################################### for u in range(U): - model += (c_l[u] == c_t_total[u] + b_l[u] * hour_fraction[u] + - xsum(ac_to_dc[b][u] - ac_from_dc[b][u] for b in range(B)) * hour_fraction[u] + - # xsum(ac_to_dc[b][u] - ac_from_dc[b][u] for b in range(B)) + - c_b[u] + xsum(c_ev[e][u] for e in range(EV)) + - c_hp[u] * hour_fraction[u] + xsum(c_ma_u[m][u] for m in range(M)) - - xsum(pv_ac[s][u] for s in range(solar_num))) + model += ( + c_l[u] + == c_t_total[u] + + b_l[u] * hour_fraction[u] + + xsum(ac_to_dc[b][u] - ac_from_dc[b][u] for b in range(B)) + * hour_fraction[u] + + + # xsum(ac_to_dc[b][u] - ac_from_dc[b][u] for b in range(B)) + + c_b[u] + + xsum(c_ev[e][u] for e in range(EV)) + + c_hp[u] + + xsum(c_ma_u[m][u] for m in range(M)) + - xsum(pv_ac[s][u] for s in range(solar_num)) + ) # cost variabele cost = model.add_var(var_type=CONTINUOUS, lb=-1000, ub=1000) delivery = model.add_var(var_type=CONTINUOUS, lb=0, ub=1000) model += delivery == xsum(c_l[u] for u in range(U)) + # cycle cost per batterij + cycle_cost = [model.add_var(var_type=CONTINUOUS, lb=0) for _ in range(B)] + for b in range(B): + model += cycle_cost[b] == xsum( + (dc_to_bat[b][u] + dc_from_bat[b][u]) + * kwh_cycle_cost[b] + * hour_fraction[u] + for u in range(U) + ) + if salderen: p_bat = p_avg else: - p_bat = sum(pt_notax)/U + p_bat = sum(pt_notax) / U # alles in kWh * prijs = kosten in euro - model += cost == xsum(c_l[u] * pl[u] - c_t_w_tax[u] * pt[u] - c_t_no_tax[u] * pt_notax[u] - for u in range(U)) + \ - xsum(xsum((dc_to_bat[b][u] + dc_from_bat[b][u]) * kwh_cycle_cost[b] + - (opt_low_level[b] - soc_low[b][u]) * 0.0025 for u in range(U)) - for b in range(B)) + \ - xsum((soc_mid[b][0] - soc_mid[b][U]) * one_soc[b] * eff_bat_to_dc[b] - * avg_eff_dc_to_ac[b] * p_bat for b in range(B)) # waarde opslag accu + model += cost == ( + xsum( + c_l[u] * pl[u] - c_t_w_tax[u] * pt[u] - c_t_no_tax[u] * pt_notax[u] + for u in range(U) + ) + + xsum(cycle_cost[b] for b in range(b)) + + xsum( + (soc[b][0] - soc[b][U]) + * one_soc[b] + * eff_bat_to_dc[b] + * avg_eff_dc_to_ac[b] + * p_bat + for b in range(B) + ) + ) + # waarde opslag accu # +(boiler_temp[U] - boiler_ondergrens) * (spec_heat_boiler/(3600 * cop_boiler)) * # p_avg # waarde energie boiler @@ -1300,14 +1952,14 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # kosten optimalisering strategy = self.strategy.lower() if strategy == "minimize cost": - strategie = 'minimale kosten' + strategie = "minimale kosten" model.objective = minimize(cost) model.optimize() if model.num_solutions == 0: logging.warning(f"Geen oplossing voor: {strategy}") return elif strategy == "minimize consumption": - strategie = 'minimale levering' + strategie = "minimale levering" model.objective = minimize(delivery) model.optimize() if model.num_solutions == 0: @@ -1317,7 +1969,7 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, logging.info("Eerste berekening") logging.info(f"Kosten (euro): {cost.x:<6.2f}") logging.info(f"Levering (kWh): {delivery.x:<6.2f}") - model += (delivery <= min_delivery) + model += delivery <= min_delivery model.objective = minimize(cost) model.optimize() if model.num_solutions == 0: @@ -1334,10 +1986,13 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, # Suppress FutureWarning messages import warnings - warnings.simplefilter(action='ignore', category=FutureWarning) + + warnings.simplefilter(action="ignore", category=FutureWarning) if model.num_solutions == 0: - logging.error(f"Er is helaas geen oplossing gevonden, kijk naar je instellingen.") + logging.error( + f"Er is helaas geen oplossing gevonden, kijk naar je instellingen." + ) return # er is een oplossing @@ -1382,8 +2037,15 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, for s in range(solar_num): solar_hour_sum_org[u] += solar_prod[s][u] # pv_ac[s][u].x solar_hour_sum_opt[u] += pv_ac[s][u].x - netto = b_l[u] + c_b[u].x + c_hp[u].x + c_ev_sum[u] + c_ma_sum[u] \ - - solar_hour_sum_org[u] - pv_ac_hour_sum[u] + netto = ( + b_l[u] + + c_b[u].x + + c_hp[u].x + + c_ev_sum[u] + + c_ma_sum[u] + - solar_hour_sum_org[u] + - pv_ac_hour_sum[u] + ) sum_old_cons += netto if netto >= 0: old_cost_gc += netto * p_grl[u] @@ -1400,51 +2062,87 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, dag_str = dt.datetime.now().strftime("%Y-%m-%d") taxes_l = get_value_from_dict(dag_str, taxes_l_def) btw = get_value_from_dict(dag_str, btw_def) - saldeer_corr_gc = -sum_old_cons * \ - (sum(p_grt) / len(p_grt) - 0.11) + saldeer_corr_gc = -sum_old_cons * (sum(p_grt) / len(p_grt) - 0.11) saldeer_corr_da = -sum_old_cons * taxes_l * (1 + btw) old_cost_gc += saldeer_corr_gc old_cost_da += saldeer_corr_da logging.info(f"Saldeercorrectie: {sum_old_cons:<6.2f} kWh") - logging.info(f"Saldeercorrectie niet geoptimaliseerd reg. tarieven: {saldeer_corr_gc:<6.2f} euro") - logging.info(f"Saldeercorrectie niet geoptimaliseerd day ahead tarieven: {saldeer_corr_da:<6.2f} euro") + logging.info( + f"Saldeercorrectie niet geoptimaliseerd reg. " + f"tarieven: {saldeer_corr_gc:<6.2f} euro" + ) + logging.info( + f"Saldeercorrectie niet geoptimaliseerd day ahead " + f"tarieven: {saldeer_corr_da:<6.2f} euro" + ) else: logging.info(f"Geen saldeer correctie") - logging.info(f"Niet geoptimaliseerd, kosten met reguliere tarieven: {old_cost_gc:<6.2f}") - logging.info(f"Niet geoptimaliseerd, kosten met day ahead tarieven: {old_cost_da:<6.2f}") + logging.info( + f"Niet geoptimaliseerd, kosten met reguliere tarieven: {old_cost_gc:<6.2f}" + ) + logging.info( + f"Niet geoptimaliseerd, kosten met day ahead tarieven: {old_cost_da:<6.2f}" + ) logging.info(f"Geoptimaliseerd, kosten met day ahead tarieven: {cost.x:<6.2f}") logging.info(f"Levering: {delivery.x:<6.2f} (kWh)") if self.boiler_present: - boiler_at_23 = ((boiler_temp[U].x - (boiler_setpoint - boiler_hysterese)) * - (spec_heat_boiler / (3600 * cop_boiler))) + boiler_at_23 = (boiler_temp[U].x - (boiler_setpoint - boiler_hysterese)) * ( + spec_heat_boiler / (3600 * cop_boiler) + ) logging.info(f"Waarde boiler om 23 uur: {boiler_at_23:<0.2f} kWh") - if self.heater_present: + if self.hp_present and self.hp_enabled: logging.info("\nInzet warmtepomp") - # df_hp = pd.DataFrame(columns=["u", "tar", "p0", "p1", "p2", p3 p4 p5 p6 p7 - # heat cons"]) - logging.info(f"u tar p0 p1 p2 p3 p4 p5 p6 p7 heat cons") - for u in range(U): - logging.info(f"{uur[u]:2.0f} {pl[u]:6.4f} {p_hp[0][u].x:6.0f} {p_hp[1][u].x:6.0f} " - f"{p_hp[2][u].x:6.0f} {p_hp[3][u].x:6.0f} {p_hp[4][u].x:6.0f} " - f"{p_hp[5][u].x:6.0f} {p_hp[6][u].x:6.0f} {p_hp[7][u].x:6.0f} " - f"{h_hp[u].x:6.2f} {c_hp[u].x:6.2f}") - + if self.hp_adjustment == "on/off": + if self.hp_heat_demand: + logging.info(f"u tar cons") + for u in range(U): + logging.info(f"{uur[u]:2.0f} {pl[u]:6.4f} {c_hp[u].x:6.2f}") + else: + logging.info( + f"u tar p0 p1 p2 p3 p4 p5 p6 p7 " + f"heat cons" + ) + for u in range(U): + logging.info( + f"{uur[u]:2.0f} {pl[u]:6.4f} {p_hp[0][u].x:6.0f} {p_hp[1][u].x:6.0f} " + f"{p_hp[2][u].x:6.0f} {p_hp[3][u].x:6.0f} {p_hp[4][u].x:6.0f} " + f"{p_hp[5][u].x:6.0f} {p_hp[6][u].x:6.0f} {p_hp[7][u].x:6.0f} " + f"{h_hp[u].x:6.2f} {c_hp[u].x:6.2f}" + ) # overzicht per ac-accu: - pd.options.display.float_format = '{:6.2f}'.format + pd.options.display.float_format = "{:6.2f}".format df_accu = [] for b in range(B): - cols = [['uur', 'ac->', 'eff', '->dc', 'pv->dc', 'dc->', 'eff', '->bat', 'o_eff', 'SoC'], - ["", "kWh", "%", "kWh", "kWh", "kWh", "%", "kWh", "%", "%"]] + cols = [ + [ + "uur", + "ac->", + "eff", + "->dc", + "pv->dc", + "dc->", + "eff", + "->bat", + "o_eff", + "SoC", + ], + ["", "kWh", "%", "kWh", "kWh", "kWh", "%", "kWh", "%", "%"], + ] df_accu.append(pd.DataFrame(columns=cols)) for u in range(U): """ for cs in range(CS[b]): if ac_to_dc_st_on[b][cs][u].x == 1: c_stage = cs - ac_to_dc_eff = self.battery_options[b]["charge stages"][cs]["efficiency"] * 100.0 + ac_to_dc_eff = + self.battery_options[b]["charge stages"][cs]["efficiency"] * 100.0 """ - ac_to_dc_netto = (ac_to_dc[b][u].x - ac_from_dc[b][u].x) * hour_fraction[u] - dc_from_ac_netto = (dc_from_ac[b][u].x - dc_to_ac[b][u].x) * hour_fraction[u] + ac_to_dc_netto = ( + ac_to_dc[b][u].x - ac_from_dc[b][u].x + ) * hour_fraction[u] + dc_from_ac_netto = ( + dc_from_ac[b][u].x - dc_to_ac[b][u].x + ) * hour_fraction[u] if ac_to_dc_netto > 0: ac_to_dc_eff = dc_from_ac_netto * 100.0 / ac_to_dc_netto elif ac_to_dc_netto < 0: @@ -1452,11 +2150,15 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, else: ac_to_dc_eff = "--" - dc_to_bat_netto = (dc_to_bat[b][u].x - dc_from_bat[b][u].x) * hour_fraction[u] - bat_from_dc_netto = ((dc_to_bat[b][u].x * eff_dc_to_bat[b] - dc_from_bat[b][u].x / eff_bat_to_dc[b]) - * hour_fraction[u]) + dc_to_bat_netto = ( + dc_to_bat[b][u].x - dc_from_bat[b][u].x + ) * hour_fraction[u] + bat_from_dc_netto = ( + dc_to_bat[b][u].x * eff_dc_to_bat[b] + - dc_from_bat[b][u].x / eff_bat_to_dc[b] + ) * hour_fraction[u] if dc_to_bat_netto > 0: - dc_to_bat_eff = bat_from_dc_netto * 100.0/dc_to_bat_netto + dc_to_bat_eff = bat_from_dc_netto * 100.0 / dc_to_bat_netto elif dc_to_bat_netto < 0: dc_to_bat_eff = dc_to_bat_netto * 100.0 / bat_from_dc_netto else: @@ -1473,14 +2175,27 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, for ds in range(DS[b]): if ac_from_dc_st_on[b][ds][u].x == 1: d_stage = ds - dc_to_ac_eff = self.battery_options[b]["discharge stages"][ds]["efficiency"] * 100.0 + dc_to_ac_eff = + self.battery_options[b]["discharge stages"][ds]["efficiency"] * 100.0 """ pv_prod = 0 for s in range(pv_dc_num[b]): - pv_prod += pv_dc_on_off[b][s][u].x * pv_prod_dc[b][s][u] * hour_fraction[u] - row = [str(uur[u]), ac_to_dc_netto, ac_to_dc_eff, dc_from_ac_netto, pv_prod, - dc_to_bat_netto, dc_to_bat_eff, bat_from_dc_netto, overall_eff, soc[b][u + 1].x] + pv_prod += ( + pv_dc_on_off[b][s][u].x * pv_prod_dc[b][s][u] * hour_fraction[u] + ) + row = [ + str(uur[u]), + ac_to_dc_netto, + ac_to_dc_eff, + dc_from_ac_netto, + pv_prod, + dc_to_bat_netto, + dc_to_bat_eff, + bat_from_dc_netto, + overall_eff, + soc[b][u + 1].x, + ] df_accu[b].loc[df_accu[b].shape[0]] = row # df_accu[b].loc['total'] = df_accu[b].select_dtypes(numpy.number).sum() @@ -1492,7 +2207,10 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, totals = True except Exception as ex: logging.info(ex) - logging.info(f"Totals of accu {self.battery_options[b]['name']} cannot be calculated") + logging.info( + f"Totals of accu {self.battery_options[b]['name']} " + f"cannot be calculated" + ) totals = False if totals: @@ -1500,9 +2218,11 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, df_accu[b].at[df_accu[b].index[-1], "eff"] = "--" df_accu[b].at[df_accu[b].index[-1], "o_eff"] = "--" df_accu[b].at[df_accu[b].index[-1], "SoC"] = "" - logging.info(f"In- en uitgaande energie per uur batterij " - f"{self.battery_options[b]['name']}" - f"\n{df_accu[b].to_string(index=False)}") + logging.info( + f"In- en uitgaande energie per uur batterij " + f"{self.battery_options[b]['name']}" + f"\n{df_accu[b].to_string(index=False)}" + ) # soc dataframe maken df_soc = pd.DataFrame(columns=["tijd", "soc"]) @@ -1511,8 +2231,9 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, tijd_soc.append(tijd_soc[U - 1] + datetime.timedelta(hours=1)) if B > 0: for b in range(B): - df_soc["soc_"+str(b)] = None - for u in range(U+1): + df_soc["soc_" + str(b)] = None + for u in range(U + 1): + row_soc = [] for b in range(B): soc_value = soc[b][u].x if b == 0: @@ -1528,38 +2249,55 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, for row in df_soc.itertuples(): sum_soc = 0 for b in range(B): - sum_soc += one_soc[b] * row[b+3] - df_soc.at[row[0], "soc"] = round(100 * sum_soc/sum_cap, 1) + sum_soc += one_soc[b] * row[b + 3] + df_soc.at[row[0], "soc"] = round(100 * sum_soc / sum_cap, 1) if not self.debug: - self.save_df(tablename='prognoses', tijd=tijd_soc, df=df_soc) - + self.save_df(tablename="prognoses", tijd=tijd_soc, df=df_soc) # totaal overzicht # pd.options.display.float_format = '{:,.3f}'.format - cols = ['uur', 'bat_in', 'bat_out'] - cols = cols + ['cons', 'prod', 'base', 'boil', 'wp', 'ev', 'pv_ac', 'cost', 'profit', - 'b_tem'] + cols = ["uur", "bat_in", "bat_out"] + cols = cols + [ + "cons", + "prod", + "base", + "boil", + "wp", + "ev", + "pv_ac", + "cost", + "profit", + "b_tem", + ] if M > 0: cols = cols + ["mach"] d_f = pd.DataFrame(columns=cols) for u in range(U): row = [uur[u], accu_in_sum[u], accu_out_sum[u]] - row = row + [c_l[u].x, c_t_total[u].x, b_l[u], - c_b[u].x, c_hp[u].x, c_ev_sum[u], solar_hour_sum_opt[u], c_l[u].x * pl[u], - -c_t_w_tax[u].x * pt[u] - c_t_no_tax[u].x * pt_notax[u], - boiler_temp[u + 1].x] + row = row + [ + c_l[u].x, + c_t_total[u].x, + b_l[u], + c_b[u].x, + c_hp[u].x, + c_ev_sum[u], + solar_hour_sum_opt[u], + c_l[u].x * pl[u], + -c_t_w_tax[u].x * pt[u] - c_t_no_tax[u].x * pt_notax[u], + boiler_temp[u + 1].x, + ] if M > 0: row = row + [c_ma_sum[u]] d_f.loc[d_f.shape[0]] = row if not self.debug: d_f_save = d_f.drop(["b_tem"], axis=1) - self.save_df(tablename='prognoses', tijd=tijd, df=d_f_save) + self.save_df(tablename="prognoses", tijd=tijd, df=d_f_save) else: logging.info("Berekende prognoses zijn niet opgeslagen.") d_f = d_f.astype({"uur": int}) - d_f.loc['total'] = d_f.iloc[:, 1:].sum() + d_f.loc["total"] = d_f.iloc[:, 1:].sum() # d_f.loc['total'] = d_f.loc['total'].astype(object) d_f.at[d_f.index[-1], "uur"] = "Totaal" @@ -1567,7 +2305,7 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, logging.info(f"Berekende prognoses: \n{d_f.to_string(index=False)}") # , formatters={'uur':'{:03d}'.format})) - logging.info(f"Winst: {old_cost_da - cost.x:<0.2f} €") + logging.info(f"Winst: € {old_cost_da - cost.x:<0.2f}") # doorzetten van alle settings naar HA if not self.debug: @@ -1575,12 +2313,12 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, else: logging.info("Onderstaande settings worden NIET doorgezet naar HA") - ''' + """ set helpers output home assistant boiler c_b[0].x >0 trigger boiler ev c_ev[0].x > 0 start laden auto, ==0 stop laden auto battery multiplus feedin from grid = accu_in[0].x - accu_out[0].x - ''' + """ ############################################# # boiler @@ -1591,8 +2329,10 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, if self.debug: logging.info("Boiler opwarmen zou zijn geactiveerd") else: - self.call_service(self.boiler_options["activate service"], - entity_id=self.boiler_options["activate entity"]) + self.call_service( + self.boiler_options["activate service"], + entity_id=self.boiler_options["activate entity"], + ) # "input_button.hw_trigger") logging.info("Boiler opwarmen geactiveerd") else: @@ -1604,7 +2344,9 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, for e in range(EV): if ready_u[e] < U: if self.log_level <= logging.INFO: - logging.info(f"Inzet-factor laden {self.ev_options[e]['name']} per stap") + logging.info( + f"Inzet-factor laden {self.ev_options[e]['name']} per stap" + ) print("uur", end=" ") for cs in range(ECS[0]): print(f" {charge_stages[e][cs]['ampere']:4.1f}A", end=" ") @@ -1612,11 +2354,17 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, for u in range(ready_u[e] + 1): print(f"{uur[u]:2d}", end=" ") for cs in range(ECS[0]): - print(f"{abs(charger_factor[0][cs][u].x):.2f}", end=" ") + print( + f"{abs(charger_factor[0][cs][u].x):.2f}", end=" " + ) print() entity_charge_switch = self.ev_options[e]["charge switch"] - entity_charging_ampere = self.ev_options[e]["entity set charging ampere"] - entity_stop_laden = self.config.get(["entity stop charging"], self.ev_options[e], None) + entity_charging_ampere = self.ev_options[e][ + "entity set charging ampere" + ] + entity_stop_laden = self.config.get( + ["entity stop charging"], self.ev_options[e], None + ) old_switch_state = self.get_state(entity_charge_switch).state old_ampere_state = self.get_state(entity_charging_ampere).state new_ampere_state = 0 @@ -1632,16 +2380,26 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, new_ampere_state = charge_stages[e][cs]["ampere"] if new_ampere_state > 0: new_switch_state = "on" - if (charger_factor[e][cs][0].x < 1) and (energy_needed[e] > (ev_accu_in[e][0].x + 0.01)): - new_ts = start_dt.timestamp() + charger_factor[e][cs][0].x * 3600 + if (charger_factor[e][cs][0].x < 1) and ( + energy_needed[e] > (ev_accu_in[e][0].x + 0.01) + ): + new_ts = ( + start_dt.timestamp() + charger_factor[e][cs][0].x * 3600 + ) stop_laden = dt.datetime.fromtimestamp(int(new_ts)) - new_state_stop_laden = stop_laden.strftime('%Y-%m-%d %H:%M') + new_state_stop_laden = stop_laden.strftime("%Y-%m-%d %H:%M") break ev_name = self.ev_options[e]["name"] logging.info(f"Berekeningsuitkomst voor opladen van {ev_name}:") - logging.info(f"- aantal ampere {new_ampere_state}A (was {old_ampere_state}A)") - logging.info(f"- stand schakelaar '{new_switch_state}' (was '{old_switch_state}')") - if not (entity_stop_laden is None) and not (new_state_stop_laden is None): + logging.info( + f"- aantal ampere {new_ampere_state}A (was {old_ampere_state}A)" + ) + logging.info( + f"- stand schakelaar '{new_switch_state}' (was '{old_switch_state}')" + ) + if not (entity_stop_laden is None) and not ( + new_state_stop_laden is None + ): logging.info(f"- stop laden op {new_state_stop_laden}") logging.info(f"- positie: {ev_position[e]}") logging.info(f"- ingeplugd: {ev_plugged_in[e]}") @@ -1650,24 +2408,46 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, if float(new_ampere_state) > 0.0: if old_switch_state == "off": if self.debug: - logging.info(f"Laden van {ev_name} zou zijn aangezet met {new_ampere_state} ampere") + logging.info( + f"Laden van {ev_name} zou zijn aangezet " + f"met {new_ampere_state} ampere" + ) else: - logging.info(f"Laden van {ev_name} aangezet met {new_ampere_state} ampere via " - f"'{entity_charging_ampere}'") + logging.info( + f"Laden van {ev_name} aangezet " + f"met {new_ampere_state} ampere via " + f"'{entity_charging_ampere}'" + ) self.set_value(entity_charging_ampere, new_ampere_state) self.turn_on(entity_charge_switch) - if not (entity_stop_laden is None) and not (new_state_stop_laden is None): - self.call_service("set_datetime", entity_id=entity_stop_laden, - datetime=new_state_stop_laden) + if not (entity_stop_laden is None) and not ( + new_state_stop_laden is None + ): + self.call_service( + "set_datetime", + entity_id=entity_stop_laden, + datetime=new_state_stop_laden, + ) if old_switch_state == "on": if self.debug: - logging.info(f"Laden van {ev_name} zou zijn doorgegaan met {new_ampere_state} A") + logging.info( + f"Laden van {ev_name} zou zijn doorgegaan " + f"met {new_ampere_state} A" + ) else: - logging.info(f"Laden van {ev_name} is doorgegaan met {new_ampere_state} A") + logging.info( + f"Laden van {ev_name} is doorgegaan " + f"met {new_ampere_state} A" + ) self.set_value(entity_charging_ampere, new_ampere_state) - if not (entity_stop_laden is None) and not (new_state_stop_laden is None): - self.call_service("set_datetime", entity_id=entity_stop_laden, - datetime=new_state_stop_laden) + if not (entity_stop_laden is None) and not ( + new_state_stop_laden is None + ): + self.call_service( + "set_datetime", + entity_id=entity_stop_laden, + datetime=new_state_stop_laden, + ) else: if old_switch_state == "on": if self.debug: @@ -1676,16 +2456,27 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, self.set_value(entity_charging_ampere, 0) self.turn_off(entity_charge_switch) logging.info(f"Laden van {ev_name} uitgezet") - if not (entity_stop_laden is None) and not (new_state_stop_laden is None): - self.call_service("set_datetime", entity_id=entity_stop_laden, - datetime=new_state_stop_laden) + if not (entity_stop_laden is None) and not ( + new_state_stop_laden is None + ): + self.call_service( + "set_datetime", + entity_id=entity_stop_laden, + datetime=new_state_stop_laden, + ) else: logging.info(f"{ev_name} is niet thuis of niet ingeplugd") - logging.info(f"Evaluatie status laden {ev_name} op " - f"" - f"{dt.datetime.now().strftime('%Y-%m-%d %H:%M')}") - logging.info(f"- schakelaar laden: {self.get_state(entity_charge_switch).state}") - logging.info(f"- aantal ampere: {self.get_state(entity_charging_ampere).state}") + logging.info( + f"Evaluatie status laden {ev_name} op " + f"" + f"{dt.datetime.now().strftime('%Y-%m-%d %H:%M')}" + ) + logging.info( + f"- schakelaar laden: {self.get_state(entity_charge_switch).state}" + ) + logging.info( + f"- aantal ampere: {self.get_state(entity_charging_ampere).state}" + ) ####################################### # solar @@ -1730,7 +2521,10 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, elif abs(netto_vermogen) < minimum_power: new_state = "Aan" balance = False - new_ts = start_dt.timestamp() + (abs(netto_vermogen) / minimum_power) * 3600 + new_ts = ( + start_dt.timestamp() + + (abs(netto_vermogen) / minimum_power) * 3600 + ) stop_omvormer = dt.datetime.fromtimestamp(int(new_ts)) if netto_vermogen > 0: netto_vermogen = minimum_power @@ -1743,47 +2537,90 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, if stop_omvormer is None: stop_str = "2000-01-01 00:00:00" else: - stop_str = stop_omvormer.strftime('%Y-%m-%d %H:%M') + stop_str = stop_omvormer.strftime("%Y-%m-%d %H:%M") first_row = df_accu[b].iloc[0] from_battery = int(-first_row["dc->"] * 1000 / fraction_first_interval) from_pv = int(first_row["pv->dc"] * 1000 / fraction_first_interval) from_ac = int(first_row["->dc"] * 1000 / fraction_first_interval) calculated_soc = round(soc[b][1].x, 1) - + grid_set_point = round( + 1000 * (c_l[0].x - c_t_total[0].x) / hour_fraction[0], 0 + ) + logging.info(f"Grid set point: {grid_set_point} W") + logging.info(f"Cycle cost {bat_name}: {cycle_cost[b].x:<0.2f} euro") if self.debug: - logging.info(f"Netto vermogen naar(+)/uit(-) batterij {bat_name} zou zijn: {netto_vermogen} W") + logging.info( + f"Netto vermogen naar(+)/uit(-) batterij {bat_name} " + f"zou zijn: {netto_vermogen} W" + ) if stop_omvormer: logging.info(f"tot: {stop_str}") logging.info(f"Balanceren zou zijn: {balance}") else: - self.set_entity_value("entity set power feedin", self.battery_options[b], netto_vermogen) - self.set_entity_option("entity set operating mode", self.battery_options[b], new_state) + # export the ess grid setpoint in W + self.set_entity_value( + "entity ess grid setpoint", + self.battery_options[b], + grid_set_point, + ) + self.set_entity_value( + "entity set power feedin", + self.battery_options[b], + netto_vermogen, + ) + self.set_entity_option( + "entity set operating mode", self.battery_options[b], new_state + ) balance_state = "on" if balance else "off" - self.set_entity_state("entity balance switch", self.battery_options[b], balance_state) - logging.info(f"Netto vermogen naar(+)/uit(-) omvormer {bat_name}: {netto_vermogen} W" - f"{' tot: '+stop_str if stop_omvormer else ''}") - logging.info(f"Balanceren: {balance}{' tot: '+stop_str if stop_omvormer else ''}") - helper_id = self.config.get(["entity stop victron"], self.battery_options[b], None) + self.set_entity_state( + "entity balance switch", self.battery_options[b], balance_state + ) + logging.info( + f"Netto vermogen naar(+)/uit(-) omvormer {bat_name}: " + f"{netto_vermogen} W" + f"{' tot: '+stop_str if stop_omvormer else ''}" + ) + logging.info( + f"Balanceren: {balance}" + f"{' tot: '+stop_str if stop_omvormer else ''}" + ) + helper_id = self.config.get( + ["entity stop victron"], self.battery_options[b], None + ) if helper_id is not None: - logging.warning(f"The name 'entity stop victron' is deprecated, " - f"please change to 'entity stop inverter'.") + logging.warning( + f"The name 'entity stop victron' is deprecated, " + f"please change to 'entity stop inverter'." + ) if helper_id is None: - helper_id = self.config.get(["entity stop inverter"], self.battery_options[b], None) + helper_id = self.config.get( + ["entity stop inverter"], self.battery_options[b], None + ) if helper_id is not None: - self.call_service("set_datetime", entity_id=helper_id, datetime=stop_str) - self.set_entity_value("entity from battery", self.battery_options[b], from_battery) + self.call_service( + "set_datetime", entity_id=helper_id, datetime=stop_str + ) + self.set_entity_value( + "entity from battery", self.battery_options[b], from_battery + ) logging.info(f"Vermogen uit batterij: {from_battery}W") - self.set_entity_value("entity from pv", self.battery_options[b], from_pv) + self.set_entity_value( + "entity from pv", self.battery_options[b], from_pv + ) logging.info(f"Vermogen dat binnenkomt van pv: {from_pv}W") - self.set_entity_value("entity from ac", self.battery_options[b], from_ac) + self.set_entity_value( + "entity from ac", self.battery_options[b], from_ac + ) logging.info(f"Vermogen dat binnenkomt van ac: {from_ac}W") - self.set_entity_value("entity calculated soc", self.battery_options[b], calculated_soc) + self.set_entity_value( + "entity calculated soc", self.battery_options[b], calculated_soc + ) logging.info(f"Waarde SoC na eerste uur: {calculated_soc}%") for s in range(pv_dc_num[b]): - entity_pv_switch = self.config.get(["entity pv switch"], - self.battery_options[b]["solar"][s], - None) + entity_pv_switch = self.config.get( + ["entity pv switch"], self.battery_options[b]["solar"][s], None + ) if entity_pv_switch == "": entity_pv_switch = None if entity_pv_switch is not None: @@ -1806,21 +2643,76 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, self.turn_on(entity_pv_switch) ################################################## - # heating + # heatpump ################################################## - if self.heater_present: - entity_curve_adjustment = self.heating_options["entity adjust heating curve"] - old_adjustment = float(self.get_state( - entity_curve_adjustment).state) - # adjustment factor (K/%) bijv 0.4 K/10% = 0.04 - adjustment_factor = self.heating_options["adjustment factor"] - adjustment = calc_adjustment_heatcurve( - pl[0], p_avg, adjustment_factor, old_adjustment) - if self.debug: - logging.info(f"Aanpassing stooklijn zou zijn: {adjustment:<0.2f}") + if self.hp_present and self.hp_enabled: + # als aan/uit entity er is altijd schakelen + entity_hp_switch = self.config.get( + ["entity hp switch"], self.heating_options, None + ) + if entity_hp_switch is None: + if self.hp_adjustment == "on/off": + logging.warning( + f"Geen entity om warmtepomp in/uit te schakelen" + ) else: - logging.info(f"Aanpassing stooklijn: {adjustment:<0.2f}") - self.set_value(entity_curve_adjustment, adjustment) + logging.debug(f"Warmtepomp entity: {entity_hp_switch}") + switch_state = self.get_state(entity_hp_switch).state + if hp_on[0].x == 1: + if switch_state == "off": + if self.debug: + logging.info(f"Warmtepomp zou zijn ingeschakeld") + else: + logging.info(f"Warmtepomp ingeschakeld") + self.turn_on(entity_hp_switch) + else: + if switch_state == "on": + if self.debug: + logging.info(f"Warmtepomp zou zijn uitgeschakeld") + else: + logging.info(f"Warmtepomp uitgeschakeld") + self.turn_off(entity_hp_switch) + # power, als entity er is altijd doorzetten + entity_hp_power = self.config.get( + ["entity hp power"], self.heating_options, None + ) + if entity_hp_power is not None and self.hp_adjustment != "on/off": + # elektrisch vermogen in W + hp_power = 1000 * c_hp[0].x / hour_fraction[0] + if self.debug: + logging.info( + f"Elektrisch vermogen warmtepomp zou zijn ingesteld " + f"op {hp_power:<0.0f} W" + ) + else: + self.set_value(entity_hp_power, hp_power) + logging.info( + f"Elektrisch vermogen warmtepomp ingesteld " + f"op {hp_power:<0.0f} W" + ) + + # curve adjustment + entity_curve_adjustment = self.config.get( + ["entity adjust heating curve"], self.heating_options, None + ) + if entity_curve_adjustment is not None: + old_adjustment = float( + self.get_state(entity_curve_adjustment).state + ) + # adjustment factor (K/%) bijv 0.4 K/10% = 0.04 + adjustment_factor = self.config.get( + ["adjustment factor"], self.heating_options, 0.0 + ) + adjustment = calc_adjustment_heatcurve( + pl[0], p_avg, adjustment_factor, old_adjustment + ) + if self.debug: + logging.info( + f"Aanpassing stooklijn zou zijn: {adjustment:<0.2f}" + ) + else: + logging.info(f"Aanpassing stooklijn: {adjustment:<0.2f}") + self.set_value(entity_curve_adjustment, adjustment) ######################################################################## # apparaten /machines @@ -1832,33 +2724,54 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, for r in range(R[m]): if ma_start[m][r].x == 1: # print(f"ma_start: run {r} start {ma_start[m][r].x}") - start_machine_str = ma_kw_dt[m][r].strftime('%Y-%m-%d %H:%M') + start_machine_str = ma_kw_dt[m][r].strftime( + "%Y-%m-%d %H:%M" + ) if not (ma_entity_plan_start[m] is None): if self.debug: - logging.info(f"Zou zijn gestart op {start_machine_str}") + logging.info( + f"Zou zijn gestart op {start_machine_str}" + ) else: - self.call_service("set_datetime", entity_id=ma_entity_plan_start[m], - datetime=start_machine_str) + self.call_service( + "set_datetime", + entity_id=ma_entity_plan_start[m], + datetime=start_machine_str, + ) logging.info(f"Start op {start_machine_str}") - end_machine_str = ma_kw_dt[m][r + RL[m]].strftime('%Y-%m-%d %H:%M') + end_machine_str = ma_kw_dt[m][r + RL[m]].strftime( + "%Y-%m-%d %H:%M" + ) if not (ma_entity_plan_end[m] is None): if self.debug: logging.info(f"Zou klaar zijn op {end_machine_str}") else: - self.call_service("set_datetime", entity_id=ma_entity_plan_end[m], - datetime=end_machine_str) + self.call_service( + "set_datetime", + entity_id=ma_entity_plan_end[m], + datetime=end_machine_str, + ) logging.info(f"Is klaar op {end_machine_str}") if self.log_level == logging.DEBUG: - logging.debug(f"Per kwartier het berekende verbruik, en het bijbehorende tarief") + logging.debug( + f"Per kwartier het berekende verbruik en het bijbehorende tarief" + ) for kw in range(KW[m]): - print(f"kwartier {kw:>2} tijd: {ma_kw_dt[m][kw].strftime('%H:%M')} " - f"consumption: {c_ma_kw[m][kw].x:>7.3f} " - f"uur: {math.floor(kw / 4)} tarief: {pl[math.floor(kw / 4)]:.4f}") - logging.debug(f"Per uur het berekende verbruik, het bijbehorende tarief en de kosten") + print( + f"kwartier {kw:>2} tijd: {ma_kw_dt[m][kw].strftime('%H:%M')} " + f"consumption: {c_ma_kw[m][kw].x:>7.3f} " + f"uur: {math.floor(kw / 4)} tarief: {pl[math.floor(kw / 4)]:.4f}" + ) + logging.debug( + f"Per uur het berekende verbruik, " + f"het bijbehorende tarief en de kosten" + ) for u in range(U): - print(f"uur {u:>2} tijdstip {tijd[u].strftime('%H:%M')} " - f"consumption: {c_ma_u[m][u].x:>7.3f} tarief: {pl[u]:.4f}") + print( + f"uur {u:>2} tijdstip {tijd[u].strftime('%H:%M')} " + f"consumption: {c_ma_u[m][u].x:>7.3f} tarief: {pl[u]:.4f}" + ) except Exception as ex: error_handling(ex) @@ -1876,7 +2789,6 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, mach_n = [] ev_n = [] c_l_p = [] - soc = [] soc_b = [] pv_p_org = [] pv_p_opt = [] @@ -1886,7 +2798,7 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, c_t_n.append(-c_t_total[u].x) c_l_p.append(c_l[u].x) base_n.append(-b_l[u]) - boiler_n.append(- c_b[u].x) + boiler_n.append(-c_b[u].x) heatpump_n.append(-c_hp[u].x) ev_n.append(-c_ev_sum[u]) mach_n.append(-c_ma_sum[u]) @@ -1900,23 +2812,34 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, accu_out_sum += ac_from_dc[b][u].x accu_in_n.append(-accu_in_sum * hour_fraction[u]) accu_out_p.append(accu_out_sum * hour_fraction[u]) - max_y = max(max_y, (c_l_p[u] + pv_p_org[u] + pv_ac_p[u]), abs( - c_t_total[u].x) + b_l[u] + c_b[u].x + c_hp[u].x + c_ev_sum[u] + c_ma_sum[u] + accu_in_sum) - if B >0: - soc_t = list(df_soc["soc"]) - for b in range(B): - soc_b.append(list(df_soc["soc_"+str(b)])) - ''' + max_y = max( + max_y, + (c_l_p[u] + pv_p_org[u] + pv_ac_p[u]), + abs(c_t_total[u].x) + + b_l[u] + + c_b[u].x + + c_hp[u].x + + c_ev_sum[u] + + c_ma_sum[u] + + accu_in_sum, + ) + soc_t = [] + if B > 0: + soc_t = list(df_soc["soc"]) + for b in range(B): + soc_b.append(list(df_soc["soc_" + str(b)])) + """ if u == 0: soc_p.append([]) soc_p[b].append(soc[b][u].x) for b in range(B): soc_p[b].append(soc[b][U].x) - ''' + """ # grafiek 1 import numpy as np from dao.prog.da_graph import GraphBuilder + gr1_df = pd.DataFrame() gr1_df["index"] = np.arange(U) gr1_df["uur"] = uur[0:U] @@ -1931,71 +2854,65 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, gr1_df["pv_dc"] = pv_ac_p gr1_df["accu_in"] = accu_in_n gr1_df["accu_out"] = accu_out_p - style = self.config.get(['graphics', 'style']) + style = self.config.get(["graphics", "style"]) gr1_options = { - "title": "Prognose berekend op: " + start_dt.strftime('%Y-%m-%d %H:%M'), + "title": "Prognose berekend op: " + start_dt.strftime("%Y-%m-%d %H:%M"), "style": style, - "haxis": { - "values": "uur", - "title": "uren van de dag" - }, - "vaxis": [{ - "title": "kWh" - } + "haxis": {"values": "uur", "title": "uren van de dag"}, + "vaxis": [{"title": "kWh"}], + "series": [ + {"column": "verbruik", "type": "stacked", "color": "#00bfff"}, + { + "column": "pv_ac", + "title": "PV-AC", + "type": "stacked", + "color": "green", + }, + { + "column": "accu_out", + "title": "Accu out", + "type": "stacked", + "color": "red", + }, + { + "column": "baseload", + "title": "Overig verbr.", + "type": "stacked", + "color": "#f1a603", + }, + {"column": "boiler", "type": "stacked", "color": "#e39ff6"}, + { + "column": "heatpump", + "title": "WP", + "type": "stacked", + "color": "#a32cc4", + }, + {"column": "ev", "title": "EV", "type": "stacked", "color": "yellow"}, + { + "column": "mach", + "title": "App.", + "type": "stacked", + "color": "brown", + }, + { + "column": "productie", + "title": "Teruglev.", + "type": "stacked", + "color": "#0080ff", + }, + { + "column": "accu_in", + "title": "Accu in", + "type": "stacked", + "color": "#ff8000", + }, ], - "series": [{"column": "verbruik", - "type": "stacked", - "color": '#00bfff' - }, - {"column": "pv_ac", - "title": "PV-AC", - "type": "stacked", - "color": 'green' - }, - {"column": "accu_out", - "title": "Accu out", - "type": "stacked", - "color": 'red' - }, - {"column": "baseload", - "title": "Overig verbr.", - "type": "stacked", - "color": "#f1a603" - }, - {"column": "boiler", - "type": "stacked", - "color": '#e39ff6' - }, - {"column": "heatpump", - "title": "WP", - "type": "stacked", - "color": '#a32cc4' - }, - {"column": "ev", - "title": "EV", - "type": "stacked", - "color": 'yellow' - }, - {"column": "mach", - "title": "App.", - "type": "stacked", - "color": 'brown' - }, - {"column": "productie", - "title": "Teruglev.", - "type": "stacked", - "color": '#0080ff' - }, - {"column": "accu_in", - "title": "Accu in", - "type": "stacked", - "color": '#ff8000' - }, - ] } backend = self.config.get(["graphical backend"], None, "") gb = GraphBuilder(backend) - show_graph = self.config.get(['graphics', 'show'], None, "False").lower() == 'true' + show_graph = ( + self.config.get(["graphics", "show"], None, "False").lower() == "true" + ) if show_graph: gb.build(gr1_df, gr1_options) @@ -2011,97 +2928,209 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, grid0_df["mach"] = mach_n grid0_df["pv_ac"] = pv_ac_p grid0_df["pv_dc"] = pv_p_org - style = self.config.get(['graphics', 'style'], None, "default") + style = self.config.get(["graphics", "style"], None, "default") import matplotlib.pyplot as plt import matplotlib.ticker as ticker import matplotlib.lines as mlines - plt.set_loglevel(level='warning') - pil_logger = logging.getLogger('PIL') + plt.set_loglevel(level="warning") + pil_logger = logging.getLogger("PIL") # override the logger logging level to INFO pil_logger.setLevel(max(logging.INFO, self.log_level)) - show_battery_balance = self.config.get(["graphics", "battery balance"], - None, - "true").lower() == "true" + show_battery_balance = ( + self.config.get(["graphics", "battery balance"], None, "true").lower() + == "true" + ) plt.style.use(style) nrows = 3 - if show_battery_balance and B>0: + if show_battery_balance and B > 0: nrows += B fig, axis = plt.subplots(figsize=(8, 3 * nrows), nrows=nrows) ind = np.arange(U) - axis[0].bar(ind, np.array(org_l), label='Levering', color='#00bfff', align="edge") + axis[0].bar( + ind, np.array(org_l), label="Levering", color="#00bfff", align="edge" + ) if sum(pv_p_org) > 0: - axis[0].bar(ind, np.array(pv_p_org), bottom=np.array( - org_l), label='PV AC', color='green', align="edge") + axis[0].bar( + ind, + np.array(pv_p_org), + bottom=np.array(org_l), + label="PV AC", + color="green", + align="edge", + ) if sum(pv_ac_p) > 0: - axis[0].bar(ind, np.array(pv_ac_p), bottom=np.array( - org_l) + np.array(pv_p_org), label='PV DC', color='lime', align="edge") - axis[0].bar(ind, np.array(base_n), - label="Overig verbr.", color='#f1a603', align="edge") + axis[0].bar( + ind, + np.array(pv_ac_p), + bottom=np.array(org_l) + np.array(pv_p_org), + label="PV DC", + color="lime", + align="edge", + ) + axis[0].bar( + ind, np.array(base_n), label="Overig verbr.", color="#f1a603", align="edge" + ) if self.boiler_present: - axis[0].bar(ind, np.array(boiler_n), bottom=np.array( - base_n), label="Boiler", color='#e39ff6', align="edge") - if self.heater_present: - axis[0].bar(ind, np.array(heatpump_n), bottom=np.array( - base_n), label="WP", color='#a32cc4', align="edge") - axis[0].bar(ind, np.array(ev_n), bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n), - label="EV laden", color='yellow', align="edge") + axis[0].bar( + ind, + np.array(boiler_n), + bottom=np.array(base_n), + label="Boiler", + color="#e39ff6", + align="edge", + ) + if self.hp_present: + axis[0].bar( + ind, + np.array(heatpump_n), + bottom=np.array(base_n), + label="WP", + color="#a32cc4", + align="edge", + ) + axis[0].bar( + ind, + np.array(ev_n), + bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n), + label="EV laden", + color="yellow", + align="edge", + ) if M > 0: - axis[0].bar(ind, np.array(mach_n), - bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n) + np.array(ev_n), - label="Apparatuur", color='brown', align="edge") - axis[0].bar(ind, np.array(org_t), - bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n) + np.array(ev_n) + - np.array(mach_n), - label="Teruglev.", color='#0080ff', align="edge") - axis[0].legend(loc='best', bbox_to_anchor=(1.05, 1.00)) - axis[0].set_ylabel('kWh') + axis[0].bar( + ind, + np.array(mach_n), + bottom=np.array(base_n) + + np.array(boiler_n) + + np.array(heatpump_n) + + np.array(ev_n), + label="Apparatuur", + color="brown", + align="edge", + ) + axis[0].bar( + ind, + np.array(org_t), + bottom=np.array(base_n) + + np.array(boiler_n) + + np.array(heatpump_n) + + np.array(ev_n) + + np.array(mach_n), + label="Teruglev.", + color="#0080ff", + align="edge", + ) + axis[0].legend(loc="best", bbox_to_anchor=(1.05, 1.00)) + axis[0].set_ylabel("kWh") ylim = math.ceil(max_y) axis[0].set_ylim([-ylim, ylim]) axis[0].set_xticks(ind, labels=uur) axis[0].xaxis.set_major_locator(ticker.MultipleLocator(2)) axis[0].xaxis.set_minor_locator(ticker.MultipleLocator(1)) - axis[0].set_title(f"Berekend op: {start_dt.strftime('%d-%m-%Y %H:%M')}\nNiet geoptimaliseerd") - - axis[1].bar(ind, np.array(c_l_p), - label='Levering', color='#00bfff', align="edge") - axis[1].bar(ind, np.array(pv_p_opt), bottom=np.array( - c_l_p), label='PV AC', color='green', align="edge") - axis[1].bar(ind, np.array(accu_out_p), bottom=np.array(c_l_p) + np.array(pv_p_opt), label='Accu uit', - color='red', align="edge") + axis[0].set_title( + f"Berekend op: {start_dt.strftime('%d-%m-%Y %H:%M')}\n" + f"Niet geoptimaliseerd" + ) + + axis[1].bar( + ind, np.array(c_l_p), label="Levering", color="#00bfff", align="edge" + ) + axis[1].bar( + ind, + np.array(pv_p_opt), + bottom=np.array(c_l_p), + label="PV AC", + color="green", + align="edge", + ) + axis[1].bar( + ind, + np.array(accu_out_p), + bottom=np.array(c_l_p) + np.array(pv_p_opt), + label="Accu uit", + color="red", + align="edge", + ) # axis[1].bar(ind, np.array(cons_n), label="Verbruik", color='yellow') - axis[1].bar(ind, np.array(base_n), - label="Overig verbr.", color='#f1a603', align="edge") + axis[1].bar( + ind, np.array(base_n), label="Overig verbr.", color="#f1a603", align="edge" + ) if self.boiler_present: - axis[1].bar(ind, np.array(boiler_n), bottom=np.array( - base_n), label="Boiler", color='#e39ff6', align="edge") - if self.heater_present: - axis[1].bar(ind, np.array(heatpump_n), bottom=np.array( - base_n), label="WP", color='#a32cc4', align="edge") - axis[1].bar(ind, np.array(ev_n), bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n), - label="EV laden", color='yellow', align="edge") + axis[1].bar( + ind, + np.array(boiler_n), + bottom=np.array(base_n), + label="Boiler", + color="#e39ff6", + align="edge", + ) + if self.hp_present: + axis[1].bar( + ind, + np.array(heatpump_n), + bottom=np.array(base_n), + label="WP", + color="#a32cc4", + align="edge", + ) + axis[1].bar( + ind, + np.array(ev_n), + bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n), + label="EV laden", + color="yellow", + align="edge", + ) if M > 0: - axis[1].bar(ind, np.array(mach_n), - bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n) + np.array(ev_n), - label="Apparatuur", color='brown', align="edge") - axis[1].bar(ind, np.array(c_t_n), - bottom=np.array(base_n) + np.array(boiler_n) + np.array(heatpump_n) + - np.array(ev_n) + np.array(mach_n), - label="Teruglev.", color='#0080ff', align="edge") - axis[1].bar(ind, np.array(accu_in_n), - bottom=np.array(base_n) + np.array(boiler_n) + - np.array(heatpump_n) + np.array(ev_n) + np.array(mach_n) + np.array(c_t_n), - label='Accu in', color='#ff8000', align="edge") - axis[1].legend(loc='best', bbox_to_anchor=(1.05, 1.00)) - axis[1].set_ylabel('kWh') + axis[1].bar( + ind, + np.array(mach_n), + bottom=np.array(base_n) + + np.array(boiler_n) + + np.array(heatpump_n) + + np.array(ev_n), + label="Apparatuur", + color="brown", + align="edge", + ) + axis[1].bar( + ind, + np.array(c_t_n), + bottom=np.array(base_n) + + np.array(boiler_n) + + np.array(heatpump_n) + + np.array(ev_n) + + np.array(mach_n), + label="Teruglev.", + color="#0080ff", + align="edge", + ) + axis[1].bar( + ind, + np.array(accu_in_n), + bottom=np.array(base_n) + + np.array(boiler_n) + + np.array(heatpump_n) + + np.array(ev_n) + + np.array(mach_n) + + np.array(c_t_n), + label="Accu in", + color="#ff8000", + align="edge", + ) + axis[1].legend(loc="best", bbox_to_anchor=(1.05, 1.00)) + axis[1].set_ylabel("kWh") axis[1].set_ylim([-ylim, ylim]) axis[1].set_xticks(ind, labels=uur) axis[1].xaxis.set_major_locator(ticker.MultipleLocator(2)) axis[1].xaxis.set_minor_locator(ticker.MultipleLocator(1)) - axis[1].set_title(f"Day Ahead geoptimaliseerd\nStrategie: {strategie}" - f" winst € {(old_cost_da - cost.x):0.2f}") + axis[1].set_title( + f"Day Ahead geoptimaliseerd\nStrategie: {strategie}" + f" winst € {(old_cost_da - cost.x):0.2f}" + ) axis[1].sharex(axis[0]) gr_no = 1 @@ -2133,54 +3162,78 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, pv_p.append(0) bat_p.append(0) bat_n.append(0) - legs = [] - leg1 = axis[gr_no].bar(ind, np.array(ac_p), label='AC<->', - color='red', align="edge") - leg2 = axis[gr_no].bar(ind, np.array(bat_p), label='BAT<->', - bottom=np.array(ac_p), - color='blue', align="edge") + leg1 = axis[gr_no].bar( + ind, np.array(ac_p), label="AC<->", color="red", align="edge" + ) + leg2 = axis[gr_no].bar( + ind, + np.array(bat_p), + label="BAT<->", + bottom=np.array(ac_p), + color="blue", + align="edge", + ) if pv_dc_num[b] > 0: - leg3 = axis[gr_no].bar(ind, np.array(pv_p), label='PV->', - bottom=np.array(ac_p) + np.array(bat_p), - color='lime', align="edge") + leg3 = axis[gr_no].bar( + ind, + np.array(pv_p), + label="PV->", + bottom=np.array(ac_p) + np.array(bat_p), + color="lime", + align="edge", + ) else: leg3 = None - axis[gr_no].bar(ind, np.array(ac_n), color='red', align="edge") - axis[gr_no].bar(ind, np.array(bat_n), - bottom=np.array(ac_n), - color='blue', align="edge") + axis[gr_no].bar(ind, np.array(ac_n), color="red", align="edge") + axis[gr_no].bar( + ind, + np.array(bat_n), + bottom=np.array(ac_n), + color="blue", + align="edge", + ) # axis[gr_no].legend(loc='best', bbox_to_anchor=(1.30, 1.00)) - axis[gr_no].set_ylabel('kWh') + axis[gr_no].set_ylabel("kWh") axis[gr_no].set_ylim([-ylim, ylim]) axis[gr_no].set_xticks(ind, labels=uur) axis[gr_no].xaxis.set_major_locator(ticker.MultipleLocator(2)) axis[gr_no].xaxis.set_minor_locator(ticker.MultipleLocator(1)) - axis[gr_no].set_title(f"Energiebalans per uur voor {self.battery_options[b]['name']}") + axis[gr_no].set_title( + f"Energiebalans per uur voor " f"{self.battery_options[b]['name']}" + ) axis[gr_no].sharex(axis[0]) axis_20 = axis[gr_no].twinx() - leg4 = axis_20.plot(ind, soc_b[b], label='% SoC', linestyle="solid", color='olive') - axis_20.set_ylabel('% SoC') + leg4 = axis_20.plot( + ind, soc_b[b], label="% SoC", linestyle="solid", color="olive" + ) + axis_20.set_ylabel("% SoC") axis_20.set_ylim([0, 100]) - soc_line = mlines.Line2D([], [], color='olive', label='SoC %') + soc_line = mlines.Line2D([], [], color="olive", label="SoC %") if pv_dc_num[b] > 0: - labels = ["AC<->", 'BAT<->', "PV->", '% SoC'] - handles =[leg1, leg2, leg3, soc_line] + labels = ["AC<->", "BAT<->", "PV->", "% SoC"] + handles = [leg1, leg2, leg3, soc_line] else: - labels = ["AC<->", 'BAT<->', '% SoC'] - handles =[leg1, leg2, soc_line] - axis[gr_no].legend(handles=handles, labels=labels, loc='best', bbox_to_anchor=(1.35, 1.00)) - + labels = ["AC<->", "BAT<->", "% SoC"] + handles = [leg1, leg2, soc_line] + axis[gr_no].legend( + handles=handles, + labels=labels, + loc="best", + bbox_to_anchor=(1.35, 1.00), + ) gr_no += 1 ln1 = None line_styles = ["solid", "dashed", "dotted"] - ind = np.arange(U+1) - if len(uur) < U+1: + ind = np.arange(U + 1) + if len(uur) < U + 1: uur.append(24) if B > 0: - ln1 = axis[gr_no].plot(ind, soc_t, label='SoC', linestyle=line_styles[0], color='olive') + ln1 = axis[gr_no].plot( + ind, soc_t, label="SoC", linestyle=line_styles[0], color="olive" + ) axis[gr_no].set_xticks(ind, labels=uur) - axis[gr_no].set_ylabel('% SoC') + axis[gr_no].set_ylabel("% SoC") axis[gr_no].set_xlabel("uren van de dag") axis[gr_no].xaxis.set_major_locator(ticker.MultipleLocator(2)) axis[gr_no].xaxis.set_minor_locator(ticker.MultipleLocator(1)) @@ -2189,25 +3242,50 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, axis[gr_no].sharex(axis[0]) axis22 = axis[gr_no].twinx() - if self.config.get(["graphics", "prices delivery"], None, "true").lower() == "true": + if ( + self.config.get(["graphics", "prices delivery"], None, "true").lower() + == "true" + ): pl.append(pl[-1]) - ln2 = axis22.step(ind, np.array(pl), label='Tarief\nlevering', color='#00bfff', where='post') + ln2 = axis22.step( + ind, + np.array(pl), + label="Tarief\nlevering", + color="#00bfff", + where="post", + ) else: ln2 = None - if self.config.get(["graphics", "prices redelivery"], None, "true").lower() == "true": + if ( + self.config.get(["graphics", "prices redelivery"], None, "true").lower() + == "true" + ): pt_notax.append(pt_notax[-1]) - ln3 = axis22.step(ind, np.array(pt_notax), label="Tarief terug\nno tax", color='#0080ff', where='post') + ln3 = axis22.step( + ind, + np.array(pt_notax), + label="Tarief terug\nno tax", + color="#0080ff", + where="post", + ) else: ln3 = None - if self.config.get(["graphics", "average delivery"], None, "true").lower() == "true": + if ( + self.config.get(["graphics", "average delivery"], None, "true").lower() + == "true" + ): pl_avg.append(pl_avg[-1]) - ln4 = axis22.plot(ind, np.array(pl_avg), label="Tarief lev.\ngemid.", linestyle="dashed", - color='#00bfff') + ln4 = axis22.plot( + ind, + np.array(pl_avg), + label="Tarief lev.\ngemid.", + linestyle="dashed", + color="#00bfff", + ) else: ln4 = None axis22.set_ylabel("euro/kWh") - axis22.yaxis.set_major_formatter( - ticker.FormatStrFormatter('% 1.2f')) + axis22.yaxis.set_major_formatter(ticker.FormatStrFormatter("% 1.2f")) bottom, top = axis22.get_ylim() if bottom > 0: axis22.set_ylim([0, top]) @@ -2221,14 +3299,16 @@ def calc_optimum(self, _start_dt: dt.datetime | None = None, if ln4: lns += ln4 labels = [line.get_label() for line in lns] - axis22.legend(lns, labels, loc='best', bbox_to_anchor=(1.40, 1.00)) + axis22.legend(lns, labels, loc="best", bbox_to_anchor=(1.40, 1.00)) plt.subplots_adjust(right=0.75) fig.tight_layout() - plt.savefig("../data/images/calc_" + start_dt.strftime("%Y-%m-%d__%H-%M") + ".png") + plt.savefig( + "../data/images/calc_" + start_dt.strftime("%Y-%m-%d__%H-%M") + ".png" + ) if show_graph: plt.show() - plt.close('all') + plt.close("all") def calc_optimum_debug(self): self.debug = True diff --git a/dao/prog/db_manager.py b/dao/prog/db_manager.py index e87b505..813c627 100644 --- a/dao/prog/db_manager.py +++ b/dao/prog/db_manager.py @@ -1,12 +1,22 @@ import pandas as pd import numpy as np import datetime -from sqlalchemy import create_engine, Table, MetaData, select, insert, update, func, and_, text, TIMESTAMP +from sqlalchemy import ( + create_engine, + Table, + MetaData, + select, + insert, + update, + func, + and_, + text, + TIMESTAMP, +) import sqlalchemy_utils -from sqlalchemy.sql import sqltypes -import pytz import os import logging + # import utils as utils @@ -15,9 +25,17 @@ class DBmanagerObj(object): Database manager class. """ - def __init__(self, db_dialect: str, db_name: str, - db_server=None, db_user=None, db_password=None, - db_port=None, db_path=None, db_time_zone: str = 'Europe/Amsterdam'): + def __init__( + self, + db_dialect: str, + db_name: str, + db_server=None, + db_user=None, + db_password=None, + db_port=None, + db_path=None, + db_time_zone: str = "Europe/Amsterdam", + ): """ Initializes a DBManager object Args: @@ -40,11 +58,17 @@ def __init__(self, db_dialect: str, db_name: str, self.db_path = db_path self.TARGET_TIMEZONE = db_time_zone if self.db_dialect == "mysql": - self.engine = create_engine(f"mysql+pymysql://{self.user}:{self.password}@{self.server}/{self.db_name}", - pool_recycle=3600, pool_pre_ping=True) + self.engine = create_engine( + f"mysql+pymysql://{self.user}:{self.password}@" + f"{self.server}/{self.db_name}", + pool_recycle=3600, + pool_pre_ping=True, + ) elif self.db_dialect == "postgresql": - self.engine = create_engine(f"postgresql+psycopg2://{self.user}:{self.password}@" - f"{self.server}/{self.db_name}") + self.engine = create_engine( + f"postgresql+psycopg2://{self.user}:{self.password}@" + f"{self.server}/{self.db_name}" + ) with self.engine.connect() as connection: connection.execute(text(f"SET timezone = '{self.TARGET_TIMEZONE}';")) else: # sqlite3 @@ -53,13 +77,15 @@ def __init__(self, db_dialect: str, db_name: str, # abs_db_path = os.path.abspath(self.db_path) # ../data # self.dbname = "home-assistant_v2.db" # self.engine = create_engine(f'sqlite:////{abs_db_path}/{self.db_name}') - self.engine = create_engine(f'sqlite:///{self.db_path}/{self.db_name}') + self.engine = create_engine(f"sqlite:///{self.db_path}/{self.db_name}") if self.db_dialect == "sqlite": logging.debug( - f"Dialect: {self.db_dialect}, database: {self.db_name}, db_path: {self.db_path}") + f"Dialect: {self.db_dialect}, database: {self.db_name}, db_path: {self.db_path}" + ) else: logging.debug( - f"Dialect: {self.db_dialect}, database: {self.db_name}, server: {self.server}") + f"Dialect: {self.db_dialect}, database: {self.db_name}, server: {self.server}" + ) db_url = self.db_url( db_dialect=self.db_dialect, db_name=self.db_name, @@ -67,24 +93,37 @@ def __init__(self, db_dialect: str, db_name: str, db_user=self.user, db_password=self.password, db_port=self.port, - db_path=self.db_path) + db_path=self.db_path, + ) if not sqlalchemy_utils.database_exists(db_url): raise ConnectionAbortedError self.metadata = MetaData() @staticmethod - def db_url(db_dialect: str, db_name: str, db_server=None, db_user=None, db_password=None, - db_port=0, db_path=None) -> str: + def db_url( + db_dialect: str, + db_name: str, + db_server=None, + db_user=None, + db_password=None, + db_port=0, + db_path=None, + ) -> str: if db_dialect == "mysql": if db_port == 0: - result = f"mysql+pymysql://{db_user}:{db_password}@{db_server}/{db_name}" + result = ( + f"mysql+pymysql://{db_user}:{db_password}@{db_server}/{db_name}" + ) else: result = f"mysql+pymysql://{db_user}:{db_password}@{db_server}:{db_port}/{db_name}" elif db_dialect == "postgresql": if db_port == 0: result = f"postgresql+psycopg2://{db_user}:{db_password}@{db_server}/{db_name}" else: - result = f"postgresql+psycopg2://{db_user}:{db_password}@{db_server}:{db_port}/{db_name}" + result = ( + f"postgresql+psycopg2://{db_user}:{db_password}@{db_server}:" + f"{db_port}/{db_name}" + ) else: # sqlite3 if db_path is None: db_path = "../data" @@ -95,52 +134,69 @@ def db_url(db_dialect: str, db_name: str, db_server=None, db_user=None, db_passw def log_pool_status(self): from inspect import currentframe, getframeinfo + cf = currentframe() cf = cf.f_back filename = getframeinfo(cf).filename lineno = getframeinfo(cf).lineno - logging.debug(f"Connection status {self.engine.pool.status()} at line {lineno} in {filename}") + logging.debug( + f"Connection status {self.engine.pool.status()} at line " + f"{lineno} in {filename}" + ) # Custom function to handle from_unixtime def from_unixtime(self, column): - if self.db_dialect == 'sqlite': - return func.datetime(column, 'unixepoch', 'localtime') - elif self.db_dialect == 'postgresql': - return func.to_char(func.to_timestamp(column), 'YYYY-MM-DD HH24:MI:SS') - else: # mysql/mariadb + if self.db_dialect == "sqlite": + return func.datetime(column, "unixepoch", "localtime") + elif self.db_dialect == "postgresql": + return func.to_char(func.to_timestamp(column), "YYYY-MM-DD HH24:MI:SS") + else: # mysql/mariadb return func.from_unixtime(column) # Custom function to handle UNIX_TIMESTAMP def unix_timestamp(self, date_str): - if self.db_dialect == 'sqlite': - return func.strftime('%s', date_str, 'utc') - elif self.db_dialect == 'postgresql': - return func.extract('epoch', func.timezone(self.TARGET_TIMEZONE, func.cast(date_str, TIMESTAMP))) - else: # mysql/mariadb + if self.db_dialect == "sqlite": + return func.strftime("%s", date_str, "utc") + elif self.db_dialect == "postgresql": + return func.extract( + "epoch", + func.timezone(self.TARGET_TIMEZONE, func.cast(date_str, TIMESTAMP)), + ) + else: # mysql/mariadb return func.unix_timestamp(date_str) def month(self, column) -> func: - if self.db_dialect == 'sqlite': - return func.strftime("%Y-%m", func.datetime(column, 'unixepoch', 'localtime')) - elif self.db_dialect == 'postgresql': - return func.to_char(func.to_timestamp(column), 'YYYY-MM') - else: # mysql/mariadb - return func.concat(func.year(func.from_unixtime(column)), '-', func.lpad(func.month(func.from_unixtime(column)), 2, '0')) + if self.db_dialect == "sqlite": + return func.strftime( + "%Y-%m", func.datetime(column, "unixepoch", "localtime") + ) + elif self.db_dialect == "postgresql": + return func.to_char(func.to_timestamp(column), "YYYY-MM") + else: # mysql/mariadb + return func.concat( + func.year(func.from_unixtime(column)), + "-", + func.lpad(func.month(func.from_unixtime(column)), 2, "0"), + ) def day(self, column) -> func: - if self.db_dialect == 'sqlite': - return func.strftime("%Y-%m-%d", func.datetime(column, 'unixepoch', 'localtime')) - elif self.db_dialect == 'postgresql': - return func.to_char(func.to_timestamp(column), 'YYYY-MM-DD') - else: # mysql/mariadb + if self.db_dialect == "sqlite": + return func.strftime( + "%Y-%m-%d", func.datetime(column, "unixepoch", "localtime") + ) + elif self.db_dialect == "postgresql": + return func.to_char(func.to_timestamp(column), "YYYY-MM-DD") + else: # mysql/mariadb return func.date(func.from_unixtime(column)) def hour(self, column) -> func: - if self.db_dialect == 'sqlite': - return func.strftime("%H:%M", func.datetime(column, 'unixepoch', 'localtime')) - elif self.db_dialect == 'postgresql': - return func.to_char(func.to_timestamp(column), 'HH24:MI') - else: # mysql/mariadb + if self.db_dialect == "sqlite": + return func.strftime( + "%H:%M", func.datetime(column, "unixepoch", "localtime") + ) + elif self.db_dialect == "postgresql": + return func.to_char(func.to_timestamp(column), "HH24:MI") + else: # mysql/mariadb return func.time_format(func.time(func.from_unixtime(column)), "%H:%i") def savedata(self, df: pd.DataFrame, tablename: str = "values"): @@ -149,7 +205,7 @@ def savedata(self, df: pd.DataFrame, tablename: str = "values"): if id exist then update else insert Args: df: Dataframe that we wish to save in table tablename - columns + columns code string calculated datetime, 0 if realised time timestamp in sec @@ -165,21 +221,30 @@ def savedata(self, df: pd.DataFrame, tablename: str = "values"): self.log_pool_status() # Reflect existing tables from the database values_table = Table(tablename, self.metadata, autoload_with=self.engine) - variabel_table = Table('variabel', self.metadata, autoload_with=self.engine) + variabel_table = Table("variabel", self.metadata, autoload_with=self.engine) df = df.reset_index() # make sure indexes pair with number of rows - df["tijd"] = df['time'].apply(lambda x: datetime.datetime.fromtimestamp(int(float(x))).strftime("%Y-%m-%d %H:%M")) + df["tijd"] = df["time"].apply( + lambda x: datetime.datetime.fromtimestamp(int(float(x))).strftime( + "%Y-%m-%d %H:%M" + ) + ) for index, dfrow in df.iterrows(): - logging.debug(f"Save record: {dfrow['tijd']} {dfrow['code']} {dfrow['time']} {dfrow['value']}") - code = dfrow['code'] - time = dfrow['time'] - value = dfrow['value'] + logging.debug( + f"Save record: {dfrow['tijd']} {dfrow['code']} " + f"{dfrow['time']} {dfrow['value']}" + ) + code = dfrow["code"] + time = dfrow["time"] + value = dfrow["value"] if not isinstance(value, (int, float)): continue - if value == float('inf'): + if value == float("inf"): continue # Get the variabel_id - select_variabel = select(variabel_table.c.id).where(variabel_table.c.code == code) + select_variabel = select(variabel_table.c.id).where( + variabel_table.c.code == code + ) variabel_result = connection.execute(select_variabel).first() if variabel_result: variabel_id = variabel_result[0] @@ -189,18 +254,24 @@ def savedata(self, df: pd.DataFrame, tablename: str = "values"): # Query to check if the record exists select_value = select(values_table.c.id).where( - (values_table.c.variabel == variabel_id) & - (values_table.c.time == time) + (values_table.c.variabel == variabel_id) + & (values_table.c.time == time) ) value_result = connection.execute(select_value).first() if value_result: # Update existing record value_id = value_result[0] - update_value = update(values_table).values(value=value).where(values_table.c.id == value_id) + update_value = ( + update(values_table) + .values(value=value) + .where(values_table.c.id == value_id) + ) connection.execute(update_value) else: # Record does not exist, perform insert - insert_value = insert(values_table).values(variabel=variabel_id, time=time, value=value) + insert_value = insert(values_table).values( + variabel=variabel_id, time=time, value=value + ) connection.execute(insert_value) connection.commit() finally: @@ -208,25 +279,28 @@ def savedata(self, df: pd.DataFrame, tablename: str = "values"): connection.close() self.log_pool_status() - def get_prognose_field(self, field:str, start, end=None, interval="hour"): - values_table = Table('values', self.metadata, autoload_with=self.engine) - t1 = values_table.alias('t1') - variabel_table = Table('variabel', self.metadata, autoload_with=self.engine) - v1 = variabel_table.alias('v1') + def get_prognose_field(self, field: str, start, end=None, interval="hour"): + values_table = Table("values", self.metadata, autoload_with=self.engine) + t1 = values_table.alias("t1") + variabel_table = Table("variabel", self.metadata, autoload_with=self.engine) + v1 = variabel_table.alias("v1") # Build the SQLAlchemy query query = select( - t1.c.time.label('time'), - self.from_unixtime(t1.c.time).label('tijd'), + t1.c.time.label("time"), + self.from_unixtime(t1.c.time).label("tijd"), t1.c.value.label(field), ).where( and_( t1.c.variabel == v1.c.id, v1.c.code == field, - t1.c.time >= start # self.unix_timestamp(start.strftime('%Y-%m-%d %H:%M:%S')) + t1.c.time + >= start, # self.unix_timestamp(start.strftime('%Y-%m-%d %H:%M:%S')) ) ) if end is not None: - query = query.where(t1.c.time < self.unix_timestamp(end.strftime('%Y-%m-%d %H:%M:%S'))) + query = query.where( + t1.c.time < self.unix_timestamp(end.strftime("%Y-%m-%d %H:%M:%S")) + ) else: start_dt = datetime.datetime.fromtimestamp(start) if start_dt.hour < 13: @@ -236,7 +310,9 @@ def get_prognose_field(self, field:str, start, end=None, interval="hour"): end_dt = start_dt + datetime.timedelta(days=num_days) end_dt = datetime.datetime(end_dt.year, end_dt.month, end_dt.day) end_ts = end_dt.timestamp() - query = query.where(t1.c.time < self.unix_timestamp(end_dt.strftime('%Y-%m-%d %H:%M:%S'))) + query = query.where( + t1.c.time < self.unix_timestamp(end_dt.strftime("%Y-%m-%d %H:%M:%S")) + ) query = query.order_by(t1.c.time) @@ -248,68 +324,80 @@ def get_prognose_field(self, field:str, start, end=None, interval="hour"): return df def get_prognose_data(self, start, end=None, interval="hour"): - values_table = Table('values', self.metadata, autoload_with=self.engine) - # Aliases for the values table - t1 = values_table.alias('t1') - t2 = values_table.alias('t2') - t3 = values_table.alias('t3') - t0 = values_table.alias('t0') + values_table = Table("values", self.metadata, autoload_with=self.engine) + variabel_table = Table("variabel", self.metadata, autoload_with=self.engine) + if interval == "hour": + # Aliases for the values table + t1 = values_table.alias("t1") + t2 = values_table.alias("t2") + t3 = values_table.alias("t3") + t0 = values_table.alias("t0") - variabel_table = Table('variabel', self.metadata, autoload_with=self.engine) - # Aliases for the variabel table - v1 = variabel_table.alias('v1') - v2 = variabel_table.alias('v2') - v3 = variabel_table.alias('v3') - v0 = variabel_table.alias('v0') + # Aliases for the variabel table + v1 = variabel_table.alias("v1") + v2 = variabel_table.alias("v2") + v3 = variabel_table.alias("v3") + v0 = variabel_table.alias("v0") - # Build the SQLAlchemy query - query = select( - t1.c.time.label('time'), - self.from_unixtime(t1.c.time).label('tijd'), - t0.c.value.label('temp'), - t1.c.value.label('glob_rad'), - t2.c.value.label('pv_rad'), - t3.c.value.label('da_price') - ).where( - and_( - t1.c.time == t2.c.time, - t1.c.time == t3.c.time, - t1.c.time == t0.c.time, - t1.c.variabel == v1.c.id, - v1.c.code == 'gr', - t2.c.variabel == v2.c.id, - v2.c.code == 'solar_rad', - t3.c.variabel == v3.c.id, - v3.c.code == 'da', - t0.c.variabel == v0.c.id, - v0.c.code == 'temp', - t1.c.time >= start # self.unix_timestamp(start.strftime('%Y-%m-%d %H:%M:%S')) + # Build the SQLAlchemy query + query = select( + t1.c.time.label("time"), + self.from_unixtime(t1.c.time).label("tijd"), + t0.c.value.label("temp"), + t1.c.value.label("glob_rad"), + t2.c.value.label("pv_rad"), + t3.c.value.label("da_price"), + ).where( + and_( + t1.c.time == t2.c.time, + t1.c.time == t3.c.time, + t1.c.time == t0.c.time, + t1.c.variabel == v1.c.id, + v1.c.code == "gr", + t2.c.variabel == v2.c.id, + v2.c.code == "solar_rad", + t3.c.variabel == v3.c.id, + v3.c.code == "da", + t0.c.variabel == v0.c.id, + v0.c.code == "temp", + t1.c.time + >= start, # self.unix_timestamp(start.strftime('%Y-%m-%d %H:%M:%S')) + ) ) - ) - if end is not None: - query = query.where(t1.c.time < self.unix_timestamp(end.strftime('%Y-%m-%d %H:%M:%S'))) - else: - start_dt = datetime.datetime.fromtimestamp(start) - if start_dt.hour < 13: - num_days = 1 + if end is not None: + query = query.where( + t1.c.time < end # self.unix_timestamp(end.strftime("%Y-%m-%d %H:%M:%S")) + ) else: - num_days = 2 - end_dt = start_dt + datetime.timedelta(days=num_days) - end_dt = datetime.datetime(end_dt.year, end_dt.month, end_dt.day) - end_ts = end_dt.timestamp() - query = query.where(t1.c.time < self.unix_timestamp(end_dt.strftime('%Y-%m-%d %H:%M:%S'))) + start_dt = datetime.datetime.fromtimestamp(start) + if start_dt.hour < 13: + num_days = 1 + else: + num_days = 2 + end_dt = start_dt + datetime.timedelta(days=num_days) + end_dt = datetime.datetime(end_dt.year, end_dt.month, end_dt.day) + end_ts = end_dt.timestamp() + query = query.where( + t1.c.time < self.unix_timestamp(end_dt.strftime("%Y-%m-%d %H:%M:%S")) + ) - query = query.order_by(t1.c.time) + query = query.order_by(t1.c.time) - # Execute the query and fetch the result into a pandas DataFrame - with self.engine.connect() as connection: - result = connection.execute(query) - df = pd.DataFrame(result.fetchall(), columns=result.keys()) - df["tijd"] = pd.to_datetime(df["tijd"]) - return df + # Execute the query and fetch the result into a pandas DataFrame + with self.engine.connect() as connection: + result = connection.execute(query) + df = pd.DataFrame(result.fetchall(), columns=result.keys()) + df["tijd"] = pd.to_datetime(df["tijd"]) + return df + # else: # interval == "kwartier" - def get_column_data(self, tablename: str, column_name: str, - start: datetime.datetime = None, end: datetime.datetime = None): + def get_column_data( + self, + tablename: str, + column_name: str, + start: datetime.datetime = None, + end: datetime.datetime = None, + ): """ Retourneert een dataframe :param tablename: de naam van de tabel "prognoses" of "values" @@ -323,7 +411,7 @@ def get_column_data(self, tablename: str, column_name: str, start = start.strftime("%Y-%m-%d %H:%M") if end is not None: end = end.strftime("%Y-%m-%d %H:%M") - ''' + """ # old style sql query sqlQuery = ( "SELECT `time`, `value` " \ @@ -336,17 +424,14 @@ def get_column_data(self, tablename: str, column_name: str, sqlQuery += "AND `time` < UNIX_TIMESTAMP('" + end + "') " sqlQuery += "ORDER BY `time`;" # print (sqlQuery) - ''' - variabel_table = Table('variabel', self.metadata, autoload_with=self.engine) + """ + variabel_table = Table("variabel", self.metadata, autoload_with=self.engine) values_table = Table(tablename, self.metadata, autoload_with=self.engine) - query = select( - values_table.c.time, - values_table.c.value - ).where( + query = select(values_table.c.time, values_table.c.value).where( and_( variabel_table.c.code == column_name, values_table.c.variabel == variabel_table.c.id, - values_table.c.time >= self.unix_timestamp(start) + values_table.c.time >= self.unix_timestamp(start), ) ) if end is not None: @@ -357,8 +442,10 @@ def get_column_data(self, tablename: str, column_name: str, result = connection.execute(query) df = pd.DataFrame(result.fetchall(), columns=result.keys()) now_ts = datetime.datetime.now().timestamp() - df["datasoort"] = np.where(df['time'] <= now_ts, "recorded", "expected") - df['time'] = df['time'].apply(lambda x: datetime.datetime.fromtimestamp(x).strftime("%Y-%m-%d %H:%M")) + df["datasoort"] = np.where(df["time"] <= now_ts, "recorded", "expected") + df["time"] = df["time"].apply( + lambda x: datetime.datetime.fromtimestamp(x).strftime("%Y-%m-%d %H:%M") + ) return df def get_consumption(self, start: datetime.datetime, end=datetime.datetime.now()): @@ -368,29 +455,29 @@ def get_consumption(self, start: datetime.datetime, end=datetime.datetime.now()) :param end: eindmoment , default nu :return: dataframe """ - values_table = Table('values', self.metadata, autoload_with=self.engine) + values_table = Table("values", self.metadata, autoload_with=self.engine) # Aliases for the values table - t1 = values_table.alias('t1') - t2 = values_table.alias('t2') + t1 = values_table.alias("t1") + t2 = values_table.alias("t2") - variabel_table = Table('variabel', self.metadata, autoload_with=self.engine) + variabel_table = Table("variabel", self.metadata, autoload_with=self.engine) # Aliases for the variabel table - v1 = variabel_table.alias('v1') - v2 = variabel_table.alias('v2') + v1 = variabel_table.alias("v1") + v2 = variabel_table.alias("v2") # Build the SQLAlchemy query query = select( - func.sum(t1.c.value).label('consumed'), - func.sum(t2.c.value).label('produced') + func.sum(t1.c.value).label("consumed"), + func.sum(t2.c.value).label("produced"), ).where( and_( t1.c.time == t2.c.time, t1.c.variabel == v1.c.id, - v1.c.code == 'cons', + v1.c.code == "cons", t2.c.variabel == v2.c.id, - v2.c.code == 'prod', - t1.c.time >= self.unix_timestamp(start.strftime('%Y-%m-%d %H:%M:%S')), - t1.c.time < self.unix_timestamp(end.strftime('%Y-%m-%d %H:%M:%S')) + v2.c.code == "prod", + t1.c.time >= self.unix_timestamp(start.strftime("%Y-%m-%d %H:%M:%S")), + t1.c.time < self.unix_timestamp(end.strftime("%Y-%m-%d %H:%M:%S")), ) ) @@ -399,8 +486,8 @@ def get_consumption(self, start: datetime.datetime, end=datetime.datetime.now()) data = pd.DataFrame(result.fetchall(), columns=result.keys()) if len(data.index) == 1: - consumption = data['consumed'][0] - production = data['produced'][0] + consumption = data["consumed"][0] + production = data["produced"][0] else: consumption = 0 production = 0 diff --git a/dao/prog/graphs.py b/dao/prog/graphs.py index 2b0a73c..4dd1028 100644 --- a/dao/prog/graphs.py +++ b/dao/prog/graphs.py @@ -1,4 +1,3 @@ -import pandas as pd import matplotlib.pyplot as plt import numpy as np @@ -6,27 +5,27 @@ def make_graph_meteo(df, file=None, show=False): - fig = plt.figure(figsize=(15, 10)) - df['gr'] = df['gr'].astype(float) - x_axis = np.arange(len(df['tijd_nl'].values)) - plt.bar(x_axis-0.1, df['gr'].values, width=0.2, label="global rad") - plt.bar(x_axis+0.1, df['solar_rad'].values, width=0.2, label="netto rad") - plt.xticks(x_axis+0.1, df['tijd_nl'].values, rotation=45) + plt.figure(figsize=(15, 10)) + df["gr"] = df["gr"].astype(float) + x_axis = np.arange(len(df["tijd_nl"].values)) + plt.bar(x_axis - 0.1, df["gr"].values, width=0.2, label="global rad") + plt.bar(x_axis + 0.1, df["solar_rad"].values, width=0.2, label="netto rad") + plt.xticks(x_axis + 0.1, df["tijd_nl"].values, rotation=45) if file is not None: plt.savefig(file) if show: plt.show() - plt.close('all') + plt.close("all") return def make_graph_entsoe(df): - fig = plt.figure(figsize=(15, 10)) - df['gr'] = df['gr'].astype(float) - x_axis = np.arange(len(df['tijd_nl'].values)) - plt.bar(x_axis-0.1, df['gr'].values, width=0.2, label="global rad") - plt.bar(x_axis+0.1, df['solar_rad'].values, width=0.2, label="netto rad") - plt.xticks(x_axis, df['tijd_nl'].values, rotation=45) + plt.figure(figsize=(15, 10)) + df["gr"] = df["gr"].astype(float) + x_axis = np.arange(len(df["tijd_nl"].values)) + plt.bar(x_axis - 0.1, df["gr"].values, width=0.2, label="global rad") + plt.bar(x_axis + 0.1, df["solar_rad"].values, width=0.2, label="netto rad") + plt.xticks(x_axis, df["tijd_nl"].values, rotation=45) plt.show() - plt.close('all') + plt.close("all") return diff --git a/dao/prog/utils.py b/dao/prog/utils.py index f9423d5..04d7b23 100644 --- a/dao/prog/utils.py +++ b/dao/prog/utils.py @@ -1,3 +1,5 @@ +from calendar import month + from dateutil import easter import datetime import bisect @@ -10,6 +12,7 @@ import logging import traceback from sqlalchemy import Table, select, and_ +from dao.prog.version import __version__ def make_data_path(): @@ -26,8 +29,12 @@ def is_laagtarief(dtime, switch_hour): return True if (dtime.hour < 7) or (dtime.hour >= switch_hour): # door de week van 7 tot 21/23 return True - feestdagen = [datetime.datetime(jaar, 1, 1), datetime.datetime(jaar, 4, 27), datetime.datetime(jaar, 12, 25), - datetime.datetime(jaar, 12, 26)] + feestdagen = [ + datetime.datetime(jaar, 1, 1), + datetime.datetime(jaar, 4, 27), + datetime.datetime(jaar, 12, 25), + datetime.datetime(jaar, 12, 26), + ] pasen = easter.easter(jaar) feestdagen.append(pasen + datetime.timedelta(days=1)) # 2e paasdag feestdagen.append(pasen + datetime.timedelta(days=39)) # hemelvaart @@ -39,7 +46,9 @@ def is_laagtarief(dtime, switch_hour): return False -def calc_adjustment_heatcurve(price_act: float, price_avg: float, adjustment_factor, old_adjustment: float) -> float: +def calc_adjustment_heatcurve( + price_act: float, price_avg: float, adjustment_factor, old_adjustment: float +) -> float: """ Calculate the adjustment of the heatcurve formule: -0,5*(price-price_avg)*10/price_avg @@ -52,7 +61,9 @@ def calc_adjustment_heatcurve(price_act: float, price_avg: float, adjustment_fac if price_avg == 0: adjustment = 0 else: - adjustment = round(- adjustment_factor * (price_act - price_avg) * 100 / price_avg, 1) + adjustment = round( + -adjustment_factor * (price_act - price_avg) * 100 / price_avg, 1 + ) # toename en afname maximeren op 10 x adjustment factor if adjustment >= old_adjustment: adjustment = min(adjustment, old_adjustment + adjustment_factor * 10) @@ -69,14 +80,15 @@ def get_value_from_dict(dag: str, options: dict) -> float: :return: de correcte value """ o_list = list(options.keys()) - result = options.get( - dag, options[o_list[bisect.bisect_left(o_list, dag) - 1]]) + result = options.get(dag, options[o_list[bisect.bisect_left(o_list, dag) - 1]]) return result def convert_timestr(time_str: str, now_dt: datetime.datetime) -> datetime.datetime: - result_hm = datetime.datetime.strptime(time_str, '%H:%M:%S') - result = datetime.datetime(now_dt.year, now_dt.month, now_dt.day, result_hm.hour, result_hm.minute) + result_hm = datetime.datetime.strptime(time_str, "%H:%M:%S") + result = datetime.datetime( + now_dt.year, now_dt.month, now_dt.day, result_hm.hour, result_hm.minute + ) return result @@ -100,20 +112,7 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: config = Config("../data/options.json") tibber_options = config.get(["tibber"]) url = config.get(["api url"], tibber_options, "https://api.tibber.com/v1-beta/gql") - db_da_engine = config.get(['database da', "engine"], None, "mysql") - db_da_server = config.get(['database da', "server"], None, "core-mariadb") - db_da_port = int(config.get(['database da', "port"], None, 0)) - if db_da_engine == "sqlite": - db_da_name = config.get(['database da', "database"], None, "day_ahead.db") - else: - db_da_name = config.get(['database da', "database"], None, "day_ahead") - db_da_user = config.get(['database da', "username"], None, "day_ahead") - db_da_password = config.get(['database da', "password"]) - db_da_path = config.get(['database da', "db_path"], None, "../data") - db_time_zone = config.get(["time_zone"]) - db_da = DBmanagerObj(db_dialect=db_da_engine, db_name=db_da_name, db_server=db_da_server, - db_port=db_da_port, db_user=db_da_user, db_password=db_da_password, - db_path=db_da_path, db_time_zone=db_time_zone) + db_da = config.get_db_da() prices_options = config.get(["prices"]) headers = { "Authorization": "Bearer " + tibber_options["api_token"], @@ -135,11 +134,13 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: # no starttime if (len(sys.argv) <= 2) or (start_ts is None): # search first missing - start_ts = datetime.datetime.strptime(prices_options["last invoice"], "%Y-%m-%d").timestamp() + start_ts = datetime.datetime.strptime( + prices_options["last invoice"], "%Y-%m-%d" + ).timestamp() timestamps = generate_hourly_timestamps(start_ts, now_ts) - values_table = Table('values', db_da.metadata, autoload_with=db_da.engine) - variabel_table = Table('variabel', db_da.metadata, autoload_with=db_da.engine) - for code in ['cons', 'prod']: + values_table = Table("values", db_da.metadata, autoload_with=db_da.engine) + variabel_table = Table("variabel", db_da.metadata, autoload_with=db_da.engine) + for code in ["cons", "prod"]: # Query the existing timestamps from the values table query = select(values_table.c.time).where( and_( @@ -152,51 +153,64 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: existing_timestamps = {row[0] for row in connection.execute(query)} # Find missing timestamps by comparing the generated list with the existing timestamps - missing_timestamps = [ts for ts in timestamps if ts not in existing_timestamps] + missing_timestamps = [ + ts for ts in timestamps if ts not in existing_timestamps + ] if len(missing_timestamps) == 0: latest = start_ts else: latest = missing_timestamps[0] latest_ts = min(latest_ts, latest) - count = math.ceil((now_ts - latest_ts)/3600) - logging.info(f"Tibber data present tot en met: {str(datetime.datetime.fromtimestamp(latest_ts - 3600))}") + count = math.ceil((now_ts - latest_ts) / 3600) + logging.info( + f"Tibber data present tot en met: " + f"{str(datetime.datetime.fromtimestamp(latest_ts - 3600))}" + ) if count < 24: logging.info("Er worden geen data opgehaald.") return - query = '{ ' \ - '"query": ' \ - ' "{ ' \ - ' viewer { ' \ - ' homes { ' \ - ' production(resolution: HOURLY, last: '+str(count)+') { ' \ - ' nodes { ' \ - ' from ' \ - ' profit ' \ - ' production ' \ - ' } ' \ - ' } ' \ - ' consumption(resolution: HOURLY, last: '+str(count)+') { ' \ - ' nodes { ' \ - ' from ' \ - ' cost ' \ - ' consumption ' \ - ' } ' \ - ' } ' \ - ' } ' \ - ' } ' \ - '}" ' \ - '}' + + query = ( + "{ " + '"query": ' + ' "{ ' + " viewer { " + " homes { " + " production(resolution: HOURLY, last: " + str(count) + ") { " + " nodes { " + " from " + " profit " + " production " + " } " + " } " + " consumption(resolution: HOURLY, last: " + str(count) + ") { " + " nodes { " + " from " + " cost " + " consumption " + " } " + " } " + " } " + " } " + '}" ' + "}" + ) now = datetime.datetime.now() - today_ts = datetime.datetime(year=now.year,month=now.month, day=now.day).timestamp() + today_ts = datetime.datetime( + year=now.year, month=now.month, day=now.day + ).timestamp() logging.debug(query) resp = post(url, headers=headers, data=query) tibber_dict = json.loads(resp.text) - production_nodes = tibber_dict['data']['viewer']['homes'][0]['production']['nodes'] - consumption_nodes = tibber_dict['data']['viewer']['homes'][0]['consumption']['nodes'] - tibber_df = pd.DataFrame(columns=['time', 'code', 'value']) + production_nodes = tibber_dict["data"]["viewer"]["homes"][0]["production"]["nodes"] + consumption_nodes = tibber_dict["data"]["viewer"]["homes"][0]["consumption"][ + "nodes" + ] + tibber_df = pd.DataFrame(columns=["time", "code", "value"]) for node in production_nodes: + timestamp = int(get_datetime_from_str(node["from"]).timestamp()) timestamp = int(get_datetime_from_str(node['from']).timestamp()) if timestamp < today_ts: time_stamp = str(timestamp) @@ -206,12 +220,12 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: logging.info(f"{node} {time_stamp} {value}") tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] if not (node["profit"] is None): - code = 'profit' + code = "profit" value = float(node["profit"]) logging.info(f"{node} {time_stamp} {value}") tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] for node in consumption_nodes: - timestamp = int(get_datetime_from_str(node['from']).timestamp()) + timestamp = int(get_datetime_from_str(node["from"]).timestamp()) if timestamp < today_ts: time_stamp = str(timestamp) if not (node["consumption"] is None): @@ -224,8 +238,10 @@ def generate_hourly_timestamps(start_gen: float, end_gen: float) -> list: value = float(node["cost"]) logging.info(f"{node} {time_stamp} {value}") tibber_df.loc[tibber_df.shape[0]] = [time_stamp, code, value] - logging.info(f"Opgehaalde data bij Tibber (database records):" - f"\n{tibber_df.to_string(index=False)}") + logging.info( + f"Opgehaalde data bij Tibber (database records):" + f"\n{tibber_df.to_string(index=False)}" + ) db_da.savedata(tibber_df) @@ -246,42 +262,15 @@ def calc_uur_index(dt: datetime, tijd: list) -> int: return result_index -''' -def calc_heatpump_usage - (pl : [], needed : float) ->[]: - """ - berekent inzet van de wp per uur - :param pl: een list van de inkoop prijzen - :param needed: benodige Wh aan energie - :return: een list van Wh in de betreffende uren - """ - U = len(pl) # aantal uur - pl_min = min(pl) - sum_cost = 0 - max_low = U * 250 - usage = [] - if max_low >= needed: - #alleen de goedkopere uren inzetten - else: - #alle uren minimum inzetten plus nog wat extra - for u in range(U): - sum_cost += pl[u]-pl_min - extra_energy = needed - max_low - energy_cost = sum_cost/extra_energy - for u in range(U): - usage.append(250+ (pl[u]-pl_min) * energy_cost) -''' - - def get_version(): return __version__ def version_number(version_str: str) -> int: - lst = [x for x in version_str.split('.')] + lst = [x for x in version_str.split(".")] lst = lst[:3] lst.reverse() - result = sum(int(x) * (100 ** i) for i, x in enumerate(lst)) + result = sum(int(x) * (100**i) for i, x in enumerate(lst)) return result @@ -302,7 +291,10 @@ def log_exc_plus(): stack.reverse() traceback.print_exc() for frame in stack: - logging.error(f"File: {frame.f_code.co_filename}, line {frame.f_lineno}, in {frame.f_code.co_name}") + logging.error( + f"File: {frame.f_code.co_filename}, line {frame.f_lineno}, " + f"in {frame.f_code.co_name}" + ) def error_handling(ex): @@ -311,51 +303,69 @@ def error_handling(ex): else: log_exc_plus() -def interpolate(org_x: list[datetime.datetime], org_y:list[float], - start_x:datetime.datetime, end_x:datetime.datetime, - interval:int) -> tuple: - new_y =[] + +def prnt_xy(x: list, y: list): + for i in range(len(x)): + print(f"{i} {x[i]} {y[i]}") + print() + + +def interpolate( + org_x: list[datetime.datetime], + org_y: list[float], + start_x: datetime.datetime, + end_x: datetime.datetime, + interval: int, +) -> tuple: + new_y = [] new_x = [] calc_x = start_x while calc_x <= end_x: new_x.append(calc_x) calc_x += datetime.timedelta(minutes=interval) + # print(f"new x:\n {'\n'.join(new_x)}") for i in range(len(new_x)): x = new_x[i] - for j in range(len(org_x)-1): - if (j==0 and x < org_x[j]) or org_x[j] <= x < org_x[j+1]: + j = 0 + for j in range(len(org_x) - 1): + if (j == 0 and x < org_x[j]) or org_x[j] <= x < org_x[j + 1]: break - delta_x = (org_x[j+1] - org_x[j]).seconds/60 # in minuten + delta_x = (org_x[j + 1] - org_x[j]).seconds / 60 # in minuten delta_y = org_y[j + 1] - org_y[j] - offset_y = -delta_y/3 - slope = delta_y / delta_x + # b = org_y[j] - a * 90 + a = delta_y / delta_x # a = value/minuut + b = -a * 90 / 4 if x >= org_x[j]: - y = org_y[j] + (x - org_x[j]).seconds * slope / 60 + offset_y + y = org_y[j] + (x - org_x[j]).seconds * a / 60 + b else: - y = org_y[j] - (org_x[j] - x).seconds * slope /60 + offset_y + y = org_y[j] - (org_x[j] - x).seconds * a / 60 + b new_y.append(y) + print(x, y, a, b) return new_x, new_y -def prnt_xy(x:list, y:list): - for i in range(len(x)): - print (f"{i} {x[i]} {y[i]}") - print() - def tst_interpolate(): x = [datetime.datetime(year=2024, month=10, day=19, hour=hour) for hour in range(4)] - y = [1 + 1*i*i for i in range(4)] - prnt_xy(x,y) + y = [1 + 1 * i * i for i in range(4)] + prnt_xy(x, y) start_x = datetime.datetime(year=2024, month=10, day=19, hour=0) end_x = datetime.datetime(year=2024, month=10, day=19, hour=4) interval = 15 new_x, new_y = interpolate(x, y, start_x, end_x, interval) prnt_xy(new_x, new_y) - df_tst = pd.DataFrame(new_y,new_x, columns=["x", "y"]) - print(df_tst.to_string()) - +def interpolate_prognose_data(): + from da_config import Config + from db_manager import DBmanagerObj + config = Config("../data/options.json") + db_da = config.get_db_da() + start_ts = datetime.datetime(year=2024, month=11, day=12).timestamp() + end_ts = datetime.datetime(year=2024, month=11, day=14).timestamp() + prognose_data = db_da.get_prognose_data(start=start_ts, end=end_ts) + print(prognose_data.to_string()) +# tst_interpolate() +# interpolate_prognose_data() diff --git a/dao/prog/version.py b/dao/prog/version.py index 7c1cbbd..6b97c22 100644 --- a/dao/prog/version.py +++ b/dao/prog/version.py @@ -1 +1 @@ -__version__ = '2024.10.1' +__version__ = "2024.10.1" diff --git a/dao/requirements.txt b/dao/requirements.txt index f8bb4fb..d750f33 100644 --- a/dao/requirements.txt +++ b/dao/requirements.txt @@ -8,10 +8,10 @@ mysql-connector-python~=9.0.0 hassapi~=0.2.1 mip~=1.16rc0 python-dateutil~=2.9.0.post0 -nordpool~=0.4.1 -entsoe-py~=0.6.11 +nordpool~=0.4.3 +entsoe-py~=0.6.16 beautifulsoup4 -sqlalchemy +sqlalchemy~=2.0.35 SQLAlchemy-Utils~=0.41.2 psycopg2-binary pymysql~=1.1.1 diff --git a/dao/run/run.sh b/dao/run/run.sh index cdb5bb4..ac276d8 100644 --- a/dao/run/run.sh +++ b/dao/run/run.sh @@ -40,6 +40,7 @@ if [ -d "$dir" ]; then export PMIP_CBC_LIBRARY="/root/dao/prog/miplib/lib/libCbc.so" fi +export PYTHONPATH="/root:/root/dao:/root/dao/prog" cd /root/dao/prog python3 check_db.py diff --git a/dao/tests/data/options_sqlite.json b/dao/tests/data/options_sqlite.json index 66f775a..5cb514d 100644 --- a/dao/tests/data/options_sqlite.json +++ b/dao/tests/data/options_sqlite.json @@ -10,7 +10,7 @@ }, "database ha": { "engine": "sqlite", - "database": "homea-ssistant_v2.db", + "database": "home-assistant_v2.db", "db_path": "../data" }, diff --git a/dao/tests/prog/test_dao.py b/dao/tests/prog/test_dao.py index 4d07696..b54c5b0 100644 --- a/dao/tests/prog/test_dao.py +++ b/dao/tests/prog/test_dao.py @@ -7,26 +7,32 @@ import dao.prog.da_report import dao.prog.day_ahead + def test_get_grid_data_sqlite(): report = dao.prog.da_report.Report(file_name="../data/options_sqlite.json") for day in [datetime.datetime(2024, 7, 9), datetime.datetime(2024, 7, 10)]: vanaf = day # datetime.datetime(2024, 7, 9) tot = day + datetime.timedelta(days=1) # datetime.datetime(2024, 7, 10) - df_ha = report.get_grid_data(periode='', _vanaf=vanaf, _tot=tot, _interval="uur", _source="ha") + df_ha = report.get_grid_data(periode='', _vanaf=vanaf, _tot=tot, _interval="uur", + _source="ha") df_ha = report.calc_grid_columns(df_ha, "uur", "tabel") print(f"Eigen meterstanden op {day.strftime('%Y-%m-%d')}:\n{df_ha.to_string(index=False)}") - df_da = report.get_grid_data(periode='', _vanaf=vanaf, _tot=tot, _interval="uur", _source="da") + df_da = report.get_grid_data(periode='', _vanaf=vanaf, _tot=tot, _interval="uur", + _source="da") df_da = report.calc_grid_columns(df_da, "uur", "tabel") - print(f"Verbruiken gecorrigeerd door Tibber op {day.strftime('%Y-%m-%d')}:\n{df_da.to_string(index=False)}") + print(f"Verbruiken gecorrigeerd door Tibber op {day.strftime('%Y-%m-%d')}:\n" + f"{df_da.to_string(index=False)}") # print(df_ha.equals(df_da)) + def start_logging(): logging.basicConfig(level=logging.INFO, - format='%(asctime)s %(levelname)s: %(message)s', - datefmt='%Y-%m-%d %H:%M:%S') + format='%(asctime)s %(levelname)s: %(message)s', + datefmt='%Y-%m-%d %H:%M:%S') logging.info(f"Testen Day Ahead Optimalisatie gestart: " - f"{datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')}") + f"{datetime.datetime.now().strftime('%d-%m-%Y %H:%M:%S')}") + def test_da_calc(): start_logging() @@ -39,18 +45,20 @@ def test_da_calc(): def get_grid_data(engine: str, source: str, vanaf: datetime.datetime, tot: datetime.datetime = None, - interval: str = "uur") -> tuple: + interval: str = "uur") -> tuple: file_name = "../data/options_" + engine + ".json" report = dao.prog.da_report.Report(file_name) if tot is None: tot = vanaf + datetime.timedelta(days=1) - df = report.get_grid_data(periode='', _vanaf=vanaf, _tot=tot, _interval=interval, _source=source) + df = report.get_grid_data(periode='', _vanaf=vanaf, _tot=tot, _interval=interval, + _source=source) df = report.calc_grid_columns(df, interval, "tabel") row = df.iloc[-1] netto_consumption = row.Verbruik[0] - row.Productie[0] netto_kosten = row.Kosten[0] - row.Opbrengst[0] return df, netto_consumption, netto_kosten + def test_grid_reporting(): engines = ["mysql", "sqlite", "postgresql"] sources = ["da", "ha"] @@ -65,8 +73,10 @@ def test_grid_reporting(): print(f"Result from DA:\n{result[0].to_string(index=False)}") print(f"Result from HA:\n{result[1].to_string(index=False)}") + def test_main(): test_get_grid_data_sqlite() + if __name__ == '__main__': test_main() diff --git a/dao/webserver/app/__init__.py b/dao/webserver/app/__init__.py index 14209b9..f692f80 100644 --- a/dao/webserver/app/__init__.py +++ b/dao/webserver/app/__init__.py @@ -1,14 +1,15 @@ from flask import Flask import sys -sys.path.append("../") + +# sys.path.append("../") app = Flask(__name__) from dao.webserver.app.routes import * -#if __name__ == '__main__': -# app.run() -#app.run(port=5000, host='0.0.0.0') -#if __name__ == '__main__': -# app.run(port=5000, host='0.0.0.0') +# if __name__ == '__main__': +# app.run() +# app.run(port=5000, host='0.0.0.0') +# if __name__ == '__main__': +# app.run(port=5000, host='0.0.0.0') diff --git a/dao/webserver/app/routes.py b/dao/webserver/app/routes.py index 2dd6e00..5852a80 100644 --- a/dao/webserver/app/routes.py +++ b/dao/webserver/app/routes.py @@ -1,4 +1,7 @@ import datetime + +from sqlalchemy.sql.coercions import expect_col_expression_collection + from dao.webserver.app import app from flask import render_template, request import fnmatch @@ -16,11 +19,14 @@ try: config = Config(app_datapath + "options.json") except ValueError as ex: + logging.error(app_datapath) + logging.error(ex) config = None logname = "dashboard.log" handler = TimedRotatingFileHandler("../data/log/" + logname, when="midnight", - backupCount=1 if config is None else config.get(["history", "save days"])) + backupCount=1 if config is None else + config.get(["history", "save days"])) handler.suffix = "%Y%m%d" handler.setLevel(logging.INFO) logging.basicConfig(level=logging.DEBUG, handlers=[handler], @@ -195,8 +201,11 @@ def home(): return render_template('home.html', title='Optimization', active_menu="home", subjects=subjects, views=views, - active_subject=active_subject, active_view=active_view, image=image, tabel=tabel, - active_time=active_time, version=__version__) + active_subject=active_subject, + active_view=active_view, + image=image, tabel=tabel, + active_time=active_time, + version=__version__) @app.route('/run', methods=['POST', 'GET']) @@ -242,14 +251,16 @@ def run_process(): break return render_template('run.html', title='Run', active_menu="run", - bewerkingen=bewerkingen, bewerking=bewerking, current_bewerking=current_bewerking, + bewerkingen=bewerkingen, bewerking=bewerking, + current_bewerking=current_bewerking, parameters=parameters, - log_content=log_content, version=__version__) + log_content=log_content, + version=__version__) @app.route('/reports', methods=['POST', 'GET']) def reports(): - report = dao.prog.da_report.Report() + report = dao.prog.da_report.Report(app_datapath+"/options.json") subjects = ["grid", "balans"] active_subject = "grid" views = ["grafiek", "tabel"] @@ -275,7 +286,9 @@ def reports(): if "met_prognose" in lst: met_prognose = lst["met_prognose"][0] tot = None - if (active_period == "vandaag" or active_period == "deze week" or active_period == "deze maand" or + if (active_period == "vandaag" or + active_period == "deze week" or + active_period == "deze maand" or active_period == "dit contractjaar"): if not met_prognose: now = datetime.datetime.now() @@ -291,18 +304,22 @@ def reports(): filtered_df = report.calc_balance_columns(report_df, active_interval, active_view) filtered_df.round(3) if active_view == "tabel": - report_data = [filtered_df.to_html(index=False, justify="right", decimal=",", classes="data", border=0, + report_data = [filtered_df.to_html(index=False, justify="right", decimal=",", + classes="data", border=0, float_format='{:.3f}'.format)] else: if active_subject == "grid": report_data = report.make_graph(filtered_df, active_period) else: - report_data = report.make_graph(filtered_df, active_period, report.balance_graph_options) + report_data = report.make_graph(filtered_df, active_period, + report.balance_graph_options) return render_template('report.html', title='Rapportage', active_menu="reports", subjects=subjects, views=views, periode_options=periode_options, active_period=active_period, met_prognose=met_prognose, - active_subject=active_subject, active_view=active_view, report_data=report_data, + active_subject=active_subject, + active_view=active_view, + report_data=report_data, version=__version__) @@ -378,15 +395,28 @@ def api_report(fld: str, periode: str): :return: de gevraagde data in json formaat """ cumulate = request.args.get('cumulate') - report = dao.prog.da_report.Report() + expected = request.args.get('expected') + report = dao.prog.da_report.Report(app_datapath+"/options.json") # start = request.args.get('start') # end = request.args.get('end') - try: - cumulate = int(cumulate) - cumulate = cumulate == 1 - except Exception: + if cumulate is None: cumulate = False - result = report.get_api_data(fld, periode, cumulate=cumulate) + else: + try: + cumulate = int(cumulate) + cumulate = cumulate == 1 + except ValueError: + cumulate = False + + if expected is None: + expected = False + else: + try: + expected = int(expected) + expected = expected == 1 + except ValueError: + expected = False + result = report.get_api_data(fld, periode, cumulate=cumulate, expected=expected) return result diff --git a/dao/webserver/app/templates/home.html b/dao/webserver/app/templates/home.html index c20db67..53a2686 100644 --- a/dao/webserver/app/templates/home.html +++ b/dao/webserver/app/templates/home.html @@ -74,8 +74,8 @@ {% block content %} {% if active_view == "grafiek" %} -
- image + image
{% else %} diff --git a/dao/webserver/app/templates/report.html b/dao/webserver/app/templates/report.html index 1b85f05..be39b04 100644 --- a/dao/webserver/app/templates/report.html +++ b/dao/webserver/app/templates/report.html @@ -50,7 +50,8 @@ {% if active_period == "vandaag" or active_period == "deze week" or active_period == "deze maand" or active_period == "dit contractjaar" %} - {% endif %}