Skip to content

Commit

Permalink
Merge pull request #219 from kilowatts-io/preview
Browse files Browse the repository at this point in the history
Preview
  • Loading branch information
BenjaminWatts committed Feb 24, 2024
2 parents 285b4f9 + 2fa88b4 commit 5930d56
Show file tree
Hide file tree
Showing 14 changed files with 1,035 additions and 3 deletions.
9 changes: 9 additions & 0 deletions .github/workflows/expo-preview.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,13 @@ jobs:
name: EAS Update
runs-on: ubuntu-latest
steps:
- name: Check for CDN_DOMAIN_NAME_DEV
run: |
if [ -z "${{ secrets.CDN_DOMAIN_NAME_DEV }}" ]; then
echo "You must provide an CDN_DOMAIN_NAME_DEV secret linked to this project's Expo account in this repo's secrets. Learn more: https://docs.expo.dev/eas-update/github-actions"
exit 1
fi
- name: Check for EXPO_TOKEN
run: |
if [ -z "${{ secrets.EXPO_TOKEN }}" ]; then
Expand Down Expand Up @@ -36,3 +43,5 @@ jobs:

- name: Publish update
run: eas update --auto --channel preview
env:
EXPO_PUBLIC_CDN_DOMAIN_NAME: ${{ secrets.CDN_DOMAIN_NAME_DEV }}
12 changes: 11 additions & 1 deletion .github/workflows/expo-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,14 @@ jobs:
name: EAS Update
runs-on: ubuntu-latest
steps:

- name: Check for CDN_DOMAIN_NAME
run: |
if [ -z "${{ secrets.CDN_DOMAIN_NAME }}" ]; then
echo "You must provide an CDN_DOMAIN_NAME secret linked to this project's Expo account in this repo's secrets. Learn more: https://docs.expo.dev/eas-update/github-actions"
exit 1
fi
- name: Check for EXPO_TOKEN
run: |
if [ -z "${{ secrets.EXPO_TOKEN }}" ]; then
Expand All @@ -36,4 +44,6 @@ jobs:
run: yarn install

- name: Publish update
run: eas update --auto --channel production # Here we specify the production channel for the update.
run: eas update --auto --channel production
env:
EXPO_PUBLIC_CDN_DOMAIN_NAME: ${{ secrets.CDN_DOMAIN_NAME }}
24 changes: 23 additions & 1 deletion backend/lambda/gb_snapshot/gb_snapshot.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import logging
from typing import List
from .ng_dist.ng_dist import NationalGridDistributionNetworks
from .s3 import write_summary_output
from .ptypes import (
BalancingTotals,
EmbeddedSnapshot,
ForeignMarketSnapshot,
NationalGridGspSnapshot,
SummaryOutput,
TotalsSnapshot,
)
Expand All @@ -14,12 +17,22 @@
from .bm import Bm


def combine_totals(gen_totals: List[TotalsSnapshot], fm_totals: TotalsSnapshot):
def combine_totals(gen_totals: List[TotalsSnapshot], fm_totals: ForeignMarketSnapshot):
totals = gen_totals + [fm_totals]
totals.sort(key=lambda x: x.ac, reverse=True)
return totals


def combine_gen_unit_groups(
bm_unit_groups: List[EmbeddedSnapshot], ngdnos: List[NationalGridGspSnapshot]
):
ngdnos = [n for n in ngdnos if n.ac > 0]
combined = bm_unit_groups + ngdnos
# resort by ac, then cp
combined.sort(key=lambda x: x.ac, reverse=True)
return combined


def gb_snapshot():
request_params = get_request_params()
print(f"preparing for {request_params.dt}...")
Expand All @@ -37,7 +50,16 @@ def gb_snapshot():
fms, fm_totals = ForeignMarketTotals(bm=bm.copy()).run()
logging.info(f"combining totals for generators and foreign markets")
totals = combine_totals(gen_totals, fm_totals)
logging.info(f"if possible, adding national grid embeddded to totals")
ngdnos = NationalGridDistributionNetworks().run()
if ngdnos is not None:
gen_unit_groups = combine_gen_unit_groups(gen_unit_groups, ngdnos)
else:
logging.info(
f"no national grid distribution networks response returned, skipping..."
)
logging.info(f"writing summary output")

write_summary_output(
SummaryOutput(
dt=request_params.dt,
Expand Down
7 changes: 7 additions & 0 deletions backend/lambda/gb_snapshot/ng_dist/Readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# National Grid DNO Areas

Gets latest data from National Grid DNO Areas

From time to time, the capacity needs to be updated by running the capacity.py module

If for any reason, the Gb Snapshot is unable to call any of the National Grid Distribution Networks for data, all of them will be excluded from the live data.
Empty file.
66 changes: 66 additions & 0 deletions backend/lambda/gb_snapshot/ng_dist/capacity.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# gets the latest capacity totals for the national grid distribution networks for wind and solar

import pandas as pd, requests, os

# write to current module directory
CAPACITY_TOTALS_CSV_FP = os.path.join(os.path.dirname(__file__), "capacity_totals.csv")
BASE_API = "https://connecteddata.nationalgrid.co.uk"
INITIAL_URL = f"{BASE_API}/api/3/action/datastore_search?resource_id=0baf38d3-8f7a-41e5-ad17-8d5785877f8c"


CAPACITY_COLUMN = "Connected Maximum Export Capacity (MW)"
TECHNOLOGY_COLUMN = "Energy Source 1"
DNO_REGION_COLUMN = "Licence Area"

COLUMNS_TO_KEEP = [
DNO_REGION_COLUMN,
TECHNOLOGY_COLUMN,
CAPACITY_COLUMN,
]

SIMPLER_COLUMN_NAMES = {
"Connected Maximum Export Capacity (MW)": "cp",
"Energy Source 1": "technology",
"Licence Area": "region",
}

SOLAR_TECHNOLOGY_NAME = "Solar"
WIND_TECHNOLOGY_NAME = "Wind"

TECHNOLOGIES_TO_KEEP = [SOLAR_TECHNOLOGY_NAME, WIND_TECHNOLOGY_NAME]


def get_capacity_register():
url = INITIAL_URL
data = []
complete = False
while not complete:
response = requests.get(url)
response.raise_for_status()
as_dict = response.json()
new_records = as_dict["result"]["records"]
data.extend(new_records)
if len(new_records) > 0:
next_url = as_dict["result"]["_links"]["next"]
url = f"{BASE_API}{next_url}"
print(url)
else:
complete = True

df = pd.DataFrame(data)[COLUMNS_TO_KEEP]
df = df[df[TECHNOLOGY_COLUMN].isin(TECHNOLOGIES_TO_KEEP)]
df = df.rename(columns=SIMPLER_COLUMN_NAMES)
df["technology"] = df["technology"].str.lower()

df = df.groupby(
[
"region",
"technology",
]
).sum()

df.to_csv(CAPACITY_TOTALS_CSV_FP)


if __name__ == "__main__":
resp = get_capacity_register()
9 changes: 9 additions & 0 deletions backend/lambda/gb_snapshot/ng_dist/capacity_totals.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
region,technology,cp
Western Power Distribution (East Midlands) Plc,solar,1235.0
Western Power Distribution (East Midlands) Plc,wind,459.0
Western Power Distribution (South Wales) Plc,solar,607.0
Western Power Distribution (South Wales) Plc,wind,503.0
Western Power Distribution (South West) Plc,solar,1139.0
Western Power Distribution (South West) Plc,wind,242.0
Western Power Distribution (West Midlands) Plc,solar,491.0
Western Power Distribution (West Midlands) Plc,wind,38.0
203 changes: 203 additions & 0 deletions backend/lambda/gb_snapshot/ng_dist/ng_dist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,203 @@
from typing import List, Union
from pydantic import BaseModel, Field
import pandas as pd, requests
from ..ptypes import NationalGridGspSnapshot
from .capacity import CAPACITY_TOTALS_CSV_FP
import logging


NATIONAL_GRID_DISTRIBUTION_NETWORKS = [
{
"id": 11,
"name": "East Midlands",
"coords": {
"solar": {
# leicester
"lat": 52.6369,
"lng": -1.1398,
},
"wind": {
# skegness
"lat": 53.144,
"lng": 0.336,
},
},
"url": "https://connecteddata.nationalgrid.co.uk/dataset/7235194a-884d-47ff-bd03-a0ad38eedcb3/resource/92d3431c-15d7-4aa6-ad34-2335596a026c/download/eastmidlands.csv",
"embedded_capacity_register_name": "Western Power Distribution (East Midlands) Plc",
},
{
"id": 14,
"name": "West Midlands",
"coords": {
# telford
"solar": {
# telford
"lat": 52.6784,
"lng": -2.4453,
},
"wind": {
# birmingham
"lat": 52.4862,
"lng": -1.8904,
},
},
"url": "https://connecteddata.nationalgrid.co.uk/dataset/7235194a-884d-47ff-bd03-a0ad38eedcb3/resource/1c3447df-37d7-4fb4-9f99-0e2a0d691dbe/download/westmidlands.csv",
"embedded_capacity_register_name": "Western Power Distribution (West Midlands) Plc",
},
{
"id": 21,
"name": "South Wales",
"coords": {
"solar": {
# cardiff
"lat": 51.4816,
"lng": -3.1791,
},
"wind": {
# llandovery
"lat": 51.995,
"lng": -3.802,
},
},
"url": "https://connecteddata.nationalgrid.co.uk/dataset/7235194a-884d-47ff-bd03-a0ad38eedcb3/resource/38b81427-a2df-42f2-befa-4d6fe9b54c98/download/southwales.csv",
"embedded_capacity_register_name": "Western Power Distribution (South Wales) Plc",
},
{
"id": 22,
"name": "South West",
"coords": {
"wind": {
# truro
"lat": 50.2632,
"lng": -5.051,
},
"solar": {
# yeovil
"lat": 50.9421,
"lng": -2.6336,
},
},
"url": "https://connecteddata.nationalgrid.co.uk/dataset/7235194a-884d-47ff-bd03-a0ad38eedcb3/resource/85aaa199-15df-40ec-845f-6c61cbedc20f/download/southwest.csv",
"embedded_capacity_register_name": "Western Power Distribution (South West) Plc",
},
]


class InterpolatedValue(BaseModel):
ac: float
dl: float


class NationalGridDistributionNetworkResponse(BaseModel):
solar: InterpolatedValue
wind: InterpolatedValue


class NationalGridDistributionNetwork(BaseModel):
id: int
name: str
coords: dict
url: str

def run(self):
logging.debug(f"running NationalGridDistributionNetwork for {self.name}...")

resp = requests.get(self.url, headers={})
lines = resp.text.split("\r\n")
columns = lines[0].split(",")
rows = [line.split(",") for line in lines[1:-1]]
data = []
for row in rows:
data.append(dict(zip(columns, row)))
df = pd.DataFrame(data)
# set Timestamp as index
df.set_index("Timestamp", inplace=True)
# set all columns as floats
df = df.astype(float)
# set Timestamp as UTC
df.index = pd.to_datetime(df.index, utc=True)
# sort index
df.sort_index(inplace=True)
# get latest
most_recent = df.iloc[-1]
# assert latest is less than 10 minutes old
assert (pd.Timestamp.now(tz="UTC") - most_recent.name) < pd.Timedelta(
"10 minutes"
)

previous = df.iloc[-2]

time_diff_minutes = (most_recent.name - previous.name).seconds / 60

return NationalGridDistributionNetworkResponse(
solar=InterpolatedValue(
ac=most_recent["Solar"],
dl=(most_recent["Solar"] - previous["Solar"]) / time_diff_minutes,
),
wind=InterpolatedValue(
ac=most_recent["Wind"],
dl=(most_recent["Wind"] - previous["Wind"]) / time_diff_minutes,
),
)


class NationalGridDistributionNetworks:
def run(self) -> Union[None, List[NationalGridGspSnapshot]]:
logging.info("running NationalGridDistributionNetworks...")
capacities = self._read_capacity_totals()
try:
results = []
for network in NATIONAL_GRID_DISTRIBUTION_NETWORKS:

ngdn = NationalGridDistributionNetwork(**network)
response = ngdn.run()
# add solar and wind
kwargs = {
"bids": 0,
"offers": 0,
}

results.append(
NationalGridGspSnapshot(
name=network["name"],
code=f'NGDNO-{network["id"]}-solar',
fuel_type="solar",
coords=network["coords"]["solar"],
ac=response.solar.ac,
dl=response.solar.dl,
cp=capacities.loc[
(network["embedded_capacity_register_name"], "solar")
]["cp"],
**kwargs,
)
)
results.append(
NationalGridGspSnapshot(
name=network["name"],
code=f'NGDNO-{network["id"]}-wind',
fuel_type="wind",
ac=response.wind.ac,
dl=response.wind.dl,
cp=capacities.loc[
(network["embedded_capacity_register_name"], "wind")
]["cp"],
coords=network["coords"]["wind"],
**kwargs,
)
)
logging.info("NationalGridDistributionNetworks completed.")
return results

except Exception as e:
print(e)
return None

def _read_capacity_totals(self):
df = pd.read_csv(CAPACITY_TOTALS_CSV_FP)
df.set_index(["region", "technology"], inplace=True)
return df


if __name__ == "__main__":
results = NationalGridDistributionNetworks().run()
print(results)
Loading

0 comments on commit 5930d56

Please sign in to comment.