From a20464fa7a81308b6f6b8afaa0109ec6043ed729 Mon Sep 17 00:00:00 2001 From: Arne Tarara Date: Sun, 19 May 2024 14:54:12 +0200 Subject: [PATCH 1/5] Clear comms which value is reported --- api/main.py | 4 ++-- frontend/ci.html | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/api/main.py b/api/main.py index 32c3b1228..6214dd99b 100644 --- a/api/main.py +++ b/api/main.py @@ -1427,8 +1427,8 @@ class EnergyData(BaseModel): machine: UUID project: Optional[str] = None tags: Optional[str] = None - time_stamp: str # is expected to be in microseconds - energy_value: str # is expected to be in mJ + time_stamp: str # value is in microseconds + energy_value: str # value is in Joules @field_validator('company', 'project', 'tags') @classmethod diff --git a/frontend/ci.html b/frontend/ci.html index 149efff93..066efa757 100644 --- a/frontend/ci.html +++ b/frontend/ci.html @@ -151,7 +151,7 @@

From 3e8ba39a21e00c142d2a65d4f74d8194c5a201f4 Mon Sep 17 00:00:00 2001 From: Arne Tarara Date: Wed, 22 May 2024 12:30:55 +0200 Subject: [PATCH 2/5] Added migration [skip ci] --- migrations/2024_05_19_carbondb.sql | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 migrations/2024_05_19_carbondb.sql diff --git a/migrations/2024_05_19_carbondb.sql b/migrations/2024_05_19_carbondb.sql new file mode 100644 index 000000000..1ac77f8b7 --- /dev/null +++ b/migrations/2024_05_19_carbondb.sql @@ -0,0 +1,2 @@ +ALTER TABLE carbondb_energy_data_day DROP CONSTRAINT unique_machine_project_date; +CREATE UNIQUE INDEX idx_carbondb_unique_entry ON carbondb_energy_data_day(type, company, machine, project, tags, date ); \ No newline at end of file From e36025f0e68c69c9b95d68dbc1f6b94f898fbb10 Mon Sep 17 00:00:00 2001 From: Arne Tarara Date: Fri, 4 Oct 2024 23:55:06 +0200 Subject: [PATCH 3/5] Fix: Authentication token was set to null --- api/main.py | 2 +- frontend/js/helpers/main.js | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/api/main.py b/api/main.py index 6214dd99b..e0e2368ae 100644 --- a/api/main.py +++ b/api/main.py @@ -142,7 +142,7 @@ def authenticate(authentication_token=Depends(header_scheme), request: Request = parsed_url = urlparse(str(request.url)) try: - if not authentication_token: # Note that if no token is supplied this will authenticate as the DEFAULT user, which in FOSS systems has full capabilities + if not authentication_token or authentication_token.strip() == '': # Note that if no token is supplied this will authenticate as the DEFAULT user, which in FOSS systems has full capabilities authentication_token = 'DEFAULT' user = User.authenticate(SecureVariable(authentication_token)) diff --git a/frontend/js/helpers/main.js b/frontend/js/helpers/main.js index 7685c8dc8..bafe2e3c4 100644 --- a/frontend/js/helpers/main.js +++ b/frontend/js/helpers/main.js @@ -156,7 +156,14 @@ async function makeAPICall(path, values=null, force_authentication_token=null) { var options = { method: 'GET', headers: {} } } - options.headers['X-Authentication'] = (force_authentication_token == null) ? localStorage.getItem('authentication_token'): force_authentication_token; + if (force_authentication_token != null && force_authentication_token != '') { + options.headers['X-Authentication'] = force_authentication_token; + } else { + const authentication_token = localStorage.getItem('authentication_token'); + if (force_authentication_token != null && force_authentication_token != '') { + options.headers['X-Authentication'] = force_authentication_token; + } + } let json_response = null; if(localStorage.getItem('remove_idle') == 'true') path += "?remove_idle=true" From 68afffd799359847dd57981f6287a4a16beac909 Mon Sep 17 00:00:00 2001 From: Arne Tarara Date: Fri, 4 Oct 2024 23:56:39 +0200 Subject: [PATCH 4/5] Fix: Memory Used rename in optimization provider --- optimization_providers/resources/utilization.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/optimization_providers/resources/utilization.py b/optimization_providers/resources/utilization.py index ab4f8b3ca..c9d75f585 100644 --- a/optimization_providers/resources/utilization.py +++ b/optimization_providers/resources/utilization.py @@ -30,7 +30,7 @@ def memory_to_bytes(memory_str): raise ValueError(f"Unrecognized memory unit: {unit}") # pylint: disable=unused-argument -@register_reporter('container_memory_utilization', Criticality.INFO, REPORTER_NAME, REPORTER_ICON, req_providers =['MemoryTotalCgroupContainerProvider']) +@register_reporter('container_memory_utilization', Criticality.INFO, REPORTER_NAME, REPORTER_ICON, req_providers =['MemoryUsedCgroupContainerProvider']) def container_memory_utilization(self, run, measurements, repo_path, network, notes, phases): mem = {} @@ -38,7 +38,7 @@ def container_memory_utilization(self, run, measurements, repo_path, network, no if x := d.get('deploy', {}).get('resources', {}).get('limits', {}).get('memory', None): mem[s] = memory_to_bytes(x) - for service, measurement_stats in phases.get('data').get('[RUNTIME]').get('memory_total_cgroup_container').get('data').items(): + for service, measurement_stats in phases.get('data').get('[RUNTIME]').get('memory_used_cgroup_container').get('data').items(): if not service in mem: self.add_optimization( f"You are not using Memory limits definitions on {service}", From c809d0e4b44992c565430249b5b8556063352a12 Mon Sep 17 00:00:00 2001 From: Arne Tarara Date: Sat, 5 Oct 2024 00:03:06 +0200 Subject: [PATCH 5/5] Normalized to new SI Units for Bytes --- config.yml.example | 1 + lib/system_checks.py | 4 ++-- metric_providers/memory/used/procfs/system/source.c | 1 + .../network/io/cgroup/container/provider.py | 1 + optimization_providers/resources/utilization.py | 12 ++++++------ 5 files changed, 11 insertions(+), 8 deletions(-) diff --git a/config.yml.example b/config.yml.example index bcf75f09b..8f7f8c5ea 100644 --- a/config.yml.example +++ b/config.yml.example @@ -162,6 +162,7 @@ measurement: # CPUCores: 4 # CPUThreads: 4 # TDP: 65 +######### The value for memory must be in GB not in GiB # HW_MemAmountGB: 16 # Hardware_Availability_Year: 2011 #--- END diff --git a/lib/system_checks.py b/lib/system_checks.py index b0d23f774..04ceaaf16 100644 --- a/lib/system_checks.py +++ b/lib/system_checks.py @@ -93,8 +93,8 @@ def check_utf_encoding(): (check_one_energy_and_scope_machine_provider, Status.ERROR, 'single energy scope machine provider', 'Please only select one provider with energy and scope machine'), (check_tmpfs_mount, Status.INFO, 'tmpfs mount', 'We recommend to mount tmp on tmpfs'), (check_cpu_utilization, Status.WARN, '< 5% CPU utilization', 'Your system seems to be busy. Utilization is above 5%. Consider terminating some processes for a more stable measurement.'), - (check_free_disk, Status.ERROR, '1GB free hdd space', 'We recommend to free up some disk space'), - (check_free_memory, Status.ERROR, 'free memory', 'No free memory! Please kill some programs'), + (check_free_disk, Status.ERROR, '1 GiB free hdd space', 'We recommend to free up some disk space (< 1GiB available)'), + (check_free_memory, Status.ERROR, '1 GiB free memory', 'No free memory! Please kill some programs (< 1GiB available)'), (check_docker_daemon, Status.ERROR, 'docker daemon', 'The docker daemon could not be reached. Are you running in rootless mode or have added yourself to the docker group? See installation: [See https://docs.green-coding.io/docs/installation/]'), (check_containers_running, Status.WARN, 'running containers', 'You have other containers running on the system. This is usually what you want in local development, but for undisturbed measurements consider going for a measurement cluster [See https://docs.green-coding.io/docs/installation/installation-cluster/].'), (check_utf_encoding, Status.ERROR, 'utf file encoding', 'Your system encoding is not set to utf-8. This is needed as we need to parse console output.'), diff --git a/metric_providers/memory/used/procfs/system/source.c b/metric_providers/memory/used/procfs/system/source.c index 6d4aedd4c..cd11ac3b4 100644 --- a/metric_providers/memory/used/procfs/system/source.c +++ b/metric_providers/memory/used/procfs/system/source.c @@ -48,6 +48,7 @@ static unsigned long long int get_memory_procfs() { exit(1); } + // note that here we need to use 1024 instead of 1000 as we are already coming from kiB and not kB mem_used = (mem_total - mem_available) * 1024; // outputted value is in Bytes then fclose(fd); diff --git a/metric_providers/network/io/cgroup/container/provider.py b/metric_providers/network/io/cgroup/container/provider.py index 12302cfc6..a3e398a5e 100644 --- a/metric_providers/network/io/cgroup/container/provider.py +++ b/metric_providers/network/io/cgroup/container/provider.py @@ -18,6 +18,7 @@ def __init__(self, resolution, rootless=False, skip_check=False): def read_metrics(self, run_id, containers=None): df = super().read_metrics(run_id, containers) + if df.empty: return df diff --git a/optimization_providers/resources/utilization.py b/optimization_providers/resources/utilization.py index c9d75f585..5811abad6 100644 --- a/optimization_providers/resources/utilization.py +++ b/optimization_providers/resources/utilization.py @@ -13,10 +13,10 @@ def memory_to_bytes(memory_str): """Convert memory string with units (e.g., '50M', '2G') to bytes.""" unit_multipliers = { - 'K': 1024, # Kilobyte - 'M': 1024**2, # Megabyte - 'G': 1024**3, # Gigabyte - 'T': 1024**4 # Terabyte + 'K': 1_000, # Kilobyte + 'M': 1_000_000, # Megabyte + 'G': 1_000_000_000, # Gigabyte + 'T': 1_000_000_000, # Terabyte } if isinstance(memory_str, int) or memory_str[-1].isdigit(): @@ -55,8 +55,8 @@ def container_memory_utilization(self, run, measurements, repo_path, network, no if (actual_mem_max/mem[service]*100) < MIN_MEM_UTIL: self.add_optimization(f"Memory utilization is low in {service}", f''' - The service {service} has the memory set to: {mem[service]} bytes but the max - usage was {actual_mem_max} bytes. The mean was {data[first_item].get('mean', None)} bytes. + The service {service} has the memory set to: {mem[service]}bytes but the max + usage was {actual_mem_max}bytes. The mean was {data[first_item].get('mean', None)}bytes. Which is a usage of {data[first_item].get('mean', 0)/mem[service]*100}%. Either you should reserve less memory ressources for the container or increase utilization through caching more data in memory and thus in turn reducing cpu calculations or network traffic if possible.