From f86df7717479501f05504321640c82f1569534ac Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Mon, 6 Jul 2020 22:11:56 +0200 Subject: [PATCH 001/140] [OpenWhisk] Initialize OpenWhisk feature --- requirements.txt | 2 + sebs/openwhisk/__init__.py | 0 sebs/openwhisk/config.py | 48 +++++++++++++++++++ sebs/openwhisk/function.py | 9 ++++ sebs/openwhisk/openwhisk.py | 96 +++++++++++++++++++++++++++++++++++++ templates/mycluster.yaml | 12 +++++ 6 files changed, 167 insertions(+) create mode 100644 sebs/openwhisk/__init__.py create mode 100644 sebs/openwhisk/config.py create mode 100644 sebs/openwhisk/function.py create mode 100644 sebs/openwhisk/openwhisk.py create mode 100644 templates/mycluster.yaml diff --git a/requirements.txt b/requirements.txt index 6a6c96ba..2a4696ce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,4 +12,6 @@ flake8-black==0.1.2 mypy==0.761 mypy-boto3==1.13.24.2 boto3-stubs[lambda,s3,apigateway,sts,logs] +jinja2==2.11.2 +pyyaml==5.3.1 diff --git a/sebs/openwhisk/__init__.py b/sebs/openwhisk/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py new file mode 100644 index 00000000..9cc715aa --- /dev/null +++ b/sebs/openwhisk/config.py @@ -0,0 +1,48 @@ +from sebs.cache import Cache +from sebs.faas.config import Credentials, Resources, Config + + +class OpenWhiskCredentials(Credentials): + def __init__(self): + pass + + @staticmethod + def initialize(config: dict, cache: Cache) -> Credentials: + return OpenWhiskCredentials() + + def serialize(self) -> dict: + pass + + +class OpenWhiskResources(Resources): + + @staticmethod + def initialize(config: dict, cache: Cache) -> Resources: + return OpenWhiskResources() + + def serialize(self) -> dict: + return {"": ""} + + +class OpenWhiskConfig(Config): + name: str + cache: Cache + + def __init__(self, config: dict, cache: Cache): + self.name = config['name'] + self.cache = cache + + @property + def credentials(self) -> Credentials: + pass + + @property + def resources(self) -> Resources: + pass + + @staticmethod + def initialize(config: dict, cache: Cache) -> Config: + return OpenWhiskConfig(config, cache) + + def serialize(self) -> dict: + pass diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py new file mode 100644 index 00000000..2cafe50f --- /dev/null +++ b/sebs/openwhisk/function.py @@ -0,0 +1,9 @@ +from sebs.faas.function import Function, ExecutionResult + + +class OpenWhiskFunction(Function): + def sync_invoke(self, payload: dict) -> ExecutionResult: + pass + + def async_invoke(self, payload: dict) -> ExecutionResult: + pass diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py new file mode 100644 index 00000000..4bb7b4b3 --- /dev/null +++ b/sebs/openwhisk/openwhisk.py @@ -0,0 +1,96 @@ +import sebs.benchmark +from sebs.faas import System, PersistentStorage +from sebs.faas.config import Config +from sebs.faas.function import Function +from .config import OpenWhiskConfig +import subprocess +import logging + + +class OpenWhisk(System): + _config: OpenWhiskConfig + + @property + def config(self) -> Config: + return self._config + + def get_storage(self, replace_existing: bool) -> PersistentStorage: + pass + + def get_function(self, code_package: sebs.benchmark.Benchmark) -> Function: + pass + + def shutdown(self) -> None: + pass + + @staticmethod + def __run_check_process__(cmd: str) -> None: + subprocess.run( + cmd.split(), + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + + @staticmethod + def __check_installation__(app: str, cmd: str) -> None: + try: + logging.info('Checking {} installation...'.format(app)) + OpenWhisk.__run_check_process__(cmd) + logging.info('Check successful, proceeding...') + except subprocess.CalledProcessError: + logging.error('Cannot find {}, aborting'.format(app)) + exit(1) + + @staticmethod + def install_kind() -> None: + try: + logging.info('Installing kind...') + OpenWhisk.__run_check_process__('GO111MODULE="on" go get sigs.k8s.io/kind@v0.8.1') + logging.info('Kind has been installed') + except subprocess.CalledProcessError as e: + logging.error('Cannot install kind, reason: {}'.format(e.output)) + exit(1) + + @staticmethod + def check_kind_installation() -> None: + try: + OpenWhisk.__run_check_process__('kind --version') + except subprocess.CalledProcessError: + logging.error('Cannot find kind executable, installing...') + OpenWhisk.install_kind() + + @staticmethod + def check_kubectl_installation() -> None: + OpenWhisk.__check_installation__("kubectl", "kubectl version") + + @staticmethod + def check_helm_installation() -> None: + OpenWhisk.__check_installation__("helm", "helm version") + + @staticmethod + def check_openwhisk_installation(namespace: str) -> None: + try: + logging.info('Checking openwhisk installation.') + namespaces = subprocess.run( + "kubectl get namespaces".split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + subprocess.run( + ["grep", namespace], + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + input=namespaces.stdout, + ) + logging.info("Openwhisk installed!") + except subprocess.CalledProcessError: + logging.info("Openwhisk is not installed, proceeding with installation...") + subprocess.run( + "helm install owdev " + ) + + @staticmethod + def name() -> str: + return "openwhisk" diff --git a/templates/mycluster.yaml b/templates/mycluster.yaml new file mode 100644 index 00000000..24ca450a --- /dev/null +++ b/templates/mycluster.yaml @@ -0,0 +1,12 @@ +whisk: + ingress: + type: NodePort + apiHostName: {{ apiHost.name }} + apiHostPort: {{ apiHost.port }} + +invoker: + containerFactory: + impl: "kubernetes" + +nginx: + httpsNodePort: {{ apiHost.port }} From b2f141e70431074822744b2b18f94fae5b1aa84e Mon Sep 17 00:00:00 2001 From: root Date: Thu, 9 Jul 2020 23:29:51 +0200 Subject: [PATCH 002/140] Add basis of OpenwhiskFunction --- sebs/openwhisk/function.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 2cafe50f..40d43dc2 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -2,8 +2,27 @@ class OpenWhiskFunction(Function): - def sync_invoke(self, payload: dict) -> ExecutionResult: - pass + def sync_invoke(self, payload: dict): + url = "http://localhost:5051/benchmark" + readyPayload = json.dumps(payload) + headers = {"content-type": "application/json"} + begin = datetime.datetime.now() + logging.info(f"Function {self.name} invoking...") + response = requests.request("POST", url, data=readyPayload, headers=headers) + end = datetime.datetime.now() + logging.info( + f"Function {self.name} returned response with code: {response.status_code}" + ) + openwhiskResult = ExecutionResult(begin, end) + if response.status_code != 200: + logging.error("Invocation of {} failed!".format(self.name)) + logging.error("Input: {}".format(readyPayload)) - def async_invoke(self, payload: dict) -> ExecutionResult: - pass + openwhiskResult.stats.failure = True + return openwhiskResult + returnContent = json.loads(json.loads(response.content)) + openwhiskResult.parse_benchmark_output(returnContent) + return openwhiskResult + + def async_invoke(self, payload: dict): + raise Exception("Non-trigger invoke not supported!") \ No newline at end of file From 542167e20ef56333c999adc11b165a1e2551eeb0 Mon Sep 17 00:00:00 2001 From: root Date: Fri, 10 Jul 2020 02:50:53 +0200 Subject: [PATCH 003/140] Add basic request --- sebs/openwhisk/OpenwhiskFunction.py | 38 +++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 sebs/openwhisk/OpenwhiskFunction.py diff --git a/sebs/openwhisk/OpenwhiskFunction.py b/sebs/openwhisk/OpenwhiskFunction.py new file mode 100644 index 00000000..2989ca1c --- /dev/null +++ b/sebs/openwhisk/OpenwhiskFunction.py @@ -0,0 +1,38 @@ +from sebs.faas.function import Function, ExecutionResult +import json +import datetime +import requests +import logging + + +class OpenwhiskFunction(Function): + def __init__(self, name: str, namespace: str = "guest"): + super().__init__(name) + self.namespace = namespace + + def sync_invoke(self, payload: dict): + url = f"http://172.17.0.1:3233/api/v1/namespaces/{self.namespace}/actions/{self.name}?blocking=true&result=true" + print(url) + readyPayload = json.dumps(payload) + headers = {"content-type": "application/json", + "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} + begin = datetime.datetime.now() + logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") + response = requests.request("GET", url, data=readyPayload, headers=headers) + end = datetime.datetime.now() + print( + f"Function {self.name} returned response with code: {response.status_code}" + ) + openwhiskResult = ExecutionResult(begin, end) + if response.status_code != 200: + logging.error("Invocation of {} failed!".format(self.name)) + logging.error("Input: {}".format(readyPayload)) + + openwhiskResult.stats.failure = True + return openwhiskResult + returnContent = json.loads(response.content) + openwhiskResult.parse_benchmark_output(returnContent) + return openwhiskResult + + def async_invoke(self, payload: dict): + raise Exception("Non-trigger invoke not supported!") \ No newline at end of file From c01cd3a3ba8a4067d0d951b1ee44070fa7c15125 Mon Sep 17 00:00:00 2001 From: root Date: Fri, 10 Jul 2020 03:05:46 +0200 Subject: [PATCH 004/140] fix file name --- sebs/openwhisk/OpenwhiskFunction.py | 38 ---------------------------- sebs/openwhisk/function.py | 39 +++++++++++++++++++++++++---- 2 files changed, 34 insertions(+), 43 deletions(-) delete mode 100644 sebs/openwhisk/OpenwhiskFunction.py diff --git a/sebs/openwhisk/OpenwhiskFunction.py b/sebs/openwhisk/OpenwhiskFunction.py deleted file mode 100644 index 2989ca1c..00000000 --- a/sebs/openwhisk/OpenwhiskFunction.py +++ /dev/null @@ -1,38 +0,0 @@ -from sebs.faas.function import Function, ExecutionResult -import json -import datetime -import requests -import logging - - -class OpenwhiskFunction(Function): - def __init__(self, name: str, namespace: str = "guest"): - super().__init__(name) - self.namespace = namespace - - def sync_invoke(self, payload: dict): - url = f"http://172.17.0.1:3233/api/v1/namespaces/{self.namespace}/actions/{self.name}?blocking=true&result=true" - print(url) - readyPayload = json.dumps(payload) - headers = {"content-type": "application/json", - "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} - begin = datetime.datetime.now() - logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - response = requests.request("GET", url, data=readyPayload, headers=headers) - end = datetime.datetime.now() - print( - f"Function {self.name} returned response with code: {response.status_code}" - ) - openwhiskResult = ExecutionResult(begin, end) - if response.status_code != 200: - logging.error("Invocation of {} failed!".format(self.name)) - logging.error("Input: {}".format(readyPayload)) - - openwhiskResult.stats.failure = True - return openwhiskResult - returnContent = json.loads(response.content) - openwhiskResult.parse_benchmark_output(returnContent) - return openwhiskResult - - def async_invoke(self, payload: dict): - raise Exception("Non-trigger invoke not supported!") \ No newline at end of file diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 2cafe50f..2989ca1c 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -1,9 +1,38 @@ from sebs.faas.function import Function, ExecutionResult +import json +import datetime +import requests +import logging -class OpenWhiskFunction(Function): - def sync_invoke(self, payload: dict) -> ExecutionResult: - pass +class OpenwhiskFunction(Function): + def __init__(self, name: str, namespace: str = "guest"): + super().__init__(name) + self.namespace = namespace - def async_invoke(self, payload: dict) -> ExecutionResult: - pass + def sync_invoke(self, payload: dict): + url = f"http://172.17.0.1:3233/api/v1/namespaces/{self.namespace}/actions/{self.name}?blocking=true&result=true" + print(url) + readyPayload = json.dumps(payload) + headers = {"content-type": "application/json", + "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} + begin = datetime.datetime.now() + logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") + response = requests.request("GET", url, data=readyPayload, headers=headers) + end = datetime.datetime.now() + print( + f"Function {self.name} returned response with code: {response.status_code}" + ) + openwhiskResult = ExecutionResult(begin, end) + if response.status_code != 200: + logging.error("Invocation of {} failed!".format(self.name)) + logging.error("Input: {}".format(readyPayload)) + + openwhiskResult.stats.failure = True + return openwhiskResult + returnContent = json.loads(response.content) + openwhiskResult.parse_benchmark_output(returnContent) + return openwhiskResult + + def async_invoke(self, payload: dict): + raise Exception("Non-trigger invoke not supported!") \ No newline at end of file From 8a3f16bdcd3650cd008782ed1a1c45aaf62a7456 Mon Sep 17 00:00:00 2001 From: sborkows Date: Fri, 10 Jul 2020 00:14:20 +0200 Subject: [PATCH 005/140] Added package_code --- sebs/openwhisk/openwhisk.py | 51 +++++++++++++++++++++++++++++++++++-- 1 file changed, 49 insertions(+), 2 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 4bb7b4b3..bb27f9a7 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -1,10 +1,16 @@ +import subprocess +import logging +import shutil +import json +import os +from typing import Tuple + import sebs.benchmark from sebs.faas import System, PersistentStorage from sebs.faas.config import Config from sebs.faas.function import Function from .config import OpenWhiskConfig -import subprocess -import logging + class OpenWhisk(System): @@ -94,3 +100,44 @@ def check_openwhisk_installation(namespace: str) -> None: @staticmethod def name() -> str: return "openwhisk" + + def package_code(self, benchmark: sebs.Benchmark) -> Tuple[str, int]: + + benchmark.build() + node = 'nodejs' + node_handler = 'handler.js' + CONFIG_FILES = { + 'python': ['virtualenv', '__main__.py'], + node: [node_handler, 'package.json', 'node_modules'] + } + directory = benchmark.code_location + package_config = CONFIG_FILES[benchmark.language_name] + function_dir = os.path.join(directory, "function") + os.makedirs(function_dir) + + # openwhisk needs main function to be named ina packaged.json + if benchmark.language_name == node: + filename = 'package.json' + with open(filename, 'r') as f: + data = json.load(f) + data['main'] = node_handler + + os.remove(filename) + with open(filename, 'w') as f: + json.dump(data, f, indent=4) + + for file in os.listdir(directory): + if file not in package_config: + file = os.path.join(directory, file) + shutil.move(file, function_dir) + os.chdir(directory) + subprocess.run( + "zip -r {}.zip ./".format(benchmark.benchmark).split(), + stdout=subprocess.DEVNULL, + ) + benchmark_archive = "{}.zip".format( + os.path.join(directory, benchmark.benchmark) + ) + logging.info("Created {} archive".format(benchmark_archive)) + bytes_size = os.path.getsize(benchmark_archive) + return benchmark_archive, bytes_size From 65625b5bf9bb2041c8c29d416deb0d190052b40b Mon Sep 17 00:00:00 2001 From: sborkows Date: Fri, 10 Jul 2020 03:17:09 +0200 Subject: [PATCH 006/140] get_function in progress --- sebs/openwhisk/openwhisk.py | 98 ++++++++++++++++++++++++++++- sebs/openwhisk/openwhiskFunction.py | 38 +++++++++++ 2 files changed, 133 insertions(+), 3 deletions(-) create mode 100644 sebs/openwhisk/openwhiskFunction.py diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index bb27f9a7..e8da2cdd 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -5,14 +5,14 @@ import os from typing import Tuple -import sebs.benchmark +from sebs import Benchmark from sebs.faas import System, PersistentStorage from sebs.faas.config import Config from sebs.faas.function import Function +from sebs.openwhisk.openwhiskFunction import OpenwhiskFunction from .config import OpenWhiskConfig - class OpenWhisk(System): _config: OpenWhiskConfig @@ -101,7 +101,7 @@ def check_openwhisk_installation(namespace: str) -> None: def name() -> str: return "openwhisk" - def package_code(self, benchmark: sebs.Benchmark) -> Tuple[str, int]: + def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: benchmark.build() node = 'nodejs' @@ -141,3 +141,95 @@ def package_code(self, benchmark: sebs.Benchmark) -> Tuple[str, int]: logging.info("Created {} archive".format(benchmark_archive)) bytes_size = os.path.getsize(benchmark_archive) return benchmark_archive, bytes_size + + def get_function(self, code_package: Benchmark) -> Function: + + if ( + code_package.language_version + not in self.system_config.supported_language_versions(self.name(), code_package.language_name) + ): + raise Exception( + "Unsupported {language} version {version} in Openwhisk!".format( + language=code_package.language_name, + version=code_package.language_version, + ) + ) + + benchmark = code_package.benchmark + func_name = code_package.cached_config["name"] + code_location = code_package.code_location + + if code_package.is_cached and code_package.is_cached_valid: + logging.info( + "Using cached function {fname} in {loc}".format( + fname=func_name, loc=code_location + ) + ) + return OpenwhiskFunction(func_name) + elif code_package.is_cached: + + timeout = code_package.benchmark_config.timeout + memory = code_package.benchmark_config.memory + + # Run Openwhisk-specific part of building code. + package, code_size = self.package_code(code_package) + + self.update_function( + benchmark, func_name, package, code_size, timeout, memory + ) + + cached_cfg = code_package.cached_config + cached_cfg["code_size"] = code_size + cached_cfg["timeout"] = timeout + cached_cfg["memory"] = memory + cached_cfg["hash"] = code_package.hash + self.cache_client.update_function( + self.name(), benchmark, code_package.language_name, package, cached_cfg + ) + # FIXME: fix after dissociating code package and benchmark + code_package.query_cache() + + logging.info( + "Updating cached function {fname} in {loc}".format( + fname=func_name, loc=code_location + ) + ) + + return OpenwhiskFunction(func_name) + # no cached instance, create package and upload code + else: + + language = code_package.language_name + language_runtime = code_package.language_version + timeout = code_package.benchmark_config.timeout + memory = code_package.benchmark_config.memory + + # Create function name, validation regexp if needed: \A([\w]|[\w][\w@ .-]*[\w@.-]+)\z + func_name = "{}-{}-{}".format(benchmark, language, memory) + + package, code_size = self.package_code(code_package) + # todo: check if function exists, if so delte otherwise create + + self.cache_client.add_function( + deployment=self.name(), + benchmark=benchmark, + language=language, + code_package=package, + language_config={ + "name": func_name, + "code_size": code_size, + "runtime": language_runtime, + "memory": memory, + "timeout": timeout, + "hash": code_package.hash, + }, + storage_config={ + "buckets": { + "input": self.storage.input_buckets, + "output": self.storage.output_buckets, + } + }, + ) + # FIXME: fix after dissociating code package and benchmark + code_package.query_cache() + return OpenwhiskFunction(func_name) diff --git a/sebs/openwhisk/openwhiskFunction.py b/sebs/openwhisk/openwhiskFunction.py new file mode 100644 index 00000000..2989ca1c --- /dev/null +++ b/sebs/openwhisk/openwhiskFunction.py @@ -0,0 +1,38 @@ +from sebs.faas.function import Function, ExecutionResult +import json +import datetime +import requests +import logging + + +class OpenwhiskFunction(Function): + def __init__(self, name: str, namespace: str = "guest"): + super().__init__(name) + self.namespace = namespace + + def sync_invoke(self, payload: dict): + url = f"http://172.17.0.1:3233/api/v1/namespaces/{self.namespace}/actions/{self.name}?blocking=true&result=true" + print(url) + readyPayload = json.dumps(payload) + headers = {"content-type": "application/json", + "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} + begin = datetime.datetime.now() + logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") + response = requests.request("GET", url, data=readyPayload, headers=headers) + end = datetime.datetime.now() + print( + f"Function {self.name} returned response with code: {response.status_code}" + ) + openwhiskResult = ExecutionResult(begin, end) + if response.status_code != 200: + logging.error("Invocation of {} failed!".format(self.name)) + logging.error("Input: {}".format(readyPayload)) + + openwhiskResult.stats.failure = True + return openwhiskResult + returnContent = json.loads(response.content) + openwhiskResult.parse_benchmark_output(returnContent) + return openwhiskResult + + def async_invoke(self, payload: dict): + raise Exception("Non-trigger invoke not supported!") \ No newline at end of file From 24a8f345a94a8036a7f85bc787b774ce66d73300 Mon Sep 17 00:00:00 2001 From: root Date: Fri, 10 Jul 2020 03:53:03 +0200 Subject: [PATCH 007/140] Fix small bug --- sebs/openwhisk/function.py | 3 +-- sebs/openwhisk/openwhiskFunction.py | 38 ----------------------------- 2 files changed, 1 insertion(+), 40 deletions(-) delete mode 100644 sebs/openwhisk/openwhiskFunction.py diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 2989ca1c..5917f579 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -12,13 +12,12 @@ def __init__(self, name: str, namespace: str = "guest"): def sync_invoke(self, payload: dict): url = f"http://172.17.0.1:3233/api/v1/namespaces/{self.namespace}/actions/{self.name}?blocking=true&result=true" - print(url) readyPayload = json.dumps(payload) headers = {"content-type": "application/json", "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} begin = datetime.datetime.now() logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - response = requests.request("GET", url, data=readyPayload, headers=headers) + response = requests.request("POST", url, data=readyPayload, headers=headers) end = datetime.datetime.now() print( f"Function {self.name} returned response with code: {response.status_code}" diff --git a/sebs/openwhisk/openwhiskFunction.py b/sebs/openwhisk/openwhiskFunction.py deleted file mode 100644 index 2989ca1c..00000000 --- a/sebs/openwhisk/openwhiskFunction.py +++ /dev/null @@ -1,38 +0,0 @@ -from sebs.faas.function import Function, ExecutionResult -import json -import datetime -import requests -import logging - - -class OpenwhiskFunction(Function): - def __init__(self, name: str, namespace: str = "guest"): - super().__init__(name) - self.namespace = namespace - - def sync_invoke(self, payload: dict): - url = f"http://172.17.0.1:3233/api/v1/namespaces/{self.namespace}/actions/{self.name}?blocking=true&result=true" - print(url) - readyPayload = json.dumps(payload) - headers = {"content-type": "application/json", - "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} - begin = datetime.datetime.now() - logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - response = requests.request("GET", url, data=readyPayload, headers=headers) - end = datetime.datetime.now() - print( - f"Function {self.name} returned response with code: {response.status_code}" - ) - openwhiskResult = ExecutionResult(begin, end) - if response.status_code != 200: - logging.error("Invocation of {} failed!".format(self.name)) - logging.error("Input: {}".format(readyPayload)) - - openwhiskResult.stats.failure = True - return openwhiskResult - returnContent = json.loads(response.content) - openwhiskResult.parse_benchmark_output(returnContent) - return openwhiskResult - - def async_invoke(self, payload: dict): - raise Exception("Non-trigger invoke not supported!") \ No newline at end of file From 30273e2bca7aca147177cd7d46aaeb61c26bf36b Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Fri, 10 Jul 2020 05:17:27 +0200 Subject: [PATCH 008/140] [OpenWhisk] Implement automatic openwhisk deployment on kind cluster --- openwhisk/mycluster_template.yaml | 12 +++ sebs/openwhisk/__init__.py | 1 + sebs/openwhisk/openwhisk.py | 170 ++++++++++++++++++++++++++++-- 3 files changed, 174 insertions(+), 9 deletions(-) create mode 100644 openwhisk/mycluster_template.yaml diff --git a/openwhisk/mycluster_template.yaml b/openwhisk/mycluster_template.yaml new file mode 100644 index 00000000..42585620 --- /dev/null +++ b/openwhisk/mycluster_template.yaml @@ -0,0 +1,12 @@ +whisk: + ingress: + type: NodePort + apiHostName: + apiHostPort: + +invoker: + containerFactory: + impl: "kubernetes" + +nginx: + httpsNodePort: \ No newline at end of file diff --git a/sebs/openwhisk/__init__.py b/sebs/openwhisk/__init__.py index e69de29b..07176434 100644 --- a/sebs/openwhisk/__init__.py +++ b/sebs/openwhisk/__init__.py @@ -0,0 +1 @@ +from .openwhisk import OpenWhisk # noqa diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index e8da2cdd..23948d6b 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -9,8 +9,10 @@ from sebs.faas import System, PersistentStorage from sebs.faas.config import Config from sebs.faas.function import Function -from sebs.openwhisk.openwhiskFunction import OpenwhiskFunction +from .function import OpenwhiskFunction from .config import OpenWhiskConfig +import yaml +import time class OpenWhisk(System): @@ -23,7 +25,7 @@ def config(self) -> Config: def get_storage(self, replace_existing: bool) -> PersistentStorage: pass - def get_function(self, code_package: sebs.benchmark.Benchmark) -> Function: + def get_function(self, code_package: Benchmark) -> Function: pass def shutdown(self) -> None: @@ -44,8 +46,8 @@ def __check_installation__(app: str, cmd: str) -> None: logging.info('Checking {} installation...'.format(app)) OpenWhisk.__run_check_process__(cmd) logging.info('Check successful, proceeding...') - except subprocess.CalledProcessError: - logging.error('Cannot find {}, aborting'.format(app)) + except subprocess.CalledProcessError as e: + logging.error('Cannot find {}, aborting, reason: {}'.format(app, e.output)) exit(1) @staticmethod @@ -68,12 +70,124 @@ def check_kind_installation() -> None: @staticmethod def check_kubectl_installation() -> None: - OpenWhisk.__check_installation__("kubectl", "kubectl version") + OpenWhisk.__check_installation__("kubectl", "kubectl version --client=true") @staticmethod def check_helm_installation() -> None: OpenWhisk.__check_installation__("helm", "helm version") + @staticmethod + def check_kind_cluster() -> None: + try: + kind_clusters_process = subprocess.run( + "kind get clusters".split(), + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + kind_clusters = set(kind_clusters_process.stdout.decode('utf-8').split()) + if "kind" not in kind_clusters: + logging.info("Creating kind cluster...") + OpenWhisk.create_kind_cluster() + except subprocess.CalledProcessError as e: + logging.error("Cannot check kind cluster, reason: {}".format(e.output)) + + @staticmethod + def create_kind_cluster() -> None: + try: + subprocess.run( + "kind create cluster --config openwhisk/kind-cluster.yaml".split(), + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + while True: + nodes = subprocess.run( + "kubectl get nodes".split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + node_grep = subprocess.run( + "grep kind".split(), + input=nodes.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + awk = subprocess.run( + ["awk", r'{print $2}'], + check=True, + input=node_grep.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + node_statuses = awk.stdout.decode('utf-8').split() + if all(node_status == 'Ready' for node_status in node_statuses): + break + time.sleep(1) + except subprocess.CalledProcessError as e: + logging.error("Cannot create kind cluster. reason: {}".format(e.output)) + + @staticmethod + def get_worker_ip() -> str: + try: + logging.info('Attempting to find worker IP...') + kind_worker_description = subprocess.run( + "kubectl describe node kind-worker".split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + grep_internal_ip = subprocess.run( + "grep InternalIP".split(), + check=True, + input=kind_worker_description.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + return grep_internal_ip.stdout.decode("utf-8").split()[1] + except subprocess.CalledProcessError as e: + logging.error("Error during finding worker IP: {}".format(e.output)) + + @staticmethod + def label_nodes() -> None: + def label_node(node: str, role: str) -> None: + subprocess.run( + "kubectl label node {} openwhisk-role={}".format(node, role).split(), + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + + try: + logging.info('Labelling nodes') + label_node('kind-worker', 'core') + label_node('kind-worker2', 'invoker') + except subprocess.CalledProcessError as e: + logging.error('Cannot label nodes, reason: {}'.format(e.output)) + + @staticmethod + def clone_openwhisk_chart() -> None: + try: + subprocess.run( + "git clone git@github.com:apache/openwhisk-deploy-kube.git /tmp/openwhisk-deploy-kube".split(), + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + except subprocess.CalledProcessError as e: + logging.error("Cannot clone openwhisk chart, reason: {}".format(e.output)) + + @staticmethod + def prepare_openwhisk_config() -> None: + worker_ip = OpenWhisk.get_worker_ip() + with open('openwhisk/mycluster_template.yaml', 'r') as openwhisk_config_template: + data = yaml.unsafe_load(openwhisk_config_template) + data['whisk']['ingress']['apiHostName'] = worker_ip + data['whisk']['ingress']['apiHostPort'] = 31001 + data['nginx']['httpsNodePort'] = 31001 + if not os.path.exists('/tmp/openwhisk-deploy-kube/mycluster.yaml'): + with open('/tmp/openwhisk-deploy-kube/mycluster.yaml', 'a+') as openwhisk_config: + openwhisk_config.write(yaml.dump(data, default_flow_style=False)) + @staticmethod def check_openwhisk_installation(namespace: str) -> None: try: @@ -91,11 +205,49 @@ def check_openwhisk_installation(namespace: str) -> None: input=namespaces.stdout, ) logging.info("Openwhisk installed!") - except subprocess.CalledProcessError: + except subprocess.CalledProcessError as e: logging.info("Openwhisk is not installed, proceeding with installation...") + OpenWhisk.helm_install() + + @staticmethod + def helm_install() -> None: + try: subprocess.run( - "helm install owdev " + "helm install owdev /tmp/openwhisk-deploy-kube/helm/openwhisk -n openwhisk --create-namespace -f " + "/tmp/openwhisk-deploy-kube/mycluster.yaml".split(), + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, ) + while True: + pods = subprocess.run( + "kubectl get pods -n openwhisk".split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + install_packages_grep = subprocess.run( + "grep install-packages".split(), + input=pods.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + install_packages_status = install_packages_grep.stdout.decode('utf-8').split()[2] + if install_packages_status == 'Completed': + break + time.sleep(1) + except subprocess.CalledProcessError as e: + logging.error("Cannot install openwhisk, reason: {}".format(e.output)) + + @staticmethod + def install_openwhisk() -> None: + OpenWhisk.check_kind_installation() + OpenWhisk.check_kubectl_installation() + OpenWhisk.check_helm_installation() + OpenWhisk.check_kind_cluster() + OpenWhisk.label_nodes() + OpenWhisk.clone_openwhisk_chart() + OpenWhisk.prepare_openwhisk_config() + OpenWhisk.check_openwhisk_installation('openwhisk') @staticmethod def name() -> str: @@ -145,8 +297,8 @@ def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: def get_function(self, code_package: Benchmark) -> Function: if ( - code_package.language_version - not in self.system_config.supported_language_versions(self.name(), code_package.language_name) + code_package.language_version + not in self.system_config.supported_language_versions(self.name(), code_package.language_name) ): raise Exception( "Unsupported {language} version {version} in Openwhisk!".format( From 892edcbd145e1aa51b815c4f2b8807e8f9e65b42 Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Fri, 10 Jul 2020 10:33:53 +0200 Subject: [PATCH 009/140] [OpenWhisk] Add missing kind-cluster.yaml --- openwhisk/kind-cluster.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 openwhisk/kind-cluster.yaml diff --git a/openwhisk/kind-cluster.yaml b/openwhisk/kind-cluster.yaml new file mode 100644 index 00000000..74b3227c --- /dev/null +++ b/openwhisk/kind-cluster.yaml @@ -0,0 +1,9 @@ +kind: Cluster +apiVersion: kind.x-k8s.io/v1alpha4 +nodes: +- role: control-plane +- role: worker + extraPortMappings: + - hostPort: 31001 + containerPort: 31001 +- role: worker \ No newline at end of file From ec434469ab0875fd69b42cfd0d9e6b99e12aa1d8 Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Sat, 11 Jul 2020 21:14:59 +0200 Subject: [PATCH 010/140] [OpenWhisk] Catch FileNotFoundError during cluster initialization --- sebs/openwhisk/openwhisk.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 23948d6b..d7271475 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -46,7 +46,7 @@ def __check_installation__(app: str, cmd: str) -> None: logging.info('Checking {} installation...'.format(app)) OpenWhisk.__run_check_process__(cmd) logging.info('Check successful, proceeding...') - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error('Cannot find {}, aborting, reason: {}'.format(app, e.output)) exit(1) @@ -56,7 +56,7 @@ def install_kind() -> None: logging.info('Installing kind...') OpenWhisk.__run_check_process__('GO111MODULE="on" go get sigs.k8s.io/kind@v0.8.1') logging.info('Kind has been installed') - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error('Cannot install kind, reason: {}'.format(e.output)) exit(1) @@ -64,7 +64,7 @@ def install_kind() -> None: def check_kind_installation() -> None: try: OpenWhisk.__run_check_process__('kind --version') - except subprocess.CalledProcessError: + except (subprocess.CalledProcessError, FileNotFoundError): logging.error('Cannot find kind executable, installing...') OpenWhisk.install_kind() @@ -89,7 +89,7 @@ def check_kind_cluster() -> None: if "kind" not in kind_clusters: logging.info("Creating kind cluster...") OpenWhisk.create_kind_cluster() - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot check kind cluster, reason: {}".format(e.output)) @staticmethod @@ -124,7 +124,7 @@ def create_kind_cluster() -> None: if all(node_status == 'Ready' for node_status in node_statuses): break time.sleep(1) - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot create kind cluster. reason: {}".format(e.output)) @staticmethod @@ -144,7 +144,7 @@ def get_worker_ip() -> str: stderr=subprocess.DEVNULL, ) return grep_internal_ip.stdout.decode("utf-8").split()[1] - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Error during finding worker IP: {}".format(e.output)) @staticmethod @@ -161,7 +161,7 @@ def label_node(node: str, role: str) -> None: logging.info('Labelling nodes') label_node('kind-worker', 'core') label_node('kind-worker2', 'invoker') - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error('Cannot label nodes, reason: {}'.format(e.output)) @staticmethod @@ -173,7 +173,7 @@ def clone_openwhisk_chart() -> None: stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot clone openwhisk chart, reason: {}".format(e.output)) @staticmethod @@ -205,7 +205,7 @@ def check_openwhisk_installation(namespace: str) -> None: input=namespaces.stdout, ) logging.info("Openwhisk installed!") - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError): logging.info("Openwhisk is not installed, proceeding with installation...") OpenWhisk.helm_install() @@ -235,7 +235,7 @@ def helm_install() -> None: if install_packages_status == 'Completed': break time.sleep(1) - except subprocess.CalledProcessError as e: + except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot install openwhisk, reason: {}".format(e.output)) @staticmethod From bcef1f360cca7f63868d27a346a03b8fee111ec3 Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Sat, 11 Jul 2020 21:56:36 +0200 Subject: [PATCH 011/140] [OpenWhisk] Fix wrong logging in error catching, expose Couch DB locally --- openwhisk/couchdb-service.yaml | 16 ++++++++++++++++ openwhisk/kind-cluster.yaml | 2 ++ sebs/openwhisk/openwhisk.py | 28 ++++++++++++++++++++-------- 3 files changed, 38 insertions(+), 8 deletions(-) create mode 100644 openwhisk/couchdb-service.yaml diff --git a/openwhisk/couchdb-service.yaml b/openwhisk/couchdb-service.yaml new file mode 100644 index 00000000..27abac77 --- /dev/null +++ b/openwhisk/couchdb-service.yaml @@ -0,0 +1,16 @@ +apiVersion: v1 +kind: Service +metadata: + name: access-couchdb + namespace: openwhisk +spec: + ports: + - name: access-couchdb + nodePort: 31201 + port: 5984 + protocol: TCP + targetPort: 5984 + selector: + name: owdev-couchdb + type: NodePort + diff --git a/openwhisk/kind-cluster.yaml b/openwhisk/kind-cluster.yaml index 74b3227c..8a8bb8f7 100644 --- a/openwhisk/kind-cluster.yaml +++ b/openwhisk/kind-cluster.yaml @@ -4,6 +4,8 @@ nodes: - role: control-plane - role: worker extraPortMappings: + - hostPort: 31201 + containerPort: 31201 - hostPort: 31001 containerPort: 31001 - role: worker \ No newline at end of file diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index d7271475..69a7ce3b 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -47,7 +47,7 @@ def __check_installation__(app: str, cmd: str) -> None: OpenWhisk.__run_check_process__(cmd) logging.info('Check successful, proceeding...') except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot find {}, aborting, reason: {}'.format(app, e.output)) + logging.error('Cannot find {}, aborting, reason: {}'.format(app, e)) exit(1) @staticmethod @@ -57,7 +57,7 @@ def install_kind() -> None: OpenWhisk.__run_check_process__('GO111MODULE="on" go get sigs.k8s.io/kind@v0.8.1') logging.info('Kind has been installed') except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install kind, reason: {}'.format(e.output)) + logging.error('Cannot install kind, reason: {}'.format(e)) exit(1) @staticmethod @@ -90,7 +90,7 @@ def check_kind_cluster() -> None: logging.info("Creating kind cluster...") OpenWhisk.create_kind_cluster() except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot check kind cluster, reason: {}".format(e.output)) + logging.error("Cannot check kind cluster, reason: {}".format(e)) @staticmethod def create_kind_cluster() -> None: @@ -125,7 +125,7 @@ def create_kind_cluster() -> None: break time.sleep(1) except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot create kind cluster. reason: {}".format(e.output)) + logging.error("Cannot create kind cluster. reason: {}".format(e)) @staticmethod def get_worker_ip() -> str: @@ -145,7 +145,7 @@ def get_worker_ip() -> str: ) return grep_internal_ip.stdout.decode("utf-8").split()[1] except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Error during finding worker IP: {}".format(e.output)) + logging.error("Error during finding worker IP: {}".format(e)) @staticmethod def label_nodes() -> None: @@ -162,7 +162,7 @@ def label_node(node: str, role: str) -> None: label_node('kind-worker', 'core') label_node('kind-worker2', 'invoker') except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot label nodes, reason: {}'.format(e.output)) + logging.error('Cannot label nodes, reason: {}'.format(e)) @staticmethod def clone_openwhisk_chart() -> None: @@ -174,7 +174,7 @@ def clone_openwhisk_chart() -> None: stderr=subprocess.DEVNULL, ) except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot clone openwhisk chart, reason: {}".format(e.output)) + logging.error("Cannot clone openwhisk chart, reason: {}".format(e)) @staticmethod def prepare_openwhisk_config() -> None: @@ -236,7 +236,19 @@ def helm_install() -> None: break time.sleep(1) except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot install openwhisk, reason: {}".format(e.output)) + logging.error("Cannot install openwhisk, reason: {}".format(e)) + + @staticmethod + def expose_couchdb(): + try: + subprocess.run( + "kubectl apply -f openwhisk/couchdb-service.yaml", + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot expose Couch DB, reason: {}".format(e)) @staticmethod def install_openwhisk() -> None: From 52cee648306922c99b863b251d3185f7a9a8b223 Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Sat, 11 Jul 2020 23:39:56 +0200 Subject: [PATCH 012/140] [OpenWhisk] Fix install_kind method --- sebs/openwhisk/openwhisk.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 69a7ce3b..047a9323 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -54,7 +54,15 @@ def __check_installation__(app: str, cmd: str) -> None: def install_kind() -> None: try: logging.info('Installing kind...') - OpenWhisk.__run_check_process__('GO111MODULE="on" go get sigs.k8s.io/kind@v0.8.1') + env = os.environ.copy() + env['GO111MODULE'] = 'on' + subprocess.run( + 'go get sigs.k8s.io/kind@v0.8.1'.split(), + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + ) logging.info('Kind has been installed') except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error('Cannot install kind, reason: {}'.format(e)) @@ -242,7 +250,7 @@ def helm_install() -> None: def expose_couchdb(): try: subprocess.run( - "kubectl apply -f openwhisk/couchdb-service.yaml", + "kubectl apply -f openwhisk/couchdb-service.yaml".split(), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True, @@ -260,6 +268,7 @@ def install_openwhisk() -> None: OpenWhisk.clone_openwhisk_chart() OpenWhisk.prepare_openwhisk_config() OpenWhisk.check_openwhisk_installation('openwhisk') + OpenWhisk.expose_couchdb() @staticmethod def name() -> str: From acfcb1b8d71a8dde25d65eab8bccf07a068d9a45 Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Sun, 12 Jul 2020 12:13:39 +0200 Subject: [PATCH 013/140] [OpenWhisk] Ignore IntelliJ files, add kubectl installation scripts --- .gitignore | 4 ++ sebs/openwhisk/openwhisk.py | 119 +++++++++++++++++++++++------------- 2 files changed, 80 insertions(+), 43 deletions(-) diff --git a/.gitignore b/.gitignore index e707ac85..4caca42c 100644 --- a/.gitignore +++ b/.gitignore @@ -170,3 +170,7 @@ dmypy.json sebs-* # cache cache + +# IntelliJ IDEA files +.idea +*.iml \ No newline at end of file diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 047a9323..d7d8a044 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -32,12 +32,16 @@ def shutdown(self) -> None: pass @staticmethod - def __run_check_process__(cmd: str) -> None: + def __run_check_process__(cmd: str, **kwargs) -> None: + env = os.environ.copy() + env = {**env, **kwargs} + subprocess.run( cmd.split(), check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, + env=env, ) @staticmethod @@ -54,15 +58,7 @@ def __check_installation__(app: str, cmd: str) -> None: def install_kind() -> None: try: logging.info('Installing kind...') - env = os.environ.copy() - env['GO111MODULE'] = 'on' - subprocess.run( - 'go get sigs.k8s.io/kind@v0.8.1'.split(), - check=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - env=env, - ) + OpenWhisk.__run_check_process__('go get sigs.k8s.io/kind@v0.8.1', GO111MODULE='on') logging.info('Kind has been installed') except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error('Cannot install kind, reason: {}'.format(e)) @@ -78,12 +74,54 @@ def check_kind_installation() -> None: @staticmethod def check_kubectl_installation() -> None: - OpenWhisk.__check_installation__("kubectl", "kubectl version --client=true") + try: + logging.info("Checking kubectl installation...") + OpenWhisk.__run_check_process__('kubectl version --clien=true') + logging.info("Kubectl is installed") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.info("Kubectl is not installed, proceeding to install...") + OpenWhisk.install_kubectl() + + @staticmethod + def install_kubectl() -> None: + try: + logging.info('Installing kubectl...') + home_path = os.environ['HOME'] + kubectl_path = '{}/.local/bin/kubectl'.format(home_path) + OpenWhisk.__run_check_process__("curl -L -o {} " + "https://storage.googleapis.com/kubernetes-release/release/v1.18.0/bin" + "/linux/amd64/kubectl".format(kubectl_path)) + OpenWhisk.__run_check_process__("chmod +x {}".format(kubectl_path)) + logging.info('Kubectl has been installed') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot install kubectl, reason: {}'.format(e)) + exit(1) @staticmethod def check_helm_installation() -> None: OpenWhisk.__check_installation__("helm", "helm version") + @staticmethod + def install_helm() -> None: + try: + logging.info('Installing helm...') + helm_package = subprocess.run( + "curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3".split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + subprocess.run( + "sh -".split(), + input=helm_package.stdout, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) + logging.info('Helm has been installed') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot install helm, reason: {}'.format(e)) + exit(1) + @staticmethod def check_kind_cluster() -> None: try: @@ -103,12 +141,7 @@ def check_kind_cluster() -> None: @staticmethod def create_kind_cluster() -> None: try: - subprocess.run( - "kind create cluster --config openwhisk/kind-cluster.yaml".split(), - check=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) + OpenWhisk.__run_check_process__("kind create cluster --config openwhisk/kind-cluster.yaml") while True: nodes = subprocess.run( "kubectl get nodes".split(), @@ -158,12 +191,7 @@ def get_worker_ip() -> str: @staticmethod def label_nodes() -> None: def label_node(node: str, role: str) -> None: - subprocess.run( - "kubectl label node {} openwhisk-role={}".format(node, role).split(), - check=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) + OpenWhisk.__run_check_process__("kubectl label node {} openwhisk-role={}".format(node, role)) try: logging.info('Labelling nodes') @@ -175,12 +203,8 @@ def label_node(node: str, role: str) -> None: @staticmethod def clone_openwhisk_chart() -> None: try: - subprocess.run( - "git clone git@github.com:apache/openwhisk-deploy-kube.git /tmp/openwhisk-deploy-kube".split(), - check=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) + OpenWhisk.__run_check_process__( + "git clone git@github.com:apache/openwhisk-deploy-kube.git /tmp/openwhisk-deploy-kube") except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot clone openwhisk chart, reason: {}".format(e)) @@ -220,13 +244,8 @@ def check_openwhisk_installation(namespace: str) -> None: @staticmethod def helm_install() -> None: try: - subprocess.run( - "helm install owdev /tmp/openwhisk-deploy-kube/helm/openwhisk -n openwhisk --create-namespace -f " - "/tmp/openwhisk-deploy-kube/mycluster.yaml".split(), - check=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) + OpenWhisk.__run_check_process__("helm install owdev /tmp/openwhisk-deploy-kube/helm/openwhisk -n " + "openwhisk --create-namespace -f /tmp/openwhisk-deploy-kube/mycluster.yaml") while True: pods = subprocess.run( "kubectl get pods -n openwhisk".split(), @@ -247,14 +266,9 @@ def helm_install() -> None: logging.error("Cannot install openwhisk, reason: {}".format(e)) @staticmethod - def expose_couchdb(): + def expose_couchdb() -> None: try: - subprocess.run( - "kubectl apply -f openwhisk/couchdb-service.yaml".split(), - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - check=True, - ) + OpenWhisk.__run_check_process__("kubectl apply -f openwhisk/couchdb-service.yaml") except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot expose Couch DB, reason: {}".format(e)) @@ -274,6 +288,25 @@ def install_openwhisk() -> None: def name() -> str: return "openwhisk" + @staticmethod + def get_openwhisk_url() -> str: + ip = OpenWhisk.get_worker_ip() + return '{}:{}'.format(ip, 31001) + + @staticmethod + def get_couchdb_url() -> str: + ip = OpenWhisk.get_worker_ip() + return '{}:{}'.format(ip, 31201) + + @staticmethod + def delete_cluster(): + try: + logging.info('Deleting KinD cluster...') + OpenWhisk.__run_check_process__('kind delete cluster') + logging.info('KinD cluster deleted...') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot delete cluster, reason: {}".format(e)) + def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: benchmark.build() From 1917c66c5917b12aa9ff29933ed19070e2e02637 Mon Sep 17 00:00:00 2001 From: maciekak Date: Sun, 12 Jul 2020 14:09:35 +0200 Subject: [PATCH 014/140] Add sync_invoke and async_invoke --- sebs/openwhisk/function.py | 74 +++++++++++++++++++++++++++++++++++--- 1 file changed, 70 insertions(+), 4 deletions(-) diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 6c719f04..19203d17 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -6,18 +6,21 @@ class OpenwhiskFunction(Function): - def __init__(self, name: str, namespace: str = "guest"): + def __init__(self, name: str, namespace: str = "_"): super().__init__(name) self.namespace = namespace def sync_invoke(self, payload: dict): - url = f"http://172.17.0.1:3233/api/v1/namespaces/{self.namespace}/actions/{self.name}?blocking=true&result=true" + from sebs.openwhisk.openwhisk import OpenWhisk + ip = OpenWhisk.get_openwhisk_url() + url = f"https://{ip}/api/v1/namespaces/{self.namespace}/actions/{self.name}?result=true&blocking=true" readyPayload = json.dumps(payload) + headers = {"content-type": "application/json", "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} begin = datetime.datetime.now() logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - response = requests.request("POST", url, data=readyPayload, headers=headers) + response = requests.request("POST", url, data=readyPayload, headers=headers, verify=False) end = datetime.datetime.now() print( f"Function {self.name} returned response with code: {response.status_code}" @@ -26,12 +29,75 @@ def sync_invoke(self, payload: dict): if response.status_code != 200: logging.error("Invocation of {} failed!".format(self.name)) logging.error("Input: {}".format(readyPayload)) + logging.error("Input: {}".format(response.content)) openwhiskResult.stats.failure = True return openwhiskResult returnContent = json.loads(response.content) + openwhiskResult.parse_benchmark_output(returnContent) return openwhiskResult def async_invoke(self, payload: dict): - raise Exception("Non-trigger invoke not supported!") \ No newline at end of file + from sebs.openwhisk.openwhisk import OpenWhisk + import time + import datetime + ip = OpenWhisk.get_openwhisk_url() + url = f"https://{ip}/api/v1/namespaces/{self.namespace}/actions/{self.name}?result=true" + readyPayload = json.dumps(payload) + print(url) + headers = {"content-type": "application/json", + "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} + begin = datetime.datetime.now() + logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") + response = requests.request("POST", url, data=readyPayload, headers=headers, verify=False) + end = datetime.datetime.now() + print( + f"Function {self.name} returned response with code: {response.status_code}" + ) + openwhiskResult = ExecutionResult(begin, end) + if response.status_code != 202: + logging.error("Invocation of {} failed!".format(self.name)) + logging.error("Input: {}".format(readyPayload)) + logging.error("Input: {}".format(response.content)) + + openwhiskResult.stats.failure = True + return openwhiskResult + activationId = json.loads(response.content)['activationId'] + + url = f"https://{ip}/api/v1/namespaces/_/activations/{activationId}" + readyPayload = json.dumps(payload) + headers = {"content-type": "application/json", + "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} + begin = datetime.datetime.now() + attempt = 1 + while(True): + print(f"Function {self.name} of namespace getting result. Attempt: {attempt}") + + response = requests.request("GET", url, data=readyPayload, headers=headers, verify=False) + if response.status_code == 404: + time.sleep(0.05) + attempt += 1 + continue + break + + print( + f"Function {self.name} returned response with code: {response.status_code}" + ) + result = json.loads(response.content) + print(result) + if response.status_code != 200: + logging.error("Invocation of {} failed!".format(self.name)) + logging.error("Input: {}".format(readyPayload)) + logging.error("Input: {}".format(response.content)) + + openwhiskResult.stats.failure = True + return openwhiskResult + + + begin = datetime.datetime.fromtimestamp(result['start'] / 1e3) + end = datetime.datetime.fromtimestamp(result['end'] / 1e3) + returnContent = result['response']['result'] + openwhiskResult = ExecutionResult(begin, end) + openwhiskResult.parse_benchmark_output(returnContent) + return openwhiskResult \ No newline at end of file From bde021a13b0de463ddf0acdb47b261c0f05e93b5 Mon Sep 17 00:00:00 2001 From: maciekak Date: Sun, 12 Jul 2020 14:16:52 +0200 Subject: [PATCH 015/140] Remove unncessery prints --- sebs/openwhisk/function.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 19203d17..3da2ac8b 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -48,14 +48,13 @@ def async_invoke(self, payload: dict): print(url) headers = {"content-type": "application/json", "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} - begin = datetime.datetime.now() + logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") response = requests.request("POST", url, data=readyPayload, headers=headers, verify=False) - end = datetime.datetime.now() + print( f"Function {self.name} returned response with code: {response.status_code}" ) - openwhiskResult = ExecutionResult(begin, end) if response.status_code != 202: logging.error("Invocation of {} failed!".format(self.name)) logging.error("Input: {}".format(readyPayload)) @@ -85,7 +84,7 @@ def async_invoke(self, payload: dict): f"Function {self.name} returned response with code: {response.status_code}" ) result = json.loads(response.content) - print(result) + if response.status_code != 200: logging.error("Invocation of {} failed!".format(self.name)) logging.error("Input: {}".format(readyPayload)) From 9913da8b6576fe2d1c3df88a373546f5bc3a30f6 Mon Sep 17 00:00:00 2001 From: sborkows Date: Fri, 17 Jul 2020 23:08:19 +0200 Subject: [PATCH 016/140] Added build configuration --- config/systems.json | 33 ++++++++++++++++++++++++ docker/Dockerfile.build.openwhisk.nodejs | 10 +++++++ docker/Dockerfile.build.openwhisk.python | 16 ++++++++++++ sebs/openwhisk/openwhisk.py | 2 +- 4 files changed, 60 insertions(+), 1 deletion(-) create mode 100755 docker/Dockerfile.build.openwhisk.nodejs create mode 100755 docker/Dockerfile.build.openwhisk.python diff --git a/config/systems.json b/config/systems.json index a4d21ae5..da77c9f5 100644 --- a/config/systems.json +++ b/config/systems.json @@ -100,5 +100,38 @@ "username": "docker_user" } } + }, + "openwhisk": { + "languages": { + "python": { + "base_images": { + "3.6": "openwhisk/python3action:1.14.0", + "2.7": "openwhisk/python2action" + }, + "versions": ["3.6", "2.7"], + "images": ["build"], + "username": "docker_user", + "deployment": { + "files": [ "__main__.py", "storage.py"], + "packages": [] + } + }, + "nodejs": { + "base_images": { + "8" : "openwhisk/action-nodejs-v8", + "10" : "openwhisk/action-nodejs-v10", + "12" : "openwhisk/action-nodejs-v12" + }, + "versions": [ "8", "10", "12"], + "images": ["build"], + "username": "docker_user", + "deployment": { + "files": [ "handler.js", "storage.js"], + "packages": { + "uuid": "3.4.0" + } + } + } + } } } diff --git a/docker/Dockerfile.build.openwhisk.nodejs b/docker/Dockerfile.build.openwhisk.nodejs new file mode 100755 index 00000000..d25b49e2 --- /dev/null +++ b/docker/Dockerfile.build.openwhisk.nodejs @@ -0,0 +1,10 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG USER +ENV HOME=/home/${USER} + +RUN useradd --uid 1000 -m ${USER} +WORKDIR ${HOME} +USER ${USER}:${USER} + +CMD cd /mnt/function && npm install && rm -rf package-lock.json diff --git a/docker/Dockerfile.build.openwhisk.python b/docker/Dockerfile.build.openwhisk.python new file mode 100755 index 00000000..35577721 --- /dev/null +++ b/docker/Dockerfile.build.openwhisk.python @@ -0,0 +1,16 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG USER +ARG VERSION +ENV HOME=/home/${USER} +ENV PYTHON_VERSION=${VERSION} + +RUN useradd --uid 1000 ${USER} +WORKDIR ${HOME} + + +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD cd /mnt/function\ + && virtualenv virtualenv && source virtualenv/bin/activate && + && if test -f "requirements.txt.${PYTHON_VERSION}"; then pip3 -q install -r requirements.txt -r requirements.txt.${PYTHON_VERSION}; else pip3 -q install -r requirements.txt ; fi\ + && if test -f "${SCRIPT_FILE}"; then /bin/bash ${SCRIPT_FILE} .virtualenv/lib/python3.6/site-packages ; fi diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index d7d8a044..0d27c885 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -76,7 +76,7 @@ def check_kind_installation() -> None: def check_kubectl_installation() -> None: try: logging.info("Checking kubectl installation...") - OpenWhisk.__run_check_process__('kubectl version --clien=true') + OpenWhisk.__run_check_process__('kubectl version --client=true') logging.info("Kubectl is installed") except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.info("Kubectl is not installed, proceeding to install...") From 9041d74483e5c967e6d2230755f17cc5f5c9c6ee Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Thu, 3 Sep 2020 18:34:52 +0200 Subject: [PATCH 017/140] [OpenWhisk] Happy-case is handled, tests are being run, still need to fix wrong implementation of async_invoke and add automatic installation of wsk tool --- .../python/requirements.txt | 0 benchmarks/wrappers/openwhisk/nodejs/index.js | 28 ++ .../wrappers/openwhisk/nodejs/storage.js | 63 +++ .../wrappers/openwhisk/python/__main__.py | 33 ++ .../wrappers/openwhisk/python/storage.py | 61 +++ config/openwhisk.json | 16 + config/systems.json | 12 +- sebs/config.py | 7 + sebs/openwhisk/__init__.py | 2 + sebs/openwhisk/function.py | 57 +-- sebs/openwhisk/minio.py | 153 +++++++ sebs/openwhisk/openwhisk.py | 388 +++++------------- sebs/sebs.py | 6 +- tools/openwhisk_preparation.py | 281 +++++++++++++ 14 files changed, 783 insertions(+), 324 deletions(-) create mode 100644 benchmarks/200.multimedia/220.video-processing/python/requirements.txt create mode 100644 benchmarks/wrappers/openwhisk/nodejs/index.js create mode 100644 benchmarks/wrappers/openwhisk/nodejs/storage.js create mode 100644 benchmarks/wrappers/openwhisk/python/__main__.py create mode 100644 benchmarks/wrappers/openwhisk/python/storage.py create mode 100644 config/openwhisk.json create mode 100644 sebs/openwhisk/minio.py create mode 100644 tools/openwhisk_preparation.py diff --git a/benchmarks/200.multimedia/220.video-processing/python/requirements.txt b/benchmarks/200.multimedia/220.video-processing/python/requirements.txt new file mode 100644 index 00000000..e69de29b diff --git a/benchmarks/wrappers/openwhisk/nodejs/index.js b/benchmarks/wrappers/openwhisk/nodejs/index.js new file mode 100644 index 00000000..c9499c4c --- /dev/null +++ b/benchmarks/wrappers/openwhisk/nodejs/index.js @@ -0,0 +1,28 @@ +const path = require('path'), fs = require('fs'); + +async function main(args) { + var func = require('./function/function'); + var begin = Date.now() / 1000; + var start = process.hrtime(); + var ret = await func.handler(args); + var elapsed = process.hrtime(start); + var end = Date.now() / 1000; + var micro = elapsed[1] / 1e3 + elapsed[0] * 1e6; + var is_cold = false; + var fname = path.join('/tmp', 'cold_run'); + if (!fs.existsSync(fname)) { + is_cold = true; + fs.closeSync(fs.openSync(fname, 'w')); + } + + return { + begin: begin, + end: end, + compute_time: micro, + results_time: 0, + result: ret, + is_cold: is_cold, + }; +} + +exports.main = main; \ No newline at end of file diff --git a/benchmarks/wrappers/openwhisk/nodejs/storage.js b/benchmarks/wrappers/openwhisk/nodejs/storage.js new file mode 100644 index 00000000..e078af75 --- /dev/null +++ b/benchmarks/wrappers/openwhisk/nodejs/storage.js @@ -0,0 +1,63 @@ + +const minio = require('minio'), + uuid = require('uuid'), + util = require('util'), + stream = require('stream'), + fs = require('fs'); + +class minio_storage { + + constructor() { + let minioConfig = JSON.parse(fs.readFileSync('minioConfig.json')); + let address = minioConfig["url"]; + let access_key = minioConfig["access_key"]; + let secret_key = minioConfig["secret_key"]; + + this.client = new minio.Client( + { + endPoint: address.split(':')[0], + port: parseInt(address.split(':')[1], 10), + accessKey: access_key, + secretKey: secret_key, + useSSL: false + } + ); + } + + unique_name(file) { + let [name, extension] = file.split('.'); + let uuid_name = uuid.v4().split('-')[0]; + return util.format('%s.%s.%s', name, uuid_name, extension); + } + + upload(bucket, file, filepath) { + let uniqueName = this.unique_name(file); + return [uniqueName, this.client.fPutObject(bucket, uniqueName, filepath)]; + }; + + download(bucket, file, filepath) { + return this.client.fGetObject(bucket, file, filepath); + }; + + uploadStream(bucket, file) { + var write_stream = new stream.PassThrough(); + let uniqueName = this.unique_name(file); + let promise = this.client.putObject(bucket, uniqueName, write_stream, write_stream.size); + return [write_stream, promise, uniqueName]; + }; + + downloadStream(bucket, file) { + var read_stream = new stream.PassThrough(); + return this.client.getObject(bucket, file); + }; + + static get_instance() { + if(!this.instance) { + this.instance = new storage(); + } + return this.instance; + } + + +}; +exports.storage = minio_storage; \ No newline at end of file diff --git a/benchmarks/wrappers/openwhisk/python/__main__.py b/benchmarks/wrappers/openwhisk/python/__main__.py new file mode 100644 index 00000000..e6ce67ca --- /dev/null +++ b/benchmarks/wrappers/openwhisk/python/__main__.py @@ -0,0 +1,33 @@ +import logging +import datetime +import os + + +def main(args): + logging.getLogger().setLevel(logging.INFO) + begin = datetime.datetime.now() + + from function import function + ret = function.handler(args) + + end = datetime.datetime.now() + logging.info("Function result: {}".format(ret)) + log_data = {"result": ret["result"]} + if "measurement" in ret: + log_data["measurement"] = ret["measurement"] + + results_time = (end - begin) / datetime.timedelta(microseconds=1) + + is_cold = False + fname = "cold_run" + if not os.path.exists(fname): + is_cold = True + open(fname, "a").close() + + return { + "begin": begin.strftime("%s.%f"), + "end": end.strftime("%s.%f"), + "results_time": results_time, + "is_cold": is_cold, + "result": log_data, + } diff --git a/benchmarks/wrappers/openwhisk/python/storage.py b/benchmarks/wrappers/openwhisk/python/storage.py new file mode 100644 index 00000000..d41cede2 --- /dev/null +++ b/benchmarks/wrappers/openwhisk/python/storage.py @@ -0,0 +1,61 @@ +import os +import uuid +import json +import minio +import logging + + +class storage: + instance = None + client = None + + def __init__(self): + file = open(os.path.join(os.path.dirname(__file__), "minioConfig.json"), "r") + minioConfig = json.load(file) + try: + self.client = minio.Minio( + minioConfig["url"], + access_key=minioConfig["access_key"], + secret_key=minioConfig["secret_key"], + secure=False, + ) + except Exception as e: + logging.info(e) + + @staticmethod + def unique_name(name): + name, extension = name.split(".") + return "{name}.{random}.{extension}".format( + name=name, extension=extension, random=str(uuid.uuid4()).split("-")[0] + ) + + def upload(self, bucket, file, filepath): + key_name = storage.unique_name(file) + self.client.fput_object(bucket, key_name, filepath) + return key_name + + def download(self, bucket, file, filepath): + self.client.fget_object(bucket, file, filepath) + + def download_directory(self, bucket, prefix, path): + objects = self.client.list_objects_v2(bucket, prefix, recursive=True) + for obj in objects: + file_name = obj.object_name + self.download(bucket, file_name, os.path.join(path, file_name)) + + def upload_stream(self, bucket, file, bytes_data): + key_name = storage.unique_name(file) + self.client.put_object( + bucket, key_name, bytes_data, bytes_data.getbuffer().nbytes + ) + return key_name + + def download_stream(self, bucket, file): + data = self.client.get_object(bucket, file) + return data.read() + + @staticmethod + def get_instance(): + if storage.instance is None: + storage.instance = storage() + return storage.instance diff --git a/config/openwhisk.json b/config/openwhisk.json new file mode 100644 index 00000000..67992612 --- /dev/null +++ b/config/openwhisk.json @@ -0,0 +1,16 @@ +{ + "experiments": { + "update_code": false, + "update_storage": false, + "download_results": false, + "deployment": "openwhisk", + "runtime": { + "language": "python", + "version": "3.6" + } + }, + "deployment": { + "name": "openwhisk", + "shouldShutdown": false + } +} diff --git a/config/systems.json b/config/systems.json index da77c9f5..d681ef40 100644 --- a/config/systems.json +++ b/config/systems.json @@ -109,11 +109,13 @@ "2.7": "openwhisk/python2action" }, "versions": ["3.6", "2.7"], - "images": ["build"], + "images": [], "username": "docker_user", "deployment": { "files": [ "__main__.py", "storage.py"], - "packages": [] + "packages": { + "minio": "^5.0.10" + } } }, "nodejs": { @@ -123,12 +125,12 @@ "12" : "openwhisk/action-nodejs-v12" }, "versions": [ "8", "10", "12"], - "images": ["build"], + "images": [], "username": "docker_user", "deployment": { - "files": [ "handler.js", "storage.js"], + "files": [ "index.js", "storage.js"], "packages": { - "uuid": "3.4.0" + "minio": "^7.0.16" } } } diff --git a/sebs/config.py b/sebs/config.py index 8b0351d1..331c8de7 100644 --- a/sebs/config.py +++ b/sebs/config.py @@ -35,3 +35,10 @@ def supported_language_versions( return self._system_config[deployment_name]["languages"][language_name][ "base_images" ].keys() + + def benchmark_base_images( + self, deployment_name: str, language_name: str + ) -> Dict[str, str]: + return self._system_config[deployment_name]["languages"][language_name][ + "base_images" + ] diff --git a/sebs/openwhisk/__init__.py b/sebs/openwhisk/__init__.py index 07176434..875ddd59 100644 --- a/sebs/openwhisk/__init__.py +++ b/sebs/openwhisk/__init__.py @@ -1 +1,3 @@ from .openwhisk import OpenWhisk # noqa +from .config import OpenWhiskConfig # noqa +from .minio import Minio # noqa diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 3da2ac8b..04d7799c 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -1,3 +1,5 @@ +import subprocess + from sebs.faas.function import Function, ExecutionResult import json import datetime @@ -11,29 +13,37 @@ def __init__(self, name: str, namespace: str = "_"): self.namespace = namespace def sync_invoke(self, payload: dict): - from sebs.openwhisk.openwhisk import OpenWhisk - ip = OpenWhisk.get_openwhisk_url() - url = f"https://{ip}/api/v1/namespaces/{self.namespace}/actions/{self.name}?result=true&blocking=true" - readyPayload = json.dumps(payload) + from tools.openwhisk_preparation import get_openwhisk_url + ip = get_openwhisk_url() + command = f"wsk -i action invoke --result {self.name}" + for key, value in payload.items(): + command = command + f" --param {key} {value}" - headers = {"content-type": "application/json", - "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} - begin = datetime.datetime.now() logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - response = requests.request("POST", url, data=readyPayload, headers=headers, verify=False) - end = datetime.datetime.now() - print( - f"Function {self.name} returned response with code: {response.status_code}" - ) + error = None + try: + begin = datetime.datetime.now() + response = subprocess.run( + command.split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + ) + end = datetime.datetime.now() + response = response.stdout.decode("utf-8") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + end = datetime.datetime.now() + logging.error(f"Cannot synchronously invoke action {self.name}, reason: {e}") + error = e + openwhiskResult = ExecutionResult(begin, end) - if response.status_code != 200: + if error is not None: logging.error("Invocation of {} failed!".format(self.name)) - logging.error("Input: {}".format(readyPayload)) - logging.error("Input: {}".format(response.content)) - openwhiskResult.stats.failure = True return openwhiskResult - returnContent = json.loads(response.content) + + returnContent = json.loads(response) + logging.info(f"{returnContent}") openwhiskResult.parse_benchmark_output(returnContent) return openwhiskResult @@ -45,9 +55,9 @@ def async_invoke(self, payload: dict): ip = OpenWhisk.get_openwhisk_url() url = f"https://{ip}/api/v1/namespaces/{self.namespace}/actions/{self.name}?result=true" readyPayload = json.dumps(payload) - print(url) + logging.info("OpenWhisk url: {}".format(url)) headers = {"content-type": "application/json", - "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} + "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") response = requests.request("POST", url, data=readyPayload, headers=headers, verify=False) @@ -67,12 +77,12 @@ def async_invoke(self, payload: dict): url = f"https://{ip}/api/v1/namespaces/_/activations/{activationId}" readyPayload = json.dumps(payload) headers = {"content-type": "application/json", - "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} + "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} begin = datetime.datetime.now() attempt = 1 - while(True): + while True: print(f"Function {self.name} of namespace getting result. Attempt: {attempt}") - + response = requests.request("GET", url, data=readyPayload, headers=headers, verify=False) if response.status_code == 404: time.sleep(0.05) @@ -93,10 +103,9 @@ def async_invoke(self, payload: dict): openwhiskResult.stats.failure = True return openwhiskResult - begin = datetime.datetime.fromtimestamp(result['start'] / 1e3) end = datetime.datetime.fromtimestamp(result['end'] / 1e3) returnContent = result['response']['result'] openwhiskResult = ExecutionResult(begin, end) openwhiskResult.parse_benchmark_output(returnContent) - return openwhiskResult \ No newline at end of file + return openwhiskResult diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py new file mode 100644 index 00000000..f398b4c5 --- /dev/null +++ b/sebs/openwhisk/minio.py @@ -0,0 +1,153 @@ +from sebs.faas.storage import PersistentStorage +from typing import List, Tuple, Any +import logging +from time import sleep +import minio +import secrets +import os + + +class Minio(PersistentStorage): + + storage_container: Any + input_buckets: List[str] = [] + output_buckets: List[str] = [] + input_index = 0 + output_index = 0 + access_key: str = "" + secret_key: str = "" + port = 9000 + location = "fissionBenchmark" + connection: Any + docker_client = None + + def __init__(self, docker_client): + self.docker_client = docker_client + self.start() + sleep(10) + self.connection = self.get_connection() + + def start(self): + self.startMinio() + + def startMinio(self): + minioVersion = "minio/minio:latest" + self.access_key = secrets.token_urlsafe(32) + self.secret_key = secrets.token_hex(32) + logging.info("Minio container starting") + logging.info("ACCESS_KEY={}".format(self.access_key)) + logging.info("SECRET_KEY={}".format(self.secret_key)) + self.storage_container = self.docker_client.containers.run( + minioVersion, + command="server /data", + ports={str(self.port): self.port}, + environment={ + "MINIO_ACCESS_KEY": self.access_key, + "MINIO_SECRET_KEY": self.secret_key, + }, + remove=True, + stdout=True, + stderr=True, + detach=True, + ) + self.storage_container.reload() + networks = self.storage_container.attrs["NetworkSettings"]["Networks"] + self.url = "{IPAddress}:{Port}".format( + IPAddress=networks["bridge"]["IPAddress"], Port=self.port + ) + logging.info("Started minio instance at {}".format(self.url)) + + def get_connection(self): + return minio.Minio( + self.url, + access_key=self.access_key, + secret_key=self.secret_key, + secure=False, + ) + + def input(self) -> List[str]: + return self.input_buckets + + def add_input_bucket(self, name: str, cache: bool = True) -> Tuple[str, int]: + input_index = self.input_index + bucket_name = "{}-{}-input".format(name, input_index) + exist = self.connection.bucket_exists(bucket_name) + try: + if cache: + self.input_index += 1 + if exist: + return (bucket_name, input_index) + else: + self.connection.make_bucket(bucket_name, location=self.location) + self.input_buckets.append(bucket_name) + return (bucket_name, input_index) + if exist: + return (bucket_name, input_index) + self.connection.make_bucket(bucket_name, location=self.location) + self.input_buckets.append(bucket_name) + return (bucket_name, input_index) + except ( + minio.error.BucketAlreadyOwnedByYou, + minio.error.BucketAlreadyExists, + minio.error.ResponseError, + ) as err: + logging.error("Bucket creation failed!") + raise err + + def add_output_bucket( + self, name: str, suffix: str = "output", cache: bool = True + ) -> Tuple[str, int]: + output_index = self.output_index + bucket_name = "{}-{}-{}".format(name, output_index, suffix) + exist = self.connection.bucket_exists(bucket_name) + try: + if cache: + self.output_index += 1 + if exist: + return (bucket_name, output_index) + else: + self.connection.make_bucket(bucket_name, location=self.location) + self.output_buckets.append(bucket_name) + return (bucket_name, output_index) + if exist: + return (bucket_name, output_index) + self.connection.make_bucket(bucket_name, location=self.location) + self.output_buckets.append(bucket_name) + return (bucket_name, output_index) + except ( + minio.error.BucketAlreadyOwnedByYou, + minio.error.BucketAlreadyExists, + minio.error.ResponseError, + ) as err: + logging.error("Bucket creation failed!") + raise err + + def output(self) -> List[str]: + return self.output_buckets + + def download(self, bucket_name: str, key: str, filepath: str) -> None: + objects = self.connection.list_objects_v2(bucket_name) + objects = [obj.object_name for obj in objects] + for obj in objects: + self.connection.fget_object(bucket_name, obj, os.path.join(filepath, obj)) + + def upload(self, bucket_name: str, filepath: str, key: str): + self.connection.put_object(bucket_name, filepath) + + def list_bucket(self, bucket_name: str) -> List[str]: + buckets = [] + for bucket in self.connection.list_buckets(): + if bucket.name == bucket_name: + buckets.append(bucket.name) + return buckets + + def allocate_buckets(self, benchmark: str, buckets: Tuple[int, int]): + input_number = buckets[0] + output_number = buckets[1] + for i in range(input_number): + self.add_input_bucket(benchmark) + for i in range(output_number): + self.add_output_bucket(benchmark) + + def uploader_func(self, bucket_idx: int, file: str, filepath: str) -> None: + pass diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 0d27c885..6bbf8c83 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -1,29 +1,39 @@ -import subprocess -import logging -import shutil import json +import logging import os +import shutil +import subprocess from typing import Tuple -from sebs import Benchmark +import docker + +from sebs.benchmark import Benchmark +from sebs.cache import Cache from sebs.faas import System, PersistentStorage from sebs.faas.config import Config from sebs.faas.function import Function -from .function import OpenwhiskFunction +from sebs.openwhisk.minio import Minio from .config import OpenWhiskConfig -import yaml -import time +from .function import OpenwhiskFunction +from ..config import SeBSConfig class OpenWhisk(System): _config: OpenWhiskConfig + storage: Minio + + def __init__(self, system_config: SeBSConfig, config: OpenWhiskConfig, cache_client: Cache, + docker_client: docker.client): + super().__init__(system_config, cache_client, docker_client) + self._config = config @property def config(self) -> Config: return self._config - def get_storage(self, replace_existing: bool) -> PersistentStorage: - pass + def get_storage(self, replace_existing: bool = False) -> PersistentStorage: + self.storage = Minio(self.docker_client) + return self.storage def get_function(self, code_package: Benchmark) -> Function: pass @@ -31,289 +41,16 @@ def get_function(self, code_package: Benchmark) -> Function: def shutdown(self) -> None: pass - @staticmethod - def __run_check_process__(cmd: str, **kwargs) -> None: - env = os.environ.copy() - env = {**env, **kwargs} - - subprocess.run( - cmd.split(), - check=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - env=env, - ) - - @staticmethod - def __check_installation__(app: str, cmd: str) -> None: - try: - logging.info('Checking {} installation...'.format(app)) - OpenWhisk.__run_check_process__(cmd) - logging.info('Check successful, proceeding...') - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot find {}, aborting, reason: {}'.format(app, e)) - exit(1) - - @staticmethod - def install_kind() -> None: - try: - logging.info('Installing kind...') - OpenWhisk.__run_check_process__('go get sigs.k8s.io/kind@v0.8.1', GO111MODULE='on') - logging.info('Kind has been installed') - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install kind, reason: {}'.format(e)) - exit(1) - - @staticmethod - def check_kind_installation() -> None: - try: - OpenWhisk.__run_check_process__('kind --version') - except (subprocess.CalledProcessError, FileNotFoundError): - logging.error('Cannot find kind executable, installing...') - OpenWhisk.install_kind() - - @staticmethod - def check_kubectl_installation() -> None: - try: - logging.info("Checking kubectl installation...") - OpenWhisk.__run_check_process__('kubectl version --client=true') - logging.info("Kubectl is installed") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.info("Kubectl is not installed, proceeding to install...") - OpenWhisk.install_kubectl() - - @staticmethod - def install_kubectl() -> None: - try: - logging.info('Installing kubectl...') - home_path = os.environ['HOME'] - kubectl_path = '{}/.local/bin/kubectl'.format(home_path) - OpenWhisk.__run_check_process__("curl -L -o {} " - "https://storage.googleapis.com/kubernetes-release/release/v1.18.0/bin" - "/linux/amd64/kubectl".format(kubectl_path)) - OpenWhisk.__run_check_process__("chmod +x {}".format(kubectl_path)) - logging.info('Kubectl has been installed') - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install kubectl, reason: {}'.format(e)) - exit(1) - - @staticmethod - def check_helm_installation() -> None: - OpenWhisk.__check_installation__("helm", "helm version") - - @staticmethod - def install_helm() -> None: - try: - logging.info('Installing helm...') - helm_package = subprocess.run( - "curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3".split(), - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - subprocess.run( - "sh -".split(), - input=helm_package.stdout, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - check=True, - ) - logging.info('Helm has been installed') - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install helm, reason: {}'.format(e)) - exit(1) - - @staticmethod - def check_kind_cluster() -> None: - try: - kind_clusters_process = subprocess.run( - "kind get clusters".split(), - check=True, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - kind_clusters = set(kind_clusters_process.stdout.decode('utf-8').split()) - if "kind" not in kind_clusters: - logging.info("Creating kind cluster...") - OpenWhisk.create_kind_cluster() - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot check kind cluster, reason: {}".format(e)) - - @staticmethod - def create_kind_cluster() -> None: - try: - OpenWhisk.__run_check_process__("kind create cluster --config openwhisk/kind-cluster.yaml") - while True: - nodes = subprocess.run( - "kubectl get nodes".split(), - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - node_grep = subprocess.run( - "grep kind".split(), - input=nodes.stdout, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - awk = subprocess.run( - ["awk", r'{print $2}'], - check=True, - input=node_grep.stdout, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - node_statuses = awk.stdout.decode('utf-8').split() - if all(node_status == 'Ready' for node_status in node_statuses): - break - time.sleep(1) - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot create kind cluster. reason: {}".format(e)) - - @staticmethod - def get_worker_ip() -> str: - try: - logging.info('Attempting to find worker IP...') - kind_worker_description = subprocess.run( - "kubectl describe node kind-worker".split(), - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - grep_internal_ip = subprocess.run( - "grep InternalIP".split(), - check=True, - input=kind_worker_description.stdout, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - return grep_internal_ip.stdout.decode("utf-8").split()[1] - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Error during finding worker IP: {}".format(e)) - - @staticmethod - def label_nodes() -> None: - def label_node(node: str, role: str) -> None: - OpenWhisk.__run_check_process__("kubectl label node {} openwhisk-role={}".format(node, role)) - - try: - logging.info('Labelling nodes') - label_node('kind-worker', 'core') - label_node('kind-worker2', 'invoker') - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot label nodes, reason: {}'.format(e)) - - @staticmethod - def clone_openwhisk_chart() -> None: - try: - OpenWhisk.__run_check_process__( - "git clone git@github.com:apache/openwhisk-deploy-kube.git /tmp/openwhisk-deploy-kube") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot clone openwhisk chart, reason: {}".format(e)) - - @staticmethod - def prepare_openwhisk_config() -> None: - worker_ip = OpenWhisk.get_worker_ip() - with open('openwhisk/mycluster_template.yaml', 'r') as openwhisk_config_template: - data = yaml.unsafe_load(openwhisk_config_template) - data['whisk']['ingress']['apiHostName'] = worker_ip - data['whisk']['ingress']['apiHostPort'] = 31001 - data['nginx']['httpsNodePort'] = 31001 - if not os.path.exists('/tmp/openwhisk-deploy-kube/mycluster.yaml'): - with open('/tmp/openwhisk-deploy-kube/mycluster.yaml', 'a+') as openwhisk_config: - openwhisk_config.write(yaml.dump(data, default_flow_style=False)) - - @staticmethod - def check_openwhisk_installation(namespace: str) -> None: - try: - logging.info('Checking openwhisk installation.') - namespaces = subprocess.run( - "kubectl get namespaces".split(), - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - subprocess.run( - ["grep", namespace], - check=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - input=namespaces.stdout, - ) - logging.info("Openwhisk installed!") - except (subprocess.CalledProcessError, FileNotFoundError): - logging.info("Openwhisk is not installed, proceeding with installation...") - OpenWhisk.helm_install() - - @staticmethod - def helm_install() -> None: - try: - OpenWhisk.__run_check_process__("helm install owdev /tmp/openwhisk-deploy-kube/helm/openwhisk -n " - "openwhisk --create-namespace -f /tmp/openwhisk-deploy-kube/mycluster.yaml") - while True: - pods = subprocess.run( - "kubectl get pods -n openwhisk".split(), - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - install_packages_grep = subprocess.run( - "grep install-packages".split(), - input=pods.stdout, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - ) - install_packages_status = install_packages_grep.stdout.decode('utf-8').split()[2] - if install_packages_status == 'Completed': - break - time.sleep(1) - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot install openwhisk, reason: {}".format(e)) - - @staticmethod - def expose_couchdb() -> None: - try: - OpenWhisk.__run_check_process__("kubectl apply -f openwhisk/couchdb-service.yaml") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot expose Couch DB, reason: {}".format(e)) - - @staticmethod - def install_openwhisk() -> None: - OpenWhisk.check_kind_installation() - OpenWhisk.check_kubectl_installation() - OpenWhisk.check_helm_installation() - OpenWhisk.check_kind_cluster() - OpenWhisk.label_nodes() - OpenWhisk.clone_openwhisk_chart() - OpenWhisk.prepare_openwhisk_config() - OpenWhisk.check_openwhisk_installation('openwhisk') - OpenWhisk.expose_couchdb() - @staticmethod def name() -> str: return "openwhisk" - @staticmethod - def get_openwhisk_url() -> str: - ip = OpenWhisk.get_worker_ip() - return '{}:{}'.format(ip, 31001) - - @staticmethod - def get_couchdb_url() -> str: - ip = OpenWhisk.get_worker_ip() - return '{}:{}'.format(ip, 31201) - - @staticmethod - def delete_cluster(): - try: - logging.info('Deleting KinD cluster...') - OpenWhisk.__run_check_process__('kind delete cluster') - logging.info('KinD cluster deleted...') - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error("Cannot delete cluster, reason: {}".format(e)) - def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: - benchmark.build() node = 'nodejs' - node_handler = 'handler.js' + node_handler = 'index.js' CONFIG_FILES = { - 'python': ['virtualenv', '__main__.py'], + 'python': ['virtualenv', '__main__.py', 'requirements.txt'], node: [node_handler, 'package.json', 'node_modules'] } directory = benchmark.code_location @@ -321,9 +58,10 @@ def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: function_dir = os.path.join(directory, "function") os.makedirs(function_dir) - # openwhisk needs main function to be named ina packaged.json + # openwhisk needs main function to be named in a package.json + if benchmark.language_name == node: - filename = 'package.json' + filename = 'code/package.json' with open(filename, 'r') as f: data = json.load(f) data['main'] = node_handler @@ -336,6 +74,35 @@ def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: if file not in package_config: file = os.path.join(directory, file) shutil.move(file, function_dir) + builder_image = self.system_config.benchmark_base_images(self.name(), benchmark.language_name)[ + benchmark.language_version + ] + if benchmark.language_name == node: + build_command = 'npm install' + volumes = { + directory: {'bind': '/nodejsAction'} + } + else: + build_command = 'cd tmp && virtualenv virtualenv && source virtualenv/bin/activate && ' \ + 'pip install -r requirements.txt' + volumes = { + directory: {'bind': '/tmp'} + } + + command = 'bash -c "{}"'.format(build_command) + + self.docker_client.containers.run( + builder_image, + command=command, + volumes=volumes, + remove=True, + stdout=True, + stderr=True, + user='1000:1000', + network_mode="bridge", + privileged=True, + tty=True + ) os.chdir(directory) subprocess.run( "zip -r {}.zip ./".format(benchmark.benchmark).split(), @@ -344,12 +111,45 @@ def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: benchmark_archive = "{}.zip".format( os.path.join(directory, benchmark.benchmark) ) - logging.info("Created {} archive".format(benchmark_archive)) + logging.info(f"Created {benchmark_archive} archive") bytes_size = os.path.getsize(benchmark_archive) return benchmark_archive, bytes_size - def get_function(self, code_package: Benchmark) -> Function: + def create_function(self, benchmark: Benchmark, function_name: str, zip_path: str) -> None: + logging.info("Creating action on openwhisk") + try: + actions = subprocess.run( + "wsk action list".split(), + stderr=subprocess.DEVNULL, + stdout=subprocess.PIPE, + ) + subprocess.run( + f"grep {function_name}".split(), + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + input=actions.stdout, + check=True, + ) + logging.info(f"Function {function_name} already exist") + + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error(f"ERROR: {e}") + try: + language_version = benchmark.language_version + if benchmark.language_name == "python": + language_version = language_version[0] + subprocess.run( + f"wsk action -i create {function_name} --kind {benchmark.language_name}:{language_version} " + f"{zip_path}".split(), + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error(f"Cannot create action {function_name}, reason: {e}") + exit(1) + def get_function(self, code_package: Benchmark) -> Function: if ( code_package.language_version not in self.system_config.supported_language_versions(self.name(), code_package.language_name) @@ -360,12 +160,11 @@ def get_function(self, code_package: Benchmark) -> Function: version=code_package.language_version, ) ) - benchmark = code_package.benchmark - func_name = code_package.cached_config["name"] code_location = code_package.code_location if code_package.is_cached and code_package.is_cached_valid: + func_name = code_package.cached_config["name"] logging.info( "Using cached function {fname} in {loc}".format( fname=func_name, loc=code_location @@ -373,15 +172,15 @@ def get_function(self, code_package: Benchmark) -> Function: ) return OpenwhiskFunction(func_name) elif code_package.is_cached: - + func_name = code_package.cached_config["name"] timeout = code_package.benchmark_config.timeout memory = code_package.benchmark_config.memory # Run Openwhisk-specific part of building code. package, code_size = self.package_code(code_package) - self.update_function( - benchmark, func_name, package, code_size, timeout, memory + self.create_function( + code_package, func_name, package, ) cached_cfg = code_package.cached_config @@ -436,6 +235,9 @@ def get_function(self, code_package: Benchmark) -> Function: } }, ) + + self.create_function(code_package, func_name, package) + # FIXME: fix after dissociating code package and benchmark code_package.query_cache() return OpenwhiskFunction(func_name) diff --git a/sebs/sebs.py b/sebs/sebs.py index 18c6f993..484b9ec1 100644 --- a/sebs/sebs.py +++ b/sebs/sebs.py @@ -6,6 +6,8 @@ from sebs.benchmark import Benchmark from sebs.faas.system import System as FaasSystem from sebs.experiments.config import Config as ExperimentConfig +from sebs.openwhisk import OpenWhisk +from sebs.openwhisk.config import OpenWhiskConfig class SeBS: @@ -24,8 +26,8 @@ def __init__(self, cache_dir: str): def get_deployment(self, config: dict) -> FaasSystem: - implementations = {"aws": AWS} - configs = {"aws": AWSConfig.initialize} + implementations = {"aws": AWS, "openwhisk": OpenWhisk} + configs = {"aws": AWSConfig.initialize, "openwhisk": OpenWhiskConfig.initialize} name = config["name"] if name not in implementations: raise RuntimeError("Deployment {name} not supported!".format(**config)) diff --git a/tools/openwhisk_preparation.py b/tools/openwhisk_preparation.py new file mode 100644 index 00000000..30446b0c --- /dev/null +++ b/tools/openwhisk_preparation.py @@ -0,0 +1,281 @@ +import logging +import os +import subprocess +import time +import yaml + + +# Common utils + +def run_check_process(cmd: str, **kwargs) -> None: + env = os.environ.copy() + env = {**env, **kwargs} + + subprocess.run( + cmd.split(), + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + ) + + +# helm utils + +def install_helm() -> None: + try: + logging.info('Installing helm...') + helm_package = subprocess.run( + "curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3".split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + subprocess.run( + "sh -".split(), + input=helm_package.stdout, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) + logging.info('Helm has been installed') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot install helm, reason: {}'.format(e)) + exit(1) + + +def check_helm_installation() -> None: + try: + logging.info("Checking helm installation...") + run_check_process('helm version') + logging.info("helm is installed") + except (subprocess.CalledProcessError, FileNotFoundError): + logging.error('helm is not installed, attempting to install...') + install_helm() + + +# kubectl utils + +def install_kubectl(kubectl_version: str = "v1.18.0") -> None: + try: + logging.info('Installing kubectl...') + home_path = os.environ['HOME'] + kubectl_path = '{}/.local/bin/kubectl'.format(home_path) + run_check_process("curl -L -o {} " + "https://storage.googleapis.com/kubernetes-release/release/{}/bin" + "/linux/amd64/kubectl".format(kubectl_path, kubectl_version)) + run_check_process("chmod +x {}".format(kubectl_path)) + logging.info('Kubectl has been installed') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot install kubectl, reason: {}'.format(e)) + exit(1) + + +def check_kubectl_installation() -> None: + try: + logging.info("Checking kubectl installation...") + run_check_process('kubectl version --client=true') + logging.info("kubectl is installed") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Kubectl is not installed, attempting to install...') + install_kubectl() + + +# kind utils + +def install_kind(kind_version: str = "v0.8.1") -> None: + try: + logging.info('Installing kind...') + env = os.environ.copy() + env['GO111MODULE'] = "on" + subprocess.run( + "go get sigs.k8s.io/kind@{}".format(kind_version).split(), + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + env=env, + ) + logging.info('Kind has been installed') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot install kind, reason: {}'.format(e)) + exit(1) + + +def check_kind_installation() -> None: + try: + logging.info("Checking go installation...") + run_check_process('go version') + logging.info("go is installed") + try: + logging.info("Checking kind installation...") + run_check_process('kind version') + logging.info('kind is installed') + except (subprocess.CalledProcessError, FileNotFoundError): + logging.warning('Cannot find kind, proceeding with installation') + install_kind() + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot find go, reason: {}'.format(e)) + exit(1) + + +def label_nodes() -> None: + def label_node(node: str, role: str) -> None: + run_check_process("kubectl label node {} openwhisk-role={}".format(node, role)) + + try: + logging.info('Labelling nodes') + label_node('kind-worker', 'core') + label_node('kind-worker2', 'invoker') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot label nodes, reason: {}'.format(e)) + exit(1) + + +def get_worker_ip(worker_node_name: str = "kind-worker") -> str: + try: + logging.info("Retrieving worker IP...") + internal_ip_proc = subprocess.run( + ["kubectl", "get", "node", worker_node_name, "-o", + "go-template='{{ (index .status.addresses 0).address }}'"], + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + return internal_ip_proc.stdout.decode("utf-8").replace("'", "") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot retrieve node IP, reason: {}".format(e)) + exit(1) + + +def create_kind_cluster() -> None: + try: + run_check_process("kind create cluster --config openwhisk/kind-cluster.yaml") + while True: + nodes = subprocess.run( + "kubectl get nodes".split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + node_grep = subprocess.run( + "grep kind".split(), + input=nodes.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + awk = subprocess.run( + ["awk", r'{print $2}'], + check=True, + input=node_grep.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + node_statuses = awk.stdout.decode('utf-8').split() + if all(node_status == 'Ready' for node_status in node_statuses): + break + time.sleep(1) + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot create kind cluster. reason: {}".format(e)) + exit(1) + + +def check_kind_cluster() -> None: + try: + kind_clusters_process = subprocess.run( + "kind get clusters".split(), + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + kind_clusters = set(kind_clusters_process.stdout.decode('utf-8').split()) + if "kind" not in kind_clusters: + logging.info("Creating kind cluster...") + create_kind_cluster() + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot check kind cluster, reason: {}".format(e)) + + +def delete_cluster(): + try: + logging.info('Deleting KinD cluster...') + run_check_process('kind delete cluster') + logging.info('KinD cluster deleted...') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot delete cluster, reason: {}".format(e)) + + +# openwhisk deployment utils + +def expose_couchdb() -> None: + try: + run_check_process("kubectl apply -f openwhisk/couchdb-service.yaml") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot expose Couch DB, reason: {}".format(e)) + + +def clone_openwhisk_chart() -> None: + try: + run_check_process("git clone git@github.com:apache/openwhisk-deploy-kube.git /tmp/openwhisk-deploy-kube") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot clone openwhisk chart, reason: {}".format(e)) + + +def prepare_openwhisk_config() -> None: + worker_ip = get_worker_ip() + with open('openwhisk/mycluster_template.yaml', 'r') as openwhisk_config_template: + data = yaml.unsafe_load(openwhisk_config_template) + data['whisk']['ingress']['apiHostName'] = worker_ip + data['whisk']['ingress']['apiHostPort'] = 31001 + data['nginx']['httpsNodePort'] = 31001 + if not os.path.exists('/tmp/openwhisk-deploy-kube/mycluster.yaml'): + with open('/tmp/openwhisk-deploy-kube/mycluster.yaml', 'a+') as openwhisk_config: + openwhisk_config.write(yaml.dump(data, default_flow_style=False)) + + +def deploy_openwhisk_on_k8s(namespace: str = "openwhisk") -> None: + try: + run_check_process( + "helm install owdev /tmp/openwhisk-deploy-kube/helm/openwhisk -n {} --create-namespace -f " + "/tmp/openwhisk-deploy-kube/mycluster.yaml".format(namespace) + ) + while True: + pods = subprocess.run( + "kubectl get pods -n {}".format(namespace).split(), + stderr=subprocess.DEVNULL, + stdout=subprocess.PIPE, + ) + check_result = subprocess.run( + "grep install-packages".split(), + input=pods.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + install_packages_status = check_result.stdout.decode('utf-8').split()[2] + if install_packages_status == 'Completed': + break + + time.sleep(1) + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error("Cannot install openwhisk, reason: {}".format(e)) + exit(1) + + +def get_openwhisk_url() -> str: + ip = get_worker_ip() + return '{}:{}'.format(ip, 31001) + + +def get_couchdb_url() -> str: + ip = get_worker_ip() + return '{}:{}'.format(ip, 31201) + + +# mixup + +def initiate_all(): + check_kubectl_installation() + check_helm_installation() + check_kind_installation() + check_kind_cluster() + label_nodes() + clone_openwhisk_chart() + prepare_openwhisk_config() + deploy_openwhisk_on_k8s() + expose_couchdb() From d57586b6d54667fd326809c44c5cc3565e9600f7 Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Thu, 3 Sep 2020 23:58:32 +0200 Subject: [PATCH 018/140] [OpenWhisk] Add methods preparing WSK Client to work without cert --- tools/openwhisk_preparation.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tools/openwhisk_preparation.py b/tools/openwhisk_preparation.py index 30446b0c..92896282 100644 --- a/tools/openwhisk_preparation.py +++ b/tools/openwhisk_preparation.py @@ -203,6 +203,30 @@ def delete_cluster(): # openwhisk deployment utils +def check_wsk_installation() -> None: + try: + logging.info("Checking wsk installation...") + run_check_process('wsk') + logging.info("wsk is installed") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error(f'Cannot find wsk, reason: {e}') + exit(1) + + +def prepare_wsk() -> None: + try: + ip = get_worker_ip() + auth = "23bc46b1-71f6-4ed5-8c54-816aa4f8c502:123zO3xZCLrMN6v2BKK1dXYFpXlPkccOFqm12CdAsMgRU4VrNZ9lyGVCGuMDGIwP" + subprocess.run( + f"wsk property set --apihost {ip} --auth {auth}", + check=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL + ) + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error(f"Cannot find wsk on system, reason: {e}") + exit(1) + def expose_couchdb() -> None: try: run_check_process("kubectl apply -f openwhisk/couchdb-service.yaml") From c0e8b21ae8852a9344c3829b48cd54f4692c5b6d Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Fri, 4 Sep 2020 14:23:40 +0200 Subject: [PATCH 019/140] [OpenWhisk] Add async_invoke implementation --- sebs/openwhisk/function.py | 105 +++++++++++++++++++----------------- sebs/openwhisk/openwhisk.py | 4 +- 2 files changed, 57 insertions(+), 52 deletions(-) diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 04d7799c..7f61c25b 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -3,8 +3,9 @@ from sebs.faas.function import Function, ExecutionResult import json import datetime -import requests import logging +import re +from typing import List class OpenwhiskFunction(Function): @@ -12,19 +13,21 @@ def __init__(self, name: str, namespace: str = "_"): super().__init__(name) self.namespace = namespace - def sync_invoke(self, payload: dict): - from tools.openwhisk_preparation import get_openwhisk_url - ip = get_openwhisk_url() - command = f"wsk -i action invoke --result {self.name}" + def __add_params__(self, command: List[str], payload: dict) -> List[str]: for key, value in payload.items(): - command = command + f" --param {key} {value}" + command.append("--param") + command.append(key) + command.append(str(value)) + return command + def sync_invoke(self, payload: dict): logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") + command = self.__add_params__(f"wsk -i action invoke --result {self.name}".split(), payload) error = None try: begin = datetime.datetime.now() response = subprocess.run( - command.split(), + command, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=True, @@ -49,63 +52,65 @@ def sync_invoke(self, payload: dict): return openwhiskResult def async_invoke(self, payload: dict): - from sebs.openwhisk.openwhisk import OpenWhisk import time import datetime - ip = OpenWhisk.get_openwhisk_url() - url = f"https://{ip}/api/v1/namespaces/{self.namespace}/actions/{self.name}?result=true" - readyPayload = json.dumps(payload) - logging.info("OpenWhisk url: {}".format(url)) - headers = {"content-type": "application/json", - "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} - logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - response = requests.request("POST", url, data=readyPayload, headers=headers, verify=False) - - print( - f"Function {self.name} returned response with code: {response.status_code}" - ) - if response.status_code != 202: - logging.error("Invocation of {} failed!".format(self.name)) - logging.error("Input: {}".format(readyPayload)) - logging.error("Input: {}".format(response.content)) - + command = self.__add_params__(f"wsk -i action invoke --result {self.name}".split(), payload) + error = None + try: + begin = datetime.datetime.now() + response = subprocess.run( + command, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + ) + end = datetime.datetime.now() + response = response.stdout.decode("utf-8") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + end = datetime.datetime.now() + logging.error(f"Cannot asynchronously invoke action {self.name}, reason: {e}") + error = e + openwhiskResult = ExecutionResult(begin, end) + if error is not None: + logging.error(f"Invocation of {self.name} failed!") + openwhiskResult.stats.failure = True + return openwhiskResult + id_pattern = re.compile(r"with id ([a-zA-Z0-9]+)$") + id_match = id_pattern.search(response).group(1) + if id_match is None: + logging.error("Cannot parse activation id") openwhiskResult.stats.failure = True return openwhiskResult - activationId = json.loads(response.content)['activationId'] - - url = f"https://{ip}/api/v1/namespaces/_/activations/{activationId}" - readyPayload = json.dumps(payload) - headers = {"content-type": "application/json", - "Authorization": "Basic Nzg5YzQ2YjEtNzFmNi00ZWQ1LThjNTQtODE2YWE0ZjhjNTAyOmFiY3pPM3haQ0xyTU42djJCS0sxZFhZRnBYbFBrY2NPRnFtMTJDZEFzTWdSVTRWck5aOWx5R1ZDR3VNREdJd1A="} - begin = datetime.datetime.now() attempt = 1 while True: - print(f"Function {self.name} of namespace getting result. Attempt: {attempt}") - - response = requests.request("GET", url, data=readyPayload, headers=headers, verify=False) - if response.status_code == 404: + logging.info(f"Function {self.name} of namespace getting result. Attempt: {attempt}") + command = f"wsk -i activation result {id_match}" + try: + response = subprocess.run( + command.split(), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + ) + response = response.stdout.decode("utf-8") + end = datetime.datetime.now() + error = None + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.info("No result yet, proceeding...") time.sleep(0.05) attempt += 1 + error = e continue break - - print( - f"Function {self.name} returned response with code: {response.status_code}" - ) - result = json.loads(response.content) - - if response.status_code != 200: - logging.error("Invocation of {} failed!".format(self.name)) - logging.error("Input: {}".format(readyPayload)) - logging.error("Input: {}".format(response.content)) - + if error is not None: + logging.error(f"Function {self.name} with id {id_match} finished unsuccessfully") openwhiskResult.stats.failure = True return openwhiskResult - begin = datetime.datetime.fromtimestamp(result['start'] / 1e3) - end = datetime.datetime.fromtimestamp(result['end'] / 1e3) - returnContent = result['response']['result'] + logging.info(f"Function {self.name} with id {id_match} finished successfully") + + returnContent = response openwhiskResult = ExecutionResult(begin, end) openwhiskResult.parse_benchmark_output(returnContent) return openwhiskResult diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 6bbf8c83..73195140 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -119,7 +119,7 @@ def create_function(self, benchmark: Benchmark, function_name: str, zip_path: st logging.info("Creating action on openwhisk") try: actions = subprocess.run( - "wsk action list".split(), + "wsk -i action list".split(), stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, ) @@ -139,7 +139,7 @@ def create_function(self, benchmark: Benchmark, function_name: str, zip_path: st if benchmark.language_name == "python": language_version = language_version[0] subprocess.run( - f"wsk action -i create {function_name} --kind {benchmark.language_name}:{language_version} " + f"wsk -i action create {function_name} --kind {benchmark.language_name}:{language_version} " f"{zip_path}".split(), stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, From c4ca6003b4c652ad5b9defe77ce78e7875c0ec57 Mon Sep 17 00:00:00 2001 From: sborkows Date: Fri, 4 Sep 2020 16:36:55 +0200 Subject: [PATCH 020/140] Added wsk installation --- tools/openwhisk_preparation.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tools/openwhisk_preparation.py b/tools/openwhisk_preparation.py index 92896282..4201f09a 100644 --- a/tools/openwhisk_preparation.py +++ b/tools/openwhisk_preparation.py @@ -291,10 +291,43 @@ def get_couchdb_url() -> str: return '{}:{}'.format(ip, 31201) +def install_wsk() -> None: + try: + logging.info('Installing wsk...') + home_path = os.environ['HOME'] + wsk_path = '{}/.local/bin/wsk'.format(home_path) + subprocess.run("go get github.com/apache/openwhisk-cli".split()) + run_check_process("go get -u github.com/jteeuwen/go-bindata/...") + instalation_dir = "{}/src/github.com/apache/openwhisk-cli".format(os.environ['GOPATH']) + + def custom_subproces(comand): + subprocess.run(comand.split(), + cwd=instalation_dir, + check=True) + custom_subproces("go-bindata -pkg wski18n -o wski18n/i18n_resources.go wski18n/resources") + custom_subproces("go build -o wsk") + run_check_process("ln -sf {}/wsk {}".format(instalation_dir, wsk_path)) + run_check_process("chmod +x {}".format(wsk_path)) + logging.info('Wsk has been installed') + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.error('Cannot install wsk, reason: {}'.format(e)) + exit(1) + + +def check_wsk_installation() -> None: + try: + logging.info("Checking wsk installation...") + run_check_process("wsk") + logging.info("Wsk is installed") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + logging.info("Wsk is not installed, proceeding to install...") + install_wsk() # mixup + def initiate_all(): check_kubectl_installation() + check_wsk_installation() check_helm_installation() check_kind_installation() check_kind_cluster() From eb6de2cc02f644cc0409417e830362d2c74152b1 Mon Sep 17 00:00:00 2001 From: Mateusz Szarek Date: Fri, 4 Sep 2020 17:02:33 +0200 Subject: [PATCH 021/140] [OpenWhisk] Fix issue with minio container causing benchmark failure; fix async_invoke result parse --- config/openwhisk.json | 3 ++- sebs/openwhisk/config.py | 3 +++ sebs/openwhisk/function.py | 2 +- sebs/openwhisk/minio.py | 47 ++++++++++++++++++++-------------- sebs/openwhisk/openwhisk.py | 20 ++++++++++----- tools/openwhisk_preparation.py | 1 + 6 files changed, 49 insertions(+), 27 deletions(-) diff --git a/config/openwhisk.json b/config/openwhisk.json index 67992612..b58698e9 100644 --- a/config/openwhisk.json +++ b/config/openwhisk.json @@ -11,6 +11,7 @@ }, "deployment": { "name": "openwhisk", - "shouldShutdown": false + "shutdownStorage": false, + "removeCluster": false } } diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 9cc715aa..c906d55e 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -26,10 +26,13 @@ def serialize(self) -> dict: class OpenWhiskConfig(Config): name: str + shutdownStorage: bool cache: Cache def __init__(self, config: dict, cache: Cache): self.name = config['name'] + self.shutdownStorage = config['shutdownStorage'] + self.removeCluster = config['removeCluster'] self.cache = cache @property diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 7f61c25b..4e0b67c4 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -110,7 +110,7 @@ def async_invoke(self, payload: dict): logging.info(f"Function {self.name} with id {id_match} finished successfully") - returnContent = response + returnContent = json.loads(response) openwhiskResult = ExecutionResult(begin, end) openwhiskResult.parse_benchmark_output(returnContent) return openwhiskResult diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py index f398b4c5..534b2589 100644 --- a/sebs/openwhisk/minio.py +++ b/sebs/openwhisk/minio.py @@ -5,10 +5,10 @@ import minio import secrets import os +import docker.errors class Minio(PersistentStorage): - storage_container: Any input_buckets: List[str] = [] output_buckets: List[str] = [] @@ -17,7 +17,7 @@ class Minio(PersistentStorage): access_key: str = "" secret_key: str = "" port = 9000 - location = "fissionBenchmark" + location = "openwhiskBenchmark" connection: Any docker_client = None @@ -32,30 +32,39 @@ def start(self): def startMinio(self): minioVersion = "minio/minio:latest" - self.access_key = secrets.token_urlsafe(32) - self.secret_key = secrets.token_hex(32) - logging.info("Minio container starting") + try: + self.storage_container = self.docker_client.containers.get("minio") + logging.info("Minio container already exists") + envs = self.storage_container.attrs["Config"]["Env"] + self.access_key = envs['MINIO_ACCESS_KEY'] + self.secret_key = envs['MINIO_SECRET_KEY'] + except docker.errors.NotFound: + logging.info("Minio container does not exists, starting") + self.access_key = secrets.token_urlsafe(32) + self.secret_key = secrets.token_hex(32) + self.storage_container = self.docker_client.containers.run( + minioVersion, + command="server /data", + ports={str(self.port): self.port}, + environment={ + "MINIO_ACCESS_KEY": self.access_key, + "MINIO_SECRET_KEY": self.secret_key, + }, + remove=True, + stdout=True, + stderr=True, + detach=True, + name="minio" + ) + logging.info("ACCESS_KEY={}".format(self.access_key)) logging.info("SECRET_KEY={}".format(self.secret_key)) - self.storage_container = self.docker_client.containers.run( - minioVersion, - command="server /data", - ports={str(self.port): self.port}, - environment={ - "MINIO_ACCESS_KEY": self.access_key, - "MINIO_SECRET_KEY": self.secret_key, - }, - remove=True, - stdout=True, - stderr=True, - detach=True, - ) self.storage_container.reload() networks = self.storage_container.attrs["NetworkSettings"]["Networks"] self.url = "{IPAddress}:{Port}".format( IPAddress=networks["bridge"]["IPAddress"], Port=self.port ) - logging.info("Started minio instance at {}".format(self.url)) + logging.info("Minio runs at {}".format(self.url)) def get_connection(self): return minio.Minio( diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 73195140..3f66ade2 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -10,7 +10,6 @@ from sebs.benchmark import Benchmark from sebs.cache import Cache from sebs.faas import System, PersistentStorage -from sebs.faas.config import Config from sebs.faas.function import Function from sebs.openwhisk.minio import Minio from .config import OpenWhiskConfig @@ -28,18 +27,19 @@ def __init__(self, system_config: SeBSConfig, config: OpenWhiskConfig, cache_cli self._config = config @property - def config(self) -> Config: + def config(self) -> OpenWhiskConfig: return self._config def get_storage(self, replace_existing: bool = False) -> PersistentStorage: self.storage = Minio(self.docker_client) return self.storage - def get_function(self, code_package: Benchmark) -> Function: - pass - def shutdown(self) -> None: - pass + if self.config.shutdownStorage: + self.storage.storage_container.kill() + if self.config.removeCluster: + from tools.openwhisk_preparation import delete_cluster + delete_cluster() @staticmethod def name() -> str: @@ -58,6 +58,14 @@ def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: function_dir = os.path.join(directory, "function") os.makedirs(function_dir) + with open('./code/minioConfig.json', 'w+') as minio_config: + minio_config_json = { + 'access_key': self.storage.access_key, + 'secret_key': self.storage.secret_key, + 'url': self.storage.url, + } + minio_config.write(json.dumps(minio_config_json)) + # openwhisk needs main function to be named in a package.json if benchmark.language_name == node: diff --git a/tools/openwhisk_preparation.py b/tools/openwhisk_preparation.py index 4201f09a..73c8da8a 100644 --- a/tools/openwhisk_preparation.py +++ b/tools/openwhisk_preparation.py @@ -227,6 +227,7 @@ def prepare_wsk() -> None: logging.error(f"Cannot find wsk on system, reason: {e}") exit(1) + def expose_couchdb() -> None: try: run_check_process("kubectl apply -f openwhisk/couchdb-service.yaml") From bd935336a85222b4b4415ccdffd191cc893ed248 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Wed, 16 Jun 2021 09:34:47 +0000 Subject: [PATCH 022/140] Use Python openwhisk/actionloop-python-v3.6-ai image --- config/systems.json | 2 +- sebs/openwhisk/minio.py | 2 ++ sebs/openwhisk/openwhisk.py | 5 +++-- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/config/systems.json b/config/systems.json index d681ef40..c853b3db 100644 --- a/config/systems.json +++ b/config/systems.json @@ -105,7 +105,7 @@ "languages": { "python": { "base_images": { - "3.6": "openwhisk/python3action:1.14.0", + "3.6": "openwhisk/actionloop-python-v3.6-ai", "2.7": "openwhisk/python2action" }, "versions": ["3.6", "2.7"], diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py index 534b2589..ed9b18fa 100644 --- a/sebs/openwhisk/minio.py +++ b/sebs/openwhisk/minio.py @@ -36,6 +36,8 @@ def startMinio(self): self.storage_container = self.docker_client.containers.get("minio") logging.info("Minio container already exists") envs = self.storage_container.attrs["Config"]["Env"] + if isinstance(envs, (tuple, list)): + envs = dict([i.split('=', 1) for i in envs]) self.access_key = envs['MINIO_ACCESS_KEY'] self.secret_key = envs['MINIO_SECRET_KEY'] except docker.errors.NotFound: diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 3f66ade2..43a3ae95 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -91,16 +91,17 @@ def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: directory: {'bind': '/nodejsAction'} } else: - build_command = 'cd tmp && virtualenv virtualenv && source virtualenv/bin/activate && ' \ + build_command = 'cd /tmp && virtualenv virtualenv && source virtualenv/bin/activate && ' \ 'pip install -r requirements.txt' volumes = { directory: {'bind': '/tmp'} } - command = 'bash -c "{}"'.format(build_command) + command = '-c "{}"'.format(build_command) self.docker_client.containers.run( builder_image, + entrypoint="bash", command=command, volumes=volumes, remove=True, From d0055dbe7f07f309ea4499197a3ba156df96bc3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Wed, 16 Jun 2021 10:00:05 +0000 Subject: [PATCH 023/140] Try to publish function even if cache exists --- sebs/openwhisk/openwhisk.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 43a3ae95..de94878c 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -174,6 +174,7 @@ def get_function(self, code_package: Benchmark) -> Function: if code_package.is_cached and code_package.is_cached_valid: func_name = code_package.cached_config["name"] + self.create_function(code_package, func_name, code_location) logging.info( "Using cached function {fname} in {loc}".format( fname=func_name, loc=code_location From 772266758c6934f533e2b935c36561287fc35f4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Wed, 16 Jun 2021 10:41:32 +0000 Subject: [PATCH 024/140] Remove port as we connect directly to the container --- sebs/openwhisk/minio.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py index ed9b18fa..225add18 100644 --- a/sebs/openwhisk/minio.py +++ b/sebs/openwhisk/minio.py @@ -47,7 +47,6 @@ def startMinio(self): self.storage_container = self.docker_client.containers.run( minioVersion, command="server /data", - ports={str(self.port): self.port}, environment={ "MINIO_ACCESS_KEY": self.access_key, "MINIO_SECRET_KEY": self.secret_key, From 72dbf4a40b38bd0d4d415e3757bb8e25a3fca4e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Wed, 16 Jun 2021 12:14:54 +0000 Subject: [PATCH 025/140] Fix test data upload --- sebs/openwhisk/minio.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py index 225add18..e32914fa 100644 --- a/sebs/openwhisk/minio.py +++ b/sebs/openwhisk/minio.py @@ -82,6 +82,7 @@ def add_input_bucket(self, name: str, cache: bool = True) -> Tuple[str, int]: input_index = self.input_index bucket_name = "{}-{}-input".format(name, input_index) exist = self.connection.bucket_exists(bucket_name) + self.input_buckets.append(bucket_name) try: if cache: self.input_index += 1 @@ -89,12 +90,10 @@ def add_input_bucket(self, name: str, cache: bool = True) -> Tuple[str, int]: return (bucket_name, input_index) else: self.connection.make_bucket(bucket_name, location=self.location) - self.input_buckets.append(bucket_name) return (bucket_name, input_index) if exist: return (bucket_name, input_index) self.connection.make_bucket(bucket_name, location=self.location) - self.input_buckets.append(bucket_name) return (bucket_name, input_index) except ( minio.error.BucketAlreadyOwnedByYou, @@ -110,6 +109,7 @@ def add_output_bucket( output_index = self.output_index bucket_name = "{}-{}-{}".format(name, output_index, suffix) exist = self.connection.bucket_exists(bucket_name) + self.output_buckets.append(bucket_name) try: if cache: self.output_index += 1 @@ -117,12 +117,10 @@ def add_output_bucket( return (bucket_name, output_index) else: self.connection.make_bucket(bucket_name, location=self.location) - self.output_buckets.append(bucket_name) return (bucket_name, output_index) if exist: return (bucket_name, output_index) self.connection.make_bucket(bucket_name, location=self.location) - self.output_buckets.append(bucket_name) return (bucket_name, output_index) except ( minio.error.BucketAlreadyOwnedByYou, @@ -141,8 +139,8 @@ def download(self, bucket_name: str, key: str, filepath: str) -> None: for obj in objects: self.connection.fget_object(bucket_name, obj, os.path.join(filepath, obj)) - def upload(self, bucket_name: str, filepath: str, key: str): - self.connection.put_object(bucket_name, filepath) + def upload(self, bucket_name: str, key: str, filepath: str): + self.connection.fput_object(bucket_name, key, filepath) def list_bucket(self, bucket_name: str) -> List[str]: buckets = [] @@ -159,5 +157,6 @@ def allocate_buckets(self, benchmark: str, buckets: Tuple[int, int]): for i in range(output_number): self.add_output_bucket(benchmark) - def uploader_func(self, bucket_idx: int, file: str, filepath: str) -> None: - pass + def uploader_func(self, bucket_idx: int, key: str, filepath: str) -> None: + bucket_name = self.input_buckets[bucket_idx] + self.upload(bucket_name, key, filepath) From 18c3addd9c5d8d2bff3afdacc7debdeb807e6033 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Tue, 22 Jun 2021 12:37:18 +0000 Subject: [PATCH 026/140] Fix Minio access --- benchmarks/wrappers/openwhisk/python/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/wrappers/openwhisk/python/storage.py b/benchmarks/wrappers/openwhisk/python/storage.py index d41cede2..b94c5fc3 100644 --- a/benchmarks/wrappers/openwhisk/python/storage.py +++ b/benchmarks/wrappers/openwhisk/python/storage.py @@ -38,7 +38,7 @@ def download(self, bucket, file, filepath): self.client.fget_object(bucket, file, filepath) def download_directory(self, bucket, prefix, path): - objects = self.client.list_objects_v2(bucket, prefix, recursive=True) + objects = self.client.list_objects(bucket, prefix, recursive=True) for obj in objects: file_name = obj.object_name self.download(bucket, file_name, os.path.join(path, file_name)) From f4eacb3b43ef452c3e5a13fdd045a5c1d9ce722e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Tue, 3 Aug 2021 10:57:02 +0000 Subject: [PATCH 027/140] Refactor OpenWhisk support to match current API --- .../220.video-processing/init.sh | 1 + .../wrappers/openwhisk/python/__main__.py | 2 + docker/Dockerfile.run.openwhisk.node | 4 + docker/Dockerfile.run.openwhisk.python | 4 + sebs.py | 2 +- sebs/aws/aws.py | 2 +- sebs/azure/azure.py | 2 +- sebs/benchmark.py | 4 +- sebs/faas/config.py | 2 + sebs/faas/system.py | 2 +- sebs/gcp/gcp.py | 2 +- sebs/local/local.py | 2 +- sebs/openwhisk/config.py | 37 ++- sebs/openwhisk/function.py | 142 ++------- sebs/openwhisk/minio.py | 149 ++-------- sebs/openwhisk/openwhisk.py | 277 ++++++++---------- sebs/openwhisk/triggers.py | 67 +++++ 17 files changed, 296 insertions(+), 405 deletions(-) create mode 100644 docker/Dockerfile.run.openwhisk.node create mode 100644 docker/Dockerfile.run.openwhisk.python create mode 100644 sebs/openwhisk/triggers.py diff --git a/benchmarks/200.multimedia/220.video-processing/init.sh b/benchmarks/200.multimedia/220.video-processing/init.sh index aa1d8243..688bb178 100755 --- a/benchmarks/200.multimedia/220.video-processing/init.sh +++ b/benchmarks/200.multimedia/220.video-processing/init.sh @@ -8,6 +8,7 @@ pushd ${DIR} > /dev/null tar -xf ffmpeg-release-amd64-static.tar.xz rm *.tar.xz mv ffmpeg-* ffmpeg +rm ffmpeg/ffprobe popd > /dev/null # copy watermark diff --git a/benchmarks/wrappers/openwhisk/python/__main__.py b/benchmarks/wrappers/openwhisk/python/__main__.py index e6ce67ca..fdfc76bb 100644 --- a/benchmarks/wrappers/openwhisk/python/__main__.py +++ b/benchmarks/wrappers/openwhisk/python/__main__.py @@ -6,6 +6,8 @@ def main(args): logging.getLogger().setLevel(logging.INFO) begin = datetime.datetime.now() + args['request-id'] = os.getenv('__OW_ACTIVATION_ID') + args['income-timestamp'] = begin.timestamp() from function import function ret = function.handler(args) diff --git a/docker/Dockerfile.run.openwhisk.node b/docker/Dockerfile.run.openwhisk.node new file mode 100644 index 00000000..cbe123b2 --- /dev/null +++ b/docker/Dockerfile.run.openwhisk.node @@ -0,0 +1,4 @@ +ARG BASE_IMAGE +FROM $BASE_IMAGE +COPY package.json / +RUN npm install -g diff --git a/docker/Dockerfile.run.openwhisk.python b/docker/Dockerfile.run.openwhisk.python new file mode 100644 index 00000000..2be06d42 --- /dev/null +++ b/docker/Dockerfile.run.openwhisk.python @@ -0,0 +1,4 @@ +ARG BASE_IMAGE +FROM $BASE_IMAGE +COPY requirements.txt / +RUN pip install --no-cache-dir -r /requirements.txt diff --git a/sebs.py b/sebs.py index ff72179a..c01846db 100755 --- a/sebs.py +++ b/sebs.py @@ -89,7 +89,7 @@ def common_params(func): @click.option( "--deployment", default=None, - type=click.Choice(["azure", "aws", "gcp", "local"]), + type=click.Choice(["azure", "aws", "gcp", "local", "openwhisk"]), help="Cloud deployment to use.", ) @simplified_common_params diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index 055c4779..9c644666 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -122,7 +122,7 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: benchmark: benchmark name """ - def package_code(self, directory: str, language_name: str, benchmark: str) -> Tuple[str, int]: + def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: CONFIG_FILES = { "python": ["handler.py", "requirements.txt", ".python_packages"], diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index a12289e4..9b5b5a5e 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -114,7 +114,7 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: # - function.json # host.json # requirements.txt/package.json - def package_code(self, directory: str, language_name: str, benchmark: str) -> Tuple[str, int]: + def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: # In previous step we ran a Docker container which installed packages # Python packages are in .python_packages because this is expected by Azure diff --git a/sebs/benchmark.py b/sebs/benchmark.py index a631f2d8..b00270cc 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -447,7 +447,7 @@ def recalculate_code_size(self): return self._code_size def build( - self, deployment_build_step: Callable[[str, str, str], Tuple[str, int]] + self, deployment_build_step: Callable[[str, str, str, str], Tuple[str, int]] ) -> Tuple[bool, str]: # Skip build if files are up to date and user didn't enforce rebuild @@ -477,7 +477,7 @@ def build( self.add_deployment_package(self._output_dir) self.install_dependencies(self._output_dir) self._code_location, self._code_size = deployment_build_step( - os.path.abspath(self._output_dir), self.language_name, self.benchmark + os.path.abspath(self._output_dir), self.language_name, self.language_version, self.benchmark ) self.logging.info( ( diff --git a/sebs/faas/config.py b/sebs/faas/config.py index 3d4a7296..1f6db2f4 100644 --- a/sebs/faas/config.py +++ b/sebs/faas/config.py @@ -106,6 +106,7 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> "Confi from sebs.azure.config import AzureConfig from sebs.gcp.config import GCPConfig from sebs.local.config import LocalConfig + from sebs.openwhisk.config import OpenWhiskConfig name = config["name"] func = { @@ -113,6 +114,7 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> "Confi "azure": AzureConfig.deserialize, "gcp": GCPConfig.deserialize, "local": LocalConfig.deserialize, + "openwhisk": OpenWhiskConfig.deserialize, }.get(name) assert func, "Unknown config type!" return func(config[name] if name in config else config, cache, handlers) diff --git a/sebs/faas/system.py b/sebs/faas/system.py index cdc3a656..01574692 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -104,7 +104,7 @@ def get_storage(self, replace_existing: bool) -> PersistentStorage: """ @abstractmethod - def package_code(self, directory: str, language_name: str, benchmark: str) -> Tuple[str, int]: + def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: pass @abstractmethod diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index 36c3b8ca..6fd95e0e 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -130,7 +130,7 @@ def format_function_name(func_name: str) -> str: :return: path to packaged code and its size """ - def package_code(self, directory: str, language_name: str, benchmark: str) -> Tuple[str, int]: + def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: CONFIG_FILES = { "python": ["handler.py", ".python_packages"], diff --git a/sebs/local/local.py b/sebs/local/local.py index 216f0d41..b93cbaee 100644 --- a/sebs/local/local.py +++ b/sebs/local/local.py @@ -115,7 +115,7 @@ def shutdown(self): benchmark: benchmark name """ - def package_code(self, directory: str, language_name: str, benchmark: str) -> Tuple[str, int]: + def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: CONFIG_FILES = { "python": ["handler.py", "requirements.txt", ".python_packages"], diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index c906d55e..e8d673e1 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -1,27 +1,26 @@ from sebs.cache import Cache from sebs.faas.config import Credentials, Resources, Config +from sebs.utils import LoggingHandlers class OpenWhiskCredentials(Credentials): - def __init__(self): - pass @staticmethod - def initialize(config: dict, cache: Cache) -> Credentials: + def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Credentials: return OpenWhiskCredentials() def serialize(self) -> dict: - pass + return {} class OpenWhiskResources(Resources): @staticmethod - def initialize(config: dict, cache: Cache) -> Resources: + def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resources: return OpenWhiskResources() def serialize(self) -> dict: - return {"": ""} + return {} class OpenWhiskConfig(Config): @@ -30,6 +29,9 @@ class OpenWhiskConfig(Config): cache: Cache def __init__(self, config: dict, cache: Cache): + super().__init__() + self._credentials = OpenWhiskCredentials() + self._resources = OpenWhiskResources() self.name = config['name'] self.shutdownStorage = config['shutdownStorage'] self.removeCluster = config['removeCluster'] @@ -37,15 +39,30 @@ def __init__(self, config: dict, cache: Cache): @property def credentials(self) -> Credentials: - pass + return self._credentials @property def resources(self) -> Resources: - pass + return self._resources @staticmethod - def initialize(config: dict, cache: Cache) -> Config: - return OpenWhiskConfig(config, cache) + def initialize(cfg: Config, dct: dict): + pass def serialize(self) -> dict: + return { + "name": "openwhisk", + "shutdownStorage": self.shutdownStorage, + "removeCluster": self.removeCluster, + "credentials": self._credentials.serialize(), + "resources": self._resources.serialize(), + } + + @staticmethod + def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Config: + res = OpenWhiskConfig(config, cache) + res.logging_handlers = handlers + return res + + def update_cache(self, cache: Cache): pass diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 4e0b67c4..ed4e7f18 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -1,116 +1,38 @@ -import subprocess - -from sebs.faas.function import Function, ExecutionResult -import json -import datetime -import logging -import re -from typing import List +from sebs.faas.function import Function +from typing import cast class OpenwhiskFunction(Function): - def __init__(self, name: str, namespace: str = "_"): - super().__init__(name) + def __init__(self, name: str, benchmark: str, code_package_hash: str, namespace: str = "_"): + super().__init__(benchmark, name, code_package_hash) self.namespace = namespace - def __add_params__(self, command: List[str], payload: dict) -> List[str]: - for key, value in payload.items(): - command.append("--param") - command.append(key) - command.append(str(value)) - return command - - def sync_invoke(self, payload: dict): - logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - command = self.__add_params__(f"wsk -i action invoke --result {self.name}".split(), payload) - error = None - try: - begin = datetime.datetime.now() - response = subprocess.run( - command, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - check=True, + @staticmethod + def typename() -> str: + return "OpenWhisk.Function" + + def serialize(self) -> dict: + return { + **super().serialize(), + "namespace": self.namespace, + } + + @staticmethod + def deserialize(cached_config: dict) -> "OpenwhiskFunction": + from sebs.faas.function import Trigger + from sebs.openwhisk.triggers import LibraryTrigger + + ret = OpenwhiskFunction( + cached_config["name"], + cached_config["benchmark"], + cached_config["hash"], + cached_config["namespace"], + ) + for trigger in cached_config["triggers"]: + trigger_type = cast( + Trigger, + {"Library": LibraryTrigger}.get(trigger["type"]), ) - end = datetime.datetime.now() - response = response.stdout.decode("utf-8") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - end = datetime.datetime.now() - logging.error(f"Cannot synchronously invoke action {self.name}, reason: {e}") - error = e - - openwhiskResult = ExecutionResult(begin, end) - if error is not None: - logging.error("Invocation of {} failed!".format(self.name)) - openwhiskResult.stats.failure = True - return openwhiskResult - - returnContent = json.loads(response) - logging.info(f"{returnContent}") - - openwhiskResult.parse_benchmark_output(returnContent) - return openwhiskResult - - def async_invoke(self, payload: dict): - import time - import datetime - logging.info(f"Function {self.name} of namespace {self.namespace} invoking...") - command = self.__add_params__(f"wsk -i action invoke --result {self.name}".split(), payload) - error = None - try: - begin = datetime.datetime.now() - response = subprocess.run( - command, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - check=True, - ) - end = datetime.datetime.now() - response = response.stdout.decode("utf-8") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - end = datetime.datetime.now() - logging.error(f"Cannot asynchronously invoke action {self.name}, reason: {e}") - error = e - openwhiskResult = ExecutionResult(begin, end) - if error is not None: - logging.error(f"Invocation of {self.name} failed!") - openwhiskResult.stats.failure = True - return openwhiskResult - id_pattern = re.compile(r"with id ([a-zA-Z0-9]+)$") - id_match = id_pattern.search(response).group(1) - if id_match is None: - logging.error("Cannot parse activation id") - openwhiskResult.stats.failure = True - return openwhiskResult - attempt = 1 - while True: - logging.info(f"Function {self.name} of namespace getting result. Attempt: {attempt}") - command = f"wsk -i activation result {id_match}" - try: - response = subprocess.run( - command.split(), - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - check=True, - ) - response = response.stdout.decode("utf-8") - end = datetime.datetime.now() - error = None - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.info("No result yet, proceeding...") - time.sleep(0.05) - attempt += 1 - error = e - continue - break - if error is not None: - logging.error(f"Function {self.name} with id {id_match} finished unsuccessfully") - openwhiskResult.stats.failure = True - return openwhiskResult - - logging.info(f"Function {self.name} with id {id_match} finished successfully") - - returnContent = json.loads(response) - openwhiskResult = ExecutionResult(begin, end) - openwhiskResult.parse_benchmark_output(returnContent) - return openwhiskResult + assert trigger_type, "Unknown trigger type {}".format(trigger["type"]) + ret.add_trigger(trigger_type.deserialize(trigger)) + return ret diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py index e32914fa..93124626 100644 --- a/sebs/openwhisk/minio.py +++ b/sebs/openwhisk/minio.py @@ -1,15 +1,16 @@ -from sebs.faas.storage import PersistentStorage +import sebs.local.storage from typing import List, Tuple, Any -import logging -from time import sleep -import minio import secrets -import os import docker.errors +from sebs.cache import Cache -class Minio(PersistentStorage): - storage_container: Any +class Minio(sebs.local.storage.Minio): + + @staticmethod + def deployment_name() -> str: + return "openwhisk" + input_buckets: List[str] = [] output_buckets: List[str] = [] input_index = 0 @@ -19,12 +20,10 @@ class Minio(PersistentStorage): port = 9000 location = "openwhiskBenchmark" connection: Any - docker_client = None - def __init__(self, docker_client): - self.docker_client = docker_client + def __init__(self,docker_client: docker.client, cache_client: Cache, replace_existing: bool): + super(Minio, self).__init__(docker_client, cache_client, replace_existing) self.start() - sleep(10) self.connection = self.get_connection() def start(self): @@ -33,23 +32,23 @@ def start(self): def startMinio(self): minioVersion = "minio/minio:latest" try: - self.storage_container = self.docker_client.containers.get("minio") - logging.info("Minio container already exists") - envs = self.storage_container.attrs["Config"]["Env"] + self._storage_container = self._docker_client.containers.get("minio") + self.logging.info("Minio container already exists") + envs = self._storage_container.attrs["Config"]["Env"] if isinstance(envs, (tuple, list)): envs = dict([i.split('=', 1) for i in envs]) - self.access_key = envs['MINIO_ACCESS_KEY'] - self.secret_key = envs['MINIO_SECRET_KEY'] + self._access_key = envs['MINIO_ACCESS_KEY'] + self._secret_key = envs['MINIO_SECRET_KEY'] except docker.errors.NotFound: - logging.info("Minio container does not exists, starting") - self.access_key = secrets.token_urlsafe(32) - self.secret_key = secrets.token_hex(32) - self.storage_container = self.docker_client.containers.run( + self.logging.info("Minio container does not exists, starting") + self._access_key = secrets.token_urlsafe(32) + self._secret_key = secrets.token_hex(32) + self._storage_container = self._docker_client.containers.run( minioVersion, command="server /data", environment={ - "MINIO_ACCESS_KEY": self.access_key, - "MINIO_SECRET_KEY": self.secret_key, + "MINIO_ACCESS_KEY": self._access_key, + "MINIO_SECRET_KEY": self._secret_key, }, remove=True, stdout=True, @@ -58,105 +57,11 @@ def startMinio(self): name="minio" ) - logging.info("ACCESS_KEY={}".format(self.access_key)) - logging.info("SECRET_KEY={}".format(self.secret_key)) - self.storage_container.reload() - networks = self.storage_container.attrs["NetworkSettings"]["Networks"] - self.url = "{IPAddress}:{Port}".format( + self.logging.info("ACCESS_KEY={}".format(self._access_key)) + self.logging.info("SECRET_KEY={}".format(self._secret_key)) + self._storage_container.reload() + networks = self._storage_container.attrs["NetworkSettings"]["Networks"] + self._url = "{IPAddress}:{Port}".format( IPAddress=networks["bridge"]["IPAddress"], Port=self.port ) - logging.info("Minio runs at {}".format(self.url)) - - def get_connection(self): - return minio.Minio( - self.url, - access_key=self.access_key, - secret_key=self.secret_key, - secure=False, - ) - - def input(self) -> List[str]: - return self.input_buckets - - def add_input_bucket(self, name: str, cache: bool = True) -> Tuple[str, int]: - input_index = self.input_index - bucket_name = "{}-{}-input".format(name, input_index) - exist = self.connection.bucket_exists(bucket_name) - self.input_buckets.append(bucket_name) - try: - if cache: - self.input_index += 1 - if exist: - return (bucket_name, input_index) - else: - self.connection.make_bucket(bucket_name, location=self.location) - return (bucket_name, input_index) - if exist: - return (bucket_name, input_index) - self.connection.make_bucket(bucket_name, location=self.location) - return (bucket_name, input_index) - except ( - minio.error.BucketAlreadyOwnedByYou, - minio.error.BucketAlreadyExists, - minio.error.ResponseError, - ) as err: - logging.error("Bucket creation failed!") - raise err - - def add_output_bucket( - self, name: str, suffix: str = "output", cache: bool = True - ) -> Tuple[str, int]: - output_index = self.output_index - bucket_name = "{}-{}-{}".format(name, output_index, suffix) - exist = self.connection.bucket_exists(bucket_name) - self.output_buckets.append(bucket_name) - try: - if cache: - self.output_index += 1 - if exist: - return (bucket_name, output_index) - else: - self.connection.make_bucket(bucket_name, location=self.location) - return (bucket_name, output_index) - if exist: - return (bucket_name, output_index) - self.connection.make_bucket(bucket_name, location=self.location) - return (bucket_name, output_index) - except ( - minio.error.BucketAlreadyOwnedByYou, - minio.error.BucketAlreadyExists, - minio.error.ResponseError, - ) as err: - logging.error("Bucket creation failed!") - raise err - - def output(self) -> List[str]: - return self.output_buckets - - def download(self, bucket_name: str, key: str, filepath: str) -> None: - objects = self.connection.list_objects_v2(bucket_name) - objects = [obj.object_name for obj in objects] - for obj in objects: - self.connection.fget_object(bucket_name, obj, os.path.join(filepath, obj)) - - def upload(self, bucket_name: str, key: str, filepath: str): - self.connection.fput_object(bucket_name, key, filepath) - - def list_bucket(self, bucket_name: str) -> List[str]: - buckets = [] - for bucket in self.connection.list_buckets(): - if bucket.name == bucket_name: - buckets.append(bucket.name) - return buckets - - def allocate_buckets(self, benchmark: str, buckets: Tuple[int, int]): - input_number = buckets[0] - output_number = buckets[1] - for i in range(input_number): - self.add_input_bucket(benchmark) - for i in range(output_number): - self.add_output_bucket(benchmark) - - def uploader_func(self, bucket_idx: int, key: str, filepath: str) -> None: - bucket_name = self.input_buckets[bucket_idx] - self.upload(bucket_name, key, filepath) + self.logging.info("Minio runs at {}".format(self._url)) \ No newline at end of file diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index de94878c..a48f05ca 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -1,17 +1,18 @@ import json -import logging import os import shutil import subprocess -from typing import Tuple +from typing import Dict, List, Tuple import docker from sebs.benchmark import Benchmark from sebs.cache import Cache from sebs.faas import System, PersistentStorage -from sebs.faas.function import Function -from sebs.openwhisk.minio import Minio +from sebs.faas.function import Function, ExecutionResult, Trigger +from .minio import Minio +from sebs.openwhisk.triggers import LibraryTrigger +from sebs.utils import PROJECT_DIR, LoggingHandlers from .config import OpenWhiskConfig from .function import OpenwhiskFunction from ..config import SeBSConfig @@ -22,21 +23,27 @@ class OpenWhisk(System): storage: Minio def __init__(self, system_config: SeBSConfig, config: OpenWhiskConfig, cache_client: Cache, - docker_client: docker.client): + docker_client: docker.client, logger_handlers: LoggingHandlers): super().__init__(system_config, cache_client, docker_client) self._config = config + self.logging_handlers = logger_handlers @property def config(self) -> OpenWhiskConfig: return self._config def get_storage(self, replace_existing: bool = False) -> PersistentStorage: - self.storage = Minio(self.docker_client) + if not hasattr(self, "storage"): + self.storage = Minio(self.docker_client, self.cache_client, replace_existing) + self.storage.logging_handlers = self.logging_handlers + self.storage.start() + else: + self.storage.replace_existing = replace_existing return self.storage def shutdown(self) -> None: - if self.config.shutdownStorage: - self.storage.storage_container.kill() + if self.storage and self.config.shutdown_storage: + self.storage.stop() if self.config.removeCluster: from tools.openwhisk_preparation import delete_cluster delete_cluster() @@ -45,30 +52,63 @@ def shutdown(self) -> None: def name() -> str: return "openwhisk" - def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: - benchmark.build() + @staticmethod + def typename(): + return "OpenWhisk" + + @staticmethod + def function_type() -> "Type[Function]": + return OpenwhiskFunction + + def benchmark_base_image(self, benchmark: str, language_name: str, language_version: str): + return f'spcleth/serverless-benchmarks:{self.name()}-{benchmark}-{language_name}-{language_version}' + + def build_base_image(self, directory: str, language_name: str, language_version: str, benchmark: str): + build_dir = os.path.join(directory, 'docker') + os.makedirs(build_dir) + shutil.copy( + os.path.join(PROJECT_DIR, 'docker', f'Dockerfile.run.{self.name()}.{language_name}'), + os.path.join(build_dir, 'Dockerfile')) + + for fn in ('requirements.txt', 'package.json'): + path = os.path.join(directory, fn) + if os.path.exists(path): + shutil.move(path, build_dir) + + builder_image = self.system_config.benchmark_base_images(self.name(), language_name)[language_version] + tag = self.benchmark_base_image(benchmark, language_name, language_version) + image, _ = self.docker_client.images.build( + tag=tag, + path=build_dir, + buildargs={ + 'BASE_IMAGE': builder_image, + }) + + shutil.rmtree(build_dir) + + def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: node = 'nodejs' node_handler = 'index.js' CONFIG_FILES = { 'python': ['virtualenv', '__main__.py', 'requirements.txt'], node: [node_handler, 'package.json', 'node_modules'] } - directory = benchmark.code_location - package_config = CONFIG_FILES[benchmark.language_name] + package_config = CONFIG_FILES[language_name] function_dir = os.path.join(directory, "function") os.makedirs(function_dir) - with open('./code/minioConfig.json', 'w+') as minio_config: + with open(os.path.join(directory, 'minioConfig.json'), 'w+') as minio_config: + storage = self.get_storage() minio_config_json = { - 'access_key': self.storage.access_key, - 'secret_key': self.storage.secret_key, - 'url': self.storage.url, + 'access_key': storage._access_key, + 'secret_key': storage._secret_key, + 'url': storage._url, } minio_config.write(json.dumps(minio_config_json)) # openwhisk needs main function to be named in a package.json - if benchmark.language_name == node: + if language_name == node: filename = 'code/package.json' with open(filename, 'r') as f: data = json.load(f) @@ -82,50 +122,22 @@ def package_code(self, benchmark: Benchmark) -> Tuple[str, int]: if file not in package_config: file = os.path.join(directory, file) shutil.move(file, function_dir) - builder_image = self.system_config.benchmark_base_images(self.name(), benchmark.language_name)[ - benchmark.language_version - ] - if benchmark.language_name == node: - build_command = 'npm install' - volumes = { - directory: {'bind': '/nodejsAction'} - } - else: - build_command = 'cd /tmp && virtualenv virtualenv && source virtualenv/bin/activate && ' \ - 'pip install -r requirements.txt' - volumes = { - directory: {'bind': '/tmp'} - } - command = '-c "{}"'.format(build_command) - - self.docker_client.containers.run( - builder_image, - entrypoint="bash", - command=command, - volumes=volumes, - remove=True, - stdout=True, - stderr=True, - user='1000:1000', - network_mode="bridge", - privileged=True, - tty=True - ) + self.build_base_image(directory, language_name, language_version, benchmark) os.chdir(directory) subprocess.run( - "zip -r {}.zip ./".format(benchmark.benchmark).split(), + "zip -r {}.zip ./".format(benchmark).split(), stdout=subprocess.DEVNULL, ) benchmark_archive = "{}.zip".format( - os.path.join(directory, benchmark.benchmark) + os.path.join(directory, benchmark) ) - logging.info(f"Created {benchmark_archive} archive") + self.logging.info(f"Created {benchmark_archive} archive") bytes_size = os.path.getsize(benchmark_archive) return benchmark_archive, bytes_size - def create_function(self, benchmark: Benchmark, function_name: str, zip_path: str) -> None: - logging.info("Creating action on openwhisk") + def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": + self.logging.info("Creating action on openwhisk") try: actions = subprocess.run( "wsk -i action list".split(), @@ -133,121 +145,76 @@ def create_function(self, benchmark: Benchmark, function_name: str, zip_path: st stdout=subprocess.PIPE, ) subprocess.run( - f"grep {function_name}".split(), + f"grep {func_name}".split(), stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, input=actions.stdout, check=True, ) - logging.info(f"Function {function_name} already exist") + self.logging.info(f"Function {func_name} already exist") except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error(f"ERROR: {e}") + self.logging.error(f"ERROR: {e}") try: - language_version = benchmark.language_version - if benchmark.language_name == "python": - language_version = language_version[0] - subprocess.run( - f"wsk -i action create {function_name} --kind {benchmark.language_name}:{language_version} " - f"{zip_path}".split(), - stderr=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - check=True, - ) + docker_image = self.benchmark_base_image(code_package.benchmark, code_package.language_name, + code_package.language_version) + subprocess.run(['wsk', '-i', 'action', 'create', func_name, + '--docker', docker_image, + '--memory', str(code_package.benchmark_config.memory), + code_package.code_location + ], + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error(f"Cannot create action {function_name}, reason: {e}") + self.logging.error(f"Cannot create action {func_name}, reason: {e}") exit(1) - def get_function(self, code_package: Benchmark) -> Function: - if ( - code_package.language_version - not in self.system_config.supported_language_versions(self.name(), code_package.language_name) - ): - raise Exception( - "Unsupported {language} version {version} in Openwhisk!".format( - language=code_package.language_name, - version=code_package.language_version, - ) - ) - benchmark = code_package.benchmark - code_location = code_package.code_location - - if code_package.is_cached and code_package.is_cached_valid: - func_name = code_package.cached_config["name"] - self.create_function(code_package, func_name, code_location) - logging.info( - "Using cached function {fname} in {loc}".format( - fname=func_name, loc=code_location - ) - ) - return OpenwhiskFunction(func_name) - elif code_package.is_cached: - func_name = code_package.cached_config["name"] - timeout = code_package.benchmark_config.timeout - memory = code_package.benchmark_config.memory - - # Run Openwhisk-specific part of building code. - package, code_size = self.package_code(code_package) - - self.create_function( - code_package, func_name, package, - ) - - cached_cfg = code_package.cached_config - cached_cfg["code_size"] = code_size - cached_cfg["timeout"] = timeout - cached_cfg["memory"] = memory - cached_cfg["hash"] = code_package.hash - self.cache_client.update_function( - self.name(), benchmark, code_package.language_name, package, cached_cfg - ) - # FIXME: fix after dissociating code package and benchmark - code_package.query_cache() - - logging.info( - "Updating cached function {fname} in {loc}".format( - fname=func_name, loc=code_location - ) - ) - - return OpenwhiskFunction(func_name) - # no cached instance, create package and upload code + res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) + + # Add LibraryTrigger to a new function + trigger = LibraryTrigger(func_name) + trigger.logging_handlers = self.logging_handlers + res.add_trigger(trigger) + + return res + + def update_function(self, function: Function, code_package: Benchmark): + with open(code_package.code_location) as f: + image_tag = f.read() + subprocess.run(['wsk', '-i', 'action', 'update', function.name, + '--docker', image_tag, + '--memory', str(code_package.benchmark_config.memory)], + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) + + def default_function_name(self, code_package: Benchmark) -> str: + return f"{code_package.benchmark}-{code_package.language_name}-{code_package.benchmark_config.memory}" + + def enforce_cold_start(self, functions: List[Function], code_package: Benchmark): + raise NotImplementedError() + + def download_metrics( + self, + function_name: str, + start_time: int, + end_time: int, + requests: Dict[str, ExecutionResult], + metrics: dict, + ): + pass + + def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) -> Trigger: + if trigger_type == Trigger.TriggerType.LIBRARY: + return function.triggers(Trigger.TriggerType.LIBRARY)[0] else: + raise RuntimeError("Not supported!") - language = code_package.language_name - language_runtime = code_package.language_version - timeout = code_package.benchmark_config.timeout - memory = code_package.benchmark_config.memory - - # Create function name, validation regexp if needed: \A([\w]|[\w][\w@ .-]*[\w@.-]+)\z - func_name = "{}-{}-{}".format(benchmark, language, memory) - - package, code_size = self.package_code(code_package) - # todo: check if function exists, if so delte otherwise create - - self.cache_client.add_function( - deployment=self.name(), - benchmark=benchmark, - language=language, - code_package=package, - language_config={ - "name": func_name, - "code_size": code_size, - "runtime": language_runtime, - "memory": memory, - "timeout": timeout, - "hash": code_package.hash, - }, - storage_config={ - "buckets": { - "input": self.storage.input_buckets, - "output": self.storage.output_buckets, - } - }, - ) - - self.create_function(code_package, func_name, package) + def shutdown(self) -> None: + super().shutdown() - # FIXME: fix after dissociating code package and benchmark - code_package.query_cache() - return OpenwhiskFunction(func_name) + def cached_function(self, function: Function): + pass diff --git a/sebs/openwhisk/triggers.py b/sebs/openwhisk/triggers.py new file mode 100644 index 00000000..0141fd81 --- /dev/null +++ b/sebs/openwhisk/triggers.py @@ -0,0 +1,67 @@ +import concurrent.futures +import datetime +import json +import subprocess +from typing import Dict, List, Optional # noqa + +from sebs.faas.function import ExecutionResult, Trigger + + +class LibraryTrigger(Trigger): + def __init__(self, fname: str): + super().__init__() + self.fname = fname + + @staticmethod + def trigger_type() -> "Trigger.TriggerType": + return Trigger.TriggerType.LIBRARY + + @staticmethod + def __add_params__(command: List[str], payload: dict) -> List[str]: + for key, value in payload.items(): + command.append("--param") + command.append(key) + command.append(json.dumps(value)) + return command + + def sync_invoke(self, payload: dict) -> ExecutionResult: + command = self.__add_params__(['wsk', '-i', 'action', 'invoke', '--result', self.fname], payload) + error = None + try: + self.logging.info(f"Executing {command}") + begin = datetime.datetime.now() + response = subprocess.run( + command, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + ) + end = datetime.datetime.now() + response = response.stdout.decode("utf-8") + except (subprocess.CalledProcessError, FileNotFoundError) as e: + end = datetime.datetime.now() + error = e + + openwhisk_result = ExecutionResult.from_times(begin, end) + if error is not None: + self.logging.error("Invocation of {} failed!".format(self.fname)) + openwhisk_result.stats.failure = True + return openwhisk_result + + return_content = json.loads(response) + self.logging.info(f"{return_content}") + + openwhisk_result.parse_benchmark_output(return_content) + return openwhisk_result + + def async_invoke(self, payload: dict) -> concurrent.futures.Future: + pool = concurrent.futures.ThreadPoolExecutor() + fut = pool.submit(self.sync_invoke, payload) + return fut + + def serialize(self) -> dict: + return {"type": "Library", "name": self.fname} + + @staticmethod + def deserialize(obj: dict) -> Trigger: + return LibraryTrigger(obj["name"]) From c71bffe97867df312353bcbfac802a4b2ac74b7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Wed, 18 Aug 2021 12:06:59 +0000 Subject: [PATCH 028/140] Add support for HTTP trigger --- benchmarks/wrappers/openwhisk/nodejs/index.js | 3 +- .../wrappers/openwhisk/python/__main__.py | 1 + sebs/faas/function.py | 5 ++- sebs/openwhisk/openwhisk.py | 5 ++- sebs/openwhisk/triggers.py | 38 +++++++++++++++++++ 5 files changed, 49 insertions(+), 3 deletions(-) diff --git a/benchmarks/wrappers/openwhisk/nodejs/index.js b/benchmarks/wrappers/openwhisk/nodejs/index.js index c9499c4c..7f4ceff9 100644 --- a/benchmarks/wrappers/openwhisk/nodejs/index.js +++ b/benchmarks/wrappers/openwhisk/nodejs/index.js @@ -21,8 +21,9 @@ async function main(args) { compute_time: micro, results_time: 0, result: ret, + request_id: process.env.__OW_ACTIVATION_ID, is_cold: is_cold, }; } -exports.main = main; \ No newline at end of file +exports.main = main; diff --git a/benchmarks/wrappers/openwhisk/python/__main__.py b/benchmarks/wrappers/openwhisk/python/__main__.py index fdfc76bb..a2dbfcb5 100644 --- a/benchmarks/wrappers/openwhisk/python/__main__.py +++ b/benchmarks/wrappers/openwhisk/python/__main__.py @@ -29,6 +29,7 @@ def main(args): return { "begin": begin.strftime("%s.%f"), "end": end.strftime("%s.%f"), + "request_id": os.getenv('__OW_ACTIVATION_ID'), "results_time": results_time, "is_cold": is_cold, "result": log_data, diff --git a/sebs/faas/function.py b/sebs/faas/function.py index 56688779..a1b79ab0 100644 --- a/sebs/faas/function.py +++ b/sebs/faas/function.py @@ -180,7 +180,7 @@ def get(name: str) -> "Trigger.TriggerType": return member raise Exception("Unknown trigger type {}".format(member)) - def _http_invoke(self, payload: dict, url: str) -> ExecutionResult: + def _http_invoke(self, payload: dict, url: str, verify_ssl: bool = True) -> ExecutionResult: import pycurl from io import BytesIO @@ -188,6 +188,9 @@ def _http_invoke(self, payload: dict, url: str) -> ExecutionResult: c.setopt(pycurl.HTTPHEADER, ["Content-Type: application/json"]) c.setopt(pycurl.POST, 1) c.setopt(pycurl.URL, url) + if not verify_ssl: + c.setopt(pycurl.SSL_VERIFYHOST, 0) + c.setopt(pycurl.SSL_VERIFYPEER, 0) data = BytesIO() c.setopt(pycurl.WRITEFUNCTION, data.write) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index a48f05ca..920318ba 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -11,7 +11,7 @@ from sebs.faas import System, PersistentStorage from sebs.faas.function import Function, ExecutionResult, Trigger from .minio import Minio -from sebs.openwhisk.triggers import LibraryTrigger +from sebs.openwhisk.triggers import LibraryTrigger, HTTPTrigger from sebs.utils import PROJECT_DIR, LoggingHandlers from .config import OpenWhiskConfig from .function import OpenwhiskFunction @@ -159,6 +159,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk docker_image = self.benchmark_base_image(code_package.benchmark, code_package.language_name, code_package.language_version) subprocess.run(['wsk', '-i', 'action', 'create', func_name, + '--web', 'true', '--docker', docker_image, '--memory', str(code_package.benchmark_config.memory), code_package.code_location @@ -210,6 +211,8 @@ def download_metrics( def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) -> Trigger: if trigger_type == Trigger.TriggerType.LIBRARY: return function.triggers(Trigger.TriggerType.LIBRARY)[0] + elif trigger_type == Trigger.TriggerType.HTTP: + return HTTPTrigger(function.name) else: raise RuntimeError("Not supported!") diff --git a/sebs/openwhisk/triggers.py b/sebs/openwhisk/triggers.py index 0141fd81..706eb779 100644 --- a/sebs/openwhisk/triggers.py +++ b/sebs/openwhisk/triggers.py @@ -65,3 +65,41 @@ def serialize(self) -> dict: @staticmethod def deserialize(obj: dict) -> Trigger: return LibraryTrigger(obj["name"]) + + @staticmethod + def typename() -> str: + return "OpenWhisk.LibraryTrigger" + + +class HTTPTrigger(Trigger): + def __init__(self, fname: str): + super().__init__() + self.fname = fname + response = subprocess.run(['wsk', '-i', 'action', 'get', fname, '--url'], + stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=True) + stdout = response.stdout.decode("utf-8") + self.url = stdout.strip().split('\n')[-1] + '.json' + + @staticmethod + def typename() -> str: + return "OpenWhisk.HTTPTrigger" + + @staticmethod + def trigger_type() -> Trigger.TriggerType: + return Trigger.TriggerType.HTTP + + def sync_invoke(self, payload: dict) -> ExecutionResult: + self.logging.debug(f"Invoke function {self.url}") + return self._http_invoke(payload, self.url, False) + + def async_invoke(self, payload: dict) -> concurrent.futures.Future: + pool = concurrent.futures.ThreadPoolExecutor() + fut = pool.submit(self.sync_invoke, payload) + return fut + + def serialize(self) -> dict: + return {"type": "HTTP", "fname": self.fname} + + @staticmethod + def deserialize(obj: dict) -> Trigger: + return HTTPTrigger(obj["fname"]) From fbec339ed3f38ad999d9b67d5e4cf4c88c964abc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Tue, 24 Aug 2021 12:42:53 +0000 Subject: [PATCH 029/140] Put test code in the container --- benchmarks/wrappers/openwhisk/nodejs/index.js | 2 +- benchmarks/wrappers/openwhisk/python/setup.py | 14 ++++++ config/systems.json | 2 +- docker/Dockerfile.run.openwhisk.node | 4 -- docker/Dockerfile.run.openwhisk.nodejs | 6 +++ docker/Dockerfile.run.openwhisk.python | 5 ++- sebs/openwhisk/openwhisk.py | 43 ++++++------------- 7 files changed, 37 insertions(+), 39 deletions(-) create mode 100644 benchmarks/wrappers/openwhisk/python/setup.py delete mode 100644 docker/Dockerfile.run.openwhisk.node create mode 100644 docker/Dockerfile.run.openwhisk.nodejs diff --git a/benchmarks/wrappers/openwhisk/nodejs/index.js b/benchmarks/wrappers/openwhisk/nodejs/index.js index 7f4ceff9..a0a68a3d 100644 --- a/benchmarks/wrappers/openwhisk/nodejs/index.js +++ b/benchmarks/wrappers/openwhisk/nodejs/index.js @@ -1,7 +1,7 @@ const path = require('path'), fs = require('fs'); async function main(args) { - var func = require('./function/function'); + var func = require('/function/function.js'); var begin = Date.now() / 1000; var start = process.hrtime(); var ret = await func.handler(args); diff --git a/benchmarks/wrappers/openwhisk/python/setup.py b/benchmarks/wrappers/openwhisk/python/setup.py new file mode 100644 index 00000000..b942d059 --- /dev/null +++ b/benchmarks/wrappers/openwhisk/python/setup.py @@ -0,0 +1,14 @@ +from distutils.core import setup +from glob import glob +from pkg_resources import parse_requirements + +with open('requirements.txt') as f: + requirements = [str(r) for r in parse_requirements(f)] + +setup( + name='function', + install_requires=requirements, + packages=['function'], + package_dir={'function': '.'}, + package_data={'function': glob('**', recursive=True)}, +) \ No newline at end of file diff --git a/config/systems.json b/config/systems.json index abcbc23f..0845e831 100644 --- a/config/systems.json +++ b/config/systems.json @@ -143,7 +143,7 @@ "images": [], "username": "docker_user", "deployment": { - "files": [ "__main__.py", "storage.py"], + "files": [ "__main__.py", "storage.py", "setup.py"], "packages": { "minio": "^5.0.10" } diff --git a/docker/Dockerfile.run.openwhisk.node b/docker/Dockerfile.run.openwhisk.node deleted file mode 100644 index cbe123b2..00000000 --- a/docker/Dockerfile.run.openwhisk.node +++ /dev/null @@ -1,4 +0,0 @@ -ARG BASE_IMAGE -FROM $BASE_IMAGE -COPY package.json / -RUN npm install -g diff --git a/docker/Dockerfile.run.openwhisk.nodejs b/docker/Dockerfile.run.openwhisk.nodejs new file mode 100644 index 00000000..e4f2f375 --- /dev/null +++ b/docker/Dockerfile.run.openwhisk.nodejs @@ -0,0 +1,6 @@ +ARG BASE_IMAGE +FROM $BASE_IMAGE +COPY . /function/ +RUN cd /function \ + && npm install --no-package-lock --production \ + && npm cache clean --force diff --git a/docker/Dockerfile.run.openwhisk.python b/docker/Dockerfile.run.openwhisk.python index 2be06d42..72d7da75 100644 --- a/docker/Dockerfile.run.openwhisk.python +++ b/docker/Dockerfile.run.openwhisk.python @@ -1,4 +1,5 @@ ARG BASE_IMAGE FROM $BASE_IMAGE -COPY requirements.txt / -RUN pip install --no-cache-dir -r /requirements.txt +COPY . function/ +RUN touch function/__init__.py \ + && pip install --no-cache-dir function/ diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 920318ba..17e00a51 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -70,10 +70,13 @@ def build_base_image(self, directory: str, language_name: str, language_version: os.path.join(PROJECT_DIR, 'docker', f'Dockerfile.run.{self.name()}.{language_name}'), os.path.join(build_dir, 'Dockerfile')) - for fn in ('requirements.txt', 'package.json'): - path = os.path.join(directory, fn) - if os.path.exists(path): - shutil.move(path, build_dir) + for fn in os.listdir(directory): + if fn not in ('index.js', '__main__.py'): + file = os.path.join(directory, fn) + shutil.move(file, build_dir) + + with open(os.path.join(build_dir, '.dockerignore'), 'w') as f: + f.write('Dockerfile') builder_image = self.system_config.benchmark_base_images(self.name(), language_name)[language_version] tag = self.benchmark_base_image(benchmark, language_name, language_version) @@ -84,18 +87,16 @@ def build_base_image(self, directory: str, language_name: str, language_version: 'BASE_IMAGE': builder_image, }) - shutil.rmtree(build_dir) + # shutil.rmtree(build_dir) def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: node = 'nodejs' node_handler = 'index.js' CONFIG_FILES = { - 'python': ['virtualenv', '__main__.py', 'requirements.txt'], - node: [node_handler, 'package.json', 'node_modules'] + 'python': ['__main__.py'], + node: [node_handler] } package_config = CONFIG_FILES[language_name] - function_dir = os.path.join(directory, "function") - os.makedirs(function_dir) with open(os.path.join(directory, 'minioConfig.json'), 'w+') as minio_config: storage = self.get_storage() @@ -106,32 +107,12 @@ def package_code(self, directory: str, language_name: str, language_version: str } minio_config.write(json.dumps(minio_config_json)) - # openwhisk needs main function to be named in a package.json - - if language_name == node: - filename = 'code/package.json' - with open(filename, 'r') as f: - data = json.load(f) - data['main'] = node_handler - - os.remove(filename) - with open(filename, 'w') as f: - json.dump(data, f, indent=4) - - for file in os.listdir(directory): - if file not in package_config: - file = os.path.join(directory, file) - shutil.move(file, function_dir) - self.build_base_image(directory, language_name, language_version, benchmark) os.chdir(directory) - subprocess.run( - "zip -r {}.zip ./".format(benchmark).split(), + benchmark_archive = os.path.join(directory, f"{benchmark}.zip") + subprocess.run(['zip', benchmark_archive] + package_config, stdout=subprocess.DEVNULL, ) - benchmark_archive = "{}.zip".format( - os.path.join(directory, benchmark) - ) self.logging.info(f"Created {benchmark_archive} archive") bytes_size = os.path.getsize(benchmark_archive) return benchmark_archive, bytes_size From 7bbddd54cd3679456fcc748198f061668fb50154 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Tue, 24 Aug 2021 16:49:42 +0000 Subject: [PATCH 030/140] Add package.json to 010.sleep --- .../000.microbenchmarks/010.sleep/nodejs/package.json | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 benchmarks/000.microbenchmarks/010.sleep/nodejs/package.json diff --git a/benchmarks/000.microbenchmarks/010.sleep/nodejs/package.json b/benchmarks/000.microbenchmarks/010.sleep/nodejs/package.json new file mode 100644 index 00000000..967cd8b7 --- /dev/null +++ b/benchmarks/000.microbenchmarks/010.sleep/nodejs/package.json @@ -0,0 +1,9 @@ +{ + "name": "", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + } +} From 23c7932676b1b5cf34b96e8c661658d1303c93bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Mon, 6 Sep 2021 11:42:34 +0000 Subject: [PATCH 031/140] Fix nodejs benchmarks --- benchmarks/100.webapps/120.uploader/nodejs/package.json | 3 +-- benchmarks/wrappers/openwhisk/nodejs/storage.js | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/benchmarks/100.webapps/120.uploader/nodejs/package.json b/benchmarks/100.webapps/120.uploader/nodejs/package.json index 6108bedf..7dcc22b1 100644 --- a/benchmarks/100.webapps/120.uploader/nodejs/package.json +++ b/benchmarks/100.webapps/120.uploader/nodejs/package.json @@ -4,8 +4,7 @@ "description": "", "author": "", "license": "", - "dependencies": {}, - "devDependencies": { + "dependencies": { "request": "^2.88.0" } } diff --git a/benchmarks/wrappers/openwhisk/nodejs/storage.js b/benchmarks/wrappers/openwhisk/nodejs/storage.js index e078af75..3715aa91 100644 --- a/benchmarks/wrappers/openwhisk/nodejs/storage.js +++ b/benchmarks/wrappers/openwhisk/nodejs/storage.js @@ -8,7 +8,7 @@ const minio = require('minio'), class minio_storage { constructor() { - let minioConfig = JSON.parse(fs.readFileSync('minioConfig.json')); + let minioConfig = JSON.parse(fs.readFileSync('/function/minioConfig.json')); let address = minioConfig["url"]; let access_key = minioConfig["access_key"]; let secret_key = minioConfig["secret_key"]; @@ -60,4 +60,4 @@ class minio_storage { }; -exports.storage = minio_storage; \ No newline at end of file +exports.storage = minio_storage; From 20a5161938482b5a0c1a15cf1a27b47e13bae475 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20=C5=BBuk?= Date: Mon, 6 Sep 2021 11:43:05 +0000 Subject: [PATCH 032/140] Remove obsolete files --- docker/Dockerfile.build.openwhisk.nodejs | 10 ---------- docker/Dockerfile.build.openwhisk.python | 16 ---------------- 2 files changed, 26 deletions(-) delete mode 100755 docker/Dockerfile.build.openwhisk.nodejs delete mode 100755 docker/Dockerfile.build.openwhisk.python diff --git a/docker/Dockerfile.build.openwhisk.nodejs b/docker/Dockerfile.build.openwhisk.nodejs deleted file mode 100755 index d25b49e2..00000000 --- a/docker/Dockerfile.build.openwhisk.nodejs +++ /dev/null @@ -1,10 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ENV HOME=/home/${USER} - -RUN useradd --uid 1000 -m ${USER} -WORKDIR ${HOME} -USER ${USER}:${USER} - -CMD cd /mnt/function && npm install && rm -rf package-lock.json diff --git a/docker/Dockerfile.build.openwhisk.python b/docker/Dockerfile.build.openwhisk.python deleted file mode 100755 index 35577721..00000000 --- a/docker/Dockerfile.build.openwhisk.python +++ /dev/null @@ -1,16 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG VERSION -ENV HOME=/home/${USER} -ENV PYTHON_VERSION=${VERSION} - -RUN useradd --uid 1000 ${USER} -WORKDIR ${HOME} - - -ENV SCRIPT_FILE=/mnt/function/package.sh -CMD cd /mnt/function\ - && virtualenv virtualenv && source virtualenv/bin/activate && - && if test -f "requirements.txt.${PYTHON_VERSION}"; then pip3 -q install -r requirements.txt -r requirements.txt.${PYTHON_VERSION}; else pip3 -q install -r requirements.txt ; fi\ - && if test -f "${SCRIPT_FILE}"; then /bin/bash ${SCRIPT_FILE} .virtualenv/lib/python3.6/site-packages ; fi From 1af7b02e700427e3cfc3af9726352723b2b09eea Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 17 Sep 2021 01:13:21 +0200 Subject: [PATCH 033/140] Enable Python 3.8 for local evaluation --- config/systems.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/config/systems.json b/config/systems.json index 7a3bf450..2610a44d 100644 --- a/config/systems.json +++ b/config/systems.json @@ -10,7 +10,8 @@ "languages": { "python": { "base_images": { - "3.6": "python:3.6-slim" + "3.6": "python:3.6-slim", + "3.8": "python:3.8-slim" }, "images": ["run", "build"], "username": "docker_user", From 3a7db85d04c6866bc83e9e08fd5c54f23ee28967 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 17 Sep 2021 02:04:43 +0200 Subject: [PATCH 034/140] Update benchmark image-recognition to use 3.8 --- .../400.inference/411.image-recognition/python/package.sh | 6 +++--- .../411.image-recognition/python/requirements.txt | 1 - .../411.image-recognition/python/requirements.txt.3.6 | 1 + .../411.image-recognition/python/requirements.txt.3.7 | 1 + .../411.image-recognition/python/requirements.txt.3.8 | 1 + 5 files changed, 6 insertions(+), 4 deletions(-) create mode 100644 benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 diff --git a/benchmarks/400.inference/411.image-recognition/python/package.sh b/benchmarks/400.inference/411.image-recognition/python/package.sh index 1133cbac..41c07ac7 100644 --- a/benchmarks/400.inference/411.image-recognition/python/package.sh +++ b/benchmarks/400.inference/411.image-recognition/python/package.sh @@ -9,11 +9,11 @@ cd $1 rm -rf external find . -type d -name "tests" -exec rm -rf {} + find . -type d -name "test" -exec rm -rf {} + -find . -type d -name "bin" -exec rm -rf {} + +find . -type d -name "bin" -not -path "*/torch/*" -exec rm -rf {} + # cleaning -find -name "*.so" -not -path "*/PIL/*" | xargs strip -find -name "*.so.*" -not -path "*/PIL/*" | xargs strip +find -name "*.so" -not -path "*/PIL/*" -not -path "*/Pillow.libs/*" | xargs strip +find -name "*.so.*" -not -path "*/PIL/*" -not -path "*/Pillow.libs/*" | xargs strip rm -r pip > /dev/null rm -r pip-* > /dev/null diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt b/benchmarks/400.inference/411.image-recognition/python/requirements.txt index 0deb86c1..3c51d232 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt @@ -2,6 +2,5 @@ #torchvision==0.4.0+cpu #https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp37-cp37m-linux_x86_64.whl #torch==1.0.1.post2+cpu -Pillow==6.1 torchvision==0.2.1 numpy==1.16 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 index 5f270c60..c09a9adf 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 @@ -1 +1,2 @@ +Pillow==6.1 https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp36-cp36m-linux_x86_64.whl diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 index 440811d5..330534cd 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 @@ -1 +1,2 @@ +Pillow==6.1 https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp37-cp37m-linux_x86_64.whl diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 new file mode 100644 index 00000000..07dd466c --- /dev/null +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 @@ -0,0 +1 @@ +https://download.pytorch.org/whl/cpu/torch-1.4.0%2Bcpu-cp38-cp38-linux_x86_64.whl From b150399a36de7b91f5d3b3efc9da5172cd3903b4 Mon Sep 17 00:00:00 2001 From: mcopik Date: Tue, 12 Oct 2021 02:44:23 +0200 Subject: [PATCH 035/140] Fix linting issues --- sebs/aws/aws.py | 4 +- sebs/azure/azure.py | 4 +- sebs/benchmark.py | 5 +- sebs/config.py | 10 +-- sebs/faas/system.py | 4 +- sebs/gcp/gcp.py | 4 +- sebs/local/local.py | 4 +- sebs/openwhisk/__init__.py | 6 +- sebs/openwhisk/config.py | 8 +- sebs/openwhisk/minio.py | 17 ++--- sebs/openwhisk/openwhisk.py | 141 +++++++++++++++++++++++------------- sebs/openwhisk/triggers.py | 18 +++-- sebs/sebs.py | 9 ++- 13 files changed, 144 insertions(+), 90 deletions(-) diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index 9c644666..6f20b3d6 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -122,7 +122,9 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: benchmark: benchmark name """ - def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: + def package_code( + self, directory: str, language_name: str, language_version: str, benchmark: str + ) -> Tuple[str, int]: CONFIG_FILES = { "python": ["handler.py", "requirements.txt", ".python_packages"], diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index 9b5b5a5e..5949ef82 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -114,7 +114,9 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: # - function.json # host.json # requirements.txt/package.json - def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: + def package_code( + self, directory: str, language_name: str, language_version: str, benchmark: str + ) -> Tuple[str, int]: # In previous step we ran a Docker container which installed packages # Python packages are in .python_packages because this is expected by Azure diff --git a/sebs/benchmark.py b/sebs/benchmark.py index b00270cc..c3e9c168 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -477,7 +477,10 @@ def build( self.add_deployment_package(self._output_dir) self.install_dependencies(self._output_dir) self._code_location, self._code_size = deployment_build_step( - os.path.abspath(self._output_dir), self.language_name, self.language_version, self.benchmark + os.path.abspath(self._output_dir), + self.language_name, + self.language_version, + self.benchmark, ) self.logging.info( ( diff --git a/sebs/config.py b/sebs/config.py index 54d9c90d..331da55e 100644 --- a/sebs/config.py +++ b/sebs/config.py @@ -30,12 +30,8 @@ def supported_language_versions(self, deployment_name: str, language_name: str) "base_images" ].keys() - def benchmark_base_images( - self, deployment_name: str, language_name: str - ) -> Dict[str, str]: - return self._system_config[deployment_name]["languages"][language_name][ - "base_images" - ] - + def benchmark_base_images(self, deployment_name: str, language_name: str) -> Dict[str, str]: + return self._system_config[deployment_name]["languages"][language_name]["base_images"] + def username(self, deployment_name: str, language_name: str) -> str: return self._system_config[deployment_name]["languages"][language_name]["username"] diff --git a/sebs/faas/system.py b/sebs/faas/system.py index 01574692..5b352be7 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -104,7 +104,9 @@ def get_storage(self, replace_existing: bool) -> PersistentStorage: """ @abstractmethod - def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: + def package_code( + self, directory: str, language_name: str, language_version: str, benchmark: str + ) -> Tuple[str, int]: pass @abstractmethod diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index 6fd95e0e..f2c8076e 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -130,7 +130,9 @@ def format_function_name(func_name: str) -> str: :return: path to packaged code and its size """ - def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: + def package_code( + self, directory: str, language_name: str, language_version: str, benchmark: str + ) -> Tuple[str, int]: CONFIG_FILES = { "python": ["handler.py", ".python_packages"], diff --git a/sebs/local/local.py b/sebs/local/local.py index b93cbaee..712e0d96 100644 --- a/sebs/local/local.py +++ b/sebs/local/local.py @@ -115,7 +115,9 @@ def shutdown(self): benchmark: benchmark name """ - def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: + def package_code( + self, directory: str, language_name: str, language_version: str, benchmark: str + ) -> Tuple[str, int]: CONFIG_FILES = { "python": ["handler.py", "requirements.txt", ".python_packages"], diff --git a/sebs/openwhisk/__init__.py b/sebs/openwhisk/__init__.py index 875ddd59..9fbfc400 100644 --- a/sebs/openwhisk/__init__.py +++ b/sebs/openwhisk/__init__.py @@ -1,3 +1,3 @@ -from .openwhisk import OpenWhisk # noqa -from .config import OpenWhiskConfig # noqa -from .minio import Minio # noqa +from .openwhisk import OpenWhisk # noqa +from .config import OpenWhiskConfig # noqa +from .minio import Minio # noqa diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index e8d673e1..8e19bc1f 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -4,7 +4,6 @@ class OpenWhiskCredentials(Credentials): - @staticmethod def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Credentials: return OpenWhiskCredentials() @@ -14,7 +13,6 @@ def serialize(self) -> dict: class OpenWhiskResources(Resources): - @staticmethod def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resources: return OpenWhiskResources() @@ -32,9 +30,9 @@ def __init__(self, config: dict, cache: Cache): super().__init__() self._credentials = OpenWhiskCredentials() self._resources = OpenWhiskResources() - self.name = config['name'] - self.shutdownStorage = config['shutdownStorage'] - self.removeCluster = config['removeCluster'] + self.name = config["name"] + self.shutdownStorage = config["shutdownStorage"] + self.removeCluster = config["removeCluster"] self.cache = cache @property diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py index 93124626..7fe0c55d 100644 --- a/sebs/openwhisk/minio.py +++ b/sebs/openwhisk/minio.py @@ -1,12 +1,11 @@ import sebs.local.storage -from typing import List, Tuple, Any +from typing import List, Any import secrets -import docker.errors +import docker from sebs.cache import Cache class Minio(sebs.local.storage.Minio): - @staticmethod def deployment_name() -> str: return "openwhisk" @@ -21,7 +20,7 @@ def deployment_name() -> str: location = "openwhiskBenchmark" connection: Any - def __init__(self,docker_client: docker.client, cache_client: Cache, replace_existing: bool): + def __init__(self, docker_client: docker.client, cache_client: Cache, replace_existing: bool): super(Minio, self).__init__(docker_client, cache_client, replace_existing) self.start() self.connection = self.get_connection() @@ -36,9 +35,9 @@ def startMinio(self): self.logging.info("Minio container already exists") envs = self._storage_container.attrs["Config"]["Env"] if isinstance(envs, (tuple, list)): - envs = dict([i.split('=', 1) for i in envs]) - self._access_key = envs['MINIO_ACCESS_KEY'] - self._secret_key = envs['MINIO_SECRET_KEY'] + envs = dict([i.split("=", 1) for i in envs]) + self._access_key = envs["MINIO_ACCESS_KEY"] + self._secret_key = envs["MINIO_SECRET_KEY"] except docker.errors.NotFound: self.logging.info("Minio container does not exists, starting") self._access_key = secrets.token_urlsafe(32) @@ -54,7 +53,7 @@ def startMinio(self): stdout=True, stderr=True, detach=True, - name="minio" + name="minio", ) self.logging.info("ACCESS_KEY={}".format(self._access_key)) @@ -64,4 +63,4 @@ def startMinio(self): self._url = "{IPAddress}:{Port}".format( IPAddress=networks["bridge"]["IPAddress"], Port=self.port ) - self.logging.info("Minio runs at {}".format(self._url)) \ No newline at end of file + self.logging.info("Minio runs at {}".format(self._url)) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 17e00a51..a207d093 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -2,7 +2,7 @@ import os import shutil import subprocess -from typing import Dict, List, Tuple +from typing import cast, Dict, List, Tuple, Type import docker @@ -22,8 +22,14 @@ class OpenWhisk(System): _config: OpenWhiskConfig storage: Minio - def __init__(self, system_config: SeBSConfig, config: OpenWhiskConfig, cache_client: Cache, - docker_client: docker.client, logger_handlers: LoggingHandlers): + def __init__( + self, + system_config: SeBSConfig, + config: OpenWhiskConfig, + cache_client: Cache, + docker_client: docker.client, + logger_handlers: LoggingHandlers, + ): super().__init__(system_config, cache_client, docker_client) self._config = config self.logging_handlers = logger_handlers @@ -42,10 +48,11 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: return self.storage def shutdown(self) -> None: - if self.storage and self.config.shutdown_storage: + if self.storage and self.config.shutdownStorage: self.storage.stop() if self.config.removeCluster: - from tools.openwhisk_preparation import delete_cluster + from tools.openwhisk_preparation import delete_cluster # type: ignore + delete_cluster() @staticmethod @@ -61,56 +68,65 @@ def function_type() -> "Type[Function]": return OpenwhiskFunction def benchmark_base_image(self, benchmark: str, language_name: str, language_version: str): - return f'spcleth/serverless-benchmarks:{self.name()}-{benchmark}-{language_name}-{language_version}' + return ( + f"spcleth/serverless-benchmarks:{self.name()}-{benchmark}-" + f"{language_name}-{language_version}" + ) - def build_base_image(self, directory: str, language_name: str, language_version: str, benchmark: str): - build_dir = os.path.join(directory, 'docker') + def build_base_image( + self, directory: str, language_name: str, language_version: str, benchmark: str + ): + build_dir = os.path.join(directory, "docker") os.makedirs(build_dir) shutil.copy( - os.path.join(PROJECT_DIR, 'docker', f'Dockerfile.run.{self.name()}.{language_name}'), - os.path.join(build_dir, 'Dockerfile')) + os.path.join(PROJECT_DIR, "docker", f"Dockerfile.run.{self.name()}.{language_name}"), + os.path.join(build_dir, "Dockerfile"), + ) for fn in os.listdir(directory): - if fn not in ('index.js', '__main__.py'): + if fn not in ("index.js", "__main__.py"): file = os.path.join(directory, fn) shutil.move(file, build_dir) - with open(os.path.join(build_dir, '.dockerignore'), 'w') as f: - f.write('Dockerfile') + with open(os.path.join(build_dir, ".dockerignore"), "w") as f: + f.write("Dockerfile") - builder_image = self.system_config.benchmark_base_images(self.name(), language_name)[language_version] + builder_image = self.system_config.benchmark_base_images(self.name(), language_name)[ + language_version + ] tag = self.benchmark_base_image(benchmark, language_name, language_version) image, _ = self.docker_client.images.build( tag=tag, path=build_dir, buildargs={ - 'BASE_IMAGE': builder_image, - }) + "BASE_IMAGE": builder_image, + }, + ) # shutil.rmtree(build_dir) - def package_code(self, directory: str, language_name: str, language_version: str, benchmark: str) -> Tuple[str, int]: - node = 'nodejs' - node_handler = 'index.js' - CONFIG_FILES = { - 'python': ['__main__.py'], - node: [node_handler] - } + def package_code( + self, directory: str, language_name: str, language_version: str, benchmark: str + ) -> Tuple[str, int]: + node = "nodejs" + node_handler = "index.js" + CONFIG_FILES = {"python": ["__main__.py"], node: [node_handler]} package_config = CONFIG_FILES[language_name] - with open(os.path.join(directory, 'minioConfig.json'), 'w+') as minio_config: - storage = self.get_storage() + with open(os.path.join(directory, "minioConfig.json"), "w+") as minio_config: + storage = cast(Minio, self.get_storage()) minio_config_json = { - 'access_key': storage._access_key, - 'secret_key': storage._secret_key, - 'url': storage._url, + "access_key": storage._access_key, + "secret_key": storage._secret_key, + "url": storage._url, } minio_config.write(json.dumps(minio_config_json)) self.build_base_image(directory, language_name, language_version, benchmark) os.chdir(directory) benchmark_archive = os.path.join(directory, f"{benchmark}.zip") - subprocess.run(['zip', benchmark_archive] + package_config, + subprocess.run( + ["zip", benchmark_archive] + package_config, stdout=subprocess.DEVNULL, ) self.logging.info(f"Created {benchmark_archive} archive") @@ -137,18 +153,30 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk except (subprocess.CalledProcessError, FileNotFoundError) as e: self.logging.error(f"ERROR: {e}") try: - docker_image = self.benchmark_base_image(code_package.benchmark, code_package.language_name, - code_package.language_version) - subprocess.run(['wsk', '-i', 'action', 'create', func_name, - '--web', 'true', - '--docker', docker_image, - '--memory', str(code_package.benchmark_config.memory), - code_package.code_location - ], - stderr=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - check=True, - ) + docker_image = self.benchmark_base_image( + code_package.benchmark, + code_package.language_name, + code_package.language_version, + ) + subprocess.run( + [ + "wsk", + "-i", + "action", + "create", + func_name, + "--web", + "true", + "--docker", + docker_image, + "--memory", + str(code_package.benchmark_config.memory), + code_package.code_location, + ], + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) except (subprocess.CalledProcessError, FileNotFoundError) as e: self.logging.error(f"Cannot create action {func_name}, reason: {e}") exit(1) @@ -165,16 +193,28 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk def update_function(self, function: Function, code_package: Benchmark): with open(code_package.code_location) as f: image_tag = f.read() - subprocess.run(['wsk', '-i', 'action', 'update', function.name, - '--docker', image_tag, - '--memory', str(code_package.benchmark_config.memory)], - stderr=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - check=True, - ) + subprocess.run( + [ + "wsk", + "-i", + "action", + "update", + function.name, + "--docker", + image_tag, + "--memory", + str(code_package.benchmark_config.memory), + ], + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) def default_function_name(self, code_package: Benchmark) -> str: - return f"{code_package.benchmark}-{code_package.language_name}-{code_package.benchmark_config.memory}" + return ( + f"{code_package.benchmark}-{code_package.language_name}-" + f"{code_package.benchmark_config.memory}" + ) def enforce_cold_start(self, functions: List[Function], code_package: Benchmark): raise NotImplementedError() @@ -197,8 +237,5 @@ def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) else: raise RuntimeError("Not supported!") - def shutdown(self) -> None: - super().shutdown() - def cached_function(self, function: Function): pass diff --git a/sebs/openwhisk/triggers.py b/sebs/openwhisk/triggers.py index 706eb779..67885c5f 100644 --- a/sebs/openwhisk/triggers.py +++ b/sebs/openwhisk/triggers.py @@ -25,7 +25,9 @@ def __add_params__(command: List[str], payload: dict) -> List[str]: return command def sync_invoke(self, payload: dict) -> ExecutionResult: - command = self.__add_params__(['wsk', '-i', 'action', 'invoke', '--result', self.fname], payload) + command = self.__add_params__( + ["wsk", "-i", "action", "invoke", "--result", self.fname], payload + ) error = None try: self.logging.info(f"Executing {command}") @@ -37,7 +39,7 @@ def sync_invoke(self, payload: dict) -> ExecutionResult: check=True, ) end = datetime.datetime.now() - response = response.stdout.decode("utf-8") + parsed_response = response.stdout.decode("utf-8") except (subprocess.CalledProcessError, FileNotFoundError) as e: end = datetime.datetime.now() error = e @@ -48,7 +50,7 @@ def sync_invoke(self, payload: dict) -> ExecutionResult: openwhisk_result.stats.failure = True return openwhisk_result - return_content = json.loads(response) + return_content = json.loads(parsed_response) self.logging.info(f"{return_content}") openwhisk_result.parse_benchmark_output(return_content) @@ -75,10 +77,14 @@ class HTTPTrigger(Trigger): def __init__(self, fname: str): super().__init__() self.fname = fname - response = subprocess.run(['wsk', '-i', 'action', 'get', fname, '--url'], - stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=True) + response = subprocess.run( + ["wsk", "-i", "action", "get", fname, "--url"], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + ) stdout = response.stdout.decode("utf-8") - self.url = stdout.strip().split('\n')[-1] + '.json' + self.url = stdout.strip().split("\n")[-1] + ".json" @staticmethod def typename() -> str: diff --git a/sebs/sebs.py b/sebs/sebs.py index 69a9b98b..a3be0462 100644 --- a/sebs/sebs.py +++ b/sebs/sebs.py @@ -15,7 +15,6 @@ from sebs.experiments.config import Config as ExperimentConfig from sebs.openwhisk import OpenWhisk -from sebs.openwhisk.config import OpenWhiskConfig from sebs.experiments import Experiment @@ -81,7 +80,13 @@ def get_deployment( deployment_config: Optional[Config] = None, ) -> FaaSSystem: name = config["name"] - implementations = {"aws": AWS, "azure": Azure, "gcp": GCP, "local": Local, "openwhisk": OpenWhisk} + implementations = { + "aws": AWS, + "azure": Azure, + "gcp": GCP, + "local": Local, + "openwhisk": OpenWhisk, + } if name not in implementations: raise RuntimeError("Deployment {name} not supported!".format(name=name)) From 28ebef44748c974c7123ed4361c28dedd2edc8b3 Mon Sep 17 00:00:00 2001 From: mcopik Date: Tue, 12 Oct 2021 14:54:20 +0200 Subject: [PATCH 036/140] Bump Python orb version in CircleCI --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7edfb97a..59e4f30b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,7 +1,7 @@ version: 2.1 orbs: - python: circleci/python@0.2.1 + python: circleci/python@1.4.0 jobs: linting: From dfabe72e10d2174ae62ddd5dcc0bdd957943f9fe Mon Sep 17 00:00:00 2001 From: mcopik Date: Tue, 12 Oct 2021 15:01:41 +0200 Subject: [PATCH 037/140] Install curl-config in CircleCI --- .circleci/config.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 59e4f30b..af76abb2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,6 +10,10 @@ jobs: - checkout - restore_cache: key: deps1-{{ .Branch }}-{{ checksum "requirements.txt" }} + - run: + command: | + sudo apt update && sudo apt install libcurl4-openssl-dev + name: Install curl-config from Ubuntu APT - run: command: | python3 install.py --dont-rebuild-docker-images --no-local From 6a990d710318e8de0b4c25735c8fb8cf26a396eb Mon Sep 17 00:00:00 2001 From: mcopik Date: Tue, 12 Oct 2021 23:18:48 +0200 Subject: [PATCH 038/140] Update README --- README.md | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 9b866d4b..2b2ac93d 100644 --- a/README.md +++ b/README.md @@ -22,26 +22,35 @@ to learn how to provide SeBS with cloud credentials. The documentation describes in detail [the design and implementation of our tool](docs/design.md), and see the [modularity](docs/modularity.md) section to learn how SeBS can be extended with new platforms, benchmarks, and experiments. +Find out more about our project in [a paper summary](mcopik.github.io/projects/sebs/). -SeBS can be used with our Docker image `spcleth/serverless-benchmarks:latest`, or the tool -can be [installed locally](#installation). +Do you have further questions not answered by our documentation? +Did you encounter troubles with installing and using SeBS? +Or do you want to use SeBS in your work and you need new features? +Feel free to reach us through GitHub issues or by writing to . ### Paper -When using SeBS, please cite our Middleware '21 paper (link coming soon!). +When using SeBS, please cite our [Middleware '21 paper](https://dl.acm.org/doi/10.1145/3464298.3476133). An extended version of our paper is [available on arXiv](https://arxiv.org/abs/2012.15592), and you can find more details about research work [in this paper summary](mcopik.github.io/projects/sebs/). You can cite our software repository as well, using the citation button on the right. ``` @inproceedings{copik2021sebs, - author={Marcin Copik and Grzegorz Kwasniewski and Maciej Besta and Michal Podstawski and Torsten Hoefler}, - title={SeBS: A Serverless Benchmark Suite for Function-as-a-Service Computing}, + author = {Copik, Marcin and Kwasniewski, Grzegorz and Besta, Maciej and Podstawski, Michal and Hoefler, Torsten}, + title = {SeBS: A Serverless Benchmark Suite for Function-as-a-Service Computing}, year = {2021}, + isbn = {9781450385343}, publisher = {Association for Computing Machinery}, + address = {New York, NY, USA}, url = {https://doi.org/10.1145/3464298.3476133}, doi = {10.1145/3464298.3476133}, booktitle = {Proceedings of the 22nd International Middleware Conference}, + pages = {64–78}, + numpages = {15}, + keywords = {benchmark, serverless, FaaS, function-as-a-service}, + location = {Qu\'{e}bec city, Canada}, series = {Middleware '21} } ``` From d6c8f0de3c78bab1db417b2ba0b44a3298b7dcb7 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 21 Jan 2022 21:51:07 +0100 Subject: [PATCH 039/140] Linting fixes --- sebs/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sebs/utils.py b/sebs/utils.py index 707fa07e..2f69e8a6 100644 --- a/sebs/utils.py +++ b/sebs/utils.py @@ -5,7 +5,7 @@ import subprocess import sys import uuid -from typing import List, Optional +from typing import List, Optional, Union PROJECT_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir) PACK_CODE_APP = "pack_code_{}.sh" @@ -146,7 +146,7 @@ def __init__(self, verbose: bool = False, filename: Optional[str] = None): logging_format = "%(asctime)s,%(msecs)d %(levelname)s %(name)s: %(message)s" logging_date_format = "%H:%M:%S" formatter = logging.Formatter(logging_format, logging_date_format) - self.handlers = [] + self.handlers: List[Union[logging.StreamHandler, logging.FileHandler]] = [] # Add stdout output if verbose: From 3ae12a9872a55a9b664ca4c622fbfb00ffc4f50b Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sat, 22 Jan 2022 01:10:07 +0100 Subject: [PATCH 040/140] [whisk] Update configuration of OpenWhisk --- config/example.json | 7 +++++++ config/openwhisk.json | 4 +++- sebs/openwhisk/config.py | 8 ++++++-- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/config/example.json b/config/example.json index 0690b85e..2ca9a9ee 100644 --- a/config/example.json +++ b/config/example.json @@ -1,5 +1,6 @@ { "experiments": { + "deployment": "openwhisk", "update_code": false, "update_storage": false, "download_results": false, @@ -52,6 +53,12 @@ "region": "europe-west1", "project_name": "", "credentials": "" + }, + "openwhisk": { + "shutdownStorage": false, + "removeCluster": false, + "wskBypassSecurity": "true", + "wskExec": "wsk" } } } diff --git a/config/openwhisk.json b/config/openwhisk.json index b58698e9..fd58f7f0 100644 --- a/config/openwhisk.json +++ b/config/openwhisk.json @@ -12,6 +12,8 @@ "deployment": { "name": "openwhisk", "shutdownStorage": false, - "removeCluster": false + "removeCluster": false, + "wskBypassSecurity": "true", + "wskExec": "wsk" } } diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 8e19bc1f..0c9f0e46 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -30,9 +30,10 @@ def __init__(self, config: dict, cache: Cache): super().__init__() self._credentials = OpenWhiskCredentials() self._resources = OpenWhiskResources() - self.name = config["name"] self.shutdownStorage = config["shutdownStorage"] self.removeCluster = config["removeCluster"] + self.wsk_exec = config["wskExec"] + self.wsk_bypass_security = config["wskBypassSecurity"] self.cache = cache @property @@ -52,13 +53,16 @@ def serialize(self) -> dict: "name": "openwhisk", "shutdownStorage": self.shutdownStorage, "removeCluster": self.removeCluster, + "wskExec": self.wsk_exec, + "wskBypassSecurity": self.wsk_bypass_security, "credentials": self._credentials.serialize(), "resources": self._resources.serialize(), } @staticmethod def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Config: - res = OpenWhiskConfig(config, cache) + cached_config = cache.get_config("openwhisk") + res = OpenWhiskConfig(config, cached_config) res.logging_handlers = handlers return res From 9ea0dd9d18db307e90c2d0d147114df81327b1e0 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sat, 22 Jan 2022 02:40:53 +0100 Subject: [PATCH 041/140] [whisk] Unify usage of OpenWhisk wsk --- sebs/openwhisk/openwhisk.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index a207d093..05308700 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -48,7 +48,7 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: return self.storage def shutdown(self) -> None: - if self.storage and self.config.shutdownStorage: + if hasattr(self, "storage") and self.config.shutdownStorage: self.storage.stop() if self.config.removeCluster: from tools.openwhisk_preparation import delete_cluster # type: ignore @@ -67,6 +67,12 @@ def typename(): def function_type() -> "Type[Function]": return OpenwhiskFunction + def get_wsk_cmd(self) -> List[str]: + cmd = [self.config.wsk_exec] + if self.config.wsk_bypass_security: + cmd.append("-i") + return cmd + def benchmark_base_image(self, benchmark: str, language_name: str, language_version: str): return ( f"spcleth/serverless-benchmarks:{self.name()}-{benchmark}-" @@ -95,6 +101,7 @@ def build_base_image( language_version ] tag = self.benchmark_base_image(benchmark, language_name, language_version) + self.logging.info(f"Build the benchmark base image {tag}.") image, _ = self.docker_client.images.build( tag=tag, path=build_dir, @@ -131,13 +138,14 @@ def package_code( ) self.logging.info(f"Created {benchmark_archive} archive") bytes_size = os.path.getsize(benchmark_archive) + self.logging.info("Zip archive size {:2f} MB".format(bytes_size/ 1024.0 / 1024.0)) return benchmark_archive, bytes_size def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": self.logging.info("Creating action on openwhisk") try: actions = subprocess.run( - "wsk -i action list".split(), + [*self.get_wsk_cmd(), "action", "list"], stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, ) @@ -160,8 +168,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk ) subprocess.run( [ - "wsk", - "-i", + *self.get_wsk_cmd(), "action", "create", func_name, @@ -195,8 +202,7 @@ def update_function(self, function: Function, code_package: Benchmark): image_tag = f.read() subprocess.run( [ - "wsk", - "-i", + *self.get_wsk_cmd(), "action", "update", function.name, From 2515cf8f5600b6795a1c86c47c6ec9da2601ac11 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sat, 22 Jan 2022 14:28:55 +0100 Subject: [PATCH 042/140] [faas] Clarify logging information --- sebs/faas/system.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/sebs/faas/system.py b/sebs/faas/system.py index 5b352be7..1fcc80a4 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -186,12 +186,18 @@ def get_function(self, code_package: Benchmark, func_name: Optional[str] = None) ) # is the function up-to-date? if function.code_package_hash != code_package.hash or rebuilt: - self.logging.info( - f"Cached function {func_name} with hash " - f"{function.code_package_hash} is not up to date with " - f"current build {code_package.hash} in " - f"{code_location}, updating cloud version!" - ) + if function.code_package_hash != code_package.hash: + self.logging.info( + f"Cached function {func_name} with hash " + f"{function.code_package_hash} is not up to date with " + f"current build {code_package.hash} in " + f"{code_location}, updating cloud version!" + ) + if rebuilt: + self.logging.info( + f"Enforcing rebuild and update of of cached function " + f"{func_name} with hash {function.code_package_hash}." + ) self.update_function(function, code_package) function.code_package_hash = code_package.hash function.updated_code = True From e4c4c0d389d12e83656df3be5f0405808f6a1665 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sat, 22 Jan 2022 18:42:18 +0100 Subject: [PATCH 043/140] [whisk] Fix code update procedure --- sebs/openwhisk/openwhisk.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 05308700..53a5c47a 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -138,7 +138,7 @@ def package_code( ) self.logging.info(f"Created {benchmark_archive} archive") bytes_size = os.path.getsize(benchmark_archive) - self.logging.info("Zip archive size {:2f} MB".format(bytes_size/ 1024.0 / 1024.0)) + self.logging.info("Zip archive size {:2f} MB".format(bytes_size / 1024.0 / 1024.0)) return benchmark_archive, bytes_size def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": @@ -198,8 +198,11 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk return res def update_function(self, function: Function, code_package: Benchmark): - with open(code_package.code_location) as f: - image_tag = f.read() + docker_image = self.benchmark_base_image( + code_package.benchmark, + code_package.language_name, + code_package.language_version, + ) subprocess.run( [ *self.get_wsk_cmd(), @@ -207,9 +210,10 @@ def update_function(self, function: Function, code_package: Benchmark): "update", function.name, "--docker", - image_tag, + docker_image, "--memory", str(code_package.benchmark_config.memory), + code_package.code_location, ], stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, From 35ff27be715e71828eacdbfc7524fcf04e78f400 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sat, 22 Jan 2022 21:07:12 +0100 Subject: [PATCH 044/140] [whisk] Reorganize trigger structure of OpenWhisk --- sebs/openwhisk/openwhisk.py | 18 +++++++++++--- sebs/openwhisk/triggers.py | 47 +++++++++++++++++++------------------ 2 files changed, 39 insertions(+), 26 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 53a5c47a..1d39ebd3 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -191,7 +191,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) # Add LibraryTrigger to a new function - trigger = LibraryTrigger(func_name) + trigger = LibraryTrigger(func_name, self.get_wsk_cmd()) trigger.logging_handlers = self.logging_handlers res.add_trigger(trigger) @@ -243,9 +243,21 @@ def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) if trigger_type == Trigger.TriggerType.LIBRARY: return function.triggers(Trigger.TriggerType.LIBRARY)[0] elif trigger_type == Trigger.TriggerType.HTTP: - return HTTPTrigger(function.name) + response = subprocess.run( + [*self.get_wsk_cmd(), "action", "get", function.name, "--url"], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + ) + stdout = response.stdout.decode("utf-8") + url = stdout.strip().split("\n")[-1] + ".json" + return HTTPTrigger(function.name, url) else: raise RuntimeError("Not supported!") def cached_function(self, function: Function): - pass + for trigger in function.triggers(Trigger.TriggerType.LIBRARY): + trigger.logging_handlers = self.logging_handlers + cast(LibraryTrigger, trigger).wsk_cmd = self.get_wsk_cmd() + for trigger in function.triggers(Trigger.TriggerType.HTTP): + trigger.logging_handlers = self.logging_handlers diff --git a/sebs/openwhisk/triggers.py b/sebs/openwhisk/triggers.py index 67885c5f..bfdebd29 100644 --- a/sebs/openwhisk/triggers.py +++ b/sebs/openwhisk/triggers.py @@ -8,29 +8,38 @@ class LibraryTrigger(Trigger): - def __init__(self, fname: str): + def __init__(self, fname: str, wsk_cmd: Optional[List[str]] = None): super().__init__() self.fname = fname + if wsk_cmd: + self._wsk_cmd = [*wsk_cmd, "action", "invoke", "--result", self.fname] @staticmethod def trigger_type() -> "Trigger.TriggerType": return Trigger.TriggerType.LIBRARY + @property + def wsk_cmd(self) -> List[str]: + assert self._wsk_cmd + return self._wsk_cmd + + @wsk_cmd.setter + def wsk_cmd(self, wsk_cmd: List[str]): + self._wsk_cmd = [*wsk_cmd, "action", "invoke", "--result", self.fname] + @staticmethod - def __add_params__(command: List[str], payload: dict) -> List[str]: + def get_command(payload: dict) -> List[str]: + params = [] for key, value in payload.items(): - command.append("--param") - command.append(key) - command.append(json.dumps(value)) - return command + params.append("--param") + params.append(key) + params.append(json.dumps(value)) + return params def sync_invoke(self, payload: dict) -> ExecutionResult: - command = self.__add_params__( - ["wsk", "-i", "action", "invoke", "--result", self.fname], payload - ) + command = self.wsk_cmd + self.get_command(payload) error = None try: - self.logging.info(f"Executing {command}") begin = datetime.datetime.now() response = subprocess.run( command, @@ -51,8 +60,6 @@ def sync_invoke(self, payload: dict) -> ExecutionResult: return openwhisk_result return_content = json.loads(parsed_response) - self.logging.info(f"{return_content}") - openwhisk_result.parse_benchmark_output(return_content) return openwhisk_result @@ -74,17 +81,10 @@ def typename() -> str: class HTTPTrigger(Trigger): - def __init__(self, fname: str): + def __init__(self, fname: str, url: str): super().__init__() self.fname = fname - response = subprocess.run( - ["wsk", "-i", "action", "get", fname, "--url"], - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - check=True, - ) - stdout = response.stdout.decode("utf-8") - self.url = stdout.strip().split("\n")[-1] + ".json" + self.url = url @staticmethod def typename() -> str: @@ -96,6 +96,7 @@ def trigger_type() -> Trigger.TriggerType: def sync_invoke(self, payload: dict) -> ExecutionResult: self.logging.debug(f"Invoke function {self.url}") + print("Test") return self._http_invoke(payload, self.url, False) def async_invoke(self, payload: dict) -> concurrent.futures.Future: @@ -104,8 +105,8 @@ def async_invoke(self, payload: dict) -> concurrent.futures.Future: return fut def serialize(self) -> dict: - return {"type": "HTTP", "fname": self.fname} + return {"type": "HTTP", "fname": self.fname, "url": self.url} @staticmethod def deserialize(obj: dict) -> Trigger: - return HTTPTrigger(obj["fname"]) + return HTTPTrigger(obj["fname"], obj["url"]) From 0b4c5b3ed9b4050b1a0fdb84a22f9c39f0e4cca9 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 23 Jan 2022 00:48:03 +0100 Subject: [PATCH 045/140] [whisk] Update the format of Docker image for benchmark --- sebs/config.py | 8 ++++++++ sebs/openwhisk/openwhisk.py | 22 ++++++++++------------ 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/sebs/config.py b/sebs/config.py index 331da55e..32c2e812 100644 --- a/sebs/config.py +++ b/sebs/config.py @@ -33,5 +33,13 @@ def supported_language_versions(self, deployment_name: str, language_name: str) def benchmark_base_images(self, deployment_name: str, language_name: str) -> Dict[str, str]: return self._system_config[deployment_name]["languages"][language_name]["base_images"] + def benchmark_image_name( + self, system: str, benchmark: str, language_name: str, language_version: str + ): + return ( + f"{self.docker_repository()}:run.{system}.{benchmark}." + f"{language_name}-{language_version}" + ) + def username(self, deployment_name: str, language_name: str) -> str: return self._system_config[deployment_name]["languages"][language_name]["username"] diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 1d39ebd3..75fdfa5b 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -73,12 +73,6 @@ def get_wsk_cmd(self) -> List[str]: cmd.append("-i") return cmd - def benchmark_base_image(self, benchmark: str, language_name: str, language_version: str): - return ( - f"spcleth/serverless-benchmarks:{self.name()}-{benchmark}-" - f"{language_name}-{language_version}" - ) - def build_base_image( self, directory: str, language_name: str, language_version: str, benchmark: str ): @@ -100,7 +94,9 @@ def build_base_image( builder_image = self.system_config.benchmark_base_images(self.name(), language_name)[ language_version ] - tag = self.benchmark_base_image(benchmark, language_name, language_version) + tag = self.system_config.benchmark_image_name( + self.name(), benchmark, language_name, language_version + ) self.logging.info(f"Build the benchmark base image {tag}.") image, _ = self.docker_client.images.build( tag=tag, @@ -158,10 +154,11 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk ) self.logging.info(f"Function {func_name} already exist") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - self.logging.error(f"ERROR: {e}") + except (subprocess.CalledProcessError, FileNotFoundError): + # grep will return error when there are no entries try: - docker_image = self.benchmark_base_image( + docker_image = self.system_config.benchmark_image_name( + self.name(), code_package.benchmark, code_package.language_name, code_package.language_version, @@ -198,7 +195,8 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk return res def update_function(self, function: Function, code_package: Benchmark): - docker_image = self.benchmark_base_image( + docker_image = self.system_config.benchmark_image_name( + self.name(), code_package.benchmark, code_package.language_name, code_package.language_version, @@ -223,7 +221,7 @@ def update_function(self, function: Function, code_package: Benchmark): def default_function_name(self, code_package: Benchmark) -> str: return ( f"{code_package.benchmark}-{code_package.language_name}-" - f"{code_package.benchmark_config.memory}" + f"{code_package.language_version}" ) def enforce_cold_start(self, functions: List[Function], code_package: Benchmark): From 83a3346bc95fe238e82a297733713d1d72b5eb53 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 23 Jan 2022 00:50:36 +0100 Subject: [PATCH 046/140] [whisk] Update OpenWhisk Python versions --- config/systems.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/config/systems.json b/config/systems.json index b31a5d23..da9494be 100644 --- a/config/systems.json +++ b/config/systems.json @@ -137,10 +137,11 @@ "languages": { "python": { "base_images": { - "3.6": "openwhisk/actionloop-python-v3.6-ai", - "2.7": "openwhisk/python2action" + "3.6": "openwhisk/python3action", + "3.7": "openwhisk/action-python-v3.7", + "3.9": "openwhisk/action-python-v3.9" }, - "versions": ["3.6", "2.7"], + "versions": ["3.6", "3.7", "3.9"], "images": [], "username": "docker_user", "deployment": { From fdcef76bd0f40e5629e05632788137380270506a Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 23 Jan 2022 01:19:32 +0100 Subject: [PATCH 047/140] [whisk] Update OpenWhisk action when retrievering action with no cache --- sebs/openwhisk/openwhisk.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 75fdfa5b..973c7eb9 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -138,7 +138,7 @@ def package_code( return benchmark_archive, bytes_size def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": - self.logging.info("Creating action on openwhisk") + self.logging.info("Creating function as an action in OpenWhisk") try: actions = subprocess.run( [*self.get_wsk_cmd(), "action", "list"], @@ -154,6 +154,11 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk ) self.logging.info(f"Function {func_name} already exist") + res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) + # Update function - we don't know what version is stored + self.update_function(res, code_package) + self.logging.info(f"Retrieved OpenWhisk action {func_name}") + except (subprocess.CalledProcessError, FileNotFoundError): # grep will return error when there are no entries try: @@ -181,12 +186,12 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk stdout=subprocess.DEVNULL, check=True, ) + self.logging.info(f"Created new OpenWhisk action {func_name}") + res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) except (subprocess.CalledProcessError, FileNotFoundError) as e: self.logging.error(f"Cannot create action {func_name}, reason: {e}") exit(1) - res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) - # Add LibraryTrigger to a new function trigger = LibraryTrigger(func_name, self.get_wsk_cmd()) trigger.logging_handlers = self.logging_handlers From b9339608cabbff06c8f6847a6a9a76ef7b443579 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 23 Jan 2022 16:53:12 +0100 Subject: [PATCH 048/140] [whisk] Properly cache triggers --- sebs/openwhisk/openwhisk.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 973c7eb9..6e2d4753 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -254,7 +254,11 @@ def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) ) stdout = response.stdout.decode("utf-8") url = stdout.strip().split("\n")[-1] + ".json" - return HTTPTrigger(function.name, url) + trigger = HTTPTrigger(function.name, url) + trigger.logging_handlers = self.logging_handlers + function.add_trigger(trigger) + self.cache_client.update_function(function) + return trigger else: raise RuntimeError("Not supported!") From 479d2b93ea69e7b16a045c7feae264fa619fbe5a Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 01:06:29 +0100 Subject: [PATCH 049/140] [whisk] Add missing deserialization of HTTPTrigger --- sebs/openwhisk/function.py | 4 ++-- sebs/openwhisk/triggers.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index ed4e7f18..719750e1 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -20,7 +20,7 @@ def serialize(self) -> dict: @staticmethod def deserialize(cached_config: dict) -> "OpenwhiskFunction": from sebs.faas.function import Trigger - from sebs.openwhisk.triggers import LibraryTrigger + from sebs.openwhisk.triggers import LibraryTrigger, HTTPTrigger ret = OpenwhiskFunction( cached_config["name"], @@ -31,7 +31,7 @@ def deserialize(cached_config: dict) -> "OpenwhiskFunction": for trigger in cached_config["triggers"]: trigger_type = cast( Trigger, - {"Library": LibraryTrigger}.get(trigger["type"]), + {"Library": LibraryTrigger, "HTTP": HTTPTrigger}.get(trigger["type"]), ) assert trigger_type, "Unknown trigger type {}".format(trigger["type"]) ret.add_trigger(trigger_type.deserialize(trigger)) diff --git a/sebs/openwhisk/triggers.py b/sebs/openwhisk/triggers.py index bfdebd29..f0d8260b 100644 --- a/sebs/openwhisk/triggers.py +++ b/sebs/openwhisk/triggers.py @@ -96,7 +96,6 @@ def trigger_type() -> Trigger.TriggerType: def sync_invoke(self, payload: dict) -> ExecutionResult: self.logging.debug(f"Invoke function {self.url}") - print("Test") return self._http_invoke(payload, self.url, False) def async_invoke(self, payload: dict) -> concurrent.futures.Future: From 8b8ca2ad9ac518c501dec8a8f2bd6aa65f4e7b54 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 20:08:49 +0100 Subject: [PATCH 050/140] [system] Extend the interface of package building with flag signaling non-cached builds (first attempt) --- sebs/aws/aws.py | 7 ++++++- sebs/azure/azure.py | 7 ++++++- sebs/benchmark.py | 5 +++-- sebs/faas/system.py | 7 ++++++- sebs/gcp/gcp.py | 7 ++++++- sebs/local/local.py | 7 ++++++- 6 files changed, 33 insertions(+), 7 deletions(-) diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index 6f20b3d6..14974c87 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -123,7 +123,12 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: """ def package_code( - self, directory: str, language_name: str, language_version: str, benchmark: str + self, + directory: str, + language_name: str, + language_version: str, + benchmark: str, + is_cached: bool, ) -> Tuple[str, int]: CONFIG_FILES = { diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index 5949ef82..614f2bc3 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -115,7 +115,12 @@ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: # host.json # requirements.txt/package.json def package_code( - self, directory: str, language_name: str, language_version: str, benchmark: str + self, + directory: str, + language_name: str, + language_version: str, + benchmark: str, + is_cached: bool, ) -> Tuple[str, int]: # In previous step we ran a Docker container which installed packages diff --git a/sebs/benchmark.py b/sebs/benchmark.py index c3e9c168..a4c4f301 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -322,7 +322,7 @@ def install_dependencies(self, output_dir): self.logging.info( ( "Docker build image for {deployment} run in {language} " - "is not available, skipping" + "is not available, skipping." ).format(deployment=self._deployment_name, language=self.language_name) ) else: @@ -447,7 +447,7 @@ def recalculate_code_size(self): return self._code_size def build( - self, deployment_build_step: Callable[[str, str, str, str], Tuple[str, int]] + self, deployment_build_step: Callable[[str, str, str, str, bool], Tuple[str, int]] ) -> Tuple[bool, str]: # Skip build if files are up to date and user didn't enforce rebuild @@ -481,6 +481,7 @@ def build( self.language_name, self.language_version, self.benchmark, + self.is_cached, ) self.logging.info( ( diff --git a/sebs/faas/system.py b/sebs/faas/system.py index 1fcc80a4..18fddf56 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -105,7 +105,12 @@ def get_storage(self, replace_existing: bool) -> PersistentStorage: @abstractmethod def package_code( - self, directory: str, language_name: str, language_version: str, benchmark: str + self, + directory: str, + language_name: str, + language_version: str, + benchmark: str, + is_cached: bool, ) -> Tuple[str, int]: pass diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index 348f1d41..ade812f6 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -131,7 +131,12 @@ def format_function_name(func_name: str) -> str: """ def package_code( - self, directory: str, language_name: str, language_version: str, benchmark: str + self, + directory: str, + language_name: str, + language_version: str, + benchmark: str, + is_cached: bool, ) -> Tuple[str, int]: CONFIG_FILES = { diff --git a/sebs/local/local.py b/sebs/local/local.py index 712e0d96..6340b7f2 100644 --- a/sebs/local/local.py +++ b/sebs/local/local.py @@ -116,7 +116,12 @@ def shutdown(self): """ def package_code( - self, directory: str, language_name: str, language_version: str, benchmark: str + self, + directory: str, + language_name: str, + language_version: str, + benchmark: str, + is_cached: bool, ) -> Tuple[str, int]: CONFIG_FILES = { From 11dac0cd251511efa4968daa8337a333cc87ef4d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 20:09:49 +0100 Subject: [PATCH 051/140] [system] Extend global configuration with repository name --- sebs/config.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/sebs/config.py b/sebs/config.py index 32c2e812..be7f67c4 100644 --- a/sebs/config.py +++ b/sebs/config.py @@ -1,5 +1,5 @@ import json -from typing import Dict, List +from typing import Dict, List, Optional from sebs.utils import project_absolute_path @@ -34,12 +34,25 @@ def benchmark_base_images(self, deployment_name: str, language_name: str) -> Dic return self._system_config[deployment_name]["languages"][language_name]["base_images"] def benchmark_image_name( + self, + system: str, + benchmark: str, + language_name: str, + language_version: str, + registry: Optional[str] = None, + ) -> str: + + tag = self.benchmark_image_tag(system, benchmark, language_name, language_version) + repo_name = self.docker_repository() + if registry is not None: + return f"{registry}/{repo_name}:{tag}" + else: + return f"{repo_name}:{tag}" + + def benchmark_image_tag( self, system: str, benchmark: str, language_name: str, language_version: str - ): - return ( - f"{self.docker_repository()}:run.{system}.{benchmark}." - f"{language_name}-{language_version}" - ) + ) -> str: + return f"function.{system}.{benchmark}.{language_name}-{language_version}" def username(self, deployment_name: str, language_name: str) -> str: return self._system_config[deployment_name]["languages"][language_name]["username"] From 8cee2eb9ec78efd7f4f05a10c98632986722eb96 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 20:47:15 +0100 Subject: [PATCH 052/140] [whisk] Extend OpenWhisk config with Docker registry configuration --- sebs/openwhisk/config.py | 98 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 93 insertions(+), 5 deletions(-) diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 0c9f0e46..40f75fc7 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -2,6 +2,8 @@ from sebs.faas.config import Credentials, Resources, Config from sebs.utils import LoggingHandlers +from typing import cast, Optional + class OpenWhiskCredentials(Credentials): @staticmethod @@ -13,12 +15,84 @@ def serialize(self) -> dict: class OpenWhiskResources(Resources): + def __init__( + self, + registry: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + ): + super().__init__() + self._docker_registry = registry if registry != "" else None + self._docker_username = username if username != "" else None + self._docker_password = password if password != "" else None + + @staticmethod + def typename() -> str: + return "OpenWhisk.Credentials" + + @property + def docker_registry(self) -> Optional[str]: + return self._docker_registry + + @property + def docker_username(self) -> Optional[str]: + return self._docker_username + + @property + def docker_password(self) -> Optional[str]: + return self._docker_password + + @staticmethod + def initialize(dct: dict) -> Resources: + return OpenWhiskResources(dct["registry"], dct["username"], dct["password"]) + @staticmethod def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resources: - return OpenWhiskResources() + + cached_config = cache.get_config("openwhisk") + ret: OpenWhiskResources + # Load cached values + if ( + cached_config + and "resources" in cached_config + and "docker" in cached_config["resources"] + ): + ret = cast( + OpenWhiskResources, + OpenWhiskResources.initialize(cached_config["resources"]["docker"]), + ) + ret.logging_handlers = handlers + ret.logging.info("Using cached Docker registry for OpenWhisk") + # Check for new config + elif "docker_registry" in config: + ret = cast(OpenWhiskResources, OpenWhiskResources.initialize(config["docker_registry"])) + ret.logging.info("Using user-provided Docker registry for OpenWhisk.") + ret.logging_handlers = handlers + else: + ret.logging.info("Using default Docker registry for OpenWhisk.") + ret = OpenWhiskResources() + ret.logging_handlers = handlers + + return ret + + def update_cache(self, cache: Cache): + cache.update_config( + val=self.docker_registry, keys=["openwhisk", "resources", "docker", "registry"] + ) + cache.update_config( + val=self.docker_username, keys=["openwhisk", "resources", "docker", "username"] + ) + cache.update_config( + val=self.docker_password, keys=["openwhisk", "resources", "docker", "password"] + ) def serialize(self) -> dict: - return {} + out = { + "docker_registry": self.docker_registry, + "docker_username": self.docker_username, + "docker_password": self.docker_password, + } + return out class OpenWhiskConfig(Config): @@ -34,14 +108,15 @@ def __init__(self, config: dict, cache: Cache): self.removeCluster = config["removeCluster"] self.wsk_exec = config["wskExec"] self.wsk_bypass_security = config["wskBypassSecurity"] + self.experimentalManifest = config["experimentalManifest"] self.cache = cache @property - def credentials(self) -> Credentials: + def credentials(self) -> OpenWhiskCredentials: return self._credentials @property - def resources(self) -> Resources: + def resources(self) -> OpenWhiskResources: return self._resources @staticmethod @@ -55,6 +130,7 @@ def serialize(self) -> dict: "removeCluster": self.removeCluster, "wskExec": self.wsk_exec, "wskBypassSecurity": self.wsk_bypass_security, + "experimentalManifest": self.experimentalManifest, "credentials": self._credentials.serialize(), "resources": self._resources.serialize(), } @@ -62,9 +138,21 @@ def serialize(self) -> dict: @staticmethod def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Config: cached_config = cache.get_config("openwhisk") + resources = cast( + OpenWhiskResources, OpenWhiskResources.deserialize(config, cache, handlers) + ) + res = OpenWhiskConfig(config, cached_config) res.logging_handlers = handlers + res._resources = resources return res def update_cache(self, cache: Cache): - pass + cache.update_config(val=self.shutdownStorage, keys=["openwhisk", "shutdownStorage"]) + cache.update_config(val=self.removeCluster, keys=["openwhisk", "removeCluster"]) + cache.update_config(val=self.wsk_exec, keys=["openwhisk", "wskExec"]) + cache.update_config(val=self.wsk_bypass_security, keys=["openwhisk", "wskBypassSecurity"]) + cache.update_config( + val=self.experimentalManifest, keys=["openwhisk", "experimentalManifest"] + ) + self.resources.update_cache(cache) From c2d2f7f53d21e961e5b6f17759e13f3444292c7a Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 20:47:35 +0100 Subject: [PATCH 053/140] [whisk] First iteration on managing images in a Docker registry --- config/example.json | 8 +- config/systems.json | 4 +- sebs/openwhisk/openwhisk.py | 159 ++++++++++++++++++++++++++---------- 3 files changed, 127 insertions(+), 44 deletions(-) diff --git a/config/example.json b/config/example.json index 2ca9a9ee..d978fced 100644 --- a/config/example.json +++ b/config/example.json @@ -58,7 +58,13 @@ "shutdownStorage": false, "removeCluster": false, "wskBypassSecurity": "true", - "wskExec": "wsk" + "wskExec": "wsk", + "experimentalManifest": false, + "docker_registry": { + "registry": "", + "username": "", + "password": "" + } } } } diff --git a/config/systems.json b/config/systems.json index da9494be..6ad650d2 100644 --- a/config/systems.json +++ b/config/systems.json @@ -142,7 +142,7 @@ "3.9": "openwhisk/action-python-v3.9" }, "versions": ["3.6", "3.7", "3.9"], - "images": [], + "images": ["function"], "username": "docker_user", "deployment": { "files": [ "__main__.py", "storage.py", "setup.py"], @@ -158,7 +158,7 @@ "12" : "openwhisk/action-nodejs-v12" }, "versions": [ "8", "10", "12"], - "images": [], + "images": ["function"], "username": "docker_user", "deployment": { "files": [ "index.js", "storage.js"], diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 6e2d4753..243d42bd 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -74,8 +74,55 @@ def get_wsk_cmd(self) -> List[str]: return cmd def build_base_image( - self, directory: str, language_name: str, language_version: str, benchmark: str - ): + self, + directory: str, + language_name: str, + language_version: str, + benchmark: str, + is_cached: bool, + ) -> bool: + """ + When building function for the first time (according to SeBS cache), + check if Docker image is available in the registry. + If yes, then skip building. + If no, then continue building. + + For every subsequent build, we rebuild image and push it to the + registry. These are triggered by users modifying code and enforcing + a build. + """ + + # We need to retag created images when pushing to registry other + # than default + registry_name = self.config.resources.docker_registry + repository_name = self.system_config.docker_repository() + image_tag = self.system_config.benchmark_image_tag( + self.name(), benchmark, language_name, language_version + ) + if registry_name is not None: + repository_name = f"{registry_name}/{repository_name}" + else: + registry_name = "Docker Hub" + + # Check if we the image is already in the registry. + if not is_cached: + try: + # check for image existence + # default version requires pulling for an image + self.docker_client.images.pull(repository=repository_name, tag=image_tag) + self.logging.info( + f"Skipping building OpenWhisk package for {benchmark}, using " + f"Docker image {repository_name}:{image_tag} from registry: " + f"{registry_name}." + ) + return False + except docker.errors.NotFound: + # image doesn't exist, let's continue + self.logging.info( + f"Image {repository_name}:{image_tag} doesn't exist in the registry, " + f"building OpenWhisk package for {benchmark}." + ) + build_dir = os.path.join(directory, "docker") os.makedirs(build_dir) shutil.copy( @@ -94,29 +141,47 @@ def build_base_image( builder_image = self.system_config.benchmark_base_images(self.name(), language_name)[ language_version ] - tag = self.system_config.benchmark_image_name( - self.name(), benchmark, language_name, language_version - ) - self.logging.info(f"Build the benchmark base image {tag}.") + self.logging.info(f"Build the benchmark base image {repository_name}:{image_tag}.") image, _ = self.docker_client.images.build( - tag=tag, + tag=f"{repository_name}:{image_tag}", path=build_dir, buildargs={ "BASE_IMAGE": builder_image, }, ) - # shutil.rmtree(build_dir) + # Now push the image to the registry + # image will be located in a private repository + self.logging.info( + f"Push the benchmark base image {repository_name}:{image_tag} " + f"to registry: {registry_name}." + ) + self.docker_client.images.push(repository=repository_name, tag=image_tag) + return True def package_code( - self, directory: str, language_name: str, language_version: str, benchmark: str + self, + directory: str, + language_name: str, + language_version: str, + benchmark: str, + is_cached: bool, ) -> Tuple[str, int]: - node = "nodejs" - node_handler = "index.js" - CONFIG_FILES = {"python": ["__main__.py"], node: [node_handler]} + + # Regardless of Docker image status, we need to create .zip file + # to allow registration of function with OpenWhisk + self.build_base_image(directory, language_name, language_version, benchmark, is_cached) + + # We deploy Minio config in code package since this depends on local + # deployment - it cannnot be a part of Docker image + minio_config_path = "minioConfig.json" + CONFIG_FILES = { + "python": ["__main__.py", minio_config_path], + "nodejs": ["index.js", minio_config_path], + } package_config = CONFIG_FILES[language_name] - with open(os.path.join(directory, "minioConfig.json"), "w+") as minio_config: + with open(os.path.join(directory, minio_config_path), "w+") as minio_config: storage = cast(Minio, self.get_storage()) minio_config_json = { "access_key": storage._access_key, @@ -125,7 +190,6 @@ def package_code( } minio_config.write(json.dumps(minio_config_json)) - self.build_base_image(directory, language_name, language_version, benchmark) os.chdir(directory) benchmark_archive = os.path.join(directory, f"{benchmark}.zip") subprocess.run( @@ -152,14 +216,17 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk input=actions.stdout, check=True, ) - self.logging.info(f"Function {func_name} already exist") res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) # Update function - we don't know what version is stored self.update_function(res, code_package) self.logging.info(f"Retrieved OpenWhisk action {func_name}") - except (subprocess.CalledProcessError, FileNotFoundError): + except FileNotFoundError as e: + self.logging.error("Could not retrieve OpenWhisk functions - is path to wsk correct?") + raise RuntimeError(e) + + except subprocess.CalledProcessError: # grep will return error when there are no entries try: docker_image = self.system_config.benchmark_image_name( @@ -188,9 +255,9 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk ) self.logging.info(f"Created new OpenWhisk action {func_name}") res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) - except (subprocess.CalledProcessError, FileNotFoundError) as e: - self.logging.error(f"Cannot create action {func_name}, reason: {e}") - exit(1) + except subprocess.CalledProcessError as e: + self.logging.error(f"Cannot create action {func_name}.") + raise RuntimeError(e) # Add LibraryTrigger to a new function trigger = LibraryTrigger(func_name, self.get_wsk_cmd()) @@ -206,22 +273,26 @@ def update_function(self, function: Function, code_package: Benchmark): code_package.language_name, code_package.language_version, ) - subprocess.run( - [ - *self.get_wsk_cmd(), - "action", - "update", - function.name, - "--docker", - docker_image, - "--memory", - str(code_package.benchmark_config.memory), - code_package.code_location, - ], - stderr=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - check=True, - ) + try: + subprocess.run( + [ + *self.get_wsk_cmd(), + "action", + "update", + function.name, + "--docker", + docker_image, + "--memory", + str(code_package.benchmark_config.memory), + code_package.code_location, + ], + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) + except FileNotFoundError as e: + self.logging.error("Could not update OpenWhisk function - is path to wsk correct?") + raise RuntimeError(e) def default_function_name(self, code_package: Benchmark) -> str: return ( @@ -246,12 +317,18 @@ def create_trigger(self, function: Function, trigger_type: Trigger.TriggerType) if trigger_type == Trigger.TriggerType.LIBRARY: return function.triggers(Trigger.TriggerType.LIBRARY)[0] elif trigger_type == Trigger.TriggerType.HTTP: - response = subprocess.run( - [*self.get_wsk_cmd(), "action", "get", function.name, "--url"], - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - check=True, - ) + try: + response = subprocess.run( + [*self.get_wsk_cmd(), "action", "get", function.name, "--url"], + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + ) + except FileNotFoundError as e: + self.logging.error( + "Could not retrieve OpenWhisk configuration - is path to wsk correct?" + ) + raise RuntimeError(e) stdout = response.stdout.decode("utf-8") url = stdout.strip().split("\n")[-1] + ".json" trigger = HTTPTrigger(function.name, url) From 64134902ae901f1d01c4feffb9a66189cec597c0 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 20:51:49 +0100 Subject: [PATCH 054/140] [whisk] Correctly handle cache updates --- sebs/cache.py | 4 ++-- sebs/openwhisk/openwhisk.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/sebs/cache.py b/sebs/cache.py index dcce8ff7..5463e444 100644 --- a/sebs/cache.py +++ b/sebs/cache.py @@ -59,7 +59,7 @@ def typename() -> str: def load_config(self): with self._lock: - for cloud in ["azure", "aws", "gcp"]: + for cloud in ["azure", "aws", "gcp", "openwhisk"]: cloud_config_file = os.path.join(self.cache_dir, "{}.json".format(cloud)) if os.path.exists(cloud_config_file): self.cached_config[cloud] = json.load(open(cloud_config_file, "r")) @@ -86,7 +86,7 @@ def unlock(self): def shutdown(self): if self.config_updated: - for cloud in ["azure", "aws", "gcp"]: + for cloud in ["azure", "aws", "gcp", "openwhisk"]: if cloud in self.cached_config: cloud_config_file = os.path.join(self.cache_dir, "{}.json".format(cloud)) self.logging.info("Update cached config {}".format(cloud_config_file)) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 243d42bd..c148de59 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -54,6 +54,7 @@ def shutdown(self) -> None: from tools.openwhisk_preparation import delete_cluster # type: ignore delete_cluster() + super().shutdown() @staticmethod def name() -> str: From f7ad4c2e30062a95e64b0d03a8ec45822d98ac56 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 21:11:10 +0100 Subject: [PATCH 055/140] [whisk] Check image existence using Docker manifest --- sebs/openwhisk/openwhisk.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index c148de59..19fbcc34 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -12,7 +12,7 @@ from sebs.faas.function import Function, ExecutionResult, Trigger from .minio import Minio from sebs.openwhisk.triggers import LibraryTrigger, HTTPTrigger -from sebs.utils import PROJECT_DIR, LoggingHandlers +from sebs.utils import PROJECT_DIR, LoggingHandlers, execute from .config import OpenWhiskConfig from .function import OpenwhiskFunction from ..config import SeBSConfig @@ -74,6 +74,24 @@ def get_wsk_cmd(self) -> List[str]: cmd.append("-i") return cmd + def find_image(self, repository_name, image_tag) -> bool: + + if self.config.experimentalManifest: + try: + # This requires enabling experimental Docker features + # Furthermore, it's not yet supported in the Python library + execute(f"docker manifest inspect {repository_name}:{image_tag}") + return True + except RuntimeError: + return False + else: + try: + # default version requires pulling for an image + self.docker_client.images.pull(repository=repository_name, tag=image_tag) + return True + except docker.errors.NotFound: + return False + def build_base_image( self, directory: str, @@ -107,17 +125,14 @@ def build_base_image( # Check if we the image is already in the registry. if not is_cached: - try: - # check for image existence - # default version requires pulling for an image - self.docker_client.images.pull(repository=repository_name, tag=image_tag) + if self.find_image(repository_name, image_tag): self.logging.info( f"Skipping building OpenWhisk package for {benchmark}, using " f"Docker image {repository_name}:{image_tag} from registry: " f"{registry_name}." ) return False - except docker.errors.NotFound: + else: # image doesn't exist, let's continue self.logging.info( f"Image {repository_name}:{image_tag} doesn't exist in the registry, " From aa798db6e9c210a1fe3b7b152b47f86c716e6c85 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 24 Jan 2022 21:22:44 +0100 Subject: [PATCH 056/140] [whisk] Add login optin for Docker --- sebs/openwhisk/openwhisk.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 19fbcc34..22aaf9c0 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -34,6 +34,19 @@ def __init__( self._config = config self.logging_handlers = logger_handlers + if self.config.resources.docker_username: + if self.config.resources.docker_registry: + docker_client.login( + login=self.config.resources.docker_username, + password=self.config.resources.docker_password, + registry=self.config.resources.docker_registry, + ) + else: + docker_client.login( + login=self.config.resources.docker_username, + password=self.config.resources.docker_password, + ) + @property def config(self) -> OpenWhiskConfig: return self._config From ac5e326beb4ad4094b962278155ff03f26848c4d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 00:46:50 +0100 Subject: [PATCH 057/140] [whisk] Fix login --- sebs/openwhisk/openwhisk.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 22aaf9c0..5d589aa2 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -37,13 +37,13 @@ def __init__( if self.config.resources.docker_username: if self.config.resources.docker_registry: docker_client.login( - login=self.config.resources.docker_username, + username=self.config.resources.docker_username, password=self.config.resources.docker_password, registry=self.config.resources.docker_registry, ) else: docker_client.login( - login=self.config.resources.docker_username, + username=self.config.resources.docker_username, password=self.config.resources.docker_password, ) From 57933576220b719ec8ee5d84c1dc4eb3a0007b7c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 00:48:09 +0100 Subject: [PATCH 058/140] [whisk] Mark unfinished problems --- sebs/openwhisk/minio.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py index 7fe0c55d..0bb31b9d 100644 --- a/sebs/openwhisk/minio.py +++ b/sebs/openwhisk/minio.py @@ -30,6 +30,8 @@ def start(self): def startMinio(self): minioVersion = "minio/minio:latest" + # FIXME: merge it with local/minio? + # FIXME: check if the container is still runing try: self._storage_container = self._docker_client.containers.get("minio") self.logging.info("Minio container already exists") From b8934d848b02135d395e26a104a5c881baadaec7 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 00:53:14 +0100 Subject: [PATCH 059/140] [whisk] Update OpenWhisk configuration --- config/openwhisk.json | 3 +- docs/design.md | 2 +- docs/modularity.md | 7 +++ docs/platforms.md | 105 +++++++++++++++++++++++++++++++++++++++++- 4 files changed, 114 insertions(+), 3 deletions(-) diff --git a/config/openwhisk.json b/config/openwhisk.json index fd58f7f0..c41b4966 100644 --- a/config/openwhisk.json +++ b/config/openwhisk.json @@ -14,6 +14,7 @@ "shutdownStorage": false, "removeCluster": false, "wskBypassSecurity": "true", - "wskExec": "wsk" + "wskExec": "wsk", + "experimentalManifest": "false" } } diff --git a/docs/design.md b/docs/design.md index 021fe4ec..2a320b2c 100644 --- a/docs/design.md +++ b/docs/design.md @@ -54,7 +54,7 @@ configuration. `sebs/experiments/` - implements the SeBS experiments. -`sebs/{aws,azure,gcp}/` - implementation of the FaaS interface for each platform. +`sebs/{aws,azure,gcp,openwhisk}/` - implementation of the FaaS interface for each platform. `sebs/local/` - implements the local invocations of functions with Docker containers and `minio` storage. diff --git a/docs/modularity.md b/docs/modularity.md index 5994a030..33bda56e 100644 --- a/docs/modularity.md +++ b/docs/modularity.md @@ -51,6 +51,13 @@ def handler(event): Configure dependencies in `requirements.txt` and `package.json`. By default, only source code is deployed. If you need to use additional resources, e.g., HTML template, use script `init.sh` (see an example in `110.dynamic-html`). +**Important** By default, SeBS deploys code packages using code packages. +Starting from OpenWhisk addition in release 1.1, we are adding function +deployment as Docker images. Docker images with existing benchmarks +are available on [Docker Hub](https://hub.docker.com/repository/docker/spcleth/serverless-benchmarks). +When adding a new benchmark, it is possible to use a local Docker registry +to push images with the new functions - see [OpenWhisk documentation for details](platforms.md). + ### How to add a new serverless platform? First, implement the interfaces in `sebs/faas/*.py` - details can be found in the diff --git a/docs/platforms.md b/docs/platforms.md index 372a770b..bc2e8e54 100644 --- a/docs/platforms.md +++ b/docs/platforms.md @@ -1,6 +1,7 @@ SeBS supports three commercial serverless platforms: AWS Lambda, Azure Functions, and Google Cloud -Functions. +Functions. +Furthermore, we support the open source FaaS system OpenWhisk. ## AWS Lambda @@ -71,3 +72,105 @@ export GCP_PROJECT_NAME = XXXX export GCP_SECRET_APPLICATION_CREDENTIALS = XXXX ``` +## OpenWhisk + +SeBS expects users have to deploy and configure OpenWhisk instance. +In `tools/openwhisk_preparation.py`, we include scripts that help to install +[kind (Kubernetes in Docker)](https://kind.sigs.k8s.io/) and deploy +OpenWhisk on a `kind` cluster. +An example of SeBS configuration for using OpenWhisk can be found in `config/example.json` +under the key `['deployment']['openwhisk']`. +In subsections below, we discuss the meaning and use of each parameter. +To correctly deploy SeBS functions to OpenWhisk, it is important to follow the +subsections on *Toolchain* and *Docker* configuration. + +### Toolchain + +We use OpenWhisk's CLI tool [wsk](https://github.com/apache/openwhisk-cli) +to manage the deployment of functions to OpenWhisk. +To deploy serverless functions, please install `wsk` and +configure it to point to your OpenWhisk installation. +By default, SeBS assumes that `wsk` is available in the `PATH`. +To override this, set the configuration option `wskExec` to the location +of your `wsk` executable. +If you are using a local deployment of OpenWhisk with a self-signed +certificate, you can skip certificate validation with the `wsk` flag `--insecure`. +To enable this option, set `wskBypassSecurity` to `true`. +At the moment, all functions are deployed as [*web actions*](https://github.com/apache/openwhisk/blob/master/docs/webactions.md) +that do not require using credentails to invoke functions. + +Furthermore, SeBS can be configured to automatically remove the `kind` +cluster after finishing experiments. This helps to automate the experiments +that should be conducted on fresh instances of the system. +To enable this option, set `removeCluster` to `true`. + +### Docker + +In FaaS platforms, function's code can be usually deployed as a code package +or as a Docker image with all dependencies preinstalled. +However, OpenWhisk as a very low limit on the code package size of only 48 +megabytes. +To circumvent this limit, we deploy functions using pre-built Docker images. + +**Important**: OpenWhisk requires that all Docker images are available +in the registry, even if they have been cached on a system serving OpenWhisk +functions. +When the image is not available, function invocations will fail after a timeout +with an error message that does not indicate directly image availability issues. +All SeBS benchmark functions are available on the Docker Hub. + +When adding new functions and extending existing functions with new languages +and new language versions, Docker images must be placed in the registry. +However, pushin the image to `spcleth/serverless-benchmarks` repository on Docker +Hub requires permissions. +Instead, OpenWhisk users can configure the FaaS platform to use a custom and +private Docker registry and push new images there. +Furthermore, a local Docker registry can speed up development when debugging +a new function. +See the documentation on +[Docker registry](https://github.com/apache/openwhisk-deploy-kube/blob/master/docs/private-docker-registry.md) +and [OpenWhisk configuration](https://github.com/apache/openwhisk-deploy-kube/blob/master/docs/private-docker-registry.md) +for details. +SeBS can use alternative Docker registry - see `dockerRegistry` settings +in the example to configure registry endpoint and credentials. +When `registry` URL is not provided, SeBS will use Docker Hub. +When `username` and `password` are provided, `SeBS` will use them to login +to the repository. + +### Code Deployment + +SeBS builds and deploys a new code package when constructing the local cache, +when function's content have change, and when user reuqests a forced rebuild. +In OpenWhisk, this set up is changed - SeBS will first attempt to verify +if the image exists already in the registry and skip building Docker +image when possible. +This allows SeBS to deploy seamlessly to OpenWhisk using default images +available on Docker Hub. +Furthermore, checking for image existence in the registry helps +to avoid failing invocations in OpenWhisk. +For performance reasons, this check is performed only once, when constructing +the local cache. + +When the function code must be updated, +SeBS will build the image and attempt to push it to the registry. +At the moment, the only available option of checking image existence in +the registry is pulling the image. +However, there is [an experimental `manifest` feature of Docker](https://docs.docker.com/engine/reference/commandline/manifest/) +that allow to check image status without downloading its contents, +saving bandwidth and time. +To use that feature in SeBS, set `experimentalManifest` flag to true. + +### Storage + +To provide persistent object storage in OpenWhisk, we deploy an instance +of [`Minio`](https://github.com/minio/minio) storage. +The storage instance is deployed as a Docker container, and it can be retained +across many experiments. +The behavior of SeBS is controlled by the `shutdownStorage` switch. +When set to true, SeBS will remove Minio instance after finishing all +work. +Otherwise, the container will be retained and future experiments with SeBS +will automatically detect an existing Minio instance. +Reusing Minio instance helps to run experiments faster and smoothly since +SeBS does not have to reupload function's inputs on each experiment. + From 4322a488326c5f82e6744e9c3a441d4ae3b8631a Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 00:53:38 +0100 Subject: [PATCH 060/140] [system] Add OpenWhisk to main page --- README.md | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2b2ac93d..bb34e216 100644 --- a/README.md +++ b/README.md @@ -16,9 +16,14 @@ See the [installation instructions](#installation) to learn how to configure SeB cloud services and [usage instructions](#usage) to automatically launch experiments in the cloud! SeBS provides support for automatic deployment and invocation of benchmarks on -AWS Lambda, Azure Functions, Google Cloud Functions, and a custom, Docker-based local -evaluation platform. See the [documentation on cloud providers](docs/platforms.md) -to learn how to provide SeBS with cloud credentials. +commercial and black-box platforms +[AWS Lambda](https://aws.amazon.com/lambda/), +[Azure Functions](https://azure.microsoft.com/en-us/services/functions/), +and [Google Cloud Functions](https://cloud.google.com/functions). +Furthermore, we support the open-source platform [OpenWhisk](https://openwhisk.apache.org/), +and offer a custom, Docker-based local evaluation platform. +See the [documentation on cloud providers](docs/platforms.md) +for details on how to configure each platform in SeBS. The documentation describes in detail [the design and implementation of our tool](docs/design.md), and see the [modularity](docs/modularity.md) section to learn how SeBS can be extended with new platforms, benchmarks, and experiments. From 624705ad7754a889f36427e70b64eab4b211f178 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 03:21:40 +0100 Subject: [PATCH 061/140] [whisk] Grammar checks --- README.md | 4 +- docs/platforms.md | 103 ++++++++++++++++++++++++---------------------- 2 files changed, 56 insertions(+), 51 deletions(-) diff --git a/README.md b/README.md index bb34e216..a591f511 100644 --- a/README.md +++ b/README.md @@ -20,10 +20,10 @@ commercial and black-box platforms [AWS Lambda](https://aws.amazon.com/lambda/), [Azure Functions](https://azure.microsoft.com/en-us/services/functions/), and [Google Cloud Functions](https://cloud.google.com/functions). -Furthermore, we support the open-source platform [OpenWhisk](https://openwhisk.apache.org/), +Furthermore, we support the open-source platform [OpenWhisk](https://openwhisk.apache.org/) and offer a custom, Docker-based local evaluation platform. See the [documentation on cloud providers](docs/platforms.md) -for details on how to configure each platform in SeBS. +for details on configuring each platform in SeBS. The documentation describes in detail [the design and implementation of our tool](docs/design.md), and see the [modularity](docs/modularity.md) section to learn how SeBS can be extended with new platforms, benchmarks, and experiments. diff --git a/docs/platforms.md b/docs/platforms.md index bc2e8e54..ff6d22d6 100644 --- a/docs/platforms.md +++ b/docs/platforms.md @@ -74,22 +74,21 @@ export GCP_SECRET_APPLICATION_CREDENTIALS = XXXX ## OpenWhisk -SeBS expects users have to deploy and configure OpenWhisk instance. -In `tools/openwhisk_preparation.py`, we include scripts that help to install +SeBS expects users to deploy and configure an OpenWhisk instance. +In `tools/openwhisk_preparation.py`, we include scripts that help install [kind (Kubernetes in Docker)](https://kind.sigs.k8s.io/) and deploy OpenWhisk on a `kind` cluster. -An example of SeBS configuration for using OpenWhisk can be found in `config/example.json` -under the key `['deployment']['openwhisk']`. -In subsections below, we discuss the meaning and use of each parameter. -To correctly deploy SeBS functions to OpenWhisk, it is important to follow the -subsections on *Toolchain* and *Docker* configuration. +The configuration parameters of OpenWhisk for SeBS can be found +in `config/example.json` under the key `['deployment']['openwhisk']`. +In the subsections below, we discuss the meaning and use of each parameter. +To correctly deploy SeBS functions to OpenWhisk, following the +subsections on *Toolchain* and *Docker* configuration is particularly important. ### Toolchain We use OpenWhisk's CLI tool [wsk](https://github.com/apache/openwhisk-cli) to manage the deployment of functions to OpenWhisk. -To deploy serverless functions, please install `wsk` and -configure it to point to your OpenWhisk installation. +Please install `wsk`and configure it to point to your OpenWhisk installation. By default, SeBS assumes that `wsk` is available in the `PATH`. To override this, set the configuration option `wskExec` to the location of your `wsk` executable. @@ -97,68 +96,74 @@ If you are using a local deployment of OpenWhisk with a self-signed certificate, you can skip certificate validation with the `wsk` flag `--insecure`. To enable this option, set `wskBypassSecurity` to `true`. At the moment, all functions are deployed as [*web actions*](https://github.com/apache/openwhisk/blob/master/docs/webactions.md) -that do not require using credentails to invoke functions. +that do not require credentials to invoke functions. -Furthermore, SeBS can be configured to automatically remove the `kind` -cluster after finishing experiments. This helps to automate the experiments +Furthermore, SeBS can be configured to remove the `kind` +cluster after finishing experiments automatically. +The boolean option `removeCluster` helps to automate the experiments that should be conducted on fresh instances of the system. -To enable this option, set `removeCluster` to `true`. ### Docker -In FaaS platforms, function's code can be usually deployed as a code package -or as a Docker image with all dependencies preinstalled. -However, OpenWhisk as a very low limit on the code package size of only 48 -megabytes. -To circumvent this limit, we deploy functions using pre-built Docker images. +In FaaS platforms, the function's code can usually be deployed as a code package +or a Docker image with all dependencies preinstalled. +However, OpenWhisk has a very low code package size limit of only 48 megabytes. +So, to circumvent this limit, we deploy functions using pre-built Docker images. **Important**: OpenWhisk requires that all Docker images are available in the registry, even if they have been cached on a system serving OpenWhisk functions. -When the image is not available, function invocations will fail after a timeout -with an error message that does not indicate directly image availability issues. -All SeBS benchmark functions are available on the Docker Hub. +Function invocations will fail when the image is not available after a +timeout with an error message that does not directly indicate image availability issues. +Therefore, all SeBS benchmark functions are available on the Docker Hub. When adding new functions and extending existing functions with new languages and new language versions, Docker images must be placed in the registry. -However, pushin the image to `spcleth/serverless-benchmarks` repository on Docker -Hub requires permissions. -Instead, OpenWhisk users can configure the FaaS platform to use a custom and +However, pushing the image to the default `spcleth/serverless-benchmarks` +repository on Docker Hub requires permissions. +To use a different Docker Hub repository, change the key +`['general']['docker_repository']` in `config/systems.json`. + + +Alternatively, OpenWhisk users can configure the FaaS platform to use a custom and private Docker registry and push new images there. Furthermore, a local Docker registry can speed up development when debugging a new function. -See the documentation on +SeBS can use alternative Docker registry - see `dockerRegistry` settings +in the example to configure registry endpoint and credentials. +When the `registry` URL is not provided, SeBS will use Docker Hub. +When `username` and `password` are provided, SeBS will log in to the repository +and push new images before invoking functions. +See the documentation on the [Docker registry](https://github.com/apache/openwhisk-deploy-kube/blob/master/docs/private-docker-registry.md) and [OpenWhisk configuration](https://github.com/apache/openwhisk-deploy-kube/blob/master/docs/private-docker-registry.md) for details. -SeBS can use alternative Docker registry - see `dockerRegistry` settings -in the example to configure registry endpoint and credentials. -When `registry` URL is not provided, SeBS will use Docker Hub. -When `username` and `password` are provided, `SeBS` will use them to login -to the repository. + +**Warning**: this feature is experimental and has not been tested extensively. +At the moment, it cannot be used on a `kind` cluster due to issues with +Docker authorization on invoker nodes. ### Code Deployment SeBS builds and deploys a new code package when constructing the local cache, -when function's content have change, and when user reuqests a forced rebuild. -In OpenWhisk, this set up is changed - SeBS will first attempt to verify -if the image exists already in the registry and skip building Docker +when the function's contents have changed, and when the user requests a forced rebuild. +In OpenWhisk, this setup is changed - SeBS will first attempt to verify +if the image exists already in the registry and skip building the Docker image when possible. -This allows SeBS to deploy seamlessly to OpenWhisk using default images +Then, SeBS tcan deploy seamlessly to OpenWhisk using default images available on Docker Hub. Furthermore, checking for image existence in the registry helps -to avoid failing invocations in OpenWhisk. -For performance reasons, this check is performed only once, when constructing -the local cache. +avoid failing invocations in OpenWhisk. +For performance reasons, this check is performed only once when +initializing the local cache for the first time. -When the function code must be updated, -SeBS will build the image and attempt to push it to the registry. -At the moment, the only available option of checking image existence in +When the function code is updated, +SeBS will build the image and push it to the registry. +Currently, the only available option of checking image existence in the registry is pulling the image. -However, there is [an experimental `manifest` feature of Docker](https://docs.docker.com/engine/reference/commandline/manifest/) -that allow to check image status without downloading its contents, -saving bandwidth and time. -To use that feature in SeBS, set `experimentalManifest` flag to true. +However, Docker's [experimental `manifest` feature](https://docs.docker.com/engine/reference/commandline/manifest/) +allows checking image status without downloading its contents, saving bandwidth and time. +To use that feature in SeBS, set the `experimentalManifest` flag to true. ### Storage @@ -166,11 +171,11 @@ To provide persistent object storage in OpenWhisk, we deploy an instance of [`Minio`](https://github.com/minio/minio) storage. The storage instance is deployed as a Docker container, and it can be retained across many experiments. -The behavior of SeBS is controlled by the `shutdownStorage` switch. -When set to true, SeBS will remove Minio instance after finishing all +The `shutdownStorage` switch controls the behavior of SeBS. +When set to true, SeBS will remove the Minio instance after finishing all work. -Otherwise, the container will be retained and future experiments with SeBS +Otherwise, the container will be retained, and future experiments with SeBS will automatically detect an existing Minio instance. -Reusing Minio instance helps to run experiments faster and smoothly since -SeBS does not have to reupload function's inputs on each experiment. +Reusing the Minio instance helps run experiments faster and smoothly since +SeBS does not have to re-upload function's data on each experiment. From 6054de295e0d14aeedaa445f429432897dc21a55 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 03:25:49 +0100 Subject: [PATCH 062/140] [whisk] Add contributor information --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index a591f511..ccca3ac4 100644 --- a/README.md +++ b/README.md @@ -257,4 +257,5 @@ Currently supported only on AWS. * [Nico Graf (ETH Zurich)](https://github.com/ncograf/) - contributed implementation of regression tests, bugfixes, and helped with testing and documentation. * [Kacper Janda](https://github.com/Kacpro), [Mateusz Knapik](https://github.com/maknapik), [JmmCz](https://github.com/JmmCz), AGH University of Science and Technology - contributed together Google Cloud support. * [Grzegorz Kwaśniewski (ETH Zurich)](https://github.com/gkwasniewski) - worked on the modeling experiments. +* [Paweł Żuk (University of Warsaw)](https://github.com/pmzuk) - contributed OpenWhisk support. From 1690a5e3fe9f0b9b026ca607f38ccbfa1296f114 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 03:48:05 +0100 Subject: [PATCH 063/140] [whisk] Catch failing image push --- sebs/openwhisk/openwhisk.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 5d589aa2..cee80f45 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -185,7 +185,14 @@ def build_base_image( f"Push the benchmark base image {repository_name}:{image_tag} " f"to registry: {registry_name}." ) - self.docker_client.images.push(repository=repository_name, tag=image_tag) + ret = self.docker_client.images.push( + repository=repository_name, tag=image_tag, stream=True, decode=True + ) + # doesn't raise an exception for some reason + for val in ret: + if "error" in val: + self.logging.error(f"Failed to push the image to registry {registry_name}") + raise RuntimeError(val) return True def package_code( From a0090725efd635ae042c6c92d75aa25e256b36e4 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 25 Jan 2022 03:48:38 +0100 Subject: [PATCH 064/140] [whisk] Override Docker settings from user input --- sebs/openwhisk/config.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 40f75fc7..0c0dc391 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -51,8 +51,13 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resour cached_config = cache.get_config("openwhisk") ret: OpenWhiskResources + # Check for new config + if "docker_registry" in config: + ret = cast(OpenWhiskResources, OpenWhiskResources.initialize(config["docker_registry"])) + ret.logging.info("Using user-provided Docker registry for OpenWhisk.") + ret.logging_handlers = handlers # Load cached values - if ( + elif ( cached_config and "resources" in cached_config and "docker" in cached_config["resources"] @@ -63,11 +68,6 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resour ) ret.logging_handlers = handlers ret.logging.info("Using cached Docker registry for OpenWhisk") - # Check for new config - elif "docker_registry" in config: - ret = cast(OpenWhiskResources, OpenWhiskResources.initialize(config["docker_registry"])) - ret.logging.info("Using user-provided Docker registry for OpenWhisk.") - ret.logging_handlers = handlers else: ret.logging.info("Using default Docker registry for OpenWhisk.") ret = OpenWhiskResources() From 5cb61bbca69aa0bc197ec7dac73d94cfccb1ef0c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 12 Apr 2022 15:09:25 +0200 Subject: [PATCH 065/140] [whisk] Add OpenWhisk as an installation option --- install.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/install.py b/install.py index 3405be4e..3b7f5a6a 100755 --- a/install.py +++ b/install.py @@ -7,7 +7,7 @@ parser = argparse.ArgumentParser(description="Install SeBS and dependencies.") parser.add_argument('--venv', metavar='DIR', type=str, default="python-venv", help='destination of local Python virtual environment') parser.add_argument('--python-path', metavar='DIR', type=str, default="python3", help='Path to local Python installation.') -for deployment in ["aws", "azure", "gcp"]: +for deployment in ["aws", "azure", "gcp", "openwhisk"]: parser.add_argument(f"--{deployment}", action="store_const", const=True, dest=deployment) parser.add_argument(f"--no-{deployment}", action="store_const", const=False, default=True, dest=deployment) for deployment in ["local"]: @@ -79,6 +79,10 @@ def execute(cmd): execute(f'echo "export SEBS_WITH_GCP={flag}" >> {env_dir}/bin/activate') execute(f'echo "unset SEBS_WITH_GCP" >> {env_dir}/bin/deactivate') +flag = "TRUE" if args.openwhisk else "FALSE" +execute(f'echo "export SEBS_WITH_OPENWHISK={flag}" >> {env_dir}/bin/activate') +execute(f'echo "unset SEBS_WITH_OPENWHISK" >> {env_dir}/bin/deactivate') + if args.local: print("Install Python dependencies for local") execute(". {}/bin/activate && pip3 install -r requirements.local.txt".format(env_dir)) From 3c157aa28b5caea5873bbd6377a2bef3e9cebca2 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 26 Apr 2022 14:50:28 +0200 Subject: [PATCH 066/140] [whisk] Improve documentation and logging of OpenWhisk management --- sebs/faas/function.py | 11 +++- sebs/faas/system.py | 2 + sebs/openwhisk/config.py | 2 +- sebs/openwhisk/function.py | 16 ++++-- sebs/openwhisk/openwhisk.py | 104 +++++++++++++++++++----------------- 5 files changed, 80 insertions(+), 55 deletions(-) diff --git a/sebs/faas/function.py b/sebs/faas/function.py index a1b79ab0..2ebfd8f4 100644 --- a/sebs/faas/function.py +++ b/sebs/faas/function.py @@ -255,12 +255,13 @@ def deserialize(cached_config: dict) -> "Trigger": class Function(LoggingBase): - def __init__(self, benchmark: str, name: str, code_hash: str): + def __init__(self, benchmark: str, name: str, code_hash: str, docker_image: str = ""): super().__init__() self._benchmark = benchmark self._name = name self._code_package_hash = code_hash self._updated_code = False + self._docker_image = docker_image self._triggers: Dict[Trigger.TriggerType, List[Trigger]] = {} @property @@ -279,6 +280,14 @@ def code_package_hash(self): def code_package_hash(self, new_hash: str): self._code_package_hash = new_hash + @property + def docker_image(self) -> str: + return self._docker_image + + @docker_image.setter + def docker_image(self, docker_image: str): + self._docker_image = docker_image + @property def updated_code(self) -> bool: return self._updated_code diff --git a/sebs/faas/system.py b/sebs/faas/system.py index 18fddf56..93fd3ccc 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -213,6 +213,8 @@ def get_function(self, code_package: Benchmark, func_name: Optional[str] = None) function=function, ) code_package.query_cache() + else: + self.logging.info(f"Cached function {func_name} is up to date.") return function @abstractmethod diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 0c0dc391..9ad33232 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -69,8 +69,8 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resour ret.logging_handlers = handlers ret.logging.info("Using cached Docker registry for OpenWhisk") else: - ret.logging.info("Using default Docker registry for OpenWhisk.") ret = OpenWhiskResources() + ret.logging.info("Using default Docker registry for OpenWhisk.") ret.logging_handlers = handlers return ret diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 719750e1..fc2b5073 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -3,19 +3,24 @@ class OpenwhiskFunction(Function): - def __init__(self, name: str, benchmark: str, code_package_hash: str, namespace: str = "_"): + def __init__( + self, + name: str, + benchmark: str, + code_package_hash: str, + docker_image: str, + namespace: str = "_", + ): super().__init__(benchmark, name, code_package_hash) self.namespace = namespace + self.docker_image = docker_image @staticmethod def typename() -> str: return "OpenWhisk.Function" def serialize(self) -> dict: - return { - **super().serialize(), - "namespace": self.namespace, - } + return {**super().serialize(), "namespace": self.namespace, "image": self.docker_image} @staticmethod def deserialize(cached_config: dict) -> "OpenwhiskFunction": @@ -26,6 +31,7 @@ def deserialize(cached_config: dict) -> "OpenwhiskFunction": cached_config["name"], cached_config["benchmark"], cached_config["hash"], + cached_config["image"], cached_config["namespace"], ) for trigger in cached_config["triggers"]: diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index cee80f45..ad051d42 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -151,6 +151,8 @@ def build_base_image( f"Image {repository_name}:{image_tag} doesn't exist in the registry, " f"building OpenWhisk package for {benchmark}." ) + else: + self.logigng.info(f"Using cached image {image_tag}.") build_dir = os.path.join(directory, "docker") os.makedirs(build_dir) @@ -171,6 +173,7 @@ def build_base_image( language_version ] self.logging.info(f"Build the benchmark base image {repository_name}:{image_tag}.") + image, _ = self.docker_client.images.build( tag=f"{repository_name}:{image_tag}", path=build_dir, @@ -238,62 +241,64 @@ def package_code( return benchmark_archive, bytes_size def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": - self.logging.info("Creating function as an action in OpenWhisk") + self.logging.info("Creating function as an action in OpenWhisk.") try: actions = subprocess.run( [*self.get_wsk_cmd(), "action", "list"], stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, ) - subprocess.run( - f"grep {func_name}".split(), - stderr=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - input=actions.stdout, - check=True, - ) - - res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) - # Update function - we don't know what version is stored - self.update_function(res, code_package) - self.logging.info(f"Retrieved OpenWhisk action {func_name}") - except FileNotFoundError as e: + function_found = False + docker_image = "" + for line in actions.stdout.decode().split("\n"): + if line and func_name in line.split()[0]: + function_found = True + break + + if function_found: + # docker image is overwritten by the update + res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash, "") + # Update function - we don't know what version is stored + self.logging.info(f"Retrieved existing OpenWhisk action {func_name}.") + self.update_function(res, code_package) + else: + try: + self.logging.info(f"Creating new OpenWhisk action {func_name}") + docker_image = self.system_config.benchmark_image_name( + self.name(), + code_package.benchmark, + code_package.language_name, + code_package.language_version, + ) + subprocess.run( + [ + *self.get_wsk_cmd(), + "action", + "create", + func_name, + "--web", + "true", + "--docker", + docker_image, + "--memory", + str(code_package.benchmark_config.memory), + code_package.code_location, + ], + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) + res = OpenwhiskFunction( + func_name, code_package.benchmark, code_package.hash, docker_image + ) + except subprocess.CalledProcessError as e: + self.logging.error(f"Cannot create action {func_name}.") + raise RuntimeError(e) + + except FileNotFoundError: self.logging.error("Could not retrieve OpenWhisk functions - is path to wsk correct?") - raise RuntimeError(e) - - except subprocess.CalledProcessError: - # grep will return error when there are no entries - try: - docker_image = self.system_config.benchmark_image_name( - self.name(), - code_package.benchmark, - code_package.language_name, - code_package.language_version, - ) - subprocess.run( - [ - *self.get_wsk_cmd(), - "action", - "create", - func_name, - "--web", - "true", - "--docker", - docker_image, - "--memory", - str(code_package.benchmark_config.memory), - code_package.code_location, - ], - stderr=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - check=True, - ) - self.logging.info(f"Created new OpenWhisk action {func_name}") - res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash) - except subprocess.CalledProcessError as e: - self.logging.error(f"Cannot create action {func_name}.") - raise RuntimeError(e) + raise RuntimeError("Failed to access wsk binary") # Add LibraryTrigger to a new function trigger = LibraryTrigger(func_name, self.get_wsk_cmd()) @@ -303,6 +308,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk return res def update_function(self, function: Function, code_package: Benchmark): + self.logging.info(f"Update an existing OpenWhisk action {function.name}.") docker_image = self.system_config.benchmark_image_name( self.name(), code_package.benchmark, @@ -326,6 +332,8 @@ def update_function(self, function: Function, code_package: Benchmark): stdout=subprocess.DEVNULL, check=True, ) + function.docker_image = docker_image + except FileNotFoundError as e: self.logging.error("Could not update OpenWhisk function - is path to wsk correct?") raise RuntimeError(e) From 09269b24d1bed0e13105f0d4c34f232ee50b8dcf Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 27 Apr 2022 01:34:03 +0200 Subject: [PATCH 067/140] [benchmarks] Update requirements of dynamic-html to work with Python 3.9 --- benchmarks/100.webapps/110.dynamic-html/python/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt b/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt index 83e5040a..5ca56944 100644 --- a/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt +++ b/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt @@ -1 +1 @@ -jinja2==2.10.3 +jinja2>=2.10.3 From 45ecaab6b9cd2d218dad702a34ce515e5c262b6e Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 27 Apr 2022 01:34:54 +0200 Subject: [PATCH 068/140] [whisk] Extend OpenWhisk images to support language-based requirements --- docker/Dockerfile.run.openwhisk.python | 5 ++++- sebs/benchmark.py | 4 ++-- sebs/openwhisk/openwhisk.py | 11 +++-------- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/docker/Dockerfile.run.openwhisk.python b/docker/Dockerfile.run.openwhisk.python index 72d7da75..1b8fba7d 100644 --- a/docker/Dockerfile.run.openwhisk.python +++ b/docker/Dockerfile.run.openwhisk.python @@ -1,5 +1,8 @@ ARG BASE_IMAGE FROM $BASE_IMAGE +ENV PYTHON_VERSION=${VERSION} COPY . function/ + RUN touch function/__init__.py \ - && pip install --no-cache-dir function/ + && if test -f "requirements.txt.${PYTHON_VERSION}"; then pip install --no-cache-dir -r function/requirements.txt -r function/requirements.txt.${PYTHON_VERSION} function/ ; else pip install --no-cache-dir -r function/requirements.txt function/ ; fi + diff --git a/sebs/benchmark.py b/sebs/benchmark.py index a4c4f301..3bf17bc1 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -321,8 +321,8 @@ def install_dependencies(self, output_dir): ): self.logging.info( ( - "Docker build image for {deployment} run in {language} " - "is not available, skipping." + "There is no Docker build image for {deployment} run in {language}, " + "thus skipping the Docker-based installation of dependencies." ).format(deployment=self._deployment_name, language=self.language_name) ) else: diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index ad051d42..d0b471df 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -140,7 +140,7 @@ def build_base_image( if not is_cached: if self.find_image(repository_name, image_tag): self.logging.info( - f"Skipping building OpenWhisk package for {benchmark}, using " + f"Skipping building OpenWhisk Docker package for {benchmark}, using " f"Docker image {repository_name}:{image_tag} from registry: " f"{registry_name}." ) @@ -151,8 +151,6 @@ def build_base_image( f"Image {repository_name}:{image_tag} doesn't exist in the registry, " f"building OpenWhisk package for {benchmark}." ) - else: - self.logigng.info(f"Using cached image {image_tag}.") build_dir = os.path.join(directory, "docker") os.makedirs(build_dir) @@ -174,12 +172,9 @@ def build_base_image( ] self.logging.info(f"Build the benchmark base image {repository_name}:{image_tag}.") + buildargs = {"VERSION": language_version, "BASE_IMAGE": builder_image} image, _ = self.docker_client.images.build( - tag=f"{repository_name}:{image_tag}", - path=build_dir, - buildargs={ - "BASE_IMAGE": builder_image, - }, + tag=f"{repository_name}:{image_tag}", path=build_dir, buildargs=buildargs ) # Now push the image to the registry From 0df4070bbf740da8cd4e58fd474a2d476202ec5f Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 27 Apr 2022 18:12:10 +0200 Subject: [PATCH 069/140] [whisk] Define action timeout --- sebs/openwhisk/openwhisk.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index d0b471df..73fa8150 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -278,6 +278,8 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk docker_image, "--memory", str(code_package.benchmark_config.memory), + "--timeout", + str(code_package.benchmark_config.timeout * 1000), code_package.code_location, ], stderr=subprocess.DEVNULL, @@ -321,6 +323,8 @@ def update_function(self, function: Function, code_package: Benchmark): docker_image, "--memory", str(code_package.benchmark_config.memory), + "--timeout", + str(code_package.benchmark_config.timeout * 1000), code_package.code_location, ], stderr=subprocess.DEVNULL, From cce481518bfb6a6a1172b60c20455c6ce19f1d7b Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 13:23:52 +0200 Subject: [PATCH 070/140] [whisk] Changing definition of storage access to action parameters --- .../wrappers/openwhisk/python/__main__.py | 4 +++ .../wrappers/openwhisk/python/storage.py | 9 +++---- docker/Dockerfile.run.openwhisk.python | 3 ++- sebs/openwhisk/openwhisk.py | 26 ++++++++++--------- 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/benchmarks/wrappers/openwhisk/python/__main__.py b/benchmarks/wrappers/openwhisk/python/__main__.py index a2dbfcb5..51e6db4d 100644 --- a/benchmarks/wrappers/openwhisk/python/__main__.py +++ b/benchmarks/wrappers/openwhisk/python/__main__.py @@ -9,6 +9,10 @@ def main(args): args['request-id'] = os.getenv('__OW_ACTIVATION_ID') args['income-timestamp'] = begin.timestamp() + for arg in ["MINIO_STORAGE_CONNECTION_URL", "MINIO_STORAGE_ACCESS_KEY", "MINIO_STORAGE_SECRET_KEY"]: + os.environ[arg] = args[arg] + del args[arg] + from function import function ret = function.handler(args) diff --git a/benchmarks/wrappers/openwhisk/python/storage.py b/benchmarks/wrappers/openwhisk/python/storage.py index b94c5fc3..3eedb3ac 100644 --- a/benchmarks/wrappers/openwhisk/python/storage.py +++ b/benchmarks/wrappers/openwhisk/python/storage.py @@ -10,17 +10,16 @@ class storage: client = None def __init__(self): - file = open(os.path.join(os.path.dirname(__file__), "minioConfig.json"), "r") - minioConfig = json.load(file) try: self.client = minio.Minio( - minioConfig["url"], - access_key=minioConfig["access_key"], - secret_key=minioConfig["secret_key"], + os.getenv("MINIO_STORAGE_CONNECTION_URL"), + access_key=os.getenv("MINIO_STORAGE_ACCESS_KEY"), + secret_key=os.getenv("MINIO_STORAGE_SECRET_KEY"), secure=False, ) except Exception as e: logging.info(e) + raise e @staticmethod def unique_name(name): diff --git a/docker/Dockerfile.run.openwhisk.python b/docker/Dockerfile.run.openwhisk.python index 1b8fba7d..a6c4225e 100644 --- a/docker/Dockerfile.run.openwhisk.python +++ b/docker/Dockerfile.run.openwhisk.python @@ -1,8 +1,9 @@ ARG BASE_IMAGE FROM $BASE_IMAGE +ARG VERSION ENV PYTHON_VERSION=${VERSION} COPY . function/ RUN touch function/__init__.py \ - && if test -f "requirements.txt.${PYTHON_VERSION}"; then pip install --no-cache-dir -r function/requirements.txt -r function/requirements.txt.${PYTHON_VERSION} function/ ; else pip install --no-cache-dir -r function/requirements.txt function/ ; fi + && if test -f "function/requirements.txt.${PYTHON_VERSION}"; then pip install --no-cache-dir -r function/requirements.txt -r function/requirements.txt.${PYTHON_VERSION} function/ ; else pip install --no-cache-dir -r function/requirements.txt function/ ; fi diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 73fa8150..e9b80adf 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -208,22 +208,12 @@ def package_code( # We deploy Minio config in code package since this depends on local # deployment - it cannnot be a part of Docker image - minio_config_path = "minioConfig.json" CONFIG_FILES = { - "python": ["__main__.py", minio_config_path], - "nodejs": ["index.js", minio_config_path], + "python": ["__main__.py"], + "nodejs": ["index.js"], } package_config = CONFIG_FILES[language_name] - with open(os.path.join(directory, minio_config_path), "w+") as minio_config: - storage = cast(Minio, self.get_storage()) - minio_config_json = { - "access_key": storage._access_key, - "secret_key": storage._secret_key, - "url": storage._url, - } - minio_config.write(json.dumps(minio_config_json)) - os.chdir(directory) benchmark_archive = os.path.join(directory, f"{benchmark}.zip") subprocess.run( @@ -235,6 +225,14 @@ def package_code( self.logging.info("Zip archive size {:2f} MB".format(bytes_size / 1024.0 / 1024.0)) return benchmark_archive, bytes_size + def storage_arguments(self) -> dict: + storage = cast(Minio, self.get_storage()) + return [ + "-p", "MINIO_STORAGE_SECRET_KEY", storage._access_key, + "-p", "MINIO_STORAGE_ACCESS_KEY", storage._secret_key, + "-p", "MINIO_STORAGE_CONNECTION_URL", storage._url, + ] + def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": self.logging.info("Creating function as an action in OpenWhisk.") try: @@ -280,6 +278,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk str(code_package.benchmark_config.memory), "--timeout", str(code_package.benchmark_config.timeout * 1000), + *self.storage_arguments(), code_package.code_location, ], stderr=subprocess.DEVNULL, @@ -319,12 +318,15 @@ def update_function(self, function: Function, code_package: Benchmark): "action", "update", function.name, + "--web", + "true", "--docker", docker_image, "--memory", str(code_package.benchmark_config.memory), "--timeout", str(code_package.benchmark_config.timeout * 1000), + *self.storage_arguments(), code_package.code_location, ], stderr=subprocess.DEVNULL, From 84d7d08e1330ac0d24e08325c71621c5da98f5f8 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 13:24:07 +0200 Subject: [PATCH 071/140] [whisk] Deprecate Python 3.6 for OpenWhisk --- config/systems.json | 3 +-- sebs/openwhisk/openwhisk.py | 15 ++++++++++----- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/config/systems.json b/config/systems.json index 6ad650d2..e2df8c9b 100644 --- a/config/systems.json +++ b/config/systems.json @@ -137,11 +137,10 @@ "languages": { "python": { "base_images": { - "3.6": "openwhisk/python3action", "3.7": "openwhisk/action-python-v3.7", "3.9": "openwhisk/action-python-v3.9" }, - "versions": ["3.6", "3.7", "3.9"], + "versions": ["3.7", "3.9"], "images": ["function"], "username": "docker_user", "deployment": { diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index e9b80adf..29808bf3 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -1,4 +1,3 @@ -import json import os import shutil import subprocess @@ -225,12 +224,18 @@ def package_code( self.logging.info("Zip archive size {:2f} MB".format(bytes_size / 1024.0 / 1024.0)) return benchmark_archive, bytes_size - def storage_arguments(self) -> dict: + def storage_arguments(self) -> List[str]: storage = cast(Minio, self.get_storage()) return [ - "-p", "MINIO_STORAGE_SECRET_KEY", storage._access_key, - "-p", "MINIO_STORAGE_ACCESS_KEY", storage._secret_key, - "-p", "MINIO_STORAGE_CONNECTION_URL", storage._url, + "-p", + "MINIO_STORAGE_SECRET_KEY", + storage._access_key, + "-p", + "MINIO_STORAGE_ACCESS_KEY", + storage._secret_key, + "-p", + "MINIO_STORAGE_CONNECTION_URL", + storage._url, ] def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": From 7df55587b336646711f37404c0c79f46d0ec10ed Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 13:58:38 +0200 Subject: [PATCH 072/140] [azure] Update Azure blob storage client --- requirements.azure.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.azure.txt b/requirements.azure.txt index f439c6a4..f7d82499 100644 --- a/requirements.azure.txt +++ b/requirements.azure.txt @@ -1 +1 @@ -azure-storage-blob==12.1.0 +azure-storage-blob==12.10.0 From befebe5b1079d6e0244a84db59f32d471d89a4f9 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 14:42:05 +0200 Subject: [PATCH 073/140] [aws] [azure] [gcp] Implement the exists method for a bucket in cloud storage --- sebs/aws/s3.py | 7 +++++++ sebs/azure/blob_storage.py | 7 +++++-- sebs/faas/storage.py | 41 ++++++++++++++++++++++++++++---------- sebs/gcp/storage.py | 18 ++++++++--------- sebs/local/storage.py | 14 +++++++++---- 5 files changed, 61 insertions(+), 26 deletions(-) diff --git a/sebs/aws/s3.py b/sebs/aws/s3.py index e47bd77f..765cace3 100644 --- a/sebs/aws/s3.py +++ b/sebs/aws/s3.py @@ -103,6 +103,13 @@ def download(self, bucket_name: str, key: str, filepath: str): self.logging.info("Download {}:{} to {}".format(bucket_name, key, filepath)) self.client.download_file(Bucket=bucket_name, Key=key, Filename=filepath) + def exists_bucket(self, bucket_name: str) -> bool: + try: + self.client.head_bucket(Bucket=bucket_name) + return True + except self.client.exceptions.ClientError: + return False + def list_bucket(self, bucket_name: str): objects_list = self.client.list_objects_v2(Bucket=bucket_name) objects: List[str] diff --git a/sebs/azure/blob_storage.py b/sebs/azure/blob_storage.py index cad108a8..96558ff6 100644 --- a/sebs/azure/blob_storage.py +++ b/sebs/azure/blob_storage.py @@ -18,7 +18,7 @@ def deployment_name(): def __init__(self, region: str, cache_client: Cache, conn_string: str, replace_existing: bool): super().__init__(region, cache_client, replace_existing) - self.client = BlobServiceClient.from_connection_string(conn_string) + self.client: BlobServiceClient = BlobServiceClient.from_connection_string(conn_string) """ Internal implementation of creating a new container. @@ -83,7 +83,10 @@ def upload(self, container_name: str, filepath: str, key: str): self.logging.info("Upload {} to {}".format(filepath, container_name)) client = self.client.get_blob_client(container_name, key) with open(filepath, "rb") as upload_file: - client.upload_blob(upload_file.read()) + client.upload_blob(upload_file) # type: ignore + + def exists_bucket(self, container: str) -> bool: + return self.client.get_container_client(container).exists() """ Return list of files in a container. diff --git a/sebs/faas/storage.py b/sebs/faas/storage.py index e54812e2..d3781f2e 100644 --- a/sebs/faas/storage.py +++ b/sebs/faas/storage.py @@ -152,6 +152,10 @@ def list_bucket(self, bucket_name: str) -> List[str]: def list_buckets(self, bucket_name: str) -> List[str]: pass + @abstractmethod + def exists_bucket(self, bucket_name: str) -> bool: + pass + @abstractmethod def clean_bucket(self, bucket_name: str): pass @@ -170,16 +174,33 @@ def allocate_buckets(self, benchmark: str, requested_buckets: Tuple[int, int]): # Load cached information cached_buckets = self.cache_client.get_storage_config(self.deployment_name(), benchmark) if cached_buckets: - self.input_buckets = cached_buckets["buckets"]["input"] - for bucket in self.input_buckets: - self.input_buckets_files.append(self.list_bucket(bucket)) - self.output_buckets = cached_buckets["buckets"]["output"] - # for bucket in self.output_buckets: - # self.clean_bucket(bucket) - self.cached = True - self.logging.info("Using cached storage input buckets {}".format(self.input_buckets)) - self.logging.info("Using cached storage output buckets {}".format(self.output_buckets)) - return + cache_valid = True + for bucket in [ + *cached_buckets["buckets"]["input"], + *cached_buckets["buckets"]["output"], + ]: + if not self.exists_bucket(bucket): + cache_valid = False + self.logging.info(f"Cached storage buckets {bucket} does not exist.") + break + + if cache_valid: + self.input_buckets = cached_buckets["buckets"]["input"] + for bucket in self.input_buckets: + self.input_buckets_files.append(self.list_bucket(bucket)) + self.output_buckets = cached_buckets["buckets"]["output"] + # for bucket in self.output_buckets: + # self.clean_bucket(bucket) + self.cached = True + self.logging.info( + "Using cached storage input buckets {}".format(self.input_buckets) + ) + self.logging.info( + "Using cached storage output buckets {}".format(self.output_buckets) + ) + return + else: + self.logging.info("Cached storage buckets are no longer valid, creating new ones.") buckets = self.list_buckets(self.correct_name(benchmark)) for i in range(0, requested_buckets[0]): diff --git a/sebs/gcp/storage.py b/sebs/gcp/storage.py index 8202cd0e..e6f705c3 100644 --- a/sebs/gcp/storage.py +++ b/sebs/gcp/storage.py @@ -3,6 +3,7 @@ from typing import List from google.cloud import storage as gcp_storage +from google.api_core import exceptions from sebs.cache import Cache from ..faas.storage import PersistentStorage @@ -64,6 +65,13 @@ def upload(self, bucket_name: str, filepath: str, key: str): gcp_storage.blob._MAX_MULTIPART_SIZE = 5 * 1024 * 1024 # workaround for connection timeout blob.upload_from_filename(filepath) + def exists_bucket(self, bucket_name: str) -> bool: + try: + return self.client.bucket(bucket_name).exists() + # 403 returned when the bucket exists but is owned by another user + except exceptions.Forbidden: + return False + def list_bucket(self, bucket_name: str) -> List[str]: bucket_instance = self.client.get_bucket(bucket_name) all_blobs = list(self.client.list_blobs(bucket_instance)) @@ -78,16 +86,6 @@ def list_buckets(self, bucket_name: str) -> List[str]: def clean_bucket(self, bucket: str): raise NotImplementedError() - """ - :param bucket_name: - :return: list of files in a given bucket - """ - - # def list_bucket(self, bucket_name: str) -> List[str]: - # name = "{}-{}".format(bucket_name, suffix) - # bucket_name = self.create_bucket(name) - # return bucket_name - def uploader_func(self, bucket_idx: int, key: str, filepath: str) -> None: if self.cached and not self.replace_existing: return diff --git a/sebs/local/storage.py b/sebs/local/storage.py index c34f4c0d..8147ae8d 100644 --- a/sebs/local/storage.py +++ b/sebs/local/storage.py @@ -145,10 +145,16 @@ def correct_name(self, name: str) -> str: def download(self, bucket_name: str, key: str, filepath: str): raise NotImplementedError() - def list_bucket(self, bucket_name: str): - objects_list = self.connection.list_objects(bucket_name) - objects: List[str] - return [obj.object_name for obj in objects_list] + def exists_bucket(self, bucket_name: str) -> bool: + return self.connection.bucket_exists(bucket_name) + + def list_bucket(self, bucket_name: str) -> List[str]: + try: + objects_list = self.connection.list_objects(bucket_name) + objects: List[str] + return [obj.object_name for obj in objects_list] + except minio.error.NoSuchBucket: + raise RuntimeError(f"Attempting to access a non-existing bucket {bucket_name}!") def list_buckets(self, bucket_name: str) -> List[str]: buckets = self.connection.list_buckets() From edf11afd20673d64193093df164b7b383df4b7da Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 14:42:17 +0200 Subject: [PATCH 074/140] [gcp] Ignore missing mypy impors --- .mypy.ini | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.mypy.ini b/.mypy.ini index fece12c6..a1adeaed 100644 --- a/.mypy.ini +++ b/.mypy.ini @@ -30,5 +30,11 @@ ignore_missing_imports = True [mypy-google.api_core] ignore_missing_imports = True +[mypy-googleapiclient.discovery] +ignore_missing_imports = True + +[mypy-googleapiclient.errors] +ignore_missing_imports = True + [mypy-testtools] ignore_missing_imports = True From 1190fc2bc7716f7d09557f82756cf1036a6b3c64 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 14:42:39 +0200 Subject: [PATCH 075/140] [benchmarks] Support Pillow in Python 3.9 --- .../200.multimedia/210.thumbnailer/python/requirements.txt.3.9 | 1 + 1 file changed, 1 insertion(+) create mode 100755 benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.9 diff --git a/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.9 b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.9 new file mode 100755 index 00000000..8da721c2 --- /dev/null +++ b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt.3.9 @@ -0,0 +1 @@ +Pillow==9.0.0 From b58014277a9a7580cdde31826dd192984a712a6f Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 15:39:27 +0200 Subject: [PATCH 076/140] [whisk] Avoid changing directory, leading to incorrect placement of output files --- sebs/openwhisk/openwhisk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 29808bf3..638be80f 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -213,11 +213,11 @@ def package_code( } package_config = CONFIG_FILES[language_name] - os.chdir(directory) benchmark_archive = os.path.join(directory, f"{benchmark}.zip") subprocess.run( ["zip", benchmark_archive] + package_config, stdout=subprocess.DEVNULL, + cwd=directory ) self.logging.info(f"Created {benchmark_archive} archive") bytes_size = os.path.getsize(benchmark_archive) From 2dee0ca0b22d97edc0fef940d3d9ed877357324a Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Thu, 28 Apr 2022 18:08:08 +0200 Subject: [PATCH 077/140] [storage] Add new interface for independent storage deployment. --- sebs.py | 166 +++++++++++--------- sebs/openwhisk/minio.py | 68 -------- sebs/sebs.py | 10 ++ sebs/storage/__init__.py | 0 sebs/{local/storage.py => storage/minio.py} | 39 +++-- sebs/types.py | 16 ++ 6 files changed, 145 insertions(+), 154 deletions(-) delete mode 100644 sebs/openwhisk/minio.py create mode 100644 sebs/storage/__init__.py rename sebs/{local/storage.py => storage/minio.py} (85%) create mode 100644 sebs/types.py diff --git a/sebs.py b/sebs.py index 3d50830f..b8d13bcb 100755 --- a/sebs.py +++ b/sebs.py @@ -5,7 +5,6 @@ import logging import functools import os -import sys import traceback from typing import cast, Optional @@ -13,6 +12,7 @@ import sebs from sebs import SeBS +from sebs.types import Storage as StorageTypes from sebs.regression import regression_suite from sebs.utils import update_nested_dict from sebs.faas import System as FaaSSystem @@ -39,6 +39,7 @@ def __call__(self, *args, **kwargs): if sebs_client is not None: sebs_client.shutdown() + def simplified_common_params(func): @click.option( "--config", @@ -46,12 +47,8 @@ def simplified_common_params(func): type=click.Path(readable=True), help="Location of experiment config.", ) - @click.option( - "--output-dir", default=os.path.curdir, help="Output directory for results." - ) - @click.option( - "--output-file", default="out.log", help="Output filename for logging." - ) + @click.option("--output-dir", default=os.path.curdir, help="Output directory for results.") + @click.option("--output-file", default="out.log", help="Output filename for logging.") @click.option( "--cache", default=os.path.join(os.path.curdir, "cache"), @@ -69,15 +66,14 @@ def simplified_common_params(func): type=click.Choice(["python", "nodejs"]), help="Benchmark language", ) - @click.option( - "--language-version", default=None, type=str, help="Benchmark language version" - ) + @click.option("--language-version", default=None, type=str, help="Benchmark language version") @functools.wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) return wrapper + def common_params(func): @click.option( "--update-code/--no-update-code", @@ -116,7 +112,7 @@ def parse_common_params( language, language_version, initialize_deployment: bool = True, - ignore_cache: bool = False + ignore_cache: bool = False, ): global sebs_client, deployment_client config_obj = json.load(open(config, "r")) @@ -129,13 +125,10 @@ def parse_common_params( # CLI overrides JSON options update_nested_dict(config_obj, ["experiments", "runtime", "language"], language) - update_nested_dict( - config_obj, ["experiments", "runtime", "version"], language_version - ) + update_nested_dict(config_obj, ["experiments", "runtime", "version"], language_version) update_nested_dict(config_obj, ["deployment", "name"], deployment) update_nested_dict(config_obj, ["experiments", "update_code"], update_code) update_nested_dict(config_obj, ["experiments", "update_storage"], update_storage) - update_nested_dict(config_obj, ["experiments", "benchmark"], benchmark) if initialize_deployment: deployment_client = sebs_client.get_deployment( @@ -160,19 +153,18 @@ def cli(): def benchmark(): pass + @benchmark.command() @click.argument("benchmark", type=str) # , help="Benchmark to be used.") @click.argument( "benchmark-input-size", type=click.Choice(["test", "small", "large"]) ) # help="Input test size") -@click.option( - "--repetitions", default=5, type=int, help="Number of experimental repetitions." -) +@click.option("--repetitions", default=5, type=int, help="Number of experimental repetitions.") @click.option( "--trigger", type=click.Choice(["library", "http"]), default="http", - help="Function trigger to be used." + help="Function trigger to be used.", ) @click.option( "--function-name", @@ -191,6 +183,7 @@ def invoke(benchmark, benchmark_input_size, repetitions, trigger, function_name, deployment_client, ) = parse_common_params(**kwargs) experiment_config = sebs_client.get_experiment_config(config["experiments"]) + update_nested_dict(config, ["experiments", "benchmark"], benchmark) benchmark_obj = sebs_client.get_benchmark( benchmark, deployment_client, @@ -198,26 +191,19 @@ def invoke(benchmark, benchmark_input_size, repetitions, trigger, function_name, logging_filename=logging_filename, ) func = deployment_client.get_function( - benchmark_obj, function_name if function_name else deployment_client.default_function_name(benchmark_obj) - ) - storage = deployment_client.get_storage( - replace_existing=experiment_config.update_storage - ) - input_config = benchmark_obj.prepare_input( - storage=storage, size=benchmark_input_size + benchmark_obj, + function_name if function_name else deployment_client.default_function_name(benchmark_obj), ) + storage = deployment_client.get_storage(replace_existing=experiment_config.update_storage) + input_config = benchmark_obj.prepare_input(storage=storage, size=benchmark_input_size) - result = sebs.experiments.ExperimentResult( - experiment_config, deployment_client.config - ) + result = sebs.experiments.ExperimentResult(experiment_config, deployment_client.config) result.begin() trigger_type = Trigger.TriggerType.get(trigger) triggers = func.triggers(trigger_type) if len(triggers) == 0: - trigger = deployment_client.create_trigger( - func, trigger_type - ) + trigger = deployment_client.create_trigger(func, trigger_type) else: trigger = triggers[0] for i in range(repetitions): @@ -225,9 +211,9 @@ def invoke(benchmark, benchmark_input_size, repetitions, trigger, function_name, ret = trigger.sync_invoke(input_config) if ret.stats.failure: sebs_client.logging.info(f"Failure on repetition {i+1}/{repetitions}") - #deployment_client.get_invocation_error( + # deployment_client.get_invocation_error( # function_name=func.name, start_time=start_time, end_time=end_time - #) + # ) result.add_invocation(func, ret) result.end() with open("experiments.json", "w") as out_f: @@ -263,6 +249,7 @@ def process(**kwargs): out_f.write(sebs.utils.serialize(experiments)) sebs_client.logging.info("Save results to {}".format(os.path.abspath("results.json"))) + @benchmark.command() @click.argument( "benchmark-input-size", type=click.Choice(["test", "small", "large"]) @@ -280,63 +267,89 @@ def process(**kwargs): help="Location of experiments cache.", ) @click.option( - "--output-dir", default=os.path.join(os.path.curdir, "regression-output"), help="Output directory for results." + "--output-dir", + default=os.path.join(os.path.curdir, "regression-output"), + help="Output directory for results.", ) def regression(benchmark_input_size, benchmark_name, **kwargs): # for regression, deployment client is initialized locally # disable default initialization - ( - config, - output_dir, - logging_filename, - sebs_client, - _ - ) = parse_common_params( - initialize_deployment=False, - **kwargs + (config, output_dir, logging_filename, sebs_client, _) = parse_common_params( + initialize_deployment=False, **kwargs ) - succ = regression_suite( + regression_suite( sebs_client, config["experiments"], - set( (config['deployment']['name'],) ), + set((config["deployment"]["name"],)), config["deployment"], - benchmark_name + benchmark_name, ) + +@cli.group() +def storage(): + pass + + +@storage.command("start") +@click.argument("storage", type=click.Choice([StorageTypes.MINIO])) +@click.option("--output-json", type=click.Path(dir_okay=False, writable=True), default=None) +@click.option("--port", type=int, default=9000) +def storage_start(storage, output_json, port): + + import docker + + sebs.utils.global_logging() + storage_type = sebs.SeBS.get_storage_implementation(StorageTypes(storage)) + storage_instance = storage_type(docker.from_env(), None, True) + logging.info(f"Starting storage {str(storage)} on port {port}.") + storage_instance.start(port) + if output_json: + logging.info(f"Writing storage configuration to {output_json}.") + with open(output_json, "w") as f: + json.dump(storage_instance.serialize(), fp=f, indent=2) + else: + logging.info("Writing storage configuration to stdout.") + logging.info(json.dumps(storage_instance.serialize(), indent=2)) + + +@storage.command("stop") +@click.argument("input-json", type=click.Path(exists=True, dir_okay=False, readable=True)) +def storage_stop(input_json): + + sebs.utils.global_logging() + with open(input_json, "r") as f: + cfg = json.load(f) + storage_type = cfg["type"] + logging.info(f"Stopping storage deployment of {storage_type}.") + storage = sebs.SeBS.get_storage_implementation(storage_type).deserialize(cfg, None) + storage.stop() + logging.info(f"Stopped storage deployment of {storage_type}.") + + @cli.group() def local(): pass + @local.command() @click.argument("benchmark", type=str) -@click.argument( - "benchmark-input-size", type=click.Choice(["test", "small", "large"]) -) +@click.argument("benchmark-input-size", type=click.Choice(["test", "small", "large"])) @click.argument("output", type=str) +@click.option("--deployments", default=1, type=int, help="Number of deployed containers.") @click.option( - "--deployments", default=1, type=int, help="Number of deployed containers." -) -@click.option( - "--remove-containers/--no-remove-containers", default=True, help="Remove containers after stopping." + "--remove-containers/--no-remove-containers", + default=True, + help="Remove containers after stopping.", ) @simplified_common_params def start(benchmark, benchmark_input_size, output, deployments, remove_containers, **kwargs): """ - Start a given number of function instances and a storage instance. + Start a given number of function instances and a storage instance. """ - ( - config, - output_dir, - logging_filename, - sebs_client, - deployment_client - ) = parse_common_params( - ignore_cache = True, - update_code = False, - update_storage = False, - deployment = "local", - **kwargs + (config, output_dir, logging_filename, sebs_client, deployment_client) = parse_common_params( + ignore_cache=True, update_code=False, update_storage=False, deployment="local", **kwargs ) deployment_client = cast(sebs.local.Local, deployment_client) deployment_client.remove_containers = remove_containers @@ -349,13 +362,9 @@ def start(benchmark, benchmark_input_size, output, deployments, remove_container experiment_config, logging_filename=logging_filename, ) - storage = deployment_client.get_storage( - replace_existing=experiment_config.update_storage - ) + storage = deployment_client.get_storage(replace_existing=experiment_config.update_storage) result.set_storage(storage) - input_config = benchmark_obj.prepare_input( - storage=storage, size=benchmark_input_size - ) + input_config = benchmark_obj.prepare_input(storage=storage, size=benchmark_input_size) result.add_input(input_config) for i in range(deployments): func = deployment_client.get_function( @@ -369,12 +378,13 @@ def start(benchmark, benchmark_input_size, output, deployments, remove_container result.serialize(output) sebs_client.logging.info(f"Save results to {os.path.abspath(output)}") + @local.command() @click.argument("input-json", type=str) -#@simplified_common_params +# @simplified_common_params def stop(input_json, **kwargs): """ - Stop function and storage containers. + Stop function and storage containers. """ sebs.utils.global_logging() @@ -384,6 +394,7 @@ def stop(input_json, **kwargs): deployment.shutdown() logging.info(f"Stopped deployment from {os.path.abspath(input_json)}") + @cli.group() def experiment(): pass @@ -418,9 +429,10 @@ def experment_process(experiment, extend_time_interval, **kwargs): deployment_client, ) = parse_common_params(**kwargs) experiment = sebs_client.get_experiment(experiment, config["experiments"]) - experiment.process(sebs_client, deployment_client, output_dir, logging_filename, extend_time_interval) + experiment.process( + sebs_client, deployment_client, output_dir, logging_filename, extend_time_interval + ) if __name__ == "__main__": cli() - diff --git a/sebs/openwhisk/minio.py b/sebs/openwhisk/minio.py deleted file mode 100644 index 0bb31b9d..00000000 --- a/sebs/openwhisk/minio.py +++ /dev/null @@ -1,68 +0,0 @@ -import sebs.local.storage -from typing import List, Any -import secrets -import docker -from sebs.cache import Cache - - -class Minio(sebs.local.storage.Minio): - @staticmethod - def deployment_name() -> str: - return "openwhisk" - - input_buckets: List[str] = [] - output_buckets: List[str] = [] - input_index = 0 - output_index = 0 - access_key: str = "" - secret_key: str = "" - port = 9000 - location = "openwhiskBenchmark" - connection: Any - - def __init__(self, docker_client: docker.client, cache_client: Cache, replace_existing: bool): - super(Minio, self).__init__(docker_client, cache_client, replace_existing) - self.start() - self.connection = self.get_connection() - - def start(self): - self.startMinio() - - def startMinio(self): - minioVersion = "minio/minio:latest" - # FIXME: merge it with local/minio? - # FIXME: check if the container is still runing - try: - self._storage_container = self._docker_client.containers.get("minio") - self.logging.info("Minio container already exists") - envs = self._storage_container.attrs["Config"]["Env"] - if isinstance(envs, (tuple, list)): - envs = dict([i.split("=", 1) for i in envs]) - self._access_key = envs["MINIO_ACCESS_KEY"] - self._secret_key = envs["MINIO_SECRET_KEY"] - except docker.errors.NotFound: - self.logging.info("Minio container does not exists, starting") - self._access_key = secrets.token_urlsafe(32) - self._secret_key = secrets.token_hex(32) - self._storage_container = self._docker_client.containers.run( - minioVersion, - command="server /data", - environment={ - "MINIO_ACCESS_KEY": self._access_key, - "MINIO_SECRET_KEY": self._secret_key, - }, - remove=True, - stdout=True, - stderr=True, - detach=True, - name="minio", - ) - - self.logging.info("ACCESS_KEY={}".format(self._access_key)) - self.logging.info("SECRET_KEY={}".format(self._secret_key)) - self._storage_container.reload() - networks = self._storage_container.attrs["NetworkSettings"]["Networks"] - self._url = "{IPAddress}:{Port}".format( - IPAddress=networks["bridge"]["IPAddress"], Port=self.port - ) - self.logging.info("Minio runs at {}".format(self._url)) diff --git a/sebs/sebs.py b/sebs/sebs.py index 79ff81bd..1a54a40f 100644 --- a/sebs/sebs.py +++ b/sebs/sebs.py @@ -2,11 +2,14 @@ import docker +import sebs.storage +from sebs import types from sebs.local import Local from sebs.cache import Cache from sebs.config import SeBSConfig from sebs.benchmark import Benchmark from sebs.faas.system import System as FaaSSystem +from sebs.faas.storage import PersistentStorage from sebs.faas.config import Config from sebs.utils import has_platform, LoggingHandlers, LoggingBase @@ -168,6 +171,13 @@ def get_benchmark( ) return benchmark + @staticmethod + def get_storage_implementation(storage_type: types.Storage) -> Type[PersistentStorage]: + _storage_implementations = {types.Storage.MINIO: sebs.storage.minio.Minio} + impl = _storage_implementations.get(storage_type) + assert impl + return impl + def shutdown(self): self.cache_client.shutdown() diff --git a/sebs/storage/__init__.py b/sebs/storage/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/sebs/local/storage.py b/sebs/storage/minio.py similarity index 85% rename from sebs/local/storage.py rename to sebs/storage/minio.py index 8147ae8d..90de0b29 100644 --- a/sebs/local/storage.py +++ b/sebs/storage/minio.py @@ -8,17 +8,18 @@ import minio from sebs.cache import Cache -from ..faas.storage import PersistentStorage +from sebs.types import Storage as StorageTypes +from sebs.faas.storage import PersistentStorage class Minio(PersistentStorage): @staticmethod def typename() -> str: - return "Local.Minio" + return f"{Minio.deployment_name()}.Minio" @staticmethod - def deployment_name(): - return "local" + def deployment_name() -> str: + return "Storage" # the location does not matter MINIO_REGION = "us-east-1" @@ -26,10 +27,22 @@ def deployment_name(): def __init__(self, docker_client: docker.client, cache_client: Cache, replace_existing: bool): super().__init__(self.MINIO_REGION, cache_client, replace_existing) self._docker_client = docker_client - self._port = 9000 self._storage_container: Optional[docker.container] = None - def start(self): + @staticmethod + def from_config( + config: dict, docker_client: docker.client, cache_client: Cache, replace_existing: bool + ): + + instance = Minio(docker_client, cache_client, replace_existing) + instance._port = config["port"] + instance._access_key = config["access-key"] + instance._secret_key = config["secret-key"] + instance._url = config["url"] + return instance + + def start(self, port: int = 9000): + self._port = port self._access_key = secrets.token_urlsafe(32) self._secret_key = secrets.token_hex(32) self.logging.info("Minio storage ACCESS_KEY={}".format(self._access_key)) @@ -38,8 +51,8 @@ def start(self): self._storage_container = self._docker_client.containers.run( "minio/minio:latest", command="server /data", - # ports={str(self._port): self._port}, network_mode="bridge", + ports={"9000": str(self._port)}, environment={ "MINIO_ACCESS_KEY": self._access_key, "MINIO_SECRET_KEY": self._secret_key, @@ -80,6 +93,8 @@ def stop(self): self.logging.info("Stopping minio container at {url}".format(url=self._url)) self._storage_container.stop() self.logging.info("Stopped minio container at {url}".format(url=self._url)) + else: + self.logging.error("Stopping minio was not succesful, storage container not known!") def get_connection(self): return minio.Minio( @@ -168,10 +183,12 @@ def serialize(self) -> dict: return { "instance_id": self._storage_container.id, "address": self._url, + "port": self._port, "secret_key": self._secret_key, "access_key": self._access_key, "input": self.input_buckets, "output": self.output_buckets, + "type": StorageTypes.MINIO, } else: return {} @@ -179,11 +196,15 @@ def serialize(self) -> dict: @staticmethod def deserialize(cached_config: dict, cache_client: Cache) -> "Minio": try: - instance_id = cached_config["instance_id"] docker_client = docker.from_env() obj = Minio(docker_client, cache_client, False) - obj._storage_container = docker_client.containers.get(instance_id) + if "instance_id" in cached_config: + instance_id = cached_config["instance_id"] + obj._storage_container = docker_client.containers.get(instance_id) + else: + obj._storage_container = None obj._url = cached_config["address"] + obj._port = cached_config["port"] obj._access_key = cached_config["access_key"] obj._secret_key = cached_config["secret_key"] obj.input_buckets = cached_config["input"] diff --git a/sebs/types.py b/sebs/types.py new file mode 100644 index 00000000..43574337 --- /dev/null +++ b/sebs/types.py @@ -0,0 +1,16 @@ +from enum import Enum + + +class Platforms(str, Enum): + AWS = ("aws",) + AZURE = ("azure",) + GCP = ("gcp",) + LOCAL = ("local",) + OPENWHISK = "openwhisk" + + +class Storage(str, Enum): + AWS_S3 = ("aws-s3",) + AZURE_BLOB_STORAGE = ("azure-blob-storage",) + GCP_STORAGE = ("google-cloud-storage",) + MINIO = "minio" From 37c8cd3654aab077383b3c9fbe7e518e61bdb223 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 29 Apr 2022 15:33:31 +0200 Subject: [PATCH 078/140] [system] Catch interrupt signal to display stacktrace --- sebs.py | 5 ++++- sebs/utils.py | 13 +++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/sebs.py b/sebs.py index b8d13bcb..f453172b 100755 --- a/sebs.py +++ b/sebs.py @@ -14,7 +14,7 @@ from sebs import SeBS from sebs.types import Storage as StorageTypes from sebs.regression import regression_suite -from sebs.utils import update_nested_dict +from sebs.utils import update_nested_dict, catch_interrupt from sebs.faas import System as FaaSSystem from sebs.faas.function import Trigger @@ -114,6 +114,7 @@ def parse_common_params( initialize_deployment: bool = True, ignore_cache: bool = False, ): + global sebs_client, deployment_client config_obj = json.load(open(config, "r")) os.makedirs(output_dir, exist_ok=True) @@ -141,6 +142,8 @@ def parse_common_params( if ignore_cache: sebs_client.ignore_cache() + catch_interrupt() + return config_obj, output_dir, logging_filename, sebs_client, deployment_client diff --git a/sebs/utils.py b/sebs/utils.py index eff58511..f79278b3 100644 --- a/sebs/utils.py +++ b/sebs/utils.py @@ -186,3 +186,16 @@ def logging_handlers(self, handlers: LoggingHandlers): def has_platform(name: str) -> bool: return os.environ.get(f"SEBS_WITH_{name.upper()}", "False").lower() == "true" + + +def catch_interrupt(): + + import signal + import sys + import traceback + + def handler(x, y): + traceback.print_stack() + sys.exit(signal.SIGINT) + + signal.signal(signal.SIGINT, handler) From cb88daf2cc3c7cdb9b5eae79bb654ceaa7ff88a7 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 29 Apr 2022 15:59:37 +0200 Subject: [PATCH 079/140] [local] Define timeout for storage access and correctly initialize from cache --- sebs/storage/minio.py | 61 +++++++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/sebs/storage/minio.py b/sebs/storage/minio.py index 90de0b29..50d6376c 100644 --- a/sebs/storage/minio.py +++ b/sebs/storage/minio.py @@ -19,7 +19,7 @@ def typename() -> str: @staticmethod def deployment_name() -> str: - return "Storage" + return "minio" # the location does not matter MINIO_REGION = "us-east-1" @@ -30,21 +30,26 @@ def __init__(self, docker_client: docker.client, cache_client: Cache, replace_ex self._storage_container: Optional[docker.container] = None @staticmethod - def from_config( - config: dict, docker_client: docker.client, cache_client: Cache, replace_existing: bool - ): - - instance = Minio(docker_client, cache_client, replace_existing) - instance._port = config["port"] - instance._access_key = config["access-key"] - instance._secret_key = config["secret-key"] - instance._url = config["url"] - return instance + def _define_http_client(): + """ + Minio does not allow another way of configuring timeout for connection. + The rest of configuration is copied from source code of Minio. + """ + import urllib3 + from datetime import timedelta + timeout = timedelta(seconds=1).seconds + + return urllib3.PoolManager( + timeout=urllib3.util.Timeout(connect=timeout, read=timeout), + maxsize=10, + retries=urllib3.Retry(total=5,backoff_factor=0.2,status_forcelist=[500, 502, 503, 504]) + ) def start(self, port: int = 9000): self._port = port self._access_key = secrets.token_urlsafe(32) self._secret_key = secrets.token_hex(32) + self._url = "" self.logging.info("Minio storage ACCESS_KEY={}".format(self._access_key)) self.logging.info("Minio storage SECRET_KEY={}".format(self._secret_key)) try: @@ -72,20 +77,21 @@ def start(self, port: int = 9000): def configure_connection(self): # who knows why? otherwise attributes are not loaded - self._storage_container.reload() - networks = self._storage_container.attrs["NetworkSettings"]["Networks"] - self._url = "{IPAddress}:{Port}".format( - IPAddress=networks["bridge"]["IPAddress"], Port=self._port - ) - if not self._url: - self.logging.error( - f"Couldn't read the IP address of container from attributes " - f"{json.dumps(self._instance.attrs, indent=2)}" + if self._url == "": + self._storage_container.reload() + networks = self._storage_container.attrs["NetworkSettings"]["Networks"] + self._url = "{IPAddress}:{Port}".format( + IPAddress=networks["bridge"]["IPAddress"], Port=self._port ) - raise RuntimeError( - f"Incorrect detection of IP address for container with id {self._instance_id}" - ) - self.logging.info("Starting minio instance at {}".format(self._url)) + if not self._url: + self.logging.error( + f"Couldn't read the IP address of container from attributes " + f"{json.dumps(self._instance.attrs, indent=2)}" + ) + raise RuntimeError( + f"Incorrect detection of IP address for container with id {self._instance_id}" + ) + self.logging.info("Starting minio instance at {}".format(self._url)) self.connection = self.get_connection() def stop(self): @@ -98,7 +104,11 @@ def stop(self): def get_connection(self): return minio.Minio( - self._url, access_key=self._access_key, secret_key=self._secret_key, secure=False + self._url, + access_key=self._access_key, + secret_key=self._secret_key, + secure=False, + http_client=Minio._define_http_client() ) def _create_bucket(self, name: str, buckets: List[str] = []): @@ -209,6 +219,7 @@ def deserialize(cached_config: dict, cache_client: Cache) -> "Minio": obj._secret_key = cached_config["secret_key"] obj.input_buckets = cached_config["input"] obj.output_buckets = cached_config["output"] + obj.configure_connection() return obj except docker.errors.NotFound: raise RuntimeError(f"Cached container {instance_id} not available anymore!") From 17de3fdd3e18d1366e63e32b4794ad6730b5a866 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 29 Apr 2022 19:35:15 +0200 Subject: [PATCH 080/140] [whisk] [storage] Implement storage configuration --- sebs/openwhisk/__init__.py | 1 - sebs/openwhisk/config.py | 68 +++++++++++++++++++++- sebs/openwhisk/openwhisk.py | 23 ++++---- sebs/openwhisk/storage.py | 18 ++++++ sebs/storage/minio.py | 109 ++++++++++++++++++------------------ 5 files changed, 149 insertions(+), 70 deletions(-) create mode 100644 sebs/openwhisk/storage.py diff --git a/sebs/openwhisk/__init__.py b/sebs/openwhisk/__init__.py index 9fbfc400..614d9443 100644 --- a/sebs/openwhisk/__init__.py +++ b/sebs/openwhisk/__init__.py @@ -1,3 +1,2 @@ from .openwhisk import OpenWhisk # noqa from .config import OpenWhiskConfig # noqa -from .minio import Minio # noqa diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 9ad33232..8bda4b8e 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -1,6 +1,7 @@ from sebs.cache import Cache from sebs.faas.config import Credentials, Resources, Config from sebs.utils import LoggingHandlers +from sebs.storage.config import MinioConfig from typing import cast, Optional @@ -20,15 +21,19 @@ def __init__( registry: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, + registry_updated: bool = False, ): super().__init__() self._docker_registry = registry if registry != "" else None self._docker_username = username if username != "" else None self._docker_password = password if password != "" else None + self._registry_updated = registry_updated + self._storage: Optional[MinioConfig] = None + self._storage_updated = False @staticmethod def typename() -> str: - return "OpenWhisk.Credentials" + return "OpenWhisk.Resources" @property def docker_registry(self) -> Optional[str]: @@ -42,6 +47,18 @@ def docker_username(self) -> Optional[str]: def docker_password(self) -> Optional[str]: return self._docker_password + @property + def storage_config(self) -> Optional[MinioConfig]: + return self._storage + + @property + def storage_updated(self) -> bool: + return self._storage_updated + + @property + def registry_updated(self) -> bool: + return self._registry_updated + @staticmethod def initialize(dct: dict) -> Resources: return OpenWhiskResources(dct["registry"], dct["username"], dct["password"]) @@ -51,11 +68,21 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resour cached_config = cache.get_config("openwhisk") ret: OpenWhiskResources - # Check for new config + # Check for new config - overrides but check if it's different if "docker_registry" in config: ret = cast(OpenWhiskResources, OpenWhiskResources.initialize(config["docker_registry"])) ret.logging.info("Using user-provided Docker registry for OpenWhisk.") ret.logging_handlers = handlers + + # check if there has been an update + if not ( + cached_config + and "resources" in cached_config + and "docker" in cached_config["resources"] + and cached_config["resources"]["docker"] == config["docker_registry"] + ): + ret._registry_updated = True + # Load cached values elif ( cached_config @@ -72,6 +99,37 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resour ret = OpenWhiskResources() ret.logging.info("Using default Docker registry for OpenWhisk.") ret.logging_handlers = handlers + ret._registry_updated = True + + # Check for new config + if "storage" in config: + ret._storage = MinioConfig.deserialize(config["storage"]) + ret.logging.info("Using user-provided configuration of storage for OpenWhisk.") + + # check if there has been an update + if not ( + cached_config + and "resources" in cached_config + and "storage" in cached_config["resources"] + and cached_config["resources"]["storage"] == config["storage"] + ): + print(cached_config["resources"]["storage"]) + print(config["storage"]) + ret.logging.info( + "User-provided configuration is different from cached storage, " + "we will update existing OpenWhisk actions." + ) + print(ret._storage) + ret._storage_updated = True + + # Load cached values + elif ( + cached_config + and "resources" in cached_config + and "storage" in cached_config["resources"] + ): + ret._storage = MinioConfig.deserialize(cached_config["resources"]["storage"]) + ret.logging.info("Using cached configuration of storage for OpenWhisk.") return ret @@ -85,13 +143,17 @@ def update_cache(self, cache: Cache): cache.update_config( val=self.docker_password, keys=["openwhisk", "resources", "docker", "password"] ) + if self._storage: + self._storage.update_cache(["openwhisk", "resources", "storage"], cache) def serialize(self) -> dict: - out = { + out: dict = { "docker_registry": self.docker_registry, "docker_username": self.docker_username, "docker_password": self.docker_password, } + if self._storage: + out = {**out, "storage": self._storage.serialize()} return out diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 638be80f..949ef042 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -9,7 +9,7 @@ from sebs.cache import Cache from sebs.faas import System, PersistentStorage from sebs.faas.function import Function, ExecutionResult, Trigger -from .minio import Minio +from sebs.openwhisk.storage import Minio from sebs.openwhisk.triggers import LibraryTrigger, HTTPTrigger from sebs.utils import PROJECT_DIR, LoggingHandlers, execute from .config import OpenWhiskConfig @@ -19,7 +19,6 @@ class OpenWhisk(System): _config: OpenWhiskConfig - storage: Minio def __init__( self, @@ -52,9 +51,15 @@ def config(self) -> OpenWhiskConfig: def get_storage(self, replace_existing: bool = False) -> PersistentStorage: if not hasattr(self, "storage"): - self.storage = Minio(self.docker_client, self.cache_client, replace_existing) + + if not self.config.resources.storage_config: + raise RuntimeError( + "OpenWhisk is missing the configuration of pre-allocated storage!" + ) + self.storage = Minio.deserialize( + self.config.resources.storage_config, self.cache_client + ) self.storage.logging_handlers = self.logging_handlers - self.storage.start() else: self.storage.replace_existing = replace_existing return self.storage @@ -215,9 +220,7 @@ def package_code( benchmark_archive = os.path.join(directory, f"{benchmark}.zip") subprocess.run( - ["zip", benchmark_archive] + package_config, - stdout=subprocess.DEVNULL, - cwd=directory + ["zip", benchmark_archive] + package_config, stdout=subprocess.DEVNULL, cwd=directory ) self.logging.info(f"Created {benchmark_archive} archive") bytes_size = os.path.getsize(benchmark_archive) @@ -229,13 +232,13 @@ def storage_arguments(self) -> List[str]: return [ "-p", "MINIO_STORAGE_SECRET_KEY", - storage._access_key, + storage.config.secret_key, "-p", "MINIO_STORAGE_ACCESS_KEY", - storage._secret_key, + storage.config.access_key, "-p", "MINIO_STORAGE_CONNECTION_URL", - storage._url, + storage.config.address, ] def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": diff --git a/sebs/openwhisk/storage.py b/sebs/openwhisk/storage.py new file mode 100644 index 00000000..d94182c4 --- /dev/null +++ b/sebs/openwhisk/storage.py @@ -0,0 +1,18 @@ +import docker + +from sebs.storage import minio +from sebs.storage.config import MinioConfig +from sebs.cache import Cache + + +class Minio(minio.Minio): + @staticmethod + def deployment_name() -> str: + return "openwhisk" + + def __init__(self, docker_client: docker.client, cache_client: Cache, replace_existing: bool): + super().__init__(docker_client, cache_client, replace_existing) + + @staticmethod + def deserialize(cached_config: MinioConfig, cache_client: Cache) -> "Minio": + return super(Minio, Minio)._deserialize(cached_config, cache_client, Minio) diff --git a/sebs/storage/minio.py b/sebs/storage/minio.py index 50d6376c..810e621d 100644 --- a/sebs/storage/minio.py +++ b/sebs/storage/minio.py @@ -2,7 +2,7 @@ import os import secrets import uuid -from typing import List, Optional +from typing import List, Optional, Type, TypeVar import docker import minio @@ -10,6 +10,7 @@ from sebs.cache import Cache from sebs.types import Storage as StorageTypes from sebs.faas.storage import PersistentStorage +from sebs.storage.config import MinioConfig class Minio(PersistentStorage): @@ -28,39 +29,48 @@ def __init__(self, docker_client: docker.client, cache_client: Cache, replace_ex super().__init__(self.MINIO_REGION, cache_client, replace_existing) self._docker_client = docker_client self._storage_container: Optional[docker.container] = None + self._cfg = MinioConfig() + + @property + def config(self) -> MinioConfig: + return self._cfg @staticmethod def _define_http_client(): """ - Minio does not allow another way of configuring timeout for connection. - The rest of configuration is copied from source code of Minio. + Minio does not allow another way of configuring timeout for connection. + The rest of configuration is copied from source code of Minio. """ import urllib3 from datetime import timedelta + timeout = timedelta(seconds=1).seconds return urllib3.PoolManager( timeout=urllib3.util.Timeout(connect=timeout, read=timeout), maxsize=10, - retries=urllib3.Retry(total=5,backoff_factor=0.2,status_forcelist=[500, 502, 503, 504]) + retries=urllib3.Retry( + total=5, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] + ), ) def start(self, port: int = 9000): - self._port = port - self._access_key = secrets.token_urlsafe(32) - self._secret_key = secrets.token_hex(32) - self._url = "" - self.logging.info("Minio storage ACCESS_KEY={}".format(self._access_key)) - self.logging.info("Minio storage SECRET_KEY={}".format(self._secret_key)) + + self._cfg.port = port + self._cfg.access_key = secrets.token_urlsafe(32) + self._cfg.secret_key = secrets.token_hex(32) + self._cfg.address = "" + self.logging.info("Minio storage ACCESS_KEY={}".format(self._cfg.access_key)) + self.logging.info("Minio storage SECRET_KEY={}".format(self._cfg.secret_key)) try: self._storage_container = self._docker_client.containers.run( "minio/minio:latest", command="server /data", network_mode="bridge", - ports={"9000": str(self._port)}, + ports={"9000": str(self._cfg.port)}, environment={ - "MINIO_ACCESS_KEY": self._access_key, - "MINIO_SECRET_KEY": self._secret_key, + "MINIO_ACCESS_KEY": self._cfg.access_key, + "MINIO_SECRET_KEY": self._cfg.secret_key, }, remove=True, stdout=True, @@ -77,13 +87,13 @@ def start(self, port: int = 9000): def configure_connection(self): # who knows why? otherwise attributes are not loaded - if self._url == "": + if self._cfg.address == "": self._storage_container.reload() networks = self._storage_container.attrs["NetworkSettings"]["Networks"] - self._url = "{IPAddress}:{Port}".format( - IPAddress=networks["bridge"]["IPAddress"], Port=self._port + self._cfg.address = "{IPAddress}:{Port}".format( + IPAddress=networks["bridge"]["IPAddress"], Port=self._cfg.port ) - if not self._url: + if not self._cfg.address: self.logging.error( f"Couldn't read the IP address of container from attributes " f"{json.dumps(self._instance.attrs, indent=2)}" @@ -91,24 +101,24 @@ def configure_connection(self): raise RuntimeError( f"Incorrect detection of IP address for container with id {self._instance_id}" ) - self.logging.info("Starting minio instance at {}".format(self._url)) + self.logging.info("Starting minio instance at {}".format(self._cfg.address)) self.connection = self.get_connection() def stop(self): if self._storage_container is not None: - self.logging.info("Stopping minio container at {url}".format(url=self._url)) + self.logging.info(f"Stopping minio container at {self._cfg.address}.") self._storage_container.stop() - self.logging.info("Stopped minio container at {url}".format(url=self._url)) + self.logging.info("Stopped minio container at {self._cfg.address}.") else: self.logging.error("Stopping minio was not succesful, storage container not known!") def get_connection(self): return minio.Minio( - self._url, - access_key=self._access_key, - secret_key=self._secret_key, + self._cfg.address, + access_key=self._cfg.access_key, + secret_key=self._cfg.secret_key, secure=False, - http_client=Minio._define_http_client() + http_client=Minio._define_http_client(), ) def _create_bucket(self, name: str, buckets: List[str] = []): @@ -189,37 +199,24 @@ def upload(self, bucket_name: str, filepath: str, key: str): raise NotImplementedError() def serialize(self) -> dict: - if self._storage_container is not None: - return { - "instance_id": self._storage_container.id, - "address": self._url, - "port": self._port, - "secret_key": self._secret_key, - "access_key": self._access_key, - "input": self.input_buckets, - "output": self.output_buckets, - "type": StorageTypes.MINIO, - } - else: - return {} + return { + **self._cfg.serialize(), + "type": StorageTypes.MINIO, + } + + T = TypeVar("T", bound="Minio") @staticmethod - def deserialize(cached_config: dict, cache_client: Cache) -> "Minio": - try: - docker_client = docker.from_env() - obj = Minio(docker_client, cache_client, False) - if "instance_id" in cached_config: - instance_id = cached_config["instance_id"] - obj._storage_container = docker_client.containers.get(instance_id) - else: - obj._storage_container = None - obj._url = cached_config["address"] - obj._port = cached_config["port"] - obj._access_key = cached_config["access_key"] - obj._secret_key = cached_config["secret_key"] - obj.input_buckets = cached_config["input"] - obj.output_buckets = cached_config["output"] - obj.configure_connection() - return obj - except docker.errors.NotFound: - raise RuntimeError(f"Cached container {instance_id} not available anymore!") + def _deserialize(cached_config: MinioConfig, cache_client: Cache, obj_type: Type[T]) -> T: + docker_client = docker.from_env() + obj = obj_type(docker_client, cache_client, False) + obj._cfg = cached_config + if cached_config.instance_id: + instance_id = cached_config.instance_id + obj._storage_container = docker_client.containers.get(instance_id) + else: + obj._storage_container = None + obj.input_buckets = cached_config.input_buckets + obj.output_buckets = cached_config.output_buckets + obj.configure_connection() + return obj From 5b3b03aef53de94a7030aa56bf49cc877c3b5cc0 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 29 Apr 2022 19:47:44 +0200 Subject: [PATCH 081/140] [system] Correctly stop and start Minio with the new interface --- sebs.py | 3 ++- sebs/sebs.py | 7 +++++++ sebs/storage/minio.py | 6 +++++- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/sebs.py b/sebs.py index f453172b..fe978e50 100755 --- a/sebs.py +++ b/sebs.py @@ -324,8 +324,9 @@ def storage_stop(input_json): with open(input_json, "r") as f: cfg = json.load(f) storage_type = cfg["type"] + storage_cfg = sebs.SeBS.get_storage_config_implementation(storage_type).deserialize(cfg) logging.info(f"Stopping storage deployment of {storage_type}.") - storage = sebs.SeBS.get_storage_implementation(storage_type).deserialize(cfg, None) + storage = sebs.SeBS.get_storage_implementation(storage_type).deserialize(storage_cfg, None) storage.stop() logging.info(f"Stopped storage deployment of {storage_type}.") diff --git a/sebs/sebs.py b/sebs/sebs.py index 1a54a40f..07b90093 100644 --- a/sebs/sebs.py +++ b/sebs/sebs.py @@ -178,6 +178,13 @@ def get_storage_implementation(storage_type: types.Storage) -> Type[PersistentSt assert impl return impl + @staticmethod + def get_storage_config_implementation(storage_type: types.Storage): + _storage_implementations = {types.Storage.MINIO: sebs.storage.config.MinioConfig} + impl = _storage_implementations.get(storage_type) + assert impl + return impl + def shutdown(self): self.cache_client.shutdown() diff --git a/sebs/storage/minio.py b/sebs/storage/minio.py index 810e621d..7a150a4f 100644 --- a/sebs/storage/minio.py +++ b/sebs/storage/minio.py @@ -108,7 +108,7 @@ def stop(self): if self._storage_container is not None: self.logging.info(f"Stopping minio container at {self._cfg.address}.") self._storage_container.stop() - self.logging.info("Stopped minio container at {self._cfg.address}.") + self.logging.info(f"Stopped minio container at {self._cfg.address}.") else: self.logging.error("Stopping minio was not succesful, storage container not known!") @@ -220,3 +220,7 @@ def _deserialize(cached_config: MinioConfig, cache_client: Cache, obj_type: Type obj.output_buckets = cached_config.output_buckets obj.configure_connection() return obj + + @staticmethod + def deserialize(cached_config: MinioConfig, cache_client: Cache) -> "Minio": + return Minio._deserialize(cached_config, cache_client, Minio) From 051a567fbb301b2cf1697217523b1cbbc815fbf5 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 29 Apr 2022 19:48:01 +0200 Subject: [PATCH 082/140] [system] Add new storage config class --- sebs/storage/config.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 sebs/storage/config.py diff --git a/sebs/storage/config.py b/sebs/storage/config.py new file mode 100644 index 00000000..c73f0b8c --- /dev/null +++ b/sebs/storage/config.py @@ -0,0 +1,30 @@ +from typing import List + +from dataclasses import dataclass, field + +from sebs.cache import Cache + + +@dataclass +class MinioConfig: + address: str = "" + port: int = -1 + access_key: str = "" + secret_key: str = "" + instance_id: str = "" + input_buckets: List[str] = field(default_factory=list) + output_buckets: List[str] = field(default_factory=list) + type: str = "minio" + + def update_cache(self, path: List[str], cache: Cache): + for key in MinioConfig.__dataclass_fields__.keys(): + cache.update_config(val=getattr(self, key), keys=[*path, key]) + + @staticmethod + def deserialize(data: dict) -> "MinioConfig": + keys = list(MinioConfig.__dataclass_fields__.keys()) + data = {k: v for k, v in data.items() if k in keys} + return MinioConfig(**data) + + def serialize(self) -> dict: + return self.__dict__ From a27e9768390ea0a39c98748e04257df096e34fce Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sat, 30 Apr 2022 01:02:01 +0200 Subject: [PATCH 083/140] [system] Correctly derive Docker instance id for storage --- sebs/storage/minio.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sebs/storage/minio.py b/sebs/storage/minio.py index 7a150a4f..29c27759 100644 --- a/sebs/storage/minio.py +++ b/sebs/storage/minio.py @@ -77,6 +77,7 @@ def start(self, port: int = 9000): stderr=True, detach=True, ) + self._cfg.instance_id = self._storage_container.id self.configure_connection() except docker.errors.APIError as e: self.logging.error("Starting Minio storage failed! Reason: {}".format(e)) From 9cc447eb8e8d786bca6b9b62b49c01304bf31cf5 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 01:18:16 +0200 Subject: [PATCH 084/140] [system] Expose mapped port of a storage instance --- sebs/storage/config.py | 2 +- sebs/storage/minio.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sebs/storage/config.py b/sebs/storage/config.py index c73f0b8c..8e0d6b8c 100644 --- a/sebs/storage/config.py +++ b/sebs/storage/config.py @@ -8,7 +8,7 @@ @dataclass class MinioConfig: address: str = "" - port: int = -1 + mapped_port: int = -1 access_key: str = "" secret_key: str = "" instance_id: str = "" diff --git a/sebs/storage/minio.py b/sebs/storage/minio.py index 29c27759..376dd0a3 100644 --- a/sebs/storage/minio.py +++ b/sebs/storage/minio.py @@ -56,7 +56,7 @@ def _define_http_client(): def start(self, port: int = 9000): - self._cfg.port = port + self._cfg.mapped_port = port self._cfg.access_key = secrets.token_urlsafe(32) self._cfg.secret_key = secrets.token_hex(32) self._cfg.address = "" @@ -67,7 +67,7 @@ def start(self, port: int = 9000): "minio/minio:latest", command="server /data", network_mode="bridge", - ports={"9000": str(self._cfg.port)}, + ports={"9000": str(self._cfg.mapped_port)}, environment={ "MINIO_ACCESS_KEY": self._cfg.access_key, "MINIO_SECRET_KEY": self._cfg.secret_key, From 1e60dcb24f7155dd26c67bb5f4e523a8dcba329e Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 01:18:57 +0200 Subject: [PATCH 085/140] [system] Add option for user-defined prefixes for Docker image tags --- sebs.py | 11 ++++++++++- sebs/config.py | 14 +++++++++++++- sebs/sebs.py | 4 ++++ 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/sebs.py b/sebs.py index fe978e50..255c1caa 100755 --- a/sebs.py +++ b/sebs.py @@ -175,8 +175,14 @@ def benchmark(): type=str, help="Override function name for random generation.", ) +@click.option( + "--image-tag-prefix", + default=None, + type=str, + help="Attach prefix to generated Docker image tag.", +) @common_params -def invoke(benchmark, benchmark_input_size, repetitions, trigger, function_name, **kwargs): +def invoke(benchmark, benchmark_input_size, repetitions, trigger, function_name, image_tag_prefix, **kwargs): ( config, @@ -185,6 +191,9 @@ def invoke(benchmark, benchmark_input_size, repetitions, trigger, function_name, sebs_client, deployment_client, ) = parse_common_params(**kwargs) + if image_tag_prefix is not None: + sebs_client.config.image_tag_prefix = image_tag_prefix + experiment_config = sebs_client.get_experiment_config(config["experiments"]) update_nested_dict(config, ["experiments", "benchmark"], benchmark) benchmark_obj = sebs_client.get_benchmark( diff --git a/sebs/config.py b/sebs/config.py index be7f67c4..cfafbf00 100644 --- a/sebs/config.py +++ b/sebs/config.py @@ -8,6 +8,15 @@ class SeBSConfig: def __init__(self): with open(project_absolute_path("config", "systems.json"), "r") as cfg: self._system_config = json.load(cfg) + self._image_tag_prefix = "" + + @property + def image_tag_prefix(self) -> str: + return self._image_tag_prefix + + @image_tag_prefix.setter + def image_tag_prefix(self, tag: str): + self._image_tag_prefix = tag def docker_repository(self) -> str: return self._system_config["general"]["docker_repository"] @@ -52,7 +61,10 @@ def benchmark_image_name( def benchmark_image_tag( self, system: str, benchmark: str, language_name: str, language_version: str ) -> str: - return f"function.{system}.{benchmark}.{language_name}-{language_version}" + tag = f"function.{system}.{benchmark}.{language_name}-{language_version}" + if self.image_tag_prefix: + tag = f"{tag}-{self.image_tag_prefix}" + return tag def username(self, deployment_name: str, language_name: str) -> str: return self._system_config[deployment_name]["languages"][language_name]["username"] diff --git a/sebs/sebs.py b/sebs/sebs.py index 07b90093..1a7aa65f 100644 --- a/sebs/sebs.py +++ b/sebs/sebs.py @@ -38,6 +38,10 @@ def verbose(self) -> bool: def logging_filename(self) -> Optional[str]: return self._logging_filename + @property + def config(self) -> SeBSConfig: + return self._config + def generate_logging_handlers(self, logging_filename: Optional[str] = None) -> LoggingHandlers: filename = logging_filename if logging_filename else self.logging_filename if filename in self._handlers: From 0616e33f669f7db64284877ec3a9c87b5cd4b024 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 01:33:51 +0200 Subject: [PATCH 086/140] [system] [openwhisk] Add infrastructure for function configuration update --- sebs/faas/system.py | 5 +++++ sebs/openwhisk/config.py | 3 --- sebs/openwhisk/openwhisk.py | 26 ++++++++++++++++++++++++++ 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/sebs/faas/system.py b/sebs/faas/system.py index 93fd3ccc..c0777c24 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -213,6 +213,11 @@ def get_function(self, code_package: Benchmark, func_name: Optional[str] = None) function=function, ) code_package.query_cache() + # code up to date, but configuration needs to be updated + # FIXME: detect change in function config + elif self.update_function_configuration_enforced(): + self.update_function_configuration(function, code_package) + code_package.query_cache() else: self.logging.info(f"Cached function {func_name} is up to date.") return function diff --git a/sebs/openwhisk/config.py b/sebs/openwhisk/config.py index 8bda4b8e..dfaad3fc 100644 --- a/sebs/openwhisk/config.py +++ b/sebs/openwhisk/config.py @@ -113,13 +113,10 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resour and "storage" in cached_config["resources"] and cached_config["resources"]["storage"] == config["storage"] ): - print(cached_config["resources"]["storage"]) - print(config["storage"]) ret.logging.info( "User-provided configuration is different from cached storage, " "we will update existing OpenWhisk actions." ) - print(ret._storage) ret._storage_updated = True # Load cached values diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 949ef042..0ff98635 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -347,6 +347,32 @@ def update_function(self, function: Function, code_package: Benchmark): self.logging.error("Could not update OpenWhisk function - is path to wsk correct?") raise RuntimeError(e) + def update_function_configuration(self, function: Function, code_package: Benchmark): + self.logging.info(f"Update configuration of an existing OpenWhisk action {function.name}.") + try: + subprocess.run( + [ + *self.get_wsk_cmd(), + "action", + "update", + function.name, + "--memory", + str(code_package.benchmark_config.memory), + "--timeout", + str(code_package.benchmark_config.timeout * 1000), + *self.storage_arguments() + ], + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + check=True, + ) + except FileNotFoundError as e: + self.logging.error("Could not update OpenWhisk function - is path to wsk correct?") + raise RuntimeError(e) + + def update_function_configuration_enforced(self) -> bool: + return self._config.resources.storage_updated + def default_function_name(self, code_package: Benchmark) -> str: return ( f"{code_package.benchmark}-{code_package.language_name}-" From 1521dcca93fa56e9653b8cad25fd2644d3207321 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 21:23:31 +0200 Subject: [PATCH 087/140] [system] Linting --- sebs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sebs.py b/sebs.py index 255c1caa..d56ec018 100755 --- a/sebs.py +++ b/sebs.py @@ -182,7 +182,9 @@ def benchmark(): help="Attach prefix to generated Docker image tag.", ) @common_params -def invoke(benchmark, benchmark_input_size, repetitions, trigger, function_name, image_tag_prefix, **kwargs): +def invoke( + benchmark, benchmark_input_size, repetitions, trigger, function_name, image_tag_prefix, **kwargs +): ( config, From f8beaf59cb0a4bc0ad4f7d4c66f2bf0e24f2db54 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 21:26:10 +0200 Subject: [PATCH 088/140] [system] Major simplification of function configuration architecture --- sebs/benchmark.py | 6 +-- sebs/cache.py | 5 ++- sebs/experiments/config.py | 46 +------------------- sebs/faas/function.py | 87 ++++++++++++++++++++++++++++++++++++- sebs/faas/system.py | 36 ++++++++++++++- sebs/openwhisk/function.py | 65 +++++++++++++++++++-------- sebs/openwhisk/openwhisk.py | 21 +++++---- 7 files changed, 187 insertions(+), 79 deletions(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 3bf17bc1..7c82eb1d 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -16,7 +16,7 @@ if TYPE_CHECKING: from sebs.experiments.config import Config as ExperimentConfig - from sebs.experiments.config import Language + from sebs.faas.function import Language class BenchmarkConfig: @@ -40,7 +40,7 @@ def languages(self) -> List["Language"]: # FIXME: 3.7+ python with future annotations @staticmethod def deserialize(json_object: dict) -> "BenchmarkConfig": - from sebs.experiments.config import Language + from sebs.faas.function import Language return BenchmarkConfig( json_object["timeout"], @@ -297,7 +297,7 @@ def add_deployment_package_nodejs(self, output_dir): json.dump(package_json, package_file, indent=2) def add_deployment_package(self, output_dir): - from sebs.experiments.config import Language + from sebs.faas.function import Language if self.language == Language.PYTHON: self.add_deployment_package_python(output_dir) diff --git a/sebs/cache.py b/sebs/cache.py index 5463e444..c3620e45 100644 --- a/sebs/cache.py +++ b/sebs/cache.py @@ -7,7 +7,7 @@ import threading from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING # noqa -from sebs.utils import LoggingBase +from sebs.utils import LoggingBase, serialize if TYPE_CHECKING: from sebs.benchmark import Benchmark @@ -287,7 +287,8 @@ def add_function( ) config = cached_config with open(cache_config, "w") as fp: - json.dump(config, fp, indent=2) + fp.write(serialize(config)) + # json.dump(config, fp, indent=2) else: raise RuntimeError( "Can't cache function {} for a non-existing code package!".format(function.name) diff --git a/sebs/experiments/config.py b/sebs/experiments/config.py index 8f6bb7bf..a5ca3f0b 100644 --- a/sebs/experiments/config.py +++ b/sebs/experiments/config.py @@ -1,48 +1,6 @@ -from enum import Enum from typing import Dict - -class Language(Enum): - PYTHON = "python" - NODEJS = "nodejs" - - # FIXME: 3.7+ python with future annotations - @staticmethod - def deserialize(val: str) -> "Language": - for member in Language: - if member.value == val: - return member - raise Exception("Unknown language type {}".format(member)) - - -class Runtime: - - _language: Language - _version: str - - @property - def language(self) -> Language: - return self._language - - @property - def version(self) -> str: - return self._version - - @version.setter - def version(self, val: str): - self._version = val - - def serialize(self) -> dict: - return {"language": self._language.value, "version": self._version} - - # FIXME: 3.7+ python with future annotations - @staticmethod - def deserialize(config: dict) -> "Runtime": - cfg = Runtime() - languages = {"python": Language.PYTHON, "nodejs": Language.NODEJS} - cfg._language = languages[config["language"]] - cfg._version = config["version"] - return cfg +from sebs.faas.function import Runtime class Config: @@ -52,7 +10,7 @@ def __init__(self): self._download_results: bool = False self._flags: Dict[str, bool] = {} self._experiment_configs: Dict[str, dict] = {} - self._runtime = Runtime() + self._runtime = Runtime(None, None) @property def update_code(self) -> bool: diff --git a/sebs/faas/function.py b/sebs/faas/function.py index 2ebfd8f4..4649e756 100644 --- a/sebs/faas/function.py +++ b/sebs/faas/function.py @@ -1,11 +1,15 @@ +from __future__ import annotations + import json +import concurrent.futures from abc import ABC from abc import abstractmethod -import concurrent.futures +from dataclasses import dataclass from datetime import datetime, timedelta from enum import Enum -from typing import Callable, Dict, List, Optional # noqa +from typing import Callable, Dict, List, Optional, Type, TypeVar # noqa +from sebs.benchmark import Benchmark from sebs.utils import LoggingBase """ @@ -247,6 +251,85 @@ def deserialize(cached_config: dict) -> "Trigger": pass +class Language(Enum): + PYTHON = "python" + NODEJS = "nodejs" + + # FIXME: 3.7+ python with future annotations + @staticmethod + def deserialize(val: str) -> Language: + for member in Language: + if member.value == val: + return member + raise Exception(f"Unknown language type {member}") + + +class Architecture(Enum): + X86 = "x86" + ARM = "arm" + + def serialize(self) -> str: + return self.value + + @staticmethod + def deserialize(val: str) -> Architecture: + for member in Architecture: + if member.value == val: + return member + raise Exception(f"Unknown architecture type {member}") + + +@dataclass +class Runtime: + + language: Language + version: str + + def serialize(self) -> dict: + return {"language": self.language.value, "version": self.version} + + @staticmethod + def deserialize(config: dict) -> Runtime: + languages = {"python": Language.PYTHON, "nodejs": Language.NODEJS} + return Runtime(language=languages[config["language"]], version=config["version"]) + + +T = TypeVar("T", bound="FunctionConfig") + + +@dataclass +class FunctionConfig: + timeout: int + memory: int + runtime: Runtime + architecture: Architecture = Architecture.X86 + + @staticmethod + def _from_benchmark(benchmark: Benchmark, obj_type: Type[T]) -> T: + runtime = Runtime(language=benchmark.language, version=benchmark.language_version) + cfg = obj_type( + timeout=benchmark.benchmark_config.timeout, + memory=benchmark.benchmark_config.memory, + runtime=runtime, + ) + # FIXME: configure architecture + return cfg + + @staticmethod + def from_benchmark(benchmark: Benchmark) -> FunctionConfig: + return FunctionConfig._from_benchmark(benchmark, FunctionConfig) + + @staticmethod + def deserialize(data: dict) -> FunctionConfig: + keys = list(FunctionConfig.__dataclass_fields__.keys()) + data = {k: v for k, v in data.items() if k in keys} + data["runtime"] = Runtime.deserialize(data["runtime"]) + return FunctionConfig(**data) + + def serialize(self) -> dict: + return self.__dict__ + + """ Abstraction base class for FaaS function. Contains a list of associated triggers and might implement non-trigger execution if supported by the SDK. diff --git a/sebs/faas/system.py b/sebs/faas/system.py index c0777c24..a989bf3c 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -215,13 +215,47 @@ def get_function(self, code_package: Benchmark, func_name: Optional[str] = None) code_package.query_cache() # code up to date, but configuration needs to be updated # FIXME: detect change in function config - elif self.update_function_configuration_enforced(): + elif self.is_configuration_changed(function, code_package): self.update_function_configuration(function, code_package) code_package.query_cache() else: self.logging.info(f"Cached function {func_name} is up to date.") return function + # FIXME: abstract method + def update_function_configuration(self, cached_function: Function, benchmark: Benchmark): + pass + + """ + This function checks for common function parameters to verify if their value is + still up to date. + """ + + def is_configuration_changed(self, cached_function: Function, benchmark: Benchmark) -> bool: + + changed = False + for attr in ["timeout", "memory"]: + new_val = getattr(benchmark.benchmark_config, attr) + old_val = getattr(cached_function.config, attr) + if new_val != old_val: + self.logging.info( + f"Updating function configuration due to changed attribute {attr}: " + f"cached function has value {old_val} whereas {new_val} has been requested." + ) + changed = True + + for lang_attr in [["language"] * 2, ["language_version", "version"]]: + new_val = getattr(benchmark, lang_attr[0]) + old_val = getattr(cached_function.config.runtime, lang_attr[1]) + if new_val != old_val: + self.logging.info( + f"Updating function configuration due to changed runtime attribute {attr}: " + f"cached function has value {old_val} whereas {new_val} has been requested." + ) + changed = True + + return changed + @abstractmethod def default_function_name(self, code_package: Benchmark) -> str: pass diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index fc2b5073..26877607 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -1,38 +1,65 @@ -from sebs.faas.function import Function -from typing import cast +from __future__ import annotations +from typing import cast, Optional +from dataclasses import dataclass -class OpenwhiskFunction(Function): +from sebs.benchmark import Benchmark +from sebs.faas.function import Function, FunctionConfig, Runtime +from sebs.storage.config import MinioConfig + + +@dataclass +class OpenWhiskFunctionConfig(FunctionConfig): + + # FIXME: merge with higher level abstraction for images + docker_image: str = "" + namespace: str = "_" + storage: Optional[MinioConfig] = None + + @staticmethod + def deserialize(data: dict) -> OpenWhiskFunctionConfig: + keys = list(OpenWhiskFunctionConfig.__dataclass_fields__.keys()) + data = {k: v for k, v in data.items() if k in keys} + data["runtime"] = Runtime.deserialize(data["runtime"]) + print(data) + return OpenWhiskFunctionConfig(**data) + + def serialize(self) -> dict: + return self.__dict__ + + @staticmethod + def from_benchmark(benchmark: Benchmark) -> OpenWhiskFunctionConfig: + return super(OpenWhiskFunctionConfig, OpenWhiskFunctionConfig)._from_benchmark( + benchmark, OpenWhiskFunctionConfig + ) + + +class OpenWhiskFunction(Function): def __init__( - self, - name: str, - benchmark: str, - code_package_hash: str, - docker_image: str, - namespace: str = "_", + self, name: str, benchmark: str, code_package_hash: str, cfg: OpenWhiskFunctionConfig ): super().__init__(benchmark, name, code_package_hash) - self.namespace = namespace - self.docker_image = docker_image + self._cfg = cfg + + @property + def config(self) -> FunctionConfig: + return self._cfg @staticmethod def typename() -> str: return "OpenWhisk.Function" def serialize(self) -> dict: - return {**super().serialize(), "namespace": self.namespace, "image": self.docker_image} + return {**super().serialize(), "config": self._cfg.serialize()} @staticmethod - def deserialize(cached_config: dict) -> "OpenwhiskFunction": + def deserialize(cached_config: dict) -> OpenWhiskFunction: from sebs.faas.function import Trigger from sebs.openwhisk.triggers import LibraryTrigger, HTTPTrigger - ret = OpenwhiskFunction( - cached_config["name"], - cached_config["benchmark"], - cached_config["hash"], - cached_config["image"], - cached_config["namespace"], + cfg = OpenWhiskFunctionConfig.deserialize(cached_config["config"]) + ret = OpenWhiskFunction( + cached_config["name"], cached_config["benchmark"], cached_config["hash"], cfg ) for trigger in cached_config["triggers"]: trigger_type = cast( diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 0ff98635..bf202951 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -13,7 +13,7 @@ from sebs.openwhisk.triggers import LibraryTrigger, HTTPTrigger from sebs.utils import PROJECT_DIR, LoggingHandlers, execute from .config import OpenWhiskConfig -from .function import OpenwhiskFunction +from .function import OpenWhiskFunction, OpenWhiskFunctionConfig from ..config import SeBSConfig @@ -83,7 +83,7 @@ def typename(): @staticmethod def function_type() -> "Type[Function]": - return OpenwhiskFunction + return OpenWhiskFunction def get_wsk_cmd(self) -> List[str]: cmd = [self.config.wsk_exec] @@ -241,7 +241,7 @@ def storage_arguments(self) -> List[str]: storage.config.address, ] - def create_function(self, code_package: Benchmark, func_name: str) -> "OpenwhiskFunction": + def create_function(self, code_package: Benchmark, func_name: str) -> "OpenWhiskFunction": self.logging.info("Creating function as an action in OpenWhisk.") try: actions = subprocess.run( @@ -257,9 +257,13 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk function_found = True break + function_cfg = OpenWhiskFunctionConfig.from_benchmark(code_package) + function_cfg.storage = self.get_storage().config if function_found: # docker image is overwritten by the update - res = OpenwhiskFunction(func_name, code_package.benchmark, code_package.hash, "") + res = OpenWhiskFunction( + func_name, code_package.benchmark, code_package.hash, function_cfg + ) # Update function - we don't know what version is stored self.logging.info(f"Retrieved existing OpenWhisk action {func_name}.") self.update_function(res, code_package) @@ -293,8 +297,9 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "Openwhisk stdout=subprocess.DEVNULL, check=True, ) - res = OpenwhiskFunction( - func_name, code_package.benchmark, code_package.hash, docker_image + function_cfg.docker_image = docker_image + res = OpenWhiskFunction( + func_name, code_package.benchmark, code_package.hash, function_cfg ) except subprocess.CalledProcessError as e: self.logging.error(f"Cannot create action {func_name}.") @@ -341,7 +346,7 @@ def update_function(self, function: Function, code_package: Benchmark): stdout=subprocess.DEVNULL, check=True, ) - function.docker_image = docker_image + function.config.docker_image = docker_image except FileNotFoundError as e: self.logging.error("Could not update OpenWhisk function - is path to wsk correct?") @@ -360,7 +365,7 @@ def update_function_configuration(self, function: Function, code_package: Benchm str(code_package.benchmark_config.memory), "--timeout", str(code_package.benchmark_config.timeout * 1000), - *self.storage_arguments() + *self.storage_arguments(), ], stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL, From ecf80ac18f85b9bdc89c200a44c1987ffd8c9a94 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 21:41:31 +0200 Subject: [PATCH 089/140] [system] Add configuration of benchmark parameters --- sebs.py | 27 ++++++++++++++++++++++++++- sebs/benchmark.py | 8 ++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/sebs.py b/sebs.py index d56ec018..866e6106 100755 --- a/sebs.py +++ b/sebs.py @@ -169,6 +169,18 @@ def benchmark(): default="http", help="Function trigger to be used.", ) +@click.option( + "--memory", + default=None, + type=int, + help="Override default memory settings for the benchmark function.", +) +@click.option( + "--timeout", + default=None, + type=int, + help="Override default timeout settings for the benchmark function.", +) @click.option( "--function-name", default=None, @@ -183,7 +195,15 @@ def benchmark(): ) @common_params def invoke( - benchmark, benchmark_input_size, repetitions, trigger, function_name, image_tag_prefix, **kwargs + benchmark, + benchmark_input_size, + repetitions, + trigger, + memory, + timeout, + function_name, + image_tag_prefix, + **kwargs, ): ( @@ -204,6 +224,11 @@ def invoke( experiment_config, logging_filename=logging_filename, ) + if memory is not None: + benchmark_obj.benchmark_config.memory = memory + if timeout is not None: + benchmark_obj.benchmark_config.timeout = timeout + func = deployment_client.get_function( benchmark_obj, function_name if function_name else deployment_client.default_function_name(benchmark_obj), diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 7c82eb1d..e9773bcc 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -29,10 +29,18 @@ def __init__(self, timeout: int, memory: int, languages: List["Language"]): def timeout(self) -> int: return self._timeout + @timeout.setter + def timeout(self, val: int): + self._timeout = val + @property def memory(self) -> int: return self._memory + @memory.setter + def memory(self, val: int): + self._memory = val + @property def languages(self) -> List["Language"]: return self._languages From a91fd7525b3ae5e5819c40e6f23abe3b996348cf Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 21:42:19 +0200 Subject: [PATCH 090/140] [whisk] Correctly update function parameters --- sebs/cache.py | 3 +-- sebs/faas/system.py | 3 +++ sebs/openwhisk/openwhisk.py | 3 --- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/sebs/cache.py b/sebs/cache.py index c3620e45..25c7c7cc 100644 --- a/sebs/cache.py +++ b/sebs/cache.py @@ -288,7 +288,6 @@ def add_function( config = cached_config with open(cache_config, "w") as fp: fp.write(serialize(config)) - # json.dump(config, fp, indent=2) else: raise RuntimeError( "Can't cache function {} for a non-existing code package!".format(function.name) @@ -315,7 +314,7 @@ def update_function(self, function: "Function"): name ] = function.serialize() with open(cache_config, "w") as fp: - json.dump(cached_config, fp, indent=2) + fp.write(serialize(cached_config)) else: raise RuntimeError( "Can't cache function {} for a non-existing code package!".format(function.name) diff --git a/sebs/faas/system.py b/sebs/faas/system.py index a989bf3c..9253c075 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -217,6 +217,7 @@ def get_function(self, code_package: Benchmark, func_name: Optional[str] = None) # FIXME: detect change in function config elif self.is_configuration_changed(function, code_package): self.update_function_configuration(function, code_package) + self.cache_client.update_function(function) code_package.query_cache() else: self.logging.info(f"Cached function {func_name} is up to date.") @@ -243,6 +244,7 @@ def is_configuration_changed(self, cached_function: Function, benchmark: Benchma f"cached function has value {old_val} whereas {new_val} has been requested." ) changed = True + setattr(cached_function.config, attr, new_val) for lang_attr in [["language"] * 2, ["language_version", "version"]]: new_val = getattr(benchmark, lang_attr[0]) @@ -253,6 +255,7 @@ def is_configuration_changed(self, cached_function: Function, benchmark: Benchma f"cached function has value {old_val} whereas {new_val} has been requested." ) changed = True + setattr(cached_function.config.runtime, lang_attr[1], new_val) return changed diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index bf202951..1bd8f404 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -375,9 +375,6 @@ def update_function_configuration(self, function: Function, code_package: Benchm self.logging.error("Could not update OpenWhisk function - is path to wsk correct?") raise RuntimeError(e) - def update_function_configuration_enforced(self) -> bool: - return self._config.resources.storage_updated - def default_function_name(self, code_package: Benchmark) -> str: return ( f"{code_package.benchmark}-{code_package.language_name}-" From 2f3249bf06f40bbe6cd23a88f8ccd7205759fb2d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 22:00:07 +0200 Subject: [PATCH 091/140] [system] Update wrong parameter name --- sebs/storage/minio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sebs/storage/minio.py b/sebs/storage/minio.py index 376dd0a3..bd10a859 100644 --- a/sebs/storage/minio.py +++ b/sebs/storage/minio.py @@ -92,7 +92,7 @@ def configure_connection(self): self._storage_container.reload() networks = self._storage_container.attrs["NetworkSettings"]["Networks"] self._cfg.address = "{IPAddress}:{Port}".format( - IPAddress=networks["bridge"]["IPAddress"], Port=self._cfg.port + IPAddress=networks["bridge"]["IPAddress"], Port=9000 ) if not self._cfg.address: self.logging.error( From ffd489b6af9778e2d679e3964a8396c66ea7b04b Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 22:00:30 +0200 Subject: [PATCH 092/140] [whisk] Correctly update storage in cached function --- sebs/openwhisk/function.py | 4 ++-- sebs/openwhisk/openwhisk.py | 18 +++++++++++++++++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index 26877607..f6566fd2 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -21,7 +21,7 @@ def deserialize(data: dict) -> OpenWhiskFunctionConfig: keys = list(OpenWhiskFunctionConfig.__dataclass_fields__.keys()) data = {k: v for k, v in data.items() if k in keys} data["runtime"] = Runtime.deserialize(data["runtime"]) - print(data) + data["storage"] = MinioConfig.deserialize(data["storage"]) return OpenWhiskFunctionConfig(**data) def serialize(self) -> dict: @@ -42,7 +42,7 @@ def __init__( self._cfg = cfg @property - def config(self) -> FunctionConfig: + def config(self) -> OpenWhiskFunctionConfig: return self._cfg @staticmethod diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 1bd8f404..6c0d043e 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -258,7 +258,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "OpenWhisk break function_cfg = OpenWhiskFunctionConfig.from_benchmark(code_package) - function_cfg.storage = self.get_storage().config + function_cfg.storage = cast(Minio, self.get_storage()).config if function_found: # docker image is overwritten by the update res = OpenWhiskFunction( @@ -318,6 +318,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "OpenWhisk def update_function(self, function: Function, code_package: Benchmark): self.logging.info(f"Update an existing OpenWhisk action {function.name}.") + function = cast(OpenWhiskFunction, function) docker_image = self.system_config.benchmark_image_name( self.name(), code_package.benchmark, @@ -375,6 +376,21 @@ def update_function_configuration(self, function: Function, code_package: Benchm self.logging.error("Could not update OpenWhisk function - is path to wsk correct?") raise RuntimeError(e) + def is_configuration_changed(self, cached_function: Function, benchmark: Benchmark) -> bool: + changed = super().is_configuration_changed(cached_function, benchmark) + + storage = cast(Minio, self.get_storage()) + function = cast(OpenWhiskFunction, cached_function) + # check if now we're using a new storage + if function.config.storage != storage.config: + self.logging.info( + "Updating function configuration due to changed storage configuration." + ) + changed = True + function.config.storage = storage.config + + return changed + def default_function_name(self, code_package: Benchmark) -> str: return ( f"{code_package.benchmark}-{code_package.language_name}-" From 9edcf51ee8c92c3aa939299565e44d441dbbd754 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 22:34:14 +0200 Subject: [PATCH 093/140] [whisk] [aws] Adjust the function configuration interface in AWS --- sebs/aws/aws.py | 27 ++++++++++++++++----------- sebs/aws/function.py | 17 ++++++----------- sebs/faas/function.py | 17 +++++++---------- sebs/faas/system.py | 4 +++- sebs/openwhisk/function.py | 5 ++--- 5 files changed, 34 insertions(+), 36 deletions(-) diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index c3342fa3..8db3859d 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -16,7 +16,7 @@ from sebs.cache import Cache from sebs.config import SeBSConfig from sebs.utils import LoggingHandlers -from sebs.faas.function import Function, ExecutionResult, Trigger +from sebs.faas.function import Function, ExecutionResult, Trigger, FunctionConfig from sebs.faas.storage import PersistentStorage from sebs.faas.system import System @@ -168,6 +168,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LambdaFun code_bucket: Optional[str] = None func_name = AWS.format_function_name(func_name) storage_client = self.get_storage() + function_cfg = FunctionConfig.from_benchmark(code_package) # we can either check for exception or use list_functions # there's no API for test @@ -182,10 +183,9 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LambdaFun code_package.benchmark, ret["Configuration"]["FunctionArn"], code_package.hash, - timeout, - memory, language_runtime, self.config.resources.lambda_role(self.session), + function_cfg, ) self.update_function(lambda_function, code_package) lambda_function.updated_code = True @@ -224,10 +224,9 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LambdaFun code_package.benchmark, ret["FunctionArn"], code_package.hash, - timeout, - memory, language_runtime, self.config.resources.lambda_role(self.session), + function_cfg, code_bucket, ) @@ -288,17 +287,23 @@ def update_function(self, function: Function, code_package: Benchmark): time.sleep(5) # and update config self.client.update_function_configuration( - FunctionName=name, Timeout=function.timeout, MemorySize=function.memory + FunctionName=name, Timeout=function.config.timeout, MemorySize=function.config.memory ) self.logging.info("Published new function code") + def update_function_configuration(self, function: Function, benchmark: Benchmark): + function = cast(LambdaFunction, function) + self.client.update_function_configuration( + FunctionName=function.name, + Timeout=function.config.timeout, + MemorySize=function.config.memory, + ) + @staticmethod def default_function_name(code_package: Benchmark) -> str: # Create function name func_name = "{}-{}-{}".format( - code_package.benchmark, - code_package.language_name, - code_package.benchmark_config.memory, + code_package.benchmark, code_package.language_name, code_package.language_version ) return AWS.format_function_name(func_name) @@ -493,8 +498,8 @@ def _enforce_cold_start(self, function: Function): func = cast(LambdaFunction, function) self.get_lambda_client().update_function_configuration( FunctionName=func.name, - Timeout=func.timeout, - MemorySize=func.memory, + Timeout=func.config.timeout, + MemorySize=func.config.memory, Environment={"Variables": {"ForceColdStart": str(self.cold_start_counter)}}, ) diff --git a/sebs/aws/function.py b/sebs/aws/function.py index 36b52c27..a36dc821 100644 --- a/sebs/aws/function.py +++ b/sebs/aws/function.py @@ -1,7 +1,7 @@ from typing import cast, Optional from sebs.aws.s3 import S3 -from sebs.faas.function import Function +from sebs.faas.function import Function, FunctionConfig class LambdaFunction(Function): @@ -11,18 +11,15 @@ def __init__( benchmark: str, arn: str, code_package_hash: str, - timeout: int, - memory: int, runtime: str, role: str, + cfg: FunctionConfig, bucket: Optional[str] = None, ): - super().__init__(benchmark, name, code_package_hash) + super().__init__(benchmark, name, code_package_hash, cfg) self.arn = arn - self.timeout = timeout - self.memory = memory - self.runtime = runtime self.role = role + self.runtime = runtime self.bucket = bucket @staticmethod @@ -33,8 +30,6 @@ def serialize(self) -> dict: return { **super().serialize(), "arn": self.arn, - "timeout": self.timeout, - "memory": self.memory, "runtime": self.runtime, "role": self.role, "bucket": self.bucket, @@ -45,15 +40,15 @@ def deserialize(cached_config: dict) -> "LambdaFunction": from sebs.faas.function import Trigger from sebs.aws.triggers import LibraryTrigger, HTTPTrigger + cfg = FunctionConfig.deserialize(cached_config["config"]) ret = LambdaFunction( cached_config["name"], cached_config["benchmark"], cached_config["arn"], cached_config["hash"], - cached_config["timeout"], - cached_config["memory"], cached_config["runtime"], cached_config["role"], + cfg, cached_config["bucket"], ) for trigger in cached_config["triggers"]: diff --git a/sebs/faas/function.py b/sebs/faas/function.py index 4649e756..df59bccb 100644 --- a/sebs/faas/function.py +++ b/sebs/faas/function.py @@ -338,14 +338,18 @@ def serialize(self) -> dict: class Function(LoggingBase): - def __init__(self, benchmark: str, name: str, code_hash: str, docker_image: str = ""): + def __init__(self, benchmark: str, name: str, code_hash: str, cfg: FunctionConfig): super().__init__() self._benchmark = benchmark self._name = name self._code_package_hash = code_hash self._updated_code = False - self._docker_image = docker_image self._triggers: Dict[Trigger.TriggerType, List[Trigger]] = {} + self._cfg = cfg + + @property + def config(self) -> FunctionConfig: + return self._cfg @property def name(self): @@ -363,14 +367,6 @@ def code_package_hash(self): def code_package_hash(self, new_hash: str): self._code_package_hash = new_hash - @property - def docker_image(self) -> str: - return self._docker_image - - @docker_image.setter - def docker_image(self, docker_image: str): - self._docker_image = docker_image - @property def updated_code(self) -> bool: return self._updated_code @@ -399,6 +395,7 @@ def serialize(self) -> dict: "name": self._name, "hash": self._code_package_hash, "benchmark": self._benchmark, + "config": self.config.serialize(), "triggers": [ obj.serialize() for t_type, triggers in self._triggers.items() for obj in triggers ], diff --git a/sebs/faas/system.py b/sebs/faas/system.py index 9253c075..64923255 100644 --- a/sebs/faas/system.py +++ b/sebs/faas/system.py @@ -223,7 +223,7 @@ def get_function(self, code_package: Benchmark, func_name: Optional[str] = None) self.logging.info(f"Cached function {func_name} is up to date.") return function - # FIXME: abstract method + @abstractmethod def update_function_configuration(self, cached_function: Function, benchmark: Benchmark): pass @@ -250,6 +250,8 @@ def is_configuration_changed(self, cached_function: Function, benchmark: Benchma new_val = getattr(benchmark, lang_attr[0]) old_val = getattr(cached_function.config.runtime, lang_attr[1]) if new_val != old_val: + # FIXME: should this even happen? we should never pick the function with + # different runtime - that should be encoded in the name self.logging.info( f"Updating function configuration due to changed runtime attribute {attr}: " f"cached function has value {old_val} whereas {new_val} has been requested." diff --git a/sebs/openwhisk/function.py b/sebs/openwhisk/function.py index f6566fd2..624b1250 100644 --- a/sebs/openwhisk/function.py +++ b/sebs/openwhisk/function.py @@ -38,12 +38,11 @@ class OpenWhiskFunction(Function): def __init__( self, name: str, benchmark: str, code_package_hash: str, cfg: OpenWhiskFunctionConfig ): - super().__init__(benchmark, name, code_package_hash) - self._cfg = cfg + super().__init__(benchmark, name, code_package_hash, cfg) @property def config(self) -> OpenWhiskFunctionConfig: - return self._cfg + return cast(OpenWhiskFunctionConfig, self._cfg) @staticmethod def typename() -> str: From efeab9f05a72681521d439c96e323ab7610a98c6 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 23:00:27 +0200 Subject: [PATCH 094/140] [azure] Update Azure functions to new function configuration --- sebs/azure/azure.py | 11 +++++++++-- sebs/azure/function.py | 7 +++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index 614f2bc3..e4160e1e 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -17,7 +17,7 @@ from sebs.cache import Cache from sebs.config import SeBSConfig from sebs.utils import LoggingHandlers, execute -from ..faas.function import Function, ExecutionResult +from ..faas.function import Function, FunctionConfig, ExecutionResult from ..faas.storage import PersistentStorage from ..faas.system import System @@ -244,6 +244,10 @@ def update_function(self, function: Function, code_package: Benchmark): trigger.logging_handlers = self.logging_handlers function.add_trigger(trigger) + def update_function_configuration(self, function: Function, code_package: Benchmark): + # FIXME: this does nothing currently - we don't specify timeout + self.logging.warn("Updating function's memory and timeout configuration is not supported.") + def _mount_function_code(self, code_package: Benchmark): self.cli_instance.upload_package(code_package.code_location, "/mnt/function/") @@ -252,9 +256,10 @@ def default_function_name(self, code_package: Benchmark) -> str: Functionapp names must be globally unique in Azure. """ func_name = ( - "{}-{}-{}".format( + "{}-{}-{}-{}".format( code_package.benchmark, code_package.language_name, + code_package.language_version, self.config.resources_id, ) .replace(".", "-") @@ -268,6 +273,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> AzureFunct language_runtime = code_package.language_version resource_group = self.config.resources.resource_group(self.cli_instance) region = self.config.region + function_cfg = FunctionConfig.from_benchmark(code_package) config = { "resource_group": resource_group, @@ -327,6 +333,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> AzureFunct benchmark=code_package.benchmark, code_hash=code_package.hash, function_storage=function_storage_account, + cfg=function_cfg, ) # update existing function app diff --git a/sebs/azure/function.py b/sebs/azure/function.py index ade7e980..61ef4c57 100644 --- a/sebs/azure/function.py +++ b/sebs/azure/function.py @@ -1,5 +1,5 @@ from sebs.azure.config import AzureResources -from sebs.faas.function import Function +from sebs.faas.function import Function, FunctionConfig class AzureFunction(Function): @@ -9,8 +9,9 @@ def __init__( benchmark: str, code_hash: str, function_storage: AzureResources.Storage, + cfg: FunctionConfig, ): - super().__init__(benchmark, name, code_hash) + super().__init__(benchmark, name, code_hash, cfg) self.function_storage = function_storage def serialize(self) -> dict: @@ -21,11 +22,13 @@ def serialize(self) -> dict: @staticmethod def deserialize(cached_config: dict) -> Function: + cfg = FunctionConfig.deserialize(cached_config["config"]) ret = AzureFunction( cached_config["name"], cached_config["benchmark"], cached_config["hash"], AzureResources.Storage.deserialize(cached_config["function_storage"]), + cfg, ) from sebs.azure.triggers import HTTPTrigger From 62c392719cc3123d77b865ca5f03f5439444ee5d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 23:23:40 +0200 Subject: [PATCH 095/140] [gcp] Update GCP functions to new function configuration --- sebs/gcp/function.py | 15 +++++---------- sebs/gcp/gcp.py | 43 ++++++++++++++++++++++++++++++++++--------- sebs/gcp/storage.py | 1 - 3 files changed, 39 insertions(+), 20 deletions(-) diff --git a/sebs/gcp/function.py b/sebs/gcp/function.py index 80d32096..d9c55a03 100644 --- a/sebs/gcp/function.py +++ b/sebs/gcp/function.py @@ -1,6 +1,6 @@ from typing import cast, Optional -from sebs.faas.function import Function +from sebs.faas.function import Function, FunctionConfig from sebs.gcp.storage import GCPStorage @@ -10,13 +10,10 @@ def __init__( name: str, benchmark: str, code_package_hash: str, - timeout: int, - memory: int, + cfg: FunctionConfig, bucket: Optional[str] = None, ): - super().__init__(benchmark, name, code_package_hash) - self.timeout = timeout - self.memory = memory + super().__init__(benchmark, name, code_package_hash, cfg) self.bucket = bucket @staticmethod @@ -26,8 +23,6 @@ def typename() -> str: def serialize(self) -> dict: return { **super().serialize(), - "timeout": self.timeout, - "memory": self.memory, "bucket": self.bucket, } @@ -36,12 +31,12 @@ def deserialize(cached_config: dict) -> "GCPFunction": from sebs.faas.function import Trigger from sebs.gcp.triggers import LibraryTrigger, HTTPTrigger + cfg = FunctionConfig.deserialize(cached_config["config"]) ret = GCPFunction( cached_config["name"], cached_config["benchmark"], cached_config["hash"], - cached_config["timeout"], - cached_config["memory"], + cfg, cached_config["bucket"], ) for trigger in cached_config["triggers"]: diff --git a/sebs/gcp/gcp.py b/sebs/gcp/gcp.py index a0310adc..cd97ab9e 100644 --- a/sebs/gcp/gcp.py +++ b/sebs/gcp/gcp.py @@ -16,7 +16,7 @@ from sebs.cache import Cache from sebs.config import SeBSConfig from sebs.benchmark import Benchmark -from ..faas.function import Function, Trigger +from ..faas.function import Function, FunctionConfig, Trigger from .storage import PersistentStorage from ..faas.system import System from sebs.gcp.config import GCPConfig @@ -103,9 +103,7 @@ def get_storage( def default_function_name(code_package: Benchmark) -> str: # Create function name func_name = "{}-{}-{}".format( - code_package.benchmark, - code_package.language_name, - code_package.benchmark_config.memory, + code_package.benchmark, code_package.language_name, code_package.language_version ) return GCP.format_function_name(func_name) @@ -201,6 +199,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "GCPFuncti storage_client = self.get_storage() location = self.config.region project_name = self.config.project_name + function_cfg = FunctionConfig.from_benchmark(code_package) code_package_name = cast(str, os.path.basename(package)) code_bucket, idx = storage_client.add_input_bucket(benchmark) @@ -254,7 +253,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "GCPFuncti self.logging.info(f"Function {func_name} accepts now unauthenticated invocations!") function = GCPFunction( - func_name, benchmark, code_package.hash, timeout, memory, code_bucket + func_name, benchmark, code_package.hash, function_cfg, code_bucket ) else: # if result is not empty, then function does exists @@ -264,8 +263,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "GCPFuncti name=func_name, benchmark=benchmark, code_package_hash=code_package.hash, - timeout=timeout, - memory=memory, + cfg=function_cfg, bucket=code_bucket, ) self.update_function(function, code_package) @@ -343,8 +341,8 @@ def update_function(self, function: Function, code_package: Benchmark): "name": full_func_name, "entryPoint": "handler", "runtime": code_package.language_name + language_runtime.replace(".", ""), - "availableMemoryMb": function.memory, - "timeout": str(function.timeout) + "s", + "availableMemoryMb": function.config.memory, + "timeout": str(function.config.timeout) + "s", "httpsTrigger": {}, "sourceArchiveUrl": "gs://" + bucket + "/" + code_package_name, }, @@ -359,6 +357,33 @@ def update_function(self, function: Function, code_package: Benchmark): break self.logging.info("Published new function code and configuration.") + def update_function_configuration(self, function: Function, benchmark: Benchmark): + function = cast(GCPFunction, function) + full_func_name = GCP.get_full_function_name( + self.config.project_name, self.config.region, function.name + ) + req = ( + self.function_client.projects() + .locations() + .functions() + .patch( + name=full_func_name, + updateMask="availableMemoryMb,timeout", + body={ + "availableMemoryMb": function.config.memory, + "timeout": str(function.config.timeout) + "s", + }, + ) + ) + res = req.execute() + versionId = res["metadata"]["versionId"] + while True: + if not self.is_deployed(function.name, versionId): + time.sleep(5) + else: + break + self.logging.info("Published new function configuration.") + @staticmethod def get_full_function_name(project_name: str, location: str, func_name: str): return f"projects/{project_name}/locations/{location}/functions/{func_name}" diff --git a/sebs/gcp/storage.py b/sebs/gcp/storage.py index e6f705c3..b59b18e0 100644 --- a/sebs/gcp/storage.py +++ b/sebs/gcp/storage.py @@ -90,7 +90,6 @@ def uploader_func(self, bucket_idx: int, key: str, filepath: str) -> None: if self.cached and not self.replace_existing: return bucket_name = self.input_buckets[bucket_idx] - print(self.input_buckets_files[bucket_idx]) if not self.replace_existing: for blob in self.input_buckets_files[bucket_idx]: if key == blob: From 5a0a7a490bd0bca3ef7e8c7191b844a3b9290c62 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 23:32:43 +0200 Subject: [PATCH 096/140] [whisk] correct timeout for minio in function wrapper --- benchmarks/wrappers/openwhisk/python/storage.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/benchmarks/wrappers/openwhisk/python/storage.py b/benchmarks/wrappers/openwhisk/python/storage.py index 3eedb3ac..920a4807 100644 --- a/benchmarks/wrappers/openwhisk/python/storage.py +++ b/benchmarks/wrappers/openwhisk/python/storage.py @@ -11,11 +11,28 @@ class storage: def __init__(self): try: + """ + Minio does not allow another way of configuring timeout for connection. + The rest of configuration is copied from source code of Minio. + """ + import urllib3 + from datetime import timedelta + + timeout = timedelta(seconds=1).seconds + + mgr = urllib3.PoolManager( + timeout=urllib3.util.Timeout(connect=timeout, read=timeout), + maxsize=10, + retries=urllib3.Retry( + total=5, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] + ) + ) self.client = minio.Minio( os.getenv("MINIO_STORAGE_CONNECTION_URL"), access_key=os.getenv("MINIO_STORAGE_ACCESS_KEY"), secret_key=os.getenv("MINIO_STORAGE_SECRET_KEY"), secure=False, + http_client=mgr ) except Exception as e: logging.info(e) From 330d2900505a792bd516639a0bdaf341328e32c2 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Sun, 1 May 2022 23:41:43 +0200 Subject: [PATCH 097/140] [system] Assign new ids to failed invocations --- sebs/experiments/result.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/sebs/experiments/result.py b/sebs/experiments/result.py index 1a56684c..b28de75c 100644 --- a/sebs/experiments/result.py +++ b/sebs/experiments/result.py @@ -44,10 +44,16 @@ def add_result_bucket(self, result_bucket: str): self.result_bucket = result_bucket def add_invocation(self, func: Function, invocation: ExecutionResult): + # the function has most likely failed, thus no request id + if invocation.request_id: + req_id = invocation.request_id + else: + req_id = f"failed-{len(self._invocations.get(func.name, []))}" + if func.name in self._invocations: - self._invocations.get(func.name)[invocation.request_id] = invocation # type: ignore + self._invocations.get(func.name)[req_id] = invocation # type: ignore else: - self._invocations[func.name] = {invocation.request_id: invocation} + self._invocations[func.name] = {req_id: invocation} def functions(self) -> List[str]: return list(self._invocations.keys()) From 6b6ee7cc86895f6ebc7fe09f69da7fb753a34bea Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 00:41:37 +0200 Subject: [PATCH 098/140] [system] Adjust local deployment to work with new storage definition --- sebs/local/config.py | 31 +++++++++++++++++++- sebs/local/deployment.py | 7 +++-- sebs/local/function.py | 14 +++++++-- sebs/local/local.py | 62 +++++++++++++++++++++------------------- 4 files changed, 77 insertions(+), 37 deletions(-) diff --git a/sebs/local/config.py b/sebs/local/config.py index 3c5e18ec..5b091664 100644 --- a/sebs/local/config.py +++ b/sebs/local/config.py @@ -1,5 +1,8 @@ +from typing import cast, Optional + from sebs.cache import Cache from sebs.faas.config import Config, Credentials, Resources +from sebs.storage.minio import MinioConfig from sebs.utils import LoggingHandlers @@ -12,13 +15,32 @@ def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Creden return LocalCredentials() +""" + No need to cache and store - we prepare the benchmark and finish. + The rest is used later by the user. +""" + + class LocalResources(Resources): + def __init__(self, storage_cfg: Optional[MinioConfig] = None): + super().__init__() + self._storage = storage_cfg + + @property + def storage_config(self) -> Optional[MinioConfig]: + return self._storage + def serialize(self) -> dict: return {} @staticmethod def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resources: - return LocalResources() + ret = LocalResources() + # Check for new config + if "storage" in config: + ret._storage = MinioConfig.deserialize(config["storage"]) + ret.logging.info("Using user-provided configuration of storage for local containers.") + return ret class LocalConfig(Config): @@ -43,10 +65,17 @@ def credentials(self) -> LocalCredentials: def resources(self) -> LocalResources: return self._resources + @resources.setter + def resources(self, val: LocalResources): + self._resources = val + @staticmethod def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Config: config_obj = LocalConfig() + config_obj.resources = cast( + LocalResources, LocalResources.deserialize(config, cache, handlers) + ) config_obj.logging_handlers = handlers return config_obj diff --git a/sebs/local/deployment.py b/sebs/local/deployment.py index d3f0e4b7..d23d87d4 100644 --- a/sebs/local/deployment.py +++ b/sebs/local/deployment.py @@ -3,7 +3,7 @@ from sebs.cache import Cache from sebs.local.function import LocalFunction -from sebs.local.storage import Minio +from sebs.storage.minio import Minio, MinioConfig from sebs.utils import serialize @@ -39,10 +39,11 @@ def deserialize(path: str, cache_client: Cache) -> "Deployment": deployment._inputs.append(input_cfg) for func in input_data["functions"]: deployment._functions.append(LocalFunction.deserialize(func)) - deployment._storage = Minio.deserialize(input_data["storage"], cache_client) + deployment._storage = Minio.deserialize( + MinioConfig.deserialize(input_data["storage"]), cache_client + ) return deployment def shutdown(self): for func in self._functions: func.stop() - self._storage.stop() diff --git a/sebs/local/function.py b/sebs/local/function.py index 8bf408be..169cb457 100644 --- a/sebs/local/function.py +++ b/sebs/local/function.py @@ -2,7 +2,7 @@ import docker import json -from sebs.faas.function import ExecutionResult, Function, Trigger +from sebs.faas.function import ExecutionResult, Function, FunctionConfig, Trigger class HTTPTrigger(Trigger): @@ -37,9 +37,15 @@ def deserialize(obj: dict) -> Trigger: class LocalFunction(Function): def __init__( - self, docker_container, port: int, name: str, benchmark: str, code_package_hash: str + self, + docker_container, + port: int, + name: str, + benchmark: str, + code_package_hash: str, + config: FunctionConfig, ): - super().__init__(benchmark, name, code_package_hash) + super().__init__(benchmark, name, code_package_hash, config) self._instance = docker_container self._instance_id = docker_container.id self._instance.reload() @@ -74,12 +80,14 @@ def deserialize(cached_config: dict) -> "LocalFunction": try: instance_id = cached_config["instance_id"] instance = docker.from_env().containers.get(instance_id) + cfg = FunctionConfig.deserialize(cached_config["config"]) return LocalFunction( instance, cached_config["port"], cached_config["name"], cached_config["benchmark"], cached_config["hash"], + cfg, ) except docker.errors.NotFound: raise RuntimeError(f"Cached container {instance_id} not available anymore!") diff --git a/sebs/local/local.py b/sebs/local/local.py index 6340b7f2..602cc16e 100644 --- a/sebs/local/local.py +++ b/sebs/local/local.py @@ -4,14 +4,13 @@ import docker -# from sebs.local.minio import Minio from sebs.cache import Cache from sebs.config import SeBSConfig from sebs.utils import LoggingHandlers from sebs.local.config import LocalConfig -from sebs.local.storage import Minio +from sebs.storage.minio import Minio from sebs.local.function import LocalFunction -from sebs.faas.function import Function, ExecutionResult, Trigger +from sebs.faas.function import Function, FunctionConfig, ExecutionResult, Trigger from sebs.faas.storage import PersistentStorage from sebs.faas.system import System from sebs.benchmark import Benchmark @@ -45,14 +44,6 @@ def remove_containers(self) -> bool: def remove_containers(self, val: bool): self._remove_containers = val - @property - def shutdown_storage(self) -> bool: - return self._shutdown_storage - - @shutdown_storage.setter - def shutdown_storage(self, val: bool): - self._shutdown_storage = val - def __init__( self, sebs_config: SeBSConfig, @@ -64,9 +55,7 @@ def __init__( super().__init__(sebs_config, cache_client, docker_client) self.logging_handlers = logger_handlers self._config = config - self._storage_instance: Optional[Minio] = None self._remove_containers = True - self._shutdown_storage = True """ Create wrapper object for minio storage and fill buckets. @@ -79,23 +68,26 @@ def __init__( """ def get_storage(self, replace_existing: bool = False) -> PersistentStorage: - if not self._storage_instance: - self._storage_instance = Minio( - self._docker_client, self._cache_client, replace_existing + if not hasattr(self, "storage"): + + if not self.config.resources.storage_config: + raise RuntimeError( + "The local deployment is missing the configuration of pre-allocated storage!" + ) + self.storage = Minio.deserialize( + self.config.resources.storage_config, self.cache_client ) - self._storage_instance.logging_handlers = self.logging_handlers - self._storage_instance.start() + self.storage.logging_handlers = self.logging_handlers else: - self._storage_instance.replace_existing = replace_existing - return self._storage_instance + self.storage.replace_existing = replace_existing + return self.storage """ Shut down minio storage instance. """ def shutdown(self): - if self._storage_instance and self.shutdown_storage: - self._storage_instance.stop() + pass """ It would be sufficient to just pack the code and ship it as zip to AWS. @@ -154,11 +146,11 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunc code_package.language_version, ) environment: Dict[str, str] = {} - if self._storage_instance: + if self.config.resources.storage_config: environment = { - "MINIO_ADDRESS": self._storage_instance._url, - "MINIO_ACCESS_KEY": self._storage_instance._access_key, - "MINIO_SECRET_KEY": self._storage_instance._secret_key, + "MINIO_ADDRESS": self.config.resources.storage_config.address, + "MINIO_ACCESS_KEY": self.config.resources.storage_config.access_key, + "MINIO_SECRET_KEY": self.config.resources.storage_config.secret_key, } container = self._docker_client.containers.run( image=container_name, @@ -168,6 +160,8 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunc }, environment=environment, # FIXME: make CPUs configurable + # FIXME: configure memory + # FIXME: configure timeout # cpuset_cpus=cpuset, # required to access perf counters # alternative: use custom seccomp profile @@ -182,8 +176,14 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunc detach=True, # tty=True, ) + function_cfg = FunctionConfig.from_benchmark(code_package) func = LocalFunction( - container, self.DEFAULT_PORT, func_name, code_package.benchmark, code_package.hash + container, + self.DEFAULT_PORT, + func_name, + code_package.benchmark, + code_package.hash, + function_cfg, ) self.logging.info( f"Started {func_name} function at container {container.id} , running on {func._url}" @@ -219,6 +219,10 @@ def create_trigger(self, func: Function, trigger_type: Trigger.TriggerType) -> T def cached_function(self, function: Function): pass + def update_function_configuration(self, function: Function, code_package: Benchmark): + self.logging.error("Updating function configuration of local deployment is not supported") + raise RuntimeError("Updating function configuration of local deployment is not supported") + def download_metrics( self, function_name: str, @@ -236,9 +240,7 @@ def enforce_cold_start(self, functions: List[Function], code_package: Benchmark) def default_function_name(code_package: Benchmark) -> str: # Create function name func_name = "{}-{}-{}".format( - code_package.benchmark, - code_package.language_name, - code_package.benchmark_config.memory, + code_package.benchmark, code_package.language_name, code_package.language_version ) return func_name From e7baee73962890c9c15abc0aa42d0fc88d2191ba Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 00:57:46 +0200 Subject: [PATCH 099/140] [system] Use separate directories for building different versions of benchmarks --- sebs/benchmark.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index e9773bcc..1f2f023e 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -178,7 +178,7 @@ def __init__( self._docker_client = docker_client self._system_config = system_config self._hash_value = None - self._output_dir = os.path.join(output_dir, f"{benchmark}_code") + self._output_dir = os.path.join(output_dir, f"{benchmark}_code", self._language.value, self._language_version) # verify existence of function in cache self.query_cache() From a62f65aa099dc30126b6140fa0be4b05fad5c76e Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 01:51:23 +0200 Subject: [PATCH 100/140] [system] Linting --- sebs/benchmark.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 1f2f023e..160f5257 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -178,7 +178,9 @@ def __init__( self._docker_client = docker_client self._system_config = system_config self._hash_value = None - self._output_dir = os.path.join(output_dir, f"{benchmark}_code", self._language.value, self._language_version) + self._output_dir = os.path.join( + output_dir, f"{benchmark}_code", self._language.value, self._language_version + ) # verify existence of function in cache self.query_cache() From 7fb73ec4e3143e01e68d414412fcad9f7637a2da Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 17:27:59 +0200 Subject: [PATCH 101/140] [aws] Add sleep after creating HTTP trigger to avoid AWS errors --- sebs/aws/aws.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index 8db3859d..7442bb97 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -483,6 +483,11 @@ def create_trigger(self, func: Function, trigger_type: Trigger.TriggerType) -> T SourceArn=f"{http_api.arn}/*/*", ) trigger = HTTPTrigger(http_api.endpoint, api_name) + self.logging.info( + f"Created HTTP trigger for {func.name} function. " + "Sleep 5 seconds to avoid cloud errors." + ) + time.sleep(5) trigger.logging_handlers = self.logging_handlers elif trigger_type == Trigger.TriggerType.LIBRARY: # should already exist From 680a618bca088725af2d488fab144358993a37f6 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 17:46:43 +0200 Subject: [PATCH 102/140] [system] Add script for Docker images to dynamically create non-root user with correct UID --- docker/entrypoint.sh | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100755 docker/entrypoint.sh diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100755 index 00000000..8cb341d7 --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +USER_ID=${CONTAINER_UID} +GROUP_ID=${CONTAINER_GID} +USER=${CONTAINER_USER} + +useradd --non-unique -m -u ${USER_ID} ${USER} +groupmod -g ${GROUP_ID} ${USER} +mkdir -p /mnt/function && chown -R ${USER}:${USER} /mnt/function +export HOME=/home/${USER} +echo "Running as ${USER}, with ${USER_ID} and ${GROUP_ID}" + +if [ ! -z "$CMD" ]; then + gosu ${USER} $CMD +fi + +exec gosu ${USER} "$@" + From 80fa6e1c2395ab124b0d5e57840046a1fde73bac Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 17:55:44 +0200 Subject: [PATCH 103/140] [system] Update API of processing invocation metrics --- sebs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sebs.py b/sebs.py index 866e6106..ce78036c 100755 --- a/sebs.py +++ b/sebs.py @@ -277,12 +277,12 @@ def process(**kwargs): experiments = sebs.experiments.ExperimentResult.deserialize( config, sebs_client.cache_client, - sebs_client.logging_handlers(logging_filename), + sebs_client.generate_logging_handlers(logging_filename), ) for func in experiments.functions(): deployment_client.download_metrics( - func, *experiments.times(), experiments.invocations(func) + func, *experiments.times(), experiments.invocations(func), experiments.metrics(func) ) with open("results.json", "w") as out_f: out_f.write(sebs.utils.serialize(experiments)) From ca269c9fc947f96d1e587d33cd34792c8ecb8a5b Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 18:03:38 +0200 Subject: [PATCH 104/140] [aws] Update Python images and simplify permission handling --- docker/Dockerfile.build.aws.python | 17 -------------- docker/aws/python/Dockerfile.build.aws.python | 22 +++++++++++++++++++ tools/build_docker_images.py | 13 +++++------ 3 files changed, 27 insertions(+), 25 deletions(-) delete mode 100755 docker/Dockerfile.build.aws.python create mode 100755 docker/aws/python/Dockerfile.build.aws.python diff --git a/docker/Dockerfile.build.aws.python b/docker/Dockerfile.build.aws.python deleted file mode 100755 index af5ae7a1..00000000 --- a/docker/Dockerfile.build.aws.python +++ /dev/null @@ -1,17 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ARG VERSION -ENV HOME=/home/${USER} -ENV PYTHON_VERSION=${VERSION} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} -RUN mkdir -p /mnt/function && chown -R ${USER}:${USER} /mnt/function -USER ${USER}:${USER} - -COPY --chown=${USER}:${USER} docker/python_installer.sh installer.sh - -ENV SCRIPT_FILE=/mnt/function/package.sh -CMD /bin/bash installer.sh diff --git a/docker/aws/python/Dockerfile.build.aws.python b/docker/aws/python/Dockerfile.build.aws.python new file mode 100755 index 00000000..960fc300 --- /dev/null +++ b/docker/aws/python/Dockerfile.build.aws.python @@ -0,0 +1,22 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV PYTHON_VERSION=${VERSION} + +# useradd, groupmod +RUN yum install -y shadow-utils +ENV GOSU_VERSION 1.14 +# https://github.com/tianon/gosu/releases/tag/1.14 +# key https://keys.openpgp.org/search?q=tianon%40debian.org +RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/${GOSU_VERSION}/gosu-amd64" \ + && chmod +x /usr/local/bin/gosu +RUN mkdir -p /sebs/ +COPY docker/python_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV PATH=/usr/sbin:$PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py index 52ae9e9d..5ca43fe3 100755 --- a/tools/build_docker_images.py +++ b/tools/build_docker_images.py @@ -17,7 +17,7 @@ config = json.load(open(os.path.join(PROJECT_DIR, 'config', 'systems.json'), 'r')) client = docker.from_env() -def build(image_type, system, username, language=None,version=None, version_name=None): +def build(image_type, system, language=None,version=None, version_name=None): msg = 'Build *{}* Dockerfile for *{}* system'.format(image_type, system) if language: @@ -35,9 +35,7 @@ def build(image_type, system, username, language=None,version=None, version_name # if we pass an integer, the build will fail with 'connection reset by peer' buildargs={ - 'USER': username, 'VERSION': version, - 'UID': str(os.getuid()) } if version: buildargs['BASE_IMAGE'] = version_name @@ -50,7 +48,6 @@ def build(image_type, system, username, language=None,version=None, version_name ) def build_language(system, language, language_config): - username = language_config['username'] configs = [] if 'base_images' in language_config: for version, base_image in language_config['base_images'].items(): @@ -61,15 +58,15 @@ def build_language(system, language, language_config): for image in configs: if args.type is None: for image_type in language_config['images']: - build(image_type, system, username, language, *image) + build(image_type, system, language, *image) else: - build(args.type, system, username, language, *image) + build(args.type, system, language, *image) def build_systems(system, system_config): if args.type == 'manage': if 'images' in system_config: - build(args.type, system, system_config['images']['manage']['username']) + build(args.type, system) else: print(f'Skipping manage image for {system}') else: @@ -81,7 +78,7 @@ def build_systems(system, system_config): # Build additional types if 'images' in system_config: for image_type, image_config in system_config['images'].items(): - build(image_type, system, image_config['username']) + build(image_type, system) if args.deployment is None: for system, system_dict in config.items(): From 919b0b1fb8d3b9fe0587a2fdfd21a490077b1617 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 18:43:27 +0200 Subject: [PATCH 105/140] [system] Correctly cache different language versions of the same package --- sebs/cache.py | 56 ++++++++++++++++++++++++++++++++++----------------- 1 file changed, 38 insertions(+), 18 deletions(-) diff --git a/sebs/cache.py b/sebs/cache.py index 25c7c7cc..ed5096e6 100644 --- a/sebs/cache.py +++ b/sebs/cache.py @@ -121,11 +121,11 @@ def get_benchmark_config(self, deployment: str, benchmark: str): """ def get_code_package( - self, deployment: str, benchmark: str, language: str + self, deployment: str, benchmark: str, language: str, language_version: str ) -> Optional[Dict[str, Any]]: cfg = self.get_benchmark_config(deployment, benchmark) - if cfg and language in cfg: - return cfg[language]["code_package"] + if cfg and language in cfg and language_version in cfg[language]["code_package"]: + return cfg[language]["code_package"][language_version] else: return None @@ -165,10 +165,11 @@ def update_storage(self, deployment: str, benchmark: str, config: dict): def add_code_package(self, deployment_name: str, language_name: str, code_package: "Benchmark"): with self._lock: language = code_package.language_name + language_version = code_package.language_version benchmark_dir = os.path.join(self.cache_dir, code_package.benchmark) os.makedirs(benchmark_dir, exist_ok=True) # Check if cache directory for this deployment exist - cached_dir = os.path.join(benchmark_dir, deployment_name, language) + cached_dir = os.path.join(benchmark_dir, deployment_name, language, language_version) if not os.path.exists(cached_dir): os.makedirs(cached_dir, exist_ok=True) @@ -181,29 +182,43 @@ def add_code_package(self, deployment_name: str, language_name: str, code_packag package_name = os.path.basename(code_package.code_location) cached_location = os.path.join(cached_dir, package_name) shutil.copy2(code_package.code_location, cached_dir) - language_config: Dict[str, Any] = { - "code_package": code_package.serialize(), - "functions": {}, - } + language_config = code_package.serialize() # don't store absolute path to avoid problems with moving cache dir relative_cached_loc = os.path.relpath(cached_location, self.cache_dir) - language_config["code_package"]["location"] = relative_cached_loc + language_config["location"] = relative_cached_loc date = str(datetime.datetime.now()) - language_config["code_package"]["date"] = { + language_config["date"] = { "created": date, "modified": date, } - config = {deployment_name: {language: language_config}} + # config = {deployment_name: {language: language_config}} + config = { + deployment_name: { + language: { + "code_package": {language_version: language_config}, + "functions": {}, + } + } + } + # make sure to not replace other entries if os.path.exists(os.path.join(benchmark_dir, "config.json")): with open(os.path.join(benchmark_dir, "config.json"), "r") as fp: cached_config = json.load(fp) if deployment_name in cached_config: - cached_config[deployment_name][language] = language_config + # language known, platform known, extend dictionary + if language in cached_config[deployment_name]: + cached_config[deployment_name][language]["code_package"][ + language_version + ] = language_config + # language unknown, platform known - add new dictionary + else: + cached_config[deployment_name][language] = config[deployment_name][ + language + ] else: - cached_config[deployment_name] = { - language: language_config, - } + # language unknown, platform unknown - add new dictionary + cached_config[deployment_name] = config[deployment_name] config = cached_config with open(os.path.join(benchmark_dir, "config.json"), "w") as fp: json.dump(config, fp, indent=2) @@ -220,9 +235,10 @@ def update_code_package( ): with self._lock: language = code_package.language_name + language_version = code_package.language_version benchmark_dir = os.path.join(self.cache_dir, code_package.benchmark) # Check if cache directory for this deployment exist - cached_dir = os.path.join(benchmark_dir, deployment_name, language) + cached_dir = os.path.join(benchmark_dir, deployment_name, language, language_version) if os.path.exists(cached_dir): # copy code @@ -242,8 +258,12 @@ def update_code_package( with open(os.path.join(benchmark_dir, "config.json"), "r") as fp: config = json.load(fp) date = str(datetime.datetime.now()) - config[deployment_name][language]["code_package"]["date"]["modified"] = date - config[deployment_name][language]["code_package"]["hash"] = code_package.hash + config[deployment_name][language]["code_package"][language_version]["date"][ + "modified" + ] = date + config[deployment_name][language]["code_package"][language_version][ + "hash" + ] = code_package.hash with open(os.path.join(benchmark_dir, "config.json"), "w") as fp: json.dump(config, fp, indent=2) else: From fd4bf1ed1bd8d180efd094d0363f3e252fc1ecd2 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 19:09:18 +0200 Subject: [PATCH 106/140] [aws] Update AWS Docker images for Node.js and drop support for old Python versions --- config/systems.json | 18 ++++++++--------- docker/Dockerfile.build.aws.nodejs | 14 ------------- docker/aws/nodejs/Dockerfile.build | 20 +++++++++++++++++++ ...file.build.aws.python => Dockerfile.build} | 0 sebs/benchmark.py | 15 ++++++++++---- tools/build_docker_images.py | 2 +- 6 files changed, 40 insertions(+), 29 deletions(-) delete mode 100755 docker/Dockerfile.build.aws.nodejs create mode 100755 docker/aws/nodejs/Dockerfile.build rename docker/aws/python/{Dockerfile.build.aws.python => Dockerfile.build} (100%) diff --git a/config/systems.json b/config/systems.json index e2df8c9b..1468841b 100644 --- a/config/systems.json +++ b/config/systems.json @@ -37,13 +37,12 @@ "languages": { "python": { "base_images": { - "3.8": "lambci/lambda:build-python3.8", - "3.7": "lambci/lambda:build-python3.7", - "3.6": "lambci/lambda:build-python3.6" + "3.9": "amazon/aws-lambda-python:3.9", + "3.8": "amazon/aws-lambda-python:3.8", + "3.7": "amazon/aws-lambda-python:3.7" }, - "versions": ["3.6", "3.7", "3.8"], + "versions": ["3.7", "3.8", "3.9"], "images": ["build"], - "username": "docker_user", "deployment": { "files": [ "handler.py", "storage.py"], "packages": [] @@ -51,12 +50,11 @@ }, "nodejs": { "base_images": { - "12.x" : "lambci/lambda:build-nodejs12.x", - "10.x" : "lambci/lambda:build-nodejs10.x" + "14.x" : "amazon/aws-lambda-nodejs:14", + "12.x" : "amazon/aws-lambda-nodejs:12" }, - "versions": ["10.x", "12.x"], + "versions": ["12.x", "14.x"], "images": ["build"], - "username": "docker_user", "deployment": { "files": [ "handler.js", "storage.js"], "packages": { @@ -71,7 +69,7 @@ "python": { "base_images": { "3.7": "mcr.microsoft.com/azure-functions/python:2.0-python3.7", - "3.6": "mcr.microsoft.com/azure-functions/python:2.0-python3.6" + "3.8": "mcr.microsoft.com/azure-functions/python:3.0-python3.8" }, "images": ["build"], "username": "docker_user", diff --git a/docker/Dockerfile.build.aws.nodejs b/docker/Dockerfile.build.aws.nodejs deleted file mode 100755 index 23806417..00000000 --- a/docker/Dockerfile.build.aws.nodejs +++ /dev/null @@ -1,14 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ENV HOME=/home/${USER} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} -RUN mkdir -p /mnt/function && chown -R ${USER}:${USER} /mnt/function -USER ${USER}:${USER} - -COPY --chown=${USER}:${USER} docker/nodejs_installer.sh installer.sh - -CMD /bin/bash installer.sh diff --git a/docker/aws/nodejs/Dockerfile.build b/docker/aws/nodejs/Dockerfile.build new file mode 100755 index 00000000..63dbb37a --- /dev/null +++ b/docker/aws/nodejs/Dockerfile.build @@ -0,0 +1,20 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} + +# useradd, groupmod +RUN yum install -y shadow-utils +ENV GOSU_VERSION 1.14 +# https://github.com/tianon/gosu/releases/tag/1.14 +# key https://keys.openpgp.org/search?q=tianon%40debian.org +RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/${GOSU_VERSION}/gosu-amd64" \ + && chmod +x /usr/local/bin/gosu +RUN mkdir -p /sebs/ +COPY docker/nodejs_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV PATH=/usr/sbin:$PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/docker/aws/python/Dockerfile.build.aws.python b/docker/aws/python/Dockerfile.build similarity index 100% rename from docker/aws/python/Dockerfile.build.aws.python rename to docker/aws/python/Dockerfile.build diff --git a/sebs/benchmark.py b/sebs/benchmark.py index 160f5257..0c51b2cd 100644 --- a/sebs/benchmark.py +++ b/sebs/benchmark.py @@ -225,6 +225,7 @@ def query_cache(self): deployment=self._deployment_name, benchmark=self._benchmark, language=self.language_name, + language_version=self.language_version, ) self._functions = self._cache_client.get_functions( deployment=self._deployment_name, @@ -388,9 +389,12 @@ def install_dependencies(self, output_dir): stdout = self._docker_client.containers.run( "{}:{}".format(repo_name, image_name), volumes=volumes, - environment={"APP": self.benchmark}, - # user="1000:1000", - user=uid, + environment={ + "CONTAINER_UID": str(os.getuid()), + "CONTAINER_GID": str(os.getgid()), + "CONTAINER_USER": "docker_user", + "APP": self.benchmark, + }, remove=True, stdout=True, stderr=True, @@ -423,7 +427,10 @@ def install_dependencies(self, output_dir): container.put_archive("/mnt/function", data.read()) # do the build step exit_code, stdout = container.exec_run( - cmd="/bin/bash installer.sh", stdout=True, stderr=True + cmd="/bin/bash /sebs/installer.sh", + user="docker_user", + stdout=True, + stderr=True, ) # copy updated code with package data, stat = container.get_archive("/mnt/function") diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py index 5ca43fe3..5c767b53 100755 --- a/tools/build_docker_images.py +++ b/tools/build_docker_images.py @@ -25,7 +25,7 @@ def build(image_type, system, language=None,version=None, version_name=None): if version: msg += ' with version *' + version + '*' print(msg) - dockerfile = os.path.join(PROJECT_DIR, 'docker', 'Dockerfile.{}.{}'.format(image_type, system)) + dockerfile = os.path.join(PROJECT_DIR, 'docker', system, language, f'Dockerfile.{image_type}') target = f'{config["general"]["docker_repository"]}:{image_type}.{system}' if language: dockerfile += '.' + language From 7bcdc979777a787bff39431e64f8a44ca6f476f3 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 21:12:35 +0200 Subject: [PATCH 107/140] [system] Remove incorrect filename suffix --- tools/build_docker_images.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py index 5c767b53..6bdb808d 100755 --- a/tools/build_docker_images.py +++ b/tools/build_docker_images.py @@ -28,7 +28,6 @@ def build(image_type, system, language=None,version=None, version_name=None): dockerfile = os.path.join(PROJECT_DIR, 'docker', system, language, f'Dockerfile.{image_type}') target = f'{config["general"]["docker_repository"]}:{image_type}.{system}' if language: - dockerfile += '.' + language target += '.' + language if version: target += '.' + version From f7c4f9e5cdb5ea07dd4b8d3a9c51ed12328ce9da Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 21:12:59 +0200 Subject: [PATCH 108/140] [system] Linting --- tools/build_docker_images.py | 74 ++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 36 deletions(-) diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py index 6bdb808d..83e1762c 100755 --- a/tools/build_docker_images.py +++ b/tools/build_docker_images.py @@ -4,87 +4,89 @@ import docker import json import os -import shutil PROJECT_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir) -DOCKER_DIR = os.path.join(PROJECT_DIR, 'docker') +DOCKER_DIR = os.path.join(PROJECT_DIR, "docker") -parser = argparse.ArgumentParser(description='Run local app experiments.') -parser.add_argument('--deployment', default=None, choices=['local', 'aws', 'azure', 'gcp'], action='store') -parser.add_argument('--type', default=None, choices=['build', 'run', 'manage'], action='store') -parser.add_argument('--language', default=None, choices=['python', 'nodejs'], action='store') +parser = argparse.ArgumentParser(description="Run local app experiments.") +parser.add_argument( + "--deployment", default=None, choices=["local", "aws", "azure", "gcp"], action="store" +) +parser.add_argument("--type", default=None, choices=["build", "run", "manage"], action="store") +parser.add_argument("--language", default=None, choices=["python", "nodejs"], action="store") args = parser.parse_args() -config = json.load(open(os.path.join(PROJECT_DIR, 'config', 'systems.json'), 'r')) +config = json.load(open(os.path.join(PROJECT_DIR, "config", "systems.json"), "r")) client = docker.from_env() -def build(image_type, system, language=None,version=None, version_name=None): - msg = 'Build *{}* Dockerfile for *{}* system'.format(image_type, system) +def build(image_type, system, language=None, version=None, version_name=None): + + msg = "Build *{}* Dockerfile for *{}* system".format(image_type, system) if language: - msg += ' with language *' + language + '*' + msg += " with language *" + language + "*" if version: - msg += ' with version *' + version + '*' + msg += " with version *" + version + "*" print(msg) - dockerfile = os.path.join(PROJECT_DIR, 'docker', system, language, f'Dockerfile.{image_type}') + dockerfile = os.path.join(PROJECT_DIR, "docker", system, language, f"Dockerfile.{image_type}") target = f'{config["general"]["docker_repository"]}:{image_type}.{system}' if language: - target += '.' + language + target += "." + language if version: - target += '.' + version + target += "." + version # if we pass an integer, the build will fail with 'connection reset by peer' - buildargs={ - 'VERSION': version, + buildargs = { + "VERSION": version, } if version: - buildargs['BASE_IMAGE'] = version_name - print('Build img {} in {} from file {} with args {}'.format(target, PROJECT_DIR, dockerfile, buildargs)) - client.images.build( - path=PROJECT_DIR, - dockerfile=dockerfile, - buildargs=buildargs, - tag=target + buildargs["BASE_IMAGE"] = version_name + print( + "Build img {} in {} from file {} with args {}".format( + target, PROJECT_DIR, dockerfile, buildargs + ) ) + client.images.build(path=PROJECT_DIR, dockerfile=dockerfile, buildargs=buildargs, tag=target) + def build_language(system, language, language_config): configs = [] - if 'base_images' in language_config: - for version, base_image in language_config['base_images'].items(): + if "base_images" in language_config: + for version, base_image in language_config["base_images"].items(): configs.append([version, base_image]) else: configs.append([None, None]) for image in configs: if args.type is None: - for image_type in language_config['images']: + for image_type in language_config["images"]: build(image_type, system, language, *image) else: build(args.type, system, language, *image) + def build_systems(system, system_config): - if args.type == 'manage': - if 'images' in system_config: + if args.type == "manage": + if "images" in system_config: build(args.type, system) else: - print(f'Skipping manage image for {system}') + print(f"Skipping manage image for {system}") else: if args.language: - build_language(system, args.language, system_config['languages'][args.language]) + build_language(system, args.language, system_config["languages"][args.language]) else: - for language, language_dict in system_config['languages'].items(): + for language, language_dict in system_config["languages"].items(): build_language(system, language, language_dict) # Build additional types - if 'images' in system_config: - for image_type, image_config in system_config['images'].items(): + if "images" in system_config: + for image_type, image_config in system_config["images"].items(): build(image_type, system) + if args.deployment is None: for system, system_dict in config.items(): - if system == 'general': + if system == "general": continue build_systems(system, system_dict) else: build_systems(args.deployment, config[args.deployment]) - - From 8ef4ae4ebafebf667555171876c7f3b712250686 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 21:15:07 +0200 Subject: [PATCH 109/140] [azure] typo --- sebs/azure/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sebs/azure/config.py b/sebs/azure/config.py index 20591595..6bb5ee51 100644 --- a/sebs/azure/config.py +++ b/sebs/azure/config.py @@ -297,7 +297,7 @@ def initialize(cfg: Config, dct: dict): else: config._resources_id = str(uuid.uuid1())[0:8] config.logging.info( - f"Azure: generating unique resource name for" + f"Azure: generating unique resource name for " f"the experiments: {config._resources_id}" ) From 4d7aef985dd2683b50f79689d2b0eb3992730a8c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 21:16:28 +0200 Subject: [PATCH 110/140] [azure] Update Docker images to use better permissions handling --- docker/Dockerfile.build.azure.nodejs | 12 ------------ docker/Dockerfile.build.azure.python | 19 ------------------- docker/azure/nodejs/Dockerfile.build | 15 +++++++++++++++ docker/azure/python/Dockerfile.build | 18 ++++++++++++++++++ 4 files changed, 33 insertions(+), 31 deletions(-) delete mode 100755 docker/Dockerfile.build.azure.nodejs delete mode 100755 docker/Dockerfile.build.azure.python create mode 100755 docker/azure/nodejs/Dockerfile.build create mode 100755 docker/azure/python/Dockerfile.build diff --git a/docker/Dockerfile.build.azure.nodejs b/docker/Dockerfile.build.azure.nodejs deleted file mode 100755 index b13db48a..00000000 --- a/docker/Dockerfile.build.azure.nodejs +++ /dev/null @@ -1,12 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ENV HOME=/home/${USER} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} -RUN chown -R ${USER}:${USER} /home/${USER}/ -USER ${USER}:${USER} - -CMD cd /mnt/function && npm install && rm -rf package-lock.json diff --git a/docker/Dockerfile.build.azure.python b/docker/Dockerfile.build.azure.python deleted file mode 100755 index 59d79059..00000000 --- a/docker/Dockerfile.build.azure.python +++ /dev/null @@ -1,19 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ARG VERSION -ENV HOME=/home/${USER} -ENV PYTHON_VERSION=${VERSION} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} -RUN apt-get update\ - && apt-get install -y gcc build-essential python-dev libxml2 libxml2-dev zlib1g-dev\ - && apt-get purge -y --auto-remove -USER ${USER}:${USER} - -ENV SCRIPT_FILE=/mnt/function/package.sh -CMD cd /mnt/function\ - && if test -f "requirements.txt.${PYTHON_VERSION}"; then pip3 -q install -r requirements.txt -r requirements.txt.${PYTHON_VERSION} -t .python_packages/lib/site-packages ; else pip3 -q install -r requirements.txt -t .python_packages/lib/site-packages ; fi\ - && if test -f "${SCRIPT_FILE}"; then /bin/bash ${SCRIPT_FILE} .python_packages/lib/site-packages ; fi diff --git a/docker/azure/nodejs/Dockerfile.build b/docker/azure/nodejs/Dockerfile.build new file mode 100755 index 00000000..c19d43e7 --- /dev/null +++ b/docker/azure/nodejs/Dockerfile.build @@ -0,0 +1,15 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} + +RUN apt-get update && apt-get install -y gosu + +RUN mkdir -p /sebs/ +COPY docker/nodejs_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV PATH=/usr/sbin:$PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/docker/azure/python/Dockerfile.build b/docker/azure/python/Dockerfile.build new file mode 100755 index 00000000..810bd358 --- /dev/null +++ b/docker/azure/python/Dockerfile.build @@ -0,0 +1,18 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV PYTHON_VERSION=${VERSION} + +RUN apt-get update\ + && apt-get install -y gosu gcc build-essential python-dev libxml2 libxml2-dev zlib1g-dev\ + && apt-get purge -y --auto-remove + +RUN mkdir -p /sebs/ +COPY docker/python_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] From e86e7b495e18b687aede2b2b786f716e9b8a7d71 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 21:18:13 +0200 Subject: [PATCH 111/140] [azure] Update supported versions of Python and Node.js --- config/systems.json | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/config/systems.json b/config/systems.json index 1468841b..54b5e1f1 100644 --- a/config/systems.json +++ b/config/systems.json @@ -68,8 +68,9 @@ "languages": { "python": { "base_images": { - "3.7": "mcr.microsoft.com/azure-functions/python:2.0-python3.7", - "3.8": "mcr.microsoft.com/azure-functions/python:3.0-python3.8" + "3.7": "mcr.microsoft.com/azure-functions/python:3.0-python3.7", + "3.8": "mcr.microsoft.com/azure-functions/python:3.0-python3.8", + "3.9": "mcr.microsoft.com/azure-functions/python:3.0-python3.9" }, "images": ["build"], "username": "docker_user", @@ -80,8 +81,8 @@ }, "nodejs": { "base_images": { - "10" : "mcr.microsoft.com/azure-functions/node:2.0-node10", - "8" : "mcr.microsoft.com/azure-functions/node:2.0-node8" + "14" : "mcr.microsoft.com/azure-functions/node:3.0-node14", + "12" : "mcr.microsoft.com/azure-functions/node:3.0-node12" }, "images": ["build"], "username": "docker_user", From 0aa978de81d2ef2f5c4423e7b75c97816a5a4782 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 21:23:58 +0200 Subject: [PATCH 112/140] [azure] Adjust CLI container to the new permissions system --- .../Dockerfile.manage} | 27 ++++++++++++------- sebs/azure/azure.py | 2 ++ sebs/azure/cli.py | 25 ++++++++++++++--- tools/build_docker_images.py | 5 +++- 4 files changed, 45 insertions(+), 14 deletions(-) rename docker/{Dockerfile.manage.azure => azure/Dockerfile.manage} (68%) diff --git a/docker/Dockerfile.manage.azure b/docker/azure/Dockerfile.manage similarity index 68% rename from docker/Dockerfile.manage.azure rename to docker/azure/Dockerfile.manage index 8c76a825..f1274949 100644 --- a/docker/Dockerfile.manage.azure +++ b/docker/azure/Dockerfile.manage @@ -1,6 +1,6 @@ FROM python:3.7-slim-stretch -ARG USER -ARG UID +#ARG USER +#ARG UID # disable telemetry by default ENV FUNCTIONS_CORE_TOOLS_TELEMETRY_OPTOUT=1 @@ -20,13 +20,22 @@ RUN apt-get clean && apt-get update\ # https://github.com/moby/moby/issues/20295 # https://github.com/moby/moby/issues/20295 -ENV HOME=/home/${USER} -RUN useradd --non-unique --uid ${UID} -m ${USER}\ - && chown ${USER}:${USER} ${HOME}\ - && chown ${USER}:${USER} /mnt -WORKDIR ${HOME} -USER ${USER}:${USER} +#ENV HOME=/home/${USER} +#RUN useradd --non-unique --uid ${UID} -m ${USER}\ +# && chown ${USER}:${USER} ${HOME}\ +# && chown ${USER}:${USER} /mnt +#WORKDIR ${HOME} +#USER ${USER}:${USER} # Extension must be installed for a specific user, I guess. # Installed with root does not work for user. -RUN az extension add --name application-insights +#RUN /usr/bin/az extension add --name application-insights + +RUN apt-get -y --no-install-recommends install gosu +RUN mkdir -p /sebs/ +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +ENV SCRIPT_FILE=/mnt/function/package.sh +#ENV CMD='/usr/bin/ extension add --name application-insights' +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/sebs/azure/azure.py b/sebs/azure/azure.py index e4160e1e..e957d693 100644 --- a/sebs/azure/azure.py +++ b/sebs/azure/azure.py @@ -376,6 +376,8 @@ def download_metrics( metrics: Dict[str, dict], ): + self.cli_instance.install_insights() + resource_group = self.config.resources.resource_group(self.cli_instance) # Avoid warnings in the next step ret = self.cli_instance.execute( diff --git a/sebs/azure/cli.py b/sebs/azure/cli.py index f98226e4..03bbc4dd 100644 --- a/sebs/azure/cli.py +++ b/sebs/azure/cli.py @@ -25,14 +25,27 @@ def __init__(self, system_config: SeBSConfig, docker_client: docker.client): self.docker_instance = docker_client.containers.run( image=repo_name + ":" + image_name, command="/bin/bash", - user="1000:1000", - volumes={}, - # remove=True, + environment={ + "CONTAINER_UID": str(os.getuid()), + "CONTAINER_GID": str(os.getgid()), + "CONTAINER_USER": "docker_user", + }, + remove=True, stdout=True, stderr=True, detach=True, tty=True, ) + self._insights_installed = False + logging.info("Started Azure CLI container.") + while True: + try: + dkg = self.docker_instance.logs(stream=True, follow=True) + next(dkg).decode("utf-8") + break + except StopIteration: + pass + logging.info("Starting Azure manage Docker instance") """ @@ -55,7 +68,7 @@ def execute(self, cmd: str): """ def login(self, appId: str, tenant: str, password: str): - self.execute( + out = self.execute( "az login -u {0} --service-principal --tenant {1} -p {2}".format( appId, tenant, @@ -74,6 +87,10 @@ def upload_package(self, directory: str, dest: str): self.execute("mkdir -p {}".format(dest)) self.docker_instance.put_archive(path=dest, data=handle.read()) + def install_insights(self): + if not self._insights_installed: + self.execute("az extension add --name application-insights") + """ Shutdowns Docker instance. """ diff --git a/tools/build_docker_images.py b/tools/build_docker_images.py index 83e1762c..8f1eb320 100755 --- a/tools/build_docker_images.py +++ b/tools/build_docker_images.py @@ -27,7 +27,10 @@ def build(image_type, system, language=None, version=None, version_name=None): if version: msg += " with version *" + version + "*" print(msg) - dockerfile = os.path.join(PROJECT_DIR, "docker", system, language, f"Dockerfile.{image_type}") + if language is not None: + dockerfile = os.path.join(PROJECT_DIR, "docker", system, language, f"Dockerfile.{image_type}") + else: + dockerfile = os.path.join(PROJECT_DIR, "docker", system, f"Dockerfile.{image_type}") target = f'{config["general"]["docker_repository"]}:{image_type}.{system}' if language: target += "." + language From 0dcac3eeb675ce21d6a5520b6b48c722eaf9b49c Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 22:10:09 +0200 Subject: [PATCH 113/140] [azure] Linting --- sebs/azure/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sebs/azure/cli.py b/sebs/azure/cli.py index 03bbc4dd..97fbe706 100644 --- a/sebs/azure/cli.py +++ b/sebs/azure/cli.py @@ -68,7 +68,7 @@ def execute(self, cmd: str): """ def login(self, appId: str, tenant: str, password: str): - out = self.execute( + self.execute( "az login -u {0} --service-principal --tenant {1} -p {2}".format( appId, tenant, From 3611e051669e5b7e24cebe3c67e522a2d4e87494 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 22:10:45 +0200 Subject: [PATCH 114/140] [gcp] Update Docker images to a new format --- docker/Dockerfile.build.gcp.nodejs | 16 ---------------- docker/Dockerfile.build.gcp.python | 26 -------------------------- docker/gcp/nodejs/Dockerfile.build | 18 ++++++++++++++++++ docker/gcp/python/Dockerfile.build | 23 +++++++++++++++++++++++ 4 files changed, 41 insertions(+), 42 deletions(-) delete mode 100755 docker/Dockerfile.build.gcp.nodejs delete mode 100755 docker/Dockerfile.build.gcp.python create mode 100755 docker/gcp/nodejs/Dockerfile.build create mode 100755 docker/gcp/python/Dockerfile.build diff --git a/docker/Dockerfile.build.gcp.nodejs b/docker/Dockerfile.build.gcp.nodejs deleted file mode 100755 index 00d811a4..00000000 --- a/docker/Dockerfile.build.gcp.nodejs +++ /dev/null @@ -1,16 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ARG VERSION -ENV HOME=/home/${USER} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} - -RUN install_node --ignore-verification-failure v${VERSION} -RUN chown -R ${USER}:${USER} /home/${USER}/ -USER ${USER}:${USER} - -CMD cd /mnt/function && npm install && rm -rf package-lock.json - diff --git a/docker/Dockerfile.build.gcp.python b/docker/Dockerfile.build.gcp.python deleted file mode 100755 index be471f7d..00000000 --- a/docker/Dockerfile.build.gcp.python +++ /dev/null @@ -1,26 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ARG VERSION -ENV HOME=/home/${USER} -ENV PYTHON_VERSION=${VERSION} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} -RUN chmod a+w ${HOME} -USER ${USER}:${USER} - -RUN export PATH=/opt/python3.7/bin:/opt/python3.6/bin:/opt/python3.5/bin:/opt/python3.4/bin:$PATH -RUN echo $PATH -RUN which python - -RUN ls -al $HOME -RUN virtualenv -p python${PYTHON_VERSION} ${HOME}/env -ENV VIRTUAL_ENV ${HOME}/env -ENV PATH ${HOME}/env/bin:${PATH} - -ENV SCRIPT_FILE=/mnt/function/package.sh -CMD cd /mnt/function\ - && if test -f "requirements.txt.${PYTHON_VERSION}"; then pip3 -q install -r requirements.txt -r requirements.txt.${PYTHON_VERSION} -t .python_packages/lib/site-packages ; else pip3 -q install -r requirements.txt -t .python_packages/lib/site-packages ; fi\ - && if test -f "${SCRIPT_FILE}"; then /bin/bash ${SCRIPT_FILE} .python_packages/lib/site-packages ; fi diff --git a/docker/gcp/nodejs/Dockerfile.build b/docker/gcp/nodejs/Dockerfile.build new file mode 100755 index 00000000..a09ff331 --- /dev/null +++ b/docker/gcp/nodejs/Dockerfile.build @@ -0,0 +1,18 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV HOME=/home/${USER} + +RUN install_node --ignore-verification-failure v${VERSION} +RUN apt-get update && apt-get install -y gosu + +RUN mkdir -p /sebs/ +COPY docker/nodejs_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV PATH=/usr/sbin:$PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/docker/gcp/python/Dockerfile.build b/docker/gcp/python/Dockerfile.build new file mode 100755 index 00000000..62130ebb --- /dev/null +++ b/docker/gcp/python/Dockerfile.build @@ -0,0 +1,23 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV PYTHON_VERSION=${VERSION} + +RUN apt-get update\ + && apt-get install -y gosu gcc build-essential python-dev libxml2 libxml2-dev zlib1g-dev\ + && apt-get purge -y --auto-remove + +RUN export PATH=/opt/python3.7/bin:/opt/python3.6/bin:/opt/python3.5/bin:/opt/python3.4/bin:$PATH +RUN virtualenv -p python${PYTHON_VERSION} /sebs/env +ENV VIRTUAL_ENV /sebs/env +ENV PATH /sebs/env/bin:${PATH} + +RUN mkdir -p /sebs/ +COPY docker/python_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] From fa8beab1c33743d09b05adb5185a3f4ebbf3d381 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 22:13:38 +0200 Subject: [PATCH 115/140] [gcp] Update Node.js versions --- config/systems.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/config/systems.json b/config/systems.json index 54b5e1f1..7a3c8621 100644 --- a/config/systems.json +++ b/config/systems.json @@ -116,9 +116,9 @@ }, "nodejs": { "base_images": { - "6" : "gcr.io/google-appengine/nodejs", - "8" : "gcr.io/google-appengine/nodejs", - "10" : "gcr.io/google-appengine/nodejs" + "10" : "gcr.io/google-appengine/nodejs", + "12" : "gcr.io/google-appengine/nodejs", + "14" : "gcr.io/google-appengine/nodejs" }, "images": ["build"], "username": "docker_user", From d08e7a18db3a083a4805a9db829d989dda26f173 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 22:14:05 +0200 Subject: [PATCH 116/140] [system] Remove unnecessary reinstallation of Docker images --- .circleci/config.yml | 6 +++--- install.py | 20 -------------------- 2 files changed, 3 insertions(+), 23 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9d5fcd76..1c461ea8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,7 +16,7 @@ jobs: name: Install curl-config from Ubuntu APT - run: command: | - python3 install.py --aws --azure --gcp --dont-rebuild-docker-images --no-local + python3 install.py --aws --azure --gcp --no-local name: Install pip dependencies - run: command: | @@ -44,8 +44,8 @@ jobs: then ls $HOME/docker/*.tar.gz | xargs -I {file} sh -c "zcat {file} | docker load"; else - docker pull mcopik/serverless-benchmarks:build.aws.python.3.6 - docker pull mcopik/serverless-benchmarks:build.aws.nodejs.10.x + docker pull mcopik/serverless-benchmarks:build.aws.python.3.7 + docker pull mcopik/serverless-benchmarks:build.aws.nodejs.12.x fi name: Load Docker images - run: diff --git a/install.py b/install.py index 3b7f5a6a..f98c2a29 100755 --- a/install.py +++ b/install.py @@ -14,8 +14,6 @@ parser.add_argument(f"--{deployment}", action="store_const", default=True, const=True, dest=deployment) parser.add_argument(f"--no-{deployment}", action="store_const", const=False, dest=deployment) parser.add_argument("--with-pypapi", action="store_true") -parser.add_argument("--force-rebuild-docker-images", default=False, action="store_true") -parser.add_argument("--dont-rebuild-docker-images", default=False, action="store_true") args = parser.parse_args() def execute(cmd): @@ -43,12 +41,6 @@ def execute(cmd): if args.aws: print("Install Python dependencies for AWS") execute(". {}/bin/activate && pip3 install -r requirements.aws.txt".format(env_dir)) - if args.force_rebuild_docker_images or (os.getuid() != 1000 and not args.dont_rebuild_docker_images): - print(f"AWS: rebuild Docker images for current user ID: {os.getuid()}") - execute(". {}/bin/activate && tools/build_docker_images.py --deployment aws".format(env_dir)) - elif os.getuid() != 1000 and args.dont_rebuild_docker_images: - print(f"AWS: Docker images are built for user with UID 1000, current UID: {os.getuid()}." - "Skipping rebuild as requested by user, but recommending to rebuild the images") flag = "TRUE" if args.aws else "FALSE" execute(f'echo "export SEBS_WITH_AWS={flag}" >> {env_dir}/bin/activate') execute(f'echo "unset SEBS_WITH_AWS" >> {env_dir}/bin/deactivate') @@ -56,12 +48,6 @@ def execute(cmd): if args.azure: print("Install Python dependencies for Azure") execute(". {}/bin/activate && pip3 install -r requirements.azure.txt".format(env_dir)) - if args.force_rebuild_docker_images or (os.getuid() != 1000 and not args.dont_rebuild_docker_images): - print(f"Azure: rebuild Docker images for current user ID: {os.getuid()}") - execute(". {}/bin/activate && tools/build_docker_images.py --deployment azure".format(env_dir)) - elif os.getuid() != 1000 and args.dont_rebuild_docker_images: - print(f"Azure: Docker images are built for user with UID 1000, current UID: {os.getuid()}." - "Skipping rebuild as requested by user, but recommending to rebuild the images") flag = "TRUE" if args.azure else "FALSE" execute(f'echo "export SEBS_WITH_AZURE={flag}" >> {env_dir}/bin/activate') execute(f'echo "unset SEBS_WITH_AZURE" >> {env_dir}/bin/deactivate') @@ -69,12 +55,6 @@ def execute(cmd): if args.gcp: print("Install Python dependencies for GCP") execute(". {}/bin/activate && pip3 install -r requirements.gcp.txt".format(env_dir)) - if args.force_rebuild_docker_images or (os.getuid() != 1000 and not args.dont_rebuild_docker_images): - print(f"GCP: rebuild Docker images for current user ID: {os.getuid()}") - execute(". {}/bin/activate && tools/build_docker_images.py --deployment gcp".format(env_dir)) - elif os.getuid() != 1000 and args.dont_rebuild_docker_images: - print(f"GCP: Docker images are built for user with UID 1000, current UID: {os.getuid()}." - "Skipping rebuild as requested by user, but recommending to rebuild the images") flag = "TRUE" if args.gcp else "FALSE" execute(f'echo "export SEBS_WITH_GCP={flag}" >> {env_dir}/bin/activate') execute(f'echo "unset SEBS_WITH_GCP" >> {env_dir}/bin/deactivate') From 2044762b7dd4ece0a754466cec3962c8cae75635 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 2 May 2022 23:04:01 +0200 Subject: [PATCH 117/140] [whisk] Working Docker image for Python --- .../python/Dockerfile.function} | 0 sebs/openwhisk/openwhisk.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename docker/{Dockerfile.run.openwhisk.python => openwhisk/python/Dockerfile.function} (100%) diff --git a/docker/Dockerfile.run.openwhisk.python b/docker/openwhisk/python/Dockerfile.function similarity index 100% rename from docker/Dockerfile.run.openwhisk.python rename to docker/openwhisk/python/Dockerfile.function diff --git a/sebs/openwhisk/openwhisk.py b/sebs/openwhisk/openwhisk.py index 6c0d043e..0337a9bb 100644 --- a/sebs/openwhisk/openwhisk.py +++ b/sebs/openwhisk/openwhisk.py @@ -159,7 +159,7 @@ def build_base_image( build_dir = os.path.join(directory, "docker") os.makedirs(build_dir) shutil.copy( - os.path.join(PROJECT_DIR, "docker", f"Dockerfile.run.{self.name()}.{language_name}"), + os.path.join(PROJECT_DIR, "docker", self.name(), language_name, "Dockerfile.function"), os.path.join(build_dir, "Dockerfile"), ) From 5f8062ad505bbf7b3ab67bbcb19031d5b8adc6bc Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 01:55:26 +0200 Subject: [PATCH 118/140] [benchmarks] Update 411.image-recognition to Python 3.9 --- .../411.image-recognition/python/requirements.txt | 2 -- .../411.image-recognition/python/requirements.txt.3.6 | 2 ++ .../411.image-recognition/python/requirements.txt.3.7 | 2 ++ .../411.image-recognition/python/requirements.txt.3.8 | 2 ++ .../411.image-recognition/python/requirements.txt.3.9 | 3 +++ 5 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.9 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt b/benchmarks/400.inference/411.image-recognition/python/requirements.txt index 3c51d232..d191dc6d 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt @@ -2,5 +2,3 @@ #torchvision==0.4.0+cpu #https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp37-cp37m-linux_x86_64.whl #torch==1.0.1.post2+cpu -torchvision==0.2.1 -numpy==1.16 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 index c09a9adf..63409aca 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.6 @@ -1,2 +1,4 @@ Pillow==6.1 +numpy==1.16 https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp36-cp36m-linux_x86_64.whl +torchvision==0.2.1 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 index 330534cd..54bddbd5 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.7 @@ -1,2 +1,4 @@ Pillow==6.1 +numpy==1.16 https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp37-cp37m-linux_x86_64.whl +torchvision==0.2.1 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 index 07dd466c..7d543dd8 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.8 @@ -1 +1,3 @@ +numpy==1.16 https://download.pytorch.org/whl/cpu/torch-1.4.0%2Bcpu-cp38-cp38-linux_x86_64.whl +torchvision==0.5 diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.9 b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.9 new file mode 100644 index 00000000..fcf863e9 --- /dev/null +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt.3.9 @@ -0,0 +1,3 @@ +numpy==1.18 +https://download.pytorch.org/whl/cpu/torch-1.8.0%2Bcpu-cp39-cp39-linux_x86_64.whl +torchvision==0.9.0 From 27eb0408f5be070798922175d6d92171decc2a0d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 02:52:45 +0200 Subject: [PATCH 119/140] [whisk] Update storage configuration in Node.js --- benchmarks/wrappers/openwhisk/nodejs/index.js | 7 +++++++ benchmarks/wrappers/openwhisk/nodejs/storage.js | 7 +++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/benchmarks/wrappers/openwhisk/nodejs/index.js b/benchmarks/wrappers/openwhisk/nodejs/index.js index a0a68a3d..1cea01df 100644 --- a/benchmarks/wrappers/openwhisk/nodejs/index.js +++ b/benchmarks/wrappers/openwhisk/nodejs/index.js @@ -1,6 +1,13 @@ const path = require('path'), fs = require('fs'); async function main(args) { + + var minio_args = ["MINIO_STORAGE_CONNECTION_URL", "MINIO_STORAGE_ACCESS_KEY", "MINIO_STORAGE_SECRET_KEY"]; + minio_args.forEach(function(arg){ + process.env[arg] = args[arg]; + delete args[arg]; + }); + var func = require('/function/function.js'); var begin = Date.now() / 1000; var start = process.hrtime(); diff --git a/benchmarks/wrappers/openwhisk/nodejs/storage.js b/benchmarks/wrappers/openwhisk/nodejs/storage.js index 3715aa91..1a57123c 100644 --- a/benchmarks/wrappers/openwhisk/nodejs/storage.js +++ b/benchmarks/wrappers/openwhisk/nodejs/storage.js @@ -8,10 +8,9 @@ const minio = require('minio'), class minio_storage { constructor() { - let minioConfig = JSON.parse(fs.readFileSync('/function/minioConfig.json')); - let address = minioConfig["url"]; - let access_key = minioConfig["access_key"]; - let secret_key = minioConfig["secret_key"]; + let address = process.env.MINIO_STORAGE_CONNECTION_URL; + let access_key = process.env.MINIO_STORAGE_ACCESS_KEY; + let secret_key = process.env.MINIO_STORAGE_SECRET_KEY; this.client = new minio.Client( { From 00df368670513c32b68fbc9e6d7784c8a2d1c868 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 02:53:16 +0200 Subject: [PATCH 120/140] [system] Correctly handle failed invocations that do not return all of invocation data --- sebs/faas/function.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sebs/faas/function.py b/sebs/faas/function.py index df59bccb..5b1bf748 100644 --- a/sebs/faas/function.py +++ b/sebs/faas/function.py @@ -218,6 +218,9 @@ def _http_invoke(self, payload: dict, url: str, verify_ssl: bool = True) -> Exec result = ExecutionResult.from_times(begin, end) result.times.http_startup = conn_time result.times.http_first_byte_return = receive_time + # OpenWhisk will not return id on a failure + if "request_id" not in output: + raise RuntimeError(f"Cannot process allocation with output: {output}") result.request_id = output["request_id"] # General benchmark output parsing result.parse_benchmark_output(output) From 2205e53ab6b4fe64d1b96c683831726aba9559a0 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 02:57:31 +0200 Subject: [PATCH 121/140] [whisk] Node.js functions --- .../nodejs/Dockerfile.function} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docker/{Dockerfile.run.openwhisk.nodejs => openwhisk/nodejs/Dockerfile.function} (100%) diff --git a/docker/Dockerfile.run.openwhisk.nodejs b/docker/openwhisk/nodejs/Dockerfile.function similarity index 100% rename from docker/Dockerfile.run.openwhisk.nodejs rename to docker/openwhisk/nodejs/Dockerfile.function From f4d75b513e12bd793a212330c835ec0c113d7f4f Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 03:02:42 +0200 Subject: [PATCH 122/140] [whisk] Update Node.js configuration --- config/systems.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/config/systems.json b/config/systems.json index 7a3c8621..bcd730f6 100644 --- a/config/systems.json +++ b/config/systems.json @@ -139,7 +139,6 @@ "3.7": "openwhisk/action-python-v3.7", "3.9": "openwhisk/action-python-v3.9" }, - "versions": ["3.7", "3.9"], "images": ["function"], "username": "docker_user", "deployment": { @@ -151,11 +150,9 @@ }, "nodejs": { "base_images": { - "8" : "openwhisk/action-nodejs-v8", "10" : "openwhisk/action-nodejs-v10", "12" : "openwhisk/action-nodejs-v12" }, - "versions": [ "8", "10", "12"], "images": ["function"], "username": "docker_user", "deployment": { From 37c64562bcd75d620a28d22df93d243f3262e8fd Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 12:54:32 +0200 Subject: [PATCH 123/140] [whisk] Move around all helper scripts for deploying OpenWhisk --- .../openwhisk}/couchdb-service.yaml | 0 .../openwhisk}/kind-cluster.yaml | 0 .../openwhisk}/mycluster_template.yaml | 0 tools/openwhisk_preparation.py | 154 ++++++++++-------- 4 files changed, 84 insertions(+), 70 deletions(-) rename {openwhisk => tools/openwhisk}/couchdb-service.yaml (100%) rename {openwhisk => tools/openwhisk}/kind-cluster.yaml (100%) rename {openwhisk => tools/openwhisk}/mycluster_template.yaml (100%) diff --git a/openwhisk/couchdb-service.yaml b/tools/openwhisk/couchdb-service.yaml similarity index 100% rename from openwhisk/couchdb-service.yaml rename to tools/openwhisk/couchdb-service.yaml diff --git a/openwhisk/kind-cluster.yaml b/tools/openwhisk/kind-cluster.yaml similarity index 100% rename from openwhisk/kind-cluster.yaml rename to tools/openwhisk/kind-cluster.yaml diff --git a/openwhisk/mycluster_template.yaml b/tools/openwhisk/mycluster_template.yaml similarity index 100% rename from openwhisk/mycluster_template.yaml rename to tools/openwhisk/mycluster_template.yaml diff --git a/tools/openwhisk_preparation.py b/tools/openwhisk_preparation.py index 73c8da8a..67ca4699 100644 --- a/tools/openwhisk_preparation.py +++ b/tools/openwhisk_preparation.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + import logging import os import subprocess @@ -7,6 +9,7 @@ # Common utils + def run_check_process(cmd: str, **kwargs) -> None: env = os.environ.copy() env = {**env, **kwargs} @@ -22,9 +25,10 @@ def run_check_process(cmd: str, **kwargs) -> None: # helm utils + def install_helm() -> None: try: - logging.info('Installing helm...') + logging.info("Installing helm...") helm_package = subprocess.run( "curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3".split(), stdout=subprocess.PIPE, @@ -37,82 +41,86 @@ def install_helm() -> None: stderr=subprocess.DEVNULL, check=True, ) - logging.info('Helm has been installed') + logging.info("Helm has been installed") except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install helm, reason: {}'.format(e)) + logging.error("Cannot install helm, reason: {}".format(e)) exit(1) def check_helm_installation() -> None: try: logging.info("Checking helm installation...") - run_check_process('helm version') + run_check_process("helm version") logging.info("helm is installed") except (subprocess.CalledProcessError, FileNotFoundError): - logging.error('helm is not installed, attempting to install...') + logging.error("helm is not installed, attempting to install...") install_helm() # kubectl utils + def install_kubectl(kubectl_version: str = "v1.18.0") -> None: try: - logging.info('Installing kubectl...') - home_path = os.environ['HOME'] - kubectl_path = '{}/.local/bin/kubectl'.format(home_path) - run_check_process("curl -L -o {} " - "https://storage.googleapis.com/kubernetes-release/release/{}/bin" - "/linux/amd64/kubectl".format(kubectl_path, kubectl_version)) + logging.info("Installing kubectl...") + home_path = os.environ["HOME"] + kubectl_path = "{}/.local/bin/kubectl".format(home_path) + run_check_process( + "curl -L -o {} " + "https://storage.googleapis.com/kubernetes-release/release/{}/bin" + "/linux/amd64/kubectl".format(kubectl_path, kubectl_version) + ) run_check_process("chmod +x {}".format(kubectl_path)) - logging.info('Kubectl has been installed') + logging.info("Kubectl has been installed") except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install kubectl, reason: {}'.format(e)) + logging.error("Cannot install kubectl, reason: {}".format(e)) exit(1) def check_kubectl_installation() -> None: try: logging.info("Checking kubectl installation...") - run_check_process('kubectl version --client=true') + run_check_process("kubectl version --client=true") logging.info("kubectl is installed") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Kubectl is not installed, attempting to install...') + except (subprocess.CalledProcessError, FileNotFoundError): + logging.error("Kubectl is not installed, attempting to install...") install_kubectl() # kind utils + def install_kind(kind_version: str = "v0.8.1") -> None: try: - logging.info('Installing kind...') + logging.info("Installing kind...") env = os.environ.copy() - env['GO111MODULE'] = "on" + env["GO111MODULE"] = "on" subprocess.run( "go get sigs.k8s.io/kind@{}".format(kind_version).split(), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, env=env, ) - logging.info('Kind has been installed') + logging.info("Kind has been installed") except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install kind, reason: {}'.format(e)) + logging.error("Cannot install kind, reason: {}".format(e)) exit(1) def check_kind_installation() -> None: try: logging.info("Checking go installation...") - run_check_process('go version') + run_check_process("go version") logging.info("go is installed") try: logging.info("Checking kind installation...") - run_check_process('kind version') - logging.info('kind is installed') + run_check_process("kind version") + logging.info("kind is installed") except (subprocess.CalledProcessError, FileNotFoundError): - logging.warning('Cannot find kind, proceeding with installation') + logging.warning("Cannot find kind, proceeding with installation") install_kind() except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot find go, reason: {}'.format(e)) + logging.error("Cannot find go, reason: {}".format(e)) exit(1) @@ -121,11 +129,11 @@ def label_node(node: str, role: str) -> None: run_check_process("kubectl label node {} openwhisk-role={}".format(node, role)) try: - logging.info('Labelling nodes') - label_node('kind-worker', 'core') - label_node('kind-worker2', 'invoker') + logging.info("Labelling nodes") + label_node("kind-worker", "core") + label_node("kind-worker2", "invoker") except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot label nodes, reason: {}'.format(e)) + logging.error("Cannot label nodes, reason: {}".format(e)) exit(1) @@ -133,8 +141,14 @@ def get_worker_ip(worker_node_name: str = "kind-worker") -> str: try: logging.info("Retrieving worker IP...") internal_ip_proc = subprocess.run( - ["kubectl", "get", "node", worker_node_name, "-o", - "go-template='{{ (index .status.addresses 0).address }}'"], + [ + "kubectl", + "get", + "node", + worker_node_name, + "-o", + "go-template='{{ (index .status.addresses 0).address }}'", + ], check=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, @@ -161,14 +175,14 @@ def create_kind_cluster() -> None: stderr=subprocess.DEVNULL, ) awk = subprocess.run( - ["awk", r'{print $2}'], + ["awk", r"{print $2}"], check=True, input=node_grep.stdout, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, ) - node_statuses = awk.stdout.decode('utf-8').split() - if all(node_status == 'Ready' for node_status in node_statuses): + node_statuses = awk.stdout.decode("utf-8").split() + if all(node_status == "Ready" for node_status in node_statuses): break time.sleep(1) except (subprocess.CalledProcessError, FileNotFoundError) as e: @@ -184,7 +198,7 @@ def check_kind_cluster() -> None: stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, ) - kind_clusters = set(kind_clusters_process.stdout.decode('utf-8').split()) + kind_clusters = set(kind_clusters_process.stdout.decode("utf-8").split()) if "kind" not in kind_clusters: logging.info("Creating kind cluster...") create_kind_cluster() @@ -194,34 +208,26 @@ def check_kind_cluster() -> None: def delete_cluster(): try: - logging.info('Deleting KinD cluster...') - run_check_process('kind delete cluster') - logging.info('KinD cluster deleted...') + logging.info("Deleting KinD cluster...") + run_check_process("kind delete cluster") + logging.info("KinD cluster deleted...") except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot delete cluster, reason: {}".format(e)) # openwhisk deployment utils -def check_wsk_installation() -> None: - try: - logging.info("Checking wsk installation...") - run_check_process('wsk') - logging.info("wsk is installed") - except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error(f'Cannot find wsk, reason: {e}') - exit(1) - def prepare_wsk() -> None: try: ip = get_worker_ip() + # default key auth = "23bc46b1-71f6-4ed5-8c54-816aa4f8c502:123zO3xZCLrMN6v2BKK1dXYFpXlPkccOFqm12CdAsMgRU4VrNZ9lyGVCGuMDGIwP" subprocess.run( f"wsk property set --apihost {ip} --auth {auth}", check=True, stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL + stderr=subprocess.DEVNULL, ) except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error(f"Cannot find wsk on system, reason: {e}") @@ -237,27 +243,30 @@ def expose_couchdb() -> None: def clone_openwhisk_chart() -> None: try: - run_check_process("git clone git@github.com:apache/openwhisk-deploy-kube.git /tmp/openwhisk-deploy-kube") + run_check_process( + "git clone git@github.com:apache/openwhisk-deploy-kube.git /tmp/openwhisk-deploy-kube" + ) except (subprocess.CalledProcessError, FileNotFoundError) as e: logging.error("Cannot clone openwhisk chart, reason: {}".format(e)) def prepare_openwhisk_config() -> None: worker_ip = get_worker_ip() - with open('openwhisk/mycluster_template.yaml', 'r') as openwhisk_config_template: + with open("openwhisk/mycluster_template.yaml", "r") as openwhisk_config_template: data = yaml.unsafe_load(openwhisk_config_template) - data['whisk']['ingress']['apiHostName'] = worker_ip - data['whisk']['ingress']['apiHostPort'] = 31001 - data['nginx']['httpsNodePort'] = 31001 - if not os.path.exists('/tmp/openwhisk-deploy-kube/mycluster.yaml'): - with open('/tmp/openwhisk-deploy-kube/mycluster.yaml', 'a+') as openwhisk_config: + data["whisk"]["ingress"]["apiHostName"] = worker_ip + data["whisk"]["ingress"]["apiHostPort"] = 31001 + data["nginx"]["httpsNodePort"] = 31001 + if not os.path.exists("/tmp/openwhisk-deploy-kube/mycluster.yaml"): + with open("/tmp/openwhisk-deploy-kube/mycluster.yaml", "a+") as openwhisk_config: openwhisk_config.write(yaml.dump(data, default_flow_style=False)) def deploy_openwhisk_on_k8s(namespace: str = "openwhisk") -> None: try: run_check_process( - "helm install owdev /tmp/openwhisk-deploy-kube/helm/openwhisk -n {} --create-namespace -f " + "helm install owdev /tmp/openwhisk-deploy-kube/helm/openwhisk -n {} " + "--create-namespace -f " "/tmp/openwhisk-deploy-kube/mycluster.yaml".format(namespace) ) while True: @@ -272,8 +281,8 @@ def deploy_openwhisk_on_k8s(namespace: str = "openwhisk") -> None: stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, ) - install_packages_status = check_result.stdout.decode('utf-8').split()[2] - if install_packages_status == 'Completed': + install_packages_status = check_result.stdout.decode("utf-8").split()[2] + if install_packages_status == "Completed": break time.sleep(1) @@ -284,34 +293,33 @@ def deploy_openwhisk_on_k8s(namespace: str = "openwhisk") -> None: def get_openwhisk_url() -> str: ip = get_worker_ip() - return '{}:{}'.format(ip, 31001) + return "{}:{}".format(ip, 31001) def get_couchdb_url() -> str: ip = get_worker_ip() - return '{}:{}'.format(ip, 31201) + return "{}:{}".format(ip, 31201) def install_wsk() -> None: try: - logging.info('Installing wsk...') - home_path = os.environ['HOME'] - wsk_path = '{}/.local/bin/wsk'.format(home_path) + logging.info("Installing wsk...") + home_path = os.environ["HOME"] + wsk_path = "{}/.local/bin/wsk".format(home_path) subprocess.run("go get github.com/apache/openwhisk-cli".split()) run_check_process("go get -u github.com/jteeuwen/go-bindata/...") - instalation_dir = "{}/src/github.com/apache/openwhisk-cli".format(os.environ['GOPATH']) + instalation_dir = "{}/src/github.com/apache/openwhisk-cli".format(os.environ["GOPATH"]) def custom_subproces(comand): - subprocess.run(comand.split(), - cwd=instalation_dir, - check=True) + subprocess.run(comand.split(), cwd=instalation_dir, check=True) + custom_subproces("go-bindata -pkg wski18n -o wski18n/i18n_resources.go wski18n/resources") custom_subproces("go build -o wsk") run_check_process("ln -sf {}/wsk {}".format(instalation_dir, wsk_path)) run_check_process("chmod +x {}".format(wsk_path)) - logging.info('Wsk has been installed') + logging.info("Wsk has been installed") except (subprocess.CalledProcessError, FileNotFoundError) as e: - logging.error('Cannot install wsk, reason: {}'.format(e)) + logging.error("Cannot install wsk, reason: {}".format(e)) exit(1) @@ -320,9 +328,11 @@ def check_wsk_installation() -> None: logging.info("Checking wsk installation...") run_check_process("wsk") logging.info("Wsk is installed") - except (subprocess.CalledProcessError, FileNotFoundError) as e: + except (subprocess.CalledProcessError, FileNotFoundError): logging.info("Wsk is not installed, proceeding to install...") install_wsk() + + # mixup @@ -337,3 +347,7 @@ def initiate_all(): prepare_openwhisk_config() deploy_openwhisk_on_k8s() expose_couchdb() + + +if __name__ == "__main__": + initiate_all() From 5751551edceb0823cbdff73ec0334ac1f730b918 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 12:55:54 +0200 Subject: [PATCH 124/140] [system] Update system config --- config/example.json | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/config/example.json b/config/example.json index d978fced..dc4da9ad 100644 --- a/config/example.json +++ b/config/example.json @@ -6,7 +6,7 @@ "download_results": false, "runtime": { "language": "python", - "version": "3.6" + "version": "3.7" }, "type": "invocation-overhead", "perf-cost": { @@ -54,6 +54,18 @@ "project_name": "", "credentials": "" }, + "local": { + "storage": { + "address": "", + "mapped_port": -1, + "access_key": "", + "secret_key": "", + "instance_id": "", + "input_buckets": [], + "output_buckets": [], + "type": "minio" + } + }, "openwhisk": { "shutdownStorage": false, "removeCluster": false, @@ -64,6 +76,16 @@ "registry": "", "username": "", "password": "" + }, + "storage": { + "address": "", + "mapped_port": -1, + "access_key": "", + "secret_key": "", + "instance_id": "", + "input_buckets": [], + "output_buckets": [], + "type": "minio" } } } From 36ea06bad0607a3126fe73b32b489498887ecdff Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 14:42:50 +0200 Subject: [PATCH 125/140] [system] Add version to package --- sebs/__init__.py | 9 ++++++--- sebs/version.py | 1 + 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 sebs/version.py diff --git a/sebs/__init__.py b/sebs/__init__.py index 6eceb356..0de36fa9 100644 --- a/sebs/__init__.py +++ b/sebs/__init__.py @@ -1,7 +1,10 @@ -from .sebs import SeBS # noqa -# from .aws import * # noqa -# from .azure import * # noqa +""" + SeBS +""" + +from .version import __version__ +from .sebs import SeBS # noqa from .cache import Cache # noqa from .benchmark import Benchmark # noqa diff --git a/sebs/version.py b/sebs/version.py new file mode 100644 index 00000000..1a72d32e --- /dev/null +++ b/sebs/version.py @@ -0,0 +1 @@ +__version__ = '1.1.0' From 8e7bcb08d55a578b24e19fbd10b566c9fab5b5c7 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 20:54:00 +0200 Subject: [PATCH 126/140] [system] Extend dockerignore to skip local code packages --- .dockerignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.dockerignore b/.dockerignore index a6790432..84416f19 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,3 +6,4 @@ config cache python-venv regression-* +*_code From ac4c1d5c662088d644c1431247c45ab75113d9ae Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Tue, 3 May 2022 20:55:03 +0200 Subject: [PATCH 127/140] [local] Update Python local images to new format --- config/systems.json | 2 +- docker/Dockerfile.build.local.python | 20 ---------------- docker/Dockerfile.run.local.python | 36 ---------------------------- docker/local/python/Dockerfile.build | 18 ++++++++++++++ docker/local/python/Dockerfile.run | 24 +++++++++++++++++++ docker/local/python/entrypoint.sh | 21 ++++++++++++++++ docker/local/python/server.py | 2 +- sebs/local/local.py | 11 ++++----- 8 files changed, 70 insertions(+), 64 deletions(-) delete mode 100755 docker/Dockerfile.build.local.python delete mode 100755 docker/Dockerfile.run.local.python create mode 100755 docker/local/python/Dockerfile.build create mode 100755 docker/local/python/Dockerfile.run create mode 100755 docker/local/python/entrypoint.sh diff --git a/config/systems.json b/config/systems.json index bcd730f6..f1a1359e 100644 --- a/config/systems.json +++ b/config/systems.json @@ -10,7 +10,7 @@ "languages": { "python": { "base_images": { - "3.6": "python:3.6-slim", + "3.7": "python:3.7-slim", "3.8": "python:3.8-slim" }, "images": ["run", "build"], diff --git a/docker/Dockerfile.build.local.python b/docker/Dockerfile.build.local.python deleted file mode 100755 index 2dd6c0a4..00000000 --- a/docker/Dockerfile.build.local.python +++ /dev/null @@ -1,20 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ARG VERSION -ENV HOME=/home/${USER} -ENV PYTHON_VERSION=${VERSION} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} -RUN apt-get update\ - && apt-get install -y gcc build-essential python-dev libxml2 libxml2-dev zlib1g-dev\ - && apt-get purge -y --auto-remove -RUN mkdir -p /mnt/function && chown -R ${USER}:${USER} /mnt/function -USER ${USER}:${USER} - -COPY --chown=${USER}:${USER} docker/python_installer.sh installer.sh - -ENV SCRIPT_FILE=/mnt/function/package.sh -CMD /bin/bash installer.sh diff --git a/docker/Dockerfile.run.local.python b/docker/Dockerfile.run.local.python deleted file mode 100755 index 43b8f735..00000000 --- a/docker/Dockerfile.run.local.python +++ /dev/null @@ -1,36 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ENV HOME=/home/${USER} - -WORKDIR ${HOME} -# must be run as root for some reason -RUN deps=''\ - && apt-get update\ - # for route and sudo - && apt-get install -y curl net-tools sudo ${deps}\ - && apt-get purge -y --auto-remove ${deps}\ - && pip3 install cffi minio bottle -RUN useradd -u ${UID} -m ${USER}\ - # Let the user use sudo - && usermod -aG sudo ${USER}\ - # Set correct permission on home directory - && chown -R ${USER}:${USER} ${HOME}\ - # Enable non-password use of sudo - && echo "$USER ALL=(ALL:ALL) NOPASSWD: ALL" | tee /etc/sudoers.d/dont-prompt-$USER-for-password -RUN chown -R ${USER}:${USER} ${HOME} - - -USER ${USER}:${USER} -COPY --chown=${USER}:${USER} docker/local/run.sh . -COPY --chown=${USER}:${USER} docker/local/*.py ${HOME}/ -COPY --chown=${USER}:${USER} docker/local/python/*.py ${HOME}/ -COPY --chown=${USER}:${USER} docker/local/python/timeit.sh . -COPY --chown=${USER}:${USER} docker/local/python/runners.json . -# https://github.com/moby/moby/issues/35018 :-( -ADD --chown=docker_user:docker_user third-party/pypapi/pypapi ${HOME}/pypapi - -ENV PYTHONPATH=${HOME}/.python_packages/lib/site-packages:$PYTHONPATH - -RUN chmod +x ${HOME}/run.sh diff --git a/docker/local/python/Dockerfile.build b/docker/local/python/Dockerfile.build new file mode 100755 index 00000000..874b05a7 --- /dev/null +++ b/docker/local/python/Dockerfile.build @@ -0,0 +1,18 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} +ARG VERSION +ENV PYTHON_VERSION=${VERSION} + +RUN apt-get update\ + && apt-get install -y --no-install-recommends gcc build-essential python-dev libxml2 libxml2-dev zlib1g-dev gosu\ + && apt-get purge -y --auto-remove + +RUN mkdir -p /sebs/ +COPY docker/python_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/docker/local/python/Dockerfile.run b/docker/local/python/Dockerfile.run new file mode 100755 index 00000000..20f7f1f8 --- /dev/null +++ b/docker/local/python/Dockerfile.run @@ -0,0 +1,24 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} + +RUN deps=''\ + && apt-get update\ + # for route and sudo + && apt-get install --no-install-recommends -y curl gosu net-tools sudo ${deps}\ + && apt-get purge -y --auto-remove ${deps}\ + && pip3 install cffi minio bottle + +RUN mkdir -p /sebs +COPY docker/local/run.sh /sebs/ +COPY docker/local/*.py /sebs/ +COPY docker/local/python/*.py /sebs/ +COPY docker/local/python/timeit.sh /sebs/ +COPY docker/local/python/runners.json /sebs/ +ADD third-party/pypapi/pypapi /sebs/pypapi +ENV PYTHONPATH=/sebs/.python_packages/lib/site-packages:$PYTHONPATH + +COPY docker/local/python/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh +RUN chmod +x /sebs/run.sh + +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/docker/local/python/entrypoint.sh b/docker/local/python/entrypoint.sh new file mode 100755 index 00000000..3e569a03 --- /dev/null +++ b/docker/local/python/entrypoint.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +USER_ID=${CONTAINER_UID} +GROUP_ID=${CONTAINER_GID} +USER=${CONTAINER_USER} + +useradd --non-unique -m -u ${USER_ID} ${USER} +groupmod -g ${GROUP_ID} ${USER} +export HOME=/home/${USER} +echo "Running as ${USER}, with ${USER_ID} and ${GROUP_ID}" + +if [ ! -z "$CMD" ]; then + gosu ${USER} $CMD +fi + +chown -R ${USER}:${USER} /sebs/ +echo "$USER ALL=(ALL:ALL) NOPASSWD: ALL" | tee /etc/sudoers.d/dont-prompt-$USER-for-password +usermod -aG sudo ${USER} + +exec gosu ${USER} "$@" + diff --git a/docker/local/python/server.py b/docker/local/python/server.py index 268c2da3..e86327dc 100644 --- a/docker/local/python/server.py +++ b/docker/local/python/server.py @@ -6,7 +6,7 @@ import bottle from bottle import route, run, template, request -CODE_LOCATION='code' +CODE_LOCATION='/function' @route('/', method='POST') def flush_log(): diff --git a/sebs/local/local.py b/sebs/local/local.py index 602cc16e..5c403b87 100644 --- a/sebs/local/local.py +++ b/sebs/local/local.py @@ -137,9 +137,6 @@ def package_code( def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunction": - home_dir = os.path.join( - "/home", self._system_config.username(self.name(), code_package.language_name) - ) container_name = "{}:run.local.{}.{}".format( self._system_config.docker_repository(), code_package.language_name, @@ -151,12 +148,15 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunc "MINIO_ADDRESS": self.config.resources.storage_config.address, "MINIO_ACCESS_KEY": self.config.resources.storage_config.access_key, "MINIO_SECRET_KEY": self.config.resources.storage_config.secret_key, + "CONTAINER_UID": str(os.getuid()), + "CONTAINER_GID": str(os.getgid()), + "CONTAINER_USER": self._system_config.username(self.name(), code_package.language_name) } container = self._docker_client.containers.run( image=container_name, - command=f"python3 server.py {self.DEFAULT_PORT}", + command=f"python3 /sebs/server.py {self.DEFAULT_PORT}", volumes={ - code_package.code_location: {"bind": os.path.join(home_dir, "code"), "mode": "ro"} + code_package.code_location: {"bind": "/function", "mode": "ro"} }, environment=environment, # FIXME: make CPUs configurable @@ -166,7 +166,6 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunc # required to access perf counters # alternative: use custom seccomp profile privileged=True, - user=os.getuid(), security_opt=["seccomp:unconfined"], network_mode="bridge", # somehow removal of containers prevents checkpointing from working? From 8849ecdfe3fa12eb4fe09cf4bb2e35a7db24d599 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 May 2022 01:58:43 +0200 Subject: [PATCH 128/140] [storage] Correctly handle an exception --- sebs/storage/minio.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sebs/storage/minio.py b/sebs/storage/minio.py index bd10a859..6c79f05d 100644 --- a/sebs/storage/minio.py +++ b/sebs/storage/minio.py @@ -214,7 +214,10 @@ def _deserialize(cached_config: MinioConfig, cache_client: Cache, obj_type: Type obj._cfg = cached_config if cached_config.instance_id: instance_id = cached_config.instance_id - obj._storage_container = docker_client.containers.get(instance_id) + try: + obj._storage_container = docker_client.containers.get(instance_id) + except docker.errors.NotFound: + raise RuntimeError(f"Storage container {instance_id} does not exist!") else: obj._storage_container = None obj.input_buckets = cached_config.input_buckets From d6d83fc9d0dd1e06d916c5e86fe928e7e2d78efd Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 May 2022 01:59:02 +0200 Subject: [PATCH 129/140] [local] Hide server architecture behind simple shell abstraction --- docker/local/{python => }/entrypoint.sh | 2 +- docker/local/python/Dockerfile.run | 3 ++- docker/local/python/run_server.sh | 3 +++ sebs/local/local.py | 10 +++++----- 4 files changed, 11 insertions(+), 7 deletions(-) rename docker/local/{python => }/entrypoint.sh (90%) create mode 100755 docker/local/python/run_server.sh diff --git a/docker/local/python/entrypoint.sh b/docker/local/entrypoint.sh similarity index 90% rename from docker/local/python/entrypoint.sh rename to docker/local/entrypoint.sh index 3e569a03..5451f551 100755 --- a/docker/local/python/entrypoint.sh +++ b/docker/local/entrypoint.sh @@ -5,7 +5,7 @@ GROUP_ID=${CONTAINER_GID} USER=${CONTAINER_USER} useradd --non-unique -m -u ${USER_ID} ${USER} -groupmod -g ${GROUP_ID} ${USER} +groupmod --non-unique -g ${GROUP_ID} ${USER} export HOME=/home/${USER} echo "Running as ${USER}, with ${USER_ID} and ${GROUP_ID}" diff --git a/docker/local/python/Dockerfile.run b/docker/local/python/Dockerfile.run index 20f7f1f8..84f9852e 100755 --- a/docker/local/python/Dockerfile.run +++ b/docker/local/python/Dockerfile.run @@ -12,12 +12,13 @@ RUN mkdir -p /sebs COPY docker/local/run.sh /sebs/ COPY docker/local/*.py /sebs/ COPY docker/local/python/*.py /sebs/ +COPY docker/local/python/run_server.sh /sebs/ COPY docker/local/python/timeit.sh /sebs/ COPY docker/local/python/runners.json /sebs/ ADD third-party/pypapi/pypapi /sebs/pypapi ENV PYTHONPATH=/sebs/.python_packages/lib/site-packages:$PYTHONPATH -COPY docker/local/python/entrypoint.sh /sebs/entrypoint.sh +COPY docker/local/entrypoint.sh /sebs/entrypoint.sh RUN chmod +x /sebs/entrypoint.sh RUN chmod +x /sebs/run.sh diff --git a/docker/local/python/run_server.sh b/docker/local/python/run_server.sh new file mode 100755 index 00000000..fa9a8229 --- /dev/null +++ b/docker/local/python/run_server.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +python3 /sebs/server.py "$@" diff --git a/sebs/local/local.py b/sebs/local/local.py index 5c403b87..ad18551e 100644 --- a/sebs/local/local.py +++ b/sebs/local/local.py @@ -150,14 +150,14 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LocalFunc "MINIO_SECRET_KEY": self.config.resources.storage_config.secret_key, "CONTAINER_UID": str(os.getuid()), "CONTAINER_GID": str(os.getgid()), - "CONTAINER_USER": self._system_config.username(self.name(), code_package.language_name) + "CONTAINER_USER": self._system_config.username( + self.name(), code_package.language_name + ), } container = self._docker_client.containers.run( image=container_name, - command=f"python3 /sebs/server.py {self.DEFAULT_PORT}", - volumes={ - code_package.code_location: {"bind": "/function", "mode": "ro"} - }, + command=f"/bin/bash /sebs/run_server.sh {self.DEFAULT_PORT}", + volumes={code_package.code_location: {"bind": "/function", "mode": "ro"}}, environment=environment, # FIXME: make CPUs configurable # FIXME: configure memory From 612307522e9815e5b095c19d91a84f7916528cfb Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 May 2022 01:59:49 +0200 Subject: [PATCH 130/140] [system] Avoid failures on Docker images with existing group --- docker/entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 8cb341d7..c8e24cd4 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -5,7 +5,7 @@ GROUP_ID=${CONTAINER_GID} USER=${CONTAINER_USER} useradd --non-unique -m -u ${USER_ID} ${USER} -groupmod -g ${GROUP_ID} ${USER} +groupmod --non-unique -g ${GROUP_ID} ${USER} mkdir -p /mnt/function && chown -R ${USER}:${USER} /mnt/function export HOME=/home/${USER} echo "Running as ${USER}, with ${USER_ID} and ${GROUP_ID}" From dd6b79babc34f03c49795d2c43fc025c77c641ea Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 May 2022 02:02:34 +0200 Subject: [PATCH 131/140] [local] Local server for Node.js --- config/systems.json | 3 +- docker/Dockerfile.build.local.nodejs | 14 ---------- docker/Dockerfile.run.local.nodejs | 33 ---------------------- docker/local/nodejs/Dockerfile.build | 16 +++++++++++ docker/local/nodejs/Dockerfile.run | 27 ++++++++++++++++++ docker/local/nodejs/package.json | 2 -- docker/local/nodejs/run_server.sh | 3 ++ docker/local/nodejs/server.js | 42 ++++++++++++++++++++++++++++ 8 files changed, 90 insertions(+), 50 deletions(-) delete mode 100755 docker/Dockerfile.build.local.nodejs delete mode 100755 docker/Dockerfile.run.local.nodejs create mode 100755 docker/local/nodejs/Dockerfile.build create mode 100755 docker/local/nodejs/Dockerfile.run create mode 100755 docker/local/nodejs/run_server.sh create mode 100644 docker/local/nodejs/server.js diff --git a/config/systems.json b/config/systems.json index f1a1359e..c38f1233 100644 --- a/config/systems.json +++ b/config/systems.json @@ -22,7 +22,8 @@ }, "nodejs": { "base_images": { - "13.6": "node:13.6-slim" + "12": "node:12-slim", + "14": "node:14-slim" }, "images": ["run", "build"], "username": "docker_user", diff --git a/docker/Dockerfile.build.local.nodejs b/docker/Dockerfile.build.local.nodejs deleted file mode 100755 index 64ecd75e..00000000 --- a/docker/Dockerfile.build.local.nodejs +++ /dev/null @@ -1,14 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ENV HOME=/home/${USER} - -RUN useradd --non-unique -u $UID ${USER} -WORKDIR ${HOME} -RUN apt-get update\ - && apt-get install -y zip\ - && apt-get purge -y --auto-remove -USER ${USER}:${USER} - -CMD cd /mnt/function && npm install && rm -rf package-lock.json diff --git a/docker/Dockerfile.run.local.nodejs b/docker/Dockerfile.run.local.nodejs deleted file mode 100755 index f6dc1201..00000000 --- a/docker/Dockerfile.run.local.nodejs +++ /dev/null @@ -1,33 +0,0 @@ -ARG BASE_IMAGE -FROM ${BASE_IMAGE} -ARG USER -ARG UID -ENV HOME=/home/${USER} - -WORKDIR ${HOME} -RUN deps=''\ - && apt-get update\ - && apt-get install -y curl net-tools python3 sudo ${deps}\ - && apt-get purge -y --auto-remove ${deps} - -RUN useradd --non-unique -u ${UID} -m ${USER}\ - # Set correct permission on home directory - && chown -R ${USER}:${USER} ${HOME}\ - # Enable non-password use of sudo - && echo "$USER ALL=(ALL:ALL) NOPASSWD: ALL" | tee /etc/sudoers.d/dont-prompt-$USER-for-password - -USER ${USER}:${USER} -COPY --chown=${USER}:${USER} docker/local/*.py ${HOME}/ -COPY --chown=${USER}:${USER} docker/local/run.sh . -COPY --chown=${USER}:${USER} docker/local/nodejs/*.js ${HOME}/ -COPY --chown=${USER}:${USER} docker/local/nodejs/timeit.sh . -COPY --chown=${USER}:${USER} docker/local/nodejs/runners.json . -COPY --chown=${USER}:${USER} docker/local/nodejs/package.json . -# must be run as root for some reason -# minio - minio storage SDK -# strftime - format timestamp easily -# csv-writer - export csv -RUN npm install - -# pypapi dependnecy -RUN chmod +x ${HOME}/run.sh diff --git a/docker/local/nodejs/Dockerfile.build b/docker/local/nodejs/Dockerfile.build new file mode 100755 index 00000000..1ba18a49 --- /dev/null +++ b/docker/local/nodejs/Dockerfile.build @@ -0,0 +1,16 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} + +RUN apt-get update\ + && apt-get install -y --no-install-recommends zip gosu\ + && apt-get purge -y --auto-remove + +RUN mkdir -p /sebs/ +COPY docker/nodejs_installer.sh /sebs/installer.sh +COPY docker/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh + +# useradd and groupmod is installed in /usr/sbin which is not in PATH +ENV SCRIPT_FILE=/mnt/function/package.sh +CMD /bin/bash /sebs/installer.sh +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/docker/local/nodejs/Dockerfile.run b/docker/local/nodejs/Dockerfile.run new file mode 100755 index 00000000..4f2f604c --- /dev/null +++ b/docker/local/nodejs/Dockerfile.run @@ -0,0 +1,27 @@ +ARG BASE_IMAGE +FROM ${BASE_IMAGE} + +RUN deps=''\ + && apt-get update\ + && apt-get install -y --no-install-recommends curl net-tools gosu python3 sudo ${deps}\ + && apt-get purge -y --auto-remove ${deps} + +RUN mkdir -p /sebs +RUN cd /sebs/ && npm install -g uuid strftime express minio +# NODE_PATH=$(npm root --quiet -g) +# https://github.com/moby/moby/issues/29110 +ENV NODE_PATH=/usr/local/lib/node_modules + +COPY docker/local/*.py /sebs/ +COPY docker/local/run.sh /sebs/ +COPY docker/local/nodejs/*.js /sebs/ +COPY docker/local/nodejs/run_server.sh /sebs/ +COPY docker/local/nodejs/timeit.sh /sebs/ +COPY docker/local/nodejs/runners.json /sebs/ +COPY docker/local/nodejs/package.json /sebs/ + +COPY docker/local/entrypoint.sh /sebs/entrypoint.sh +RUN chmod +x /sebs/entrypoint.sh +RUN chmod +x /sebs/run.sh + +ENTRYPOINT ["/sebs/entrypoint.sh"] diff --git a/docker/local/nodejs/package.json b/docker/local/nodejs/package.json index 219ee514..635c8b69 100644 --- a/docker/local/nodejs/package.json +++ b/docker/local/nodejs/package.json @@ -1,7 +1,5 @@ { "dependencies": { - "csv-writer": "^1.5.0", - "glob": "^7.1.6", "minio": "^7.0.13", "strftime": "^0.10.0", "uuid": "^3.4.0" diff --git a/docker/local/nodejs/run_server.sh b/docker/local/nodejs/run_server.sh new file mode 100755 index 00000000..c257e1fb --- /dev/null +++ b/docker/local/nodejs/run_server.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +node /sebs/server.js "$@" diff --git a/docker/local/nodejs/server.js b/docker/local/nodejs/server.js new file mode 100644 index 00000000..b40696d7 --- /dev/null +++ b/docker/local/nodejs/server.js @@ -0,0 +1,42 @@ +const http = require('http'), + strftime = require('strftime'), + express = require('express'), + f = require('/function/function/function'); +//import { v4 as uuidv4 } from 'uuid'; +const { v4: uuidv4 } = require('uuid'); + + +var app = express(); +app.use(express.json()); + +app.post('/', function (req, res) { + + let begin = Date.now(); + let ret = f.handler(req.body); + ret.then((func_res) => { + + let end = Date.now(); + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({ + begin: strftime('%s.%L', new Date(begin)), + end: strftime('%s.%L', new Date(end)), + request_id: uuidv4(), + is_cold: false, + result: { + output: func_res + } + })); + }, + (reason) => { + console.log('Function invocation failed!'); + console.log(reason); + process.exit(1); + } + ); +}); + +app.listen(port=process.argv[2], function () { + console.log(`Server listening on port ${process.argv[2]}.`); +}); + + From 7ffedfa985ae4d589f5b76419b9d289ab2d39278 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 May 2022 02:08:08 +0200 Subject: [PATCH 132/140] [benchmarks] Update dependency to work with Node 14 --- benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json b/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json index a284651f..774a1492 100644 --- a/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json +++ b/benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json @@ -5,6 +5,6 @@ "author": "", "license": "", "dependencies": { - "sharp": "^0.23.4" + "sharp": "^0.25" } } From f96df990af4adce542e492935d6d9bc616403242 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Wed, 4 May 2022 02:08:20 +0200 Subject: [PATCH 133/140] [system] Linting --- sebs/__init__.py | 3 +-- sebs/version.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sebs/__init__.py b/sebs/__init__.py index 0de36fa9..b92b9f25 100644 --- a/sebs/__init__.py +++ b/sebs/__init__.py @@ -1,9 +1,8 @@ - """ SeBS """ -from .version import __version__ +from .version import __version__ # noqa from .sebs import SeBS # noqa from .cache import Cache # noqa diff --git a/sebs/version.py b/sebs/version.py index 1a72d32e..6849410a 100644 --- a/sebs/version.py +++ b/sebs/version.py @@ -1 +1 @@ -__version__ = '1.1.0' +__version__ = "1.1.0" From 70e77e9fc8864a19f20bea60295b5d74e432e46d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 May 2022 20:21:52 +0200 Subject: [PATCH 134/140] [system] Remove old option from the installation script --- install.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/install.py b/install.py index f98c2a29..0be56542 100755 --- a/install.py +++ b/install.py @@ -66,9 +66,8 @@ def execute(cmd): if args.local: print("Install Python dependencies for local") execute(". {}/bin/activate && pip3 install -r requirements.local.txt".format(env_dir)) - if not args.dont_rebuild_docker_images: - print("Initialize Docker image for local storage.") - execute("docker pull minio/minio:latest") + print("Initialize Docker image for local storage.") + execute("docker pull minio/minio:latest") print("Initialize git submodules") execute("git submodule update --init --recursive") From 3cdbdc9d0786efc6644111742e3f62af7eff911d Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 May 2022 20:50:10 +0200 Subject: [PATCH 135/140] [aws] Implement waiting for changes in function states --- sebs/aws/aws.py | 37 ++++++++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 9 deletions(-) diff --git a/sebs/aws/aws.py b/sebs/aws/aws.py index 7442bb97..6c34af90 100644 --- a/sebs/aws/aws.py +++ b/sebs/aws/aws.py @@ -217,8 +217,7 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LambdaFun Timeout=timeout, Code=code_config, ) - # url = self.create_http_trigger(func_name, None, None) - # print(url) + lambda_function = LambdaFunction( func_name, code_package.benchmark, @@ -230,6 +229,8 @@ def create_function(self, code_package: Benchmark, func_name: str) -> "LambdaFun code_bucket, ) + self.wait_function_active(lambda_function) + # Add LibraryTrigger to a new function from sebs.aws.triggers import LibraryTrigger @@ -280,15 +281,15 @@ def update_function(self, function: Function, code_package: Benchmark): self.client.update_function_code( FunctionName=name, S3Bucket=bucket, S3Key=code_package_name ) - self.logging.info( - f"Updated code of {name} function. " - "Sleep 5 seconds before updating configuration to avoid cloud errors." - ) - time.sleep(5) + self.wait_function_updated(function) + self.logging.info(f"Updated code of {name} function. ") # and update config self.client.update_function_configuration( FunctionName=name, Timeout=function.config.timeout, MemorySize=function.config.memory ) + self.wait_function_updated(function) + self.logging.info(f"Updated configuration of {name} function. ") + self.wait_function_updated(function) self.logging.info("Published new function code") def update_function_configuration(self, function: Function, benchmark: Benchmark): @@ -298,6 +299,8 @@ def update_function_configuration(self, function: Function, benchmark: Benchmark Timeout=function.config.timeout, MemorySize=function.config.memory, ) + self.wait_function_updated(function) + self.logging.info(f"Updated configuration of {function.name} function. ") @staticmethod def default_function_name(code_package: Benchmark) -> str: @@ -512,6 +515,22 @@ def enforce_cold_start(self, functions: List[Function], code_package: Benchmark) self.cold_start_counter += 1 for func in functions: self._enforce_cold_start(func) - import time + self.logging.info("Sent function updates enforcing cold starts.") + for func in functions: + lambda_function = cast(LambdaFunction, func) + self.wait_function_updated(lambda_function) + self.logging.info("Finished function updates enforcing cold starts.") + + def wait_function_active(self, func: LambdaFunction): + + self.logging.info("Waiting for Lambda function to be created...") + waiter = self.client.get_waiter("function_active_v2") + waiter.wait(FunctionName=func.name) + self.logging.info("Lambda function has been created.") + + def wait_function_updated(self, func: LambdaFunction): - time.sleep(5) + self.logging.info("Waiting for Lambda function to be updated...") + waiter = self.client.get_waiter("function_updated_v2") + waiter.wait(FunctionName=func.name) + self.logging.info("Lambda function has been updated.") From dc9fd80cee15899c574082301de5ec5008a88910 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Fri, 6 May 2022 20:58:09 +0200 Subject: [PATCH 136/140] [experiments] Improve output --- sebs/experiments/perf_cost.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/sebs/experiments/perf_cost.py b/sebs/experiments/perf_cost.py index 3fc81482..36cde660 100644 --- a/sebs/experiments/perf_cost.py +++ b/sebs/experiments/perf_cost.py @@ -87,7 +87,7 @@ def run(self): def compute_statistics(self, times: List[float]): mean, median, std, cv = basic_stats(times) - self.logging.info(f"Mean {mean}, median {median}, std {std}, CV {cv}") + self.logging.info(f"Mean {mean} [ms], median {median} [ms], std {std}, CV {cv}") for alpha in [0.95, 0.99]: ci_interval = ci_tstudents(alpha, times) interval_width = ci_interval[1] - ci_interval[0] @@ -173,15 +173,11 @@ def _run_configuration( ret = res.get() if first_iteration: continue - if (run_type == PerfCost.RunType.COLD and not ret.stats.cold_start) or ( - run_type == PerfCost.RunType.WARM and ret.stats.cold_start - ): - self.logging.info( - f"Invocation {ret.request_id} " - f"cold: {ret.stats.cold_start} " - f"on experiment {run_type.str()}!" - ) + if run_type == PerfCost.RunType.COLD and not ret.stats.cold_start: + self.logging.info(f"Invocation {ret.request_id} is not cold!") incorrect.append(ret) + elif run_type == PerfCost.RunType.WARM and ret.stats.cold_start: + self.logging.info(f"Invocation {ret.request_id} is cold!") else: result.add_invocation(self._function, ret) colds_count += ret.stats.cold_start @@ -190,14 +186,15 @@ def _run_configuration( except Exception as e: error_count += 1 error_executions.append(str(e)) - self.logging.info( - f"Processed {samples_gathered} samples out of {repetitions}," - f"{error_count} errors" - ) samples_generated += invocations if first_iteration: self.logging.info( - f"Processed {samples_gathered} warm-up samples, ignore results." + f"Processed {samples_gathered} warm-up samples, ignoring these results." + ) + else: + self.logging.info( + f"Processed {samples_gathered} samples out of {repetitions}," + f" {error_count} errors" ) first_iteration = False From bad897290adc78b47b23d9e9e60093e83f7113e5 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 30 May 2022 11:08:34 +0200 Subject: [PATCH 137/140] [system] Update documentation --- README.md | 198 +++++++--------------------------------------- docs/overview.png | Bin 0 -> 114429 bytes 2 files changed, 28 insertions(+), 170 deletions(-) create mode 100644 docs/overview.png diff --git a/README.md b/README.md index a45dc330..007cd924 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,3 @@ -# SeBS: Serverless Benchmark Suite - -**FaaS benchmarking suite for serverless functions with automatic build, deployment, and measurements.** [![CircleCI](https://circleci.com/gh/spcl/serverless-benchmarks.svg?style=shield)](https://circleci.com/gh/spcl/serverless-benchmarks) ![Release](https://img.shields.io/github/v/release/spcl/serverless-benchmarks) @@ -8,14 +5,21 @@ ![GitHub issues](https://img.shields.io/github/issues/spcl/serverless-benchmarks) ![GitHub pull requests](https://img.shields.io/github/issues-pr/spcl/serverless-benchmarks) -SeBS is a diverse suite of FaaS benchmarks that allows an automatic performance analysis of +# SeBS: Serverless Benchmark Suite + +**FaaS benchmarking suite for serverless functions with automatic build, deployment, and measurements.** + +![Overview of SeBS features and components.](docs/overview.png) + +SeBS is a diverse suite of FaaS benchmarks that allows automatic performance analysis of commercial and open-source serverless platforms. We provide a suite of -[benchmark applications](#benchmark-applications) and [experiments](#experiments), +[benchmark applications](#benchmark-applications) and [experiments](#experiments) and use them to test and evaluate different components of FaaS systems. See the [installation instructions](#installation) to learn how to configure SeBS to use selected cloud services and [usage instructions](#usage) to automatically launch experiments in the cloud! -SeBS provides support for automatic deployment and invocation of benchmarks on + +SeBS provides support for **automatic deployment** and invocation of benchmarks on commercial and black-box platforms [AWS Lambda](https://aws.amazon.com/lambda/), [Azure Functions](https://azure.microsoft.com/en-us/services/functions/), @@ -34,7 +38,19 @@ Did you encounter troubles with installing and using SeBS? Or do you want to use SeBS in your work and you need new features? Feel free to reach us through GitHub issues or by writing to . -### Paper + +For more information on how to configure, use and extend SeBS, see our +documentation: + +* [How to use SeBS?](docs/usage.md) +* [Which benchmark applications are offered?](docs/benchmarks.md) +* [Which experiments can be launched to evaluate FaaS platforms?](docs/experiment.md) +* [How to configure serverless platforms?](docs/platforms.md) +* [How SeBS builds and deploys functions?](docs/build.md) +* [How SeBS package is designed?](docs/design.md) +* [How to extend SeBS with new benchmarks, experiments, and platforms?](docs/modularity.md) + +### Publication When using SeBS, please cite our [Middleware '21 paper](https://dl.acm.org/doi/abs/10.1145/3464298.3476133). An extended version of our paper is [available on arXiv](https://arxiv.org/abs/2012.14132), and you can @@ -60,28 +76,11 @@ You can cite our software repository as well, using the citation button on the r } ``` -## Benchmark Applications - -For details on benchmark selection and their characterization, please refer to [our paper](#paper). - -| Type | Benchmark | Languages | Description | -| :--- | :---: | :---: | :---: | -| Webapps | 110.dynamic-html | Python, Node.js | Generate dynamic HTML from a template. | -| Webapps | 120.uploader | Python, Node.js | Uploader file from provided URL to cloud storage. | -| Multimedia | 210.thumbnailer | Python, Node.js | Generate a thumbnail of an image. | -| Multimedia | 220.video-processing | Python | Add a watermark and generate gif of a video file. | -| Utilities | 311.compression | Python | Create a .zip file for a group of files in storage and return to user to download. | -| Utilities | 504.dna-visualization | Python | Creates a visualization data for DNA sequence. | -| Inference | 411.image-recognition | Python | Image recognition with ResNet and pytorch. | -| Scientific | 501.graph-pagerank | Python | PageRank implementation with igraph. | -| Scientific | 501.graph-mst | Python | Minimum spanning tree (MST) implementation with igraph. | -| Scientific | 501.graph-bfs | Python | Breadth-first search (BFS) implementation with igraph. | - ## Installation Requirements: - Docker (at least 19) -- Python 3.6+ with: +- Python 3.7+ with: - pip - venv - `libcurl` and its headers must be available on your system to install `pycurl` @@ -92,7 +91,7 @@ Requirements: To install the benchmarks with a support for all platforms, use: ``` -./install.py --aws --azure --gcp --local +./install.py --aws --azure --gcp --openwhisk --local ``` It will create a virtual environment in `python-virtualenv`, install necessary Python @@ -106,153 +105,12 @@ virtual environment: Now you can deploy serverless experiments :-) The installation of additional platforms is controlled with the `--platform` and `--no-platform` -switches. Currently, the default behavior for `install.py` is to install only the local -environment. +switches. Currently, the default behavior for `install.py` is to install only the +local environment. **Make sure** that your Docker daemon is running and your user has sufficient permissions to use it. Otherwise you might see a lot of "Connection refused" and "Permission denied" errors when using SeBS. -To verify the correctness of installation, you can use [our regression testing](#regression). - -## Usage - -SeBS has three basic commands: `benchmark`, `experiment`, and `local`. -For each command you can pass `--verbose` flag to increase the verbosity of the output. -By default, all scripts will create a cache in directory `cache` to store code with -dependencies and information on allocated cloud resources. -Benchmarks will be rebuilt after a change in source code is detected. -To enforce redeployment of code and benchmark input please use flags `--update-code` -and `--update-storage`, respectively. -**Note:** the cache does not support updating cloud region. If you want to deploy benchmarks -to a new cloud region, then use a new cache directory. - -### Benchmark - -This command is used to build, deploy, and execute serverless benchmark in cloud. -The example below invokes the benchmark `110.dynamic-html` on AWS via the standard HTTP trigger. - -``` -./sebs.py benchmark invoke 110.dynamic-html test --config config/example.json --deployment aws --verbose -``` - -To configure your benchmark, change settings in the config file or use command-line options. -The full list is available by running `./sebs.py benchmark invoke --help`. - -### Regression - -Additionally, we provide a regression option to execute all benchmarks on a given platform. -The example below demonstrates how to run the regression suite with `test` input size on AWS. - -``` -./sebs.py benchmark regression test --config config/example.json --deployment aws -``` - -The regression can be executed on a single benchmark as well: - -``` -./sebs.py benchmark regression test --config config/example.json --deployment aws --benchmark-name 120.uploader -``` - -### Experiment - -This command is used to execute benchmarks described in the paper. The example below runs the experiment **perf-cost**: - -``` -./sebs.py experiment invoke perf-cost --config config/example.json --deployment aws -``` - -The configuration specifies that benchmark **110.dynamic-html** is executed 50 times, with 50 concurrent invocations, and both cold and warm invocations are recorded. - -```json -"perf-cost": { - "benchmark": "110.dynamic-html", - "experiments": ["cold", "warm"], - "input-size": "test", - "repetitions": 50, - "concurrent-invocations": 50, - "memory-sizes": [128, 256] -} -``` - -To download cloud metrics and process the invocations into a .csv file with data, run the process construct - -``` -./sebs.py experiment process perf-cost --config example.json --deployment aws -``` - -### Local - -In addition to the cloud deployment, we provide an opportunity to launch benchmarks locally with the help of [minio](https://min.io/) storage. -This allows us to conduct debugging and a local characterization of the benchmarks. - -To launch Docker containers, use the following command - this example launches benchmark `110.dynamic-html` with size `test`: - -``` -./sebs.py local start 110.dynamic-html test out.json --config config/example.json --deployments 1 -``` - -The output file `out.json` will contain the information on containers deployed and the endpoints that can be used to invoke functions: - -``` -{ - "functions": [ - { - "benchmark": "110.dynamic-html", - "hash": "5ff0657337d17b0cf6156f712f697610", - "instance_id": "e4797ae01c52ac54bfc22aece1e413130806165eea58c544b2a15c740ec7d75f", - "name": "110.dynamic-html-python-128", - "port": 9000, - "triggers": [], - "url": "172.17.0.3:9000" - } - ], - "inputs": [ - { - "random_len": 10, - "username": "testname" - } - ] -} -``` - -In our example, we can use `curl` to invoke the function with provided input: - -``` -curl 172.17.0.3:9000 --request POST --data '{"random_len": 10,"username": "testname"}' --header 'Content-Type: application/json' -``` - -To stop containers, you can use the following command: - -``` -./sebs.py local stop out.json -``` - -The stopped containers won't be automatically removed unless the option `--remove-containers` has been passed to the `start` command. - -## Experiments - -For details on experiments and methodology, please refer to [our paper](#paper). - -#### Performance & cost - -Invokes given benchmark a selected number of times, measuring the time and cost of invocations. -Supports `cold` and `warm` invocations with a selected number of concurrent invocations. -In addition, to accurately measure the overheads of Azure Function Apps, we offer `burst` and `sequential` invocation type that doesn't distinguish -between cold and warm startups. - -#### Network ping-pong - -Measures the distribution of network latency between benchmark driver and function instance. - -#### Invocation overhead - -The experiment performs the clock drift synchronization protocol to accurately measure the startup time of a function by comparing -benchmark driver and function timestamps. - -#### Eviction model - -Executes test functions multiple times, with varying size, memory and runtime configurations, to test for how long function instances stay alive. -The result helps to estimate the analytical models describing cold startups. -Currently supported only on AWS. +To verify the correctness of installation, you can use [our regression testing](docs/usage.md#regression). ## Authors diff --git a/docs/overview.png b/docs/overview.png new file mode 100644 index 0000000000000000000000000000000000000000..fc47c3e5260590ff48ca1efc34c0c5fd54682739 GIT binary patch literal 114429 zcmd3OcR1Gn`?jV^3oS&*j3_&MmQ7@@viFu5EtQl#vPX&tA!VyjX0kU$wve9%4K@7L>kUDtV@*LmFms>+uR?4#UAL_~BzPF7lth-g$Gj%XF zq%Z#GE%2KNv!$!6qaY`zySqDwJ2!`evjyk*3l}bMp5x-=;yR0GoOSWEcfICu*52hb z-eDi@-}jI&?znlXW`HhMx^pC5}XmEY{L#Pta@#J zlM3sp`#fWAzA)HBQK8K~w(W1s5E0$9c2(W=pPz{`r9%Gu^UWQ5^8fp1MgdwcsKpM}RMDRi0txlmKO+U-V9 z7Z(?wJbBXH-MzZHT3uaz=#;3>+Pn<=v4*V=6H|g0F7P@G_Se+ZG&N-_#qlKwxgXoJ zGXp1@@c3G=c5Sb!MK%64j-Qi(;cnLcg!p*#=0p*bkNXuA6s~aoecHRO{SwDA*6&JQ zGd2DGOd)!{{GuCBKlS(-kgr#cS)ebui*iI81WMZ||_ z8WTm<@d3Qc0jDoM67ZZ)yLD^ZnNxqC^fl`?ElIo1`}glFH-Ar$dY4RCurXI z$5h+chnb9mVJac@?lh__*p<2LS*e_P*fhi6dFO}3?H>%FBB^(lJw zO7#{eQPR+>Yf@TTU3_-E-90_L_Wj)-K743zzoMej{O+AN1KD2x53Q}OfByWLnVEV0 z`n5`uXiar>ndiduvNHXY0T(Byz5Dh}4VDW99r=49;s*Y>xm{R%yS_I!wl+o9mDt(Y znN$*m?Rt3*9Xccvtjj4Z z?DOq|e`si^Et$}T3raCuHJ?7EOWi+s@7}#LzOI^@o&F@pZ{NA|bFe%vI{K4&_zU&) z+3vim#>V}`|Mnv}2>bE65BK~j_*65~v+V32QD^l&KBi(myhmW@aeRC{uTAHlr7k1M z6jgg;1%-y%+FJ9b*l)l79ffwrJ^44^oZ;jgD)(NwcYtc&zI{*BmuGrh#u{UA85%3A zp@=^9bY*Ix>oWtT?v1Pi{r&j&wo-S8lb8Ok$4$W<634!1Hbip{y|yBEp>%Osz$+i0 zypYf2{t}U|m2242(&DT>Smb2#_1!Ijh=1GZcIRLw(Q9QVr@YF_tsrWlC*cQ9oTC*K zEV*}@U6`i1ww8r~;d^f7rgcl|E|+==ZgcF&ZsLDikZk`>_ShG$qeqWQ-P^zS;6ZcQ zwk)mOA-}D1o35O~(kwGGvrj?PefU5%H8O)(CZ*VuB3|u%eN>nJNw5JPw1xe0-hn-VqT~{tGjOYYZfo_Ys*j zfydj*vvPB{epYT-+urs6G%`6c5ijE1+0YQk@qT^fr&hME|BeHHUzJ>bE^OE9s)?4T z%}HV^E3>W76h=A5U(%J8m3^qHVjXI2Y4Kc~2u)9CK1e9{$g_MOu$0>`1=BnWd^IsK z(U>5l75MSy&Rx5TOG?x#7|8;BZrm`c^nHB#Kk@b$AHXvZc$~AWynIq~ya#)}U@Ucp zEU?C?!beVCzP`Tx3@hu4oE$zrzBl>#&aNn5j$c|XGb!q5Xe|ABJ7iew>_0zXKTsm# zvl`MoKJsOUma&OR@F}sx@Nko>SHF&q^7{VqXy5zw$gX!U9#|6-sS@wh5q}yP6(uMr zXl`MlULj=pB^ew2f|JJSdu=GTeFKr1Z$vJd!$h<$1|zDmwyncT1=$w6A$0m zSQ&C9jmj}7O%(Gpq$WLTytZ6S;XVlO^$u6#{$KWoflQtLL#C%*@PUVnbiPe8C!h{`|SBO3KJ+ zi%qz+BC$6kCx0Rp{&P@ir4wg)|A7M?b18wx&i0nNb2d~v z43_B>T5G7Ps>;efxN~R6y2@m4;q;%i1su?`>z$|oNSWQ`-tIZ_i@&~AB_|Ws*v#C# zz9K6z-&1r9K?d-gl zXD-PT4ug`#PpABj96zpJ!FG7h{{8!rW@mo<(0gnD1CQb1+wwcU z3@WFkrl$IBZMIsBbLtmey>{)^cqS1~^a*kGfl9xmS$H&-xYF zmZrP5#*_Sx{4RR=lAVo>t-e2&&n`API^8Q^u*|cqz(OTT=1WJ%#<%;YP#h1OvK;Lk zwC~CpKzU+2b7o51Zs*ROj(xBA?faiR*(!1xdGIqTGO{D@4gLA^p`%}Ok!^5#QrtRt z+%}JlMyay!oI9tQq4aBhUeJB^Wahzi`b+}_q0r-;hP8vF)iFDg^Inx4w#@O{T)RK- zhMI)x_`)Ov*Zpp7VLXy5T~}B4H^Q-P*F6>;9i66F-oW|n@87?Z9OuaJ&1$Qv8bQ3V zx1nzT7$}Y2V4=xD!e+_qz`7o1*Zw*-wmi{t+AjA%NncBBcnVXg`_IJEQg2z=J#p19 zUc6{aSFzAOL?ga{jgE?rW_w-t^iuG}V44ja)MH1FrflE#I)^+op+;r4CR3HAdcmHk za2K+@s+E;hG^f#XADfm`nW@$4JHP`XUJ-Qfjr=y;?tgrDYX_r2a&9hXgRibJszp>( zRD?r zb?*S%*|Urlwc`s01_s{9xlb=0!pZK*vqHUH2jhrQI{Lv&zw{{lzZFjvf2``}g)+$tcRZcI^`QLd3h@CI2Q9**^b@ii)+hwR{Ip zFRvg9?t2R=WysMpJp~jw`c(Y(iu_Y)$;q_z^pf*i`T6;%y&r385a@Yp`gQw@5)%{i z-H)R(d*goU!kO7F$0N%E(rIXFp2@S5lMA?>a2AyamE}=zutR@QUR3ADmpTPN87G#{ ziHiCTl(@BuVLvuUg2h@H#}?M81zlGuDfflS^4N5;51;2ccTPLcxF-IB%ggNSfT1tb zUAe@#eqv$;!xX)45^*YAHI1-G$f`VcLqbBr)^-NC$9||nX{J|FQj*WMTijNKs6tn_ zJ%GpZ%h%e_(?agEf96L60|Ml>Ha9484kZbD(kGNNGBGioJ=@gYe&jw?G7~Ce>;&Kh z>PNbd%~bmfY%lWO`5`LXo_xjuy}q{&%}q^_n`?6;pHMxqe0hH+94gk#YeVSrM>MOA z=TN9W2A?v@H58EvBtLON_Ci+HcV{1zex+ER7nzy$wY3i(Jt}kmNi#9;btgPL zytugd`xc-Q?k`F7Mk>`R&f^Lpy$ZU@D%e!Ldv~=xY(lRm|e-@hQ=;-!k@7kUG?a`x0x^L~zu6rS4 z9y_ahgo5G%s(wVo5#yx7tM9j$P`oGl5@2c0%!S(Hxbfvc8y_9*NS4^qPU9njEmQJp)J!R^ z!tNxKZzJ*t&7&L#?Qn8(%Fh)N5b)XBSdlmN82?NSgn#-{P;EsasywPytBARz^l{VT z>px;=0y{c7a9NkXdS^T>@Z0jOUka0yk~(_!Cl;O6DCyj_?Ny*SeywS80RPlHmLW_@ zGzX~o(z5zci*vL2D$K)pyPo;Q#>Vnlx9{oLF8%@)r&tf?SNaKQB5|c(MTafp z&+5HV4Pv7Za7-pE2>(7hiqnA%(BhWIX;fhvu9Bsh&BCAVozKd`V%hh`F73rviq+r0 zONxpRA3=*=ii(W@@zm7RJ;yJcJ^Ogx?Q@b3X@E4YsT|wn;o))d;ze_Fb2m3PaGuJnvTjRNtg9rCbz&JpMW1t;3L}rY0`Cc5u)aXYkDD zu8^>>`LFNq5H88o6p&9_Pp_}LdvU6xVQ?@pYk-53lUZ3rTUYlCS%7KR&96I|Yt~lh zOfFvzeer_TDk~!+1IZq^zCG8d618dv@xj-xUWJq)>qW)K>+9%P85_60wo>=+%;sca zkyTPU8d!siHC<>(c~hT3cj;1QQPJwRs{8o(#=5!}85ytMys__n{USX*{d-SqY~x+M zg9i_$C0>47u}hfdXVv{f;81(_?v3Y>HqH}ZVj4qL0Jg@$qNo5y|{@bERF|w@rpA3O+qKd8P!6vZ(0E>O*nbb38olA%iEA z?a6jnhNY&`gD%v&ETP&w9E(ayYJOFXl-AbKv6T94W`IdwyI+Vrgdcg^Ezc7=gmZMX7s6k;GGqa8P22N2?QIwjAfzp*`v8_L&4aa$JaLT&u#+tIeULydSIE7$Yhypja z?O>@qj#yJrRJ`IU2B@?$(=+nzn}oZ(>%^CDAQ|+b55leK{C}lBeOjD-{a~P!zQ^X< zA>Z&sNxbUxsFkVd;%{N~2*#n93^q`rW-#n}Npsft|)1a|huzP`Audo0}C zE`T7ew`43V#vfDh1G99<+sJ9{0Pwcydy{QDhS=+WV;47byfQV_7*`eHw2zaMbNTEw zFR$NYO>yOAWhW#rK!qVbcrxSV%l43Cw7k5LMLvn*w5ree$IHm+MDO*dND=eJ#S6JB zQ;K(Wb@?q#9W|&oYlxELc0kH-bK{*G08wOJ-0=&1wKX1FJPT6J(-6s~#iHi|&K(v; zCgajq=E?1Hs;$_?qQovBe0z>zc`Vm~1+_~ELJdoYp97_rVuC9wuneN>KECTe`0IV} z=+E55rlKW{t~`qr}Lw(Esw#v%TTJEYD)R{Cfy#d3szx6_27uITFPzdR}drc#9X z7z-tZgn;vJvc>xC+qZl-$rnKLb#--_S|pPr6&uTX2dCrvh7TWlA1t=Xax5@BENtRK z1eiditxUZ35D>@dK;>3wpGoP>r-v?zfZXKg#X1;&rIu`8pYo4Y_=?B|rAy@Im0bkuIIt09}%CjBU z+E8DAq`cv?Bi-eN@(TnZZ%Sq*++*|4n*4V@*9q0J*>9ykE1vd62@pN8SCq$=Q*aIC zn*~aG>#35bK25&_>j5k9^fzj?JJqL9;6gJM(4 z*wxkTkT}%GJg3@On-1k8UqX(1b-@uplmxQF0x3YS`GsZe4}*i7KlqzIXQz@)racp7f!vWIt!?OE|k=$5D=B@3{%JGJT zhmTB7)-f#*u$Xg^+w`$Gmaw3p&j`E|Qx824P|M?@ENJdNn?06{jExAWHvRbV&p0kvSE2_mw2y->kH>SLeUi zRgbHuiM5Q14mfUY-auk#H6hh|V;k`3k)o2)jg{E}+}m*ueM?hQY0BX%y1JS2g^)b! z5{1S7{JKN-bg7v)OnmYN(kto+7wsjYDAw3ct5O(vfTQe z>kOsoH=pLAGEb?-SBB+Y;7^AR98dxwYihc&FxC`$`VxvRlSmsE7gu{k!0+4as9+4U ziH{#Y?$48)5|sk)YtJzdI!KiY6duOc($+?WB#Vqtf^|osd<=rYb2QrYwT#l6sOrXz zKYt$Zz8hb-;p3zEQv2J_pF^NFJ@(UJbAU9b=89>!xeI_1b9D=?L80;oQo-2=E=MI~ zz62bHbR~4-Ho2w(iwoIj= z_07%921A?(lhjm6zR~gKB)LlME4euUz4cKIb#;^9>%!|r*m9mDgKe9e@2an#!kLS_ zy9Q;&ez2_Ac~pM5=hR1p7JdR(*hnezMPViYG3&%UHCKAzfO4#F_+)BM2vs$3UG5Xd$)b~=VW830(0Qn|g zj36)0qaZn(_4@Ud**j+=sGA|%Jg|DMlJr@WQu4KB3%$%raa!74=ZG=Ty`%NxKuZy! zBxLI9ZHx+0Y3b<&bqfToY^BrH&5iYbf2rDT~EyT7SpvKd9U zeg;ueQf999OIum7nwe^H8I9oJbnoBDxHYUwv;ESgOJkD{O>--mm13*oMUn13a!a8u z#~NK+7Ncp;8?WZ&f_9zW?^!Fy)W2H?0;!MBpIBZSS({vT!!m7vLIAv@N2kP*+c(zVZH{Q_2_Yhw*yX;O#9+I)XSL#`O|+cDMYf!}GC79PWGvQdDIr zNl%iJrl?K|SZ$M~+i~zTd%QcnF16{4fq!Sg4Ju~p%(%943-c0z$`x2N7<8x@870jp zHiD=H)bTkCvd0lbUT448Bs*`T_8V0C8l(_blX29JLxwHf{yLQoz&e4sc^gNJH79YF z+FiX$Yv5JyAI18!svzK|D%kz%WO`EfBk^x#e6PKfkI@A4o7Nt#?^7pZWo50tqXzz5 zlGXU(gXFODRUD`2((Rv7xdOcN-G3S$Wn*Sz3%q8ygP8a}_LKX_8}o;!z4s_8Dnd#f zomKPl5}eaU6X%@RghcF+E6yQSIO^{TgJf5C&vj<&MGEXdz(>*sCMFs_-D+rUReNK= zd;a{j1-Gm^?z`dlSVTn=wYNYXoT(f`WnI_iNBjHxV{Ol}a-TYHytAr11j$<;Mp z%uh6q_jqykfxM_(4p1{j_lvt*^@EY6BI-7AppEmnEk4%{#31t-6S=Pb`j*+HqNCGu zdl$(GrXeRs$JOmQ zbKY~qyUtgQHbfU3%Fh_wep1xuN%t!%3JU*w_cBL>YBZQr%O#E-m~7gHX6Dz#8S)x3 zP)3V_H8cs1#*!xFuSXjcGttrAiWp1s+q^uy-5puXR$hYg(&@+TwdK#mt9RB} z^7Z=l^_K1MwW@pPeOwI<6K9x2yqCY9tEJ}!wg>H{%sLQ&#DET&k)m`Z;wqltS~(&=(cM zAqU@sFZ4tQy1sR6d~)8EwIDzLfy*&9QTF5*Gt-3;2_`Y59 z@)S07a+*V$d628EtzG*$ae8u+0l=L%g^*6!$F9_BPJaEL+xdmP}zvMxGd>N zLcz*4ETBpB+zbW=Ca)EPPKr>I9{9Q6Q4uw^|wRfal*lcKM z(6P@jCOM3L?e!ULt2^eVrmh>mmk>#rOIc}Nh-02ie&UQBh~oqE%Rls%orqJ>DbCd| zR%0N`+j@y^?ctvy^6_YI5LyHt@|KpCfIIs7@ryM85!a{MC%>dfMWhf5j*gB37lyK1 zybF26kt$shSDjc;uz-wmnx3BJlyq1jVjMzGxZ$IqpeQBev4-qNnORwKajFL$Qrgqx z$oaRUGt;xg6@cEv@bom>%I&c5-c`yIR8-_!?5wPzN?=D}k&%oW;TsNAtr|k>1-B14 zIQ)F|>J_*f_@yFWtfr361-GeolTSe%_DZf|p}$b-v&LmskK}TA#Tta8 zSM6eP``dABCVe6^8o!W?^z%*SE?(TdJ}|;S{%mb^^~#131gmGyou7DIJ>y_Kt(rz*YMMTz9y^nIXeQe6b8W4F)37v3%Osakk>j-k z?H8LymYwK|)%VMOv}zEsI(_=|g$s;^6+SPXdwH+TGoBZwGrM~A@TpTq;Wd|s4<9xt z_lhjKFdFU!MZG$gY*bL1Qtx7MU#45LT(Gz7#PqbvlXJK1&ztj_rS^|VIv@Xp7eWPF^Roq?7LTb0X`fj+esy(j1Q}eNNdREg z*SDg?W=B!EMUCp(83?*fC}JE&6^fL*H4F_aTqo7ep7{)E=E3QofV!;SiAh54K?2tw z&Fj!LiJ2BU=Vu#Liqg{FY!}qHawT5avlWg46j?Ny!-VK>BCkqcx@3XIT$=X|D0rgo zv+-||=+dY)jVC^Jz0Ax!!@`n*)#>c)L{rDG%!42-LxH^Uqqh*P=KD|;@bLHV_!AWR z{3i$k+rY^8spqQ9w+G9{N_P`IhB?AgBE&zX16+EEKm=D`YG&((8lq9GE+MhaY#7B` zHrZ7b5eav-3KH8?+uQPUzg&`0X~FlquGA_8N>d=X8kH zPwhuiI&E-QZh9GOKb3Rf>#vuVt!V-1ALy>QOngZ}Pbjajkg>vJX-cEjkw}RnC?|(w zj^?>43+{uSwFx=l*%mr6=bfSsXA7C~mW@O8#XSJ>r^P>@L$Jg4fMT9DCp}dwoec~k z?WL%;97>!mBqggLL~6mg-5iP=p22P2=<)08HYW~dB^Xz>p(Qn$1V;!U$?&fW?7VLD z6HPW1Hbh^vOER{9ISG1^U*PfUMAEhjm+>IyBpJ%Ou+UJtQq9yWXivH(F9EZ{fRS!2 zhm>3Bv*zs2ciCzG19pD?w@5O$SMv0@P^^t7Vkpetr0??SyZKb<+<+)NE z(+2n1vpW>bHfZWri#R{qBmw+5L1&N;AyRBGK)0#?eZP7_KNmAt8k%9w_$q*RQ_;-2C1Q-I>f{^SO_v?DRpM>={qALrUDH zqY@KojvRSy)5S5|SZJK*Q5VLTty>r-WXH!GlW`|H`uJS)@bGYdzy737+GC47FI2u+ ze=!?4cKkR$Y+A#S8qpjEQJhADm41FbDdMPcyr)dR&3OP&4Weh*{zCm(E)I7-YCgn? zwr=H}(8B&~or)vGo2!aCxA3RLh={amVo>pDo~3}uNDX8`B}K)9G{Um-{aIR^Rl7AIQn@KB}hw2~zq<=O_M;?NWu7e{rU^jKo!a3C=JRAyS&)OJ#Szq&jKz_VE&d64RctijjK}AAhn$DWdeTw23tN!*o^Kf8|E6I+wqn%(@)NUMy_pl^=q`wi5MX;r;XST6! zM3!SUvM`*C3CEcNFfg7`SvZwx{Uv$ZoQhhz%0i$NWs)=Wz&bxQb@hOTy0!x*Egz*c zOTFzaEsGq#eE8Vg% zXLVoGZua2J_3!{zj5-P$%212>#?QOK4C~f)?~0q-5<&}p3fLgj12rz|EiNquL|5Y6 zyN(p_?6g{;aNhd9+%O z%4@?&-VsVa0?o17!vFQ$ekW&V`rYrz$;cw8hJG(S2o7deKFZ_#T~^7E^y9mCU(pMx zo=!rwK`VkZDX2YOS$aMpY}1JSSiuDFjuI)mGwE z%VW_jXA@HwMEoKTjg`fT%fm_Nk48~{NOrT>GfDF>SCXzn;$#(Vs)r0v# z`XJYe4dLSFUs+mWtQh|OU7EsTZe@j8dHW@1US2owUsvzc5Ku)(OE{(X=8F?y5jw20 zm$&2IcR@Fl(&}%rs3E{UerQB%EhR91maiR|LvJ}XD9$;8z?RUeSPl+?t%0;sAIfBY z;K?pyCv8eGU*Wl!EeJ!;($Ws+j!+~XZH?lx{+bx>o62L|p7Dn3at(Yco15zn!)(wE zGIk(;9sxKAFReoFYjbn6m3X$|Oj^>=e*Qd$BI>HFc!$c3>u40E*>KlymUIe*!m)5~ zHGSsHq%GUBqTOAHuY~=NE$?=uAs$G79N@Zi)KTMx$**6<*GywN z6X>u#OhJpV#+?%L^^uX;W$+uI(rvci6OmAuZ>gN^;^;`bxrUm3Uf8R^cf-qH(0++v zCUjzd0;tVn)e1PGQKwFwu*Zr0EW932&~d9Q++}XlnmD|Wh}th3q;2D^*72A!>>^4| zsV0iOJ3nTK7S%*6n+SP!Wo0G9{a3Hfl`L6~eNKprjFfph7JBCQ@(%@_c-V*Q&$Xe| zsjc}H)%!~S+v`7iH8eEp#?Zi)VpOr?s!_wN&riWZzqt776% zHz}0R9oMt&$v2x?HSCAgY?T$TYM-65u2m_r^(+%i6m<5^zrIS8M4$neb{ZatPs}Qb z=S*p+smo!iK<<)~s(Scv-{{ocqQmzJ3zvYhOpJ}C=}B38;`kkK%?iJDd%C-i968c% z>J%Zc(^d9RJ!v4H&#EKe1 zugZQ#7DL`vuyz|JiQu9D(+^CkQ&ZNh{+(fr3Z{CW!K@%J)Gs@!KUe(r%5|UJI-0eu zMUh;xU8 zHO-vNiqU+Lh}BZdE|lPZtZ&!!Q7aI}8OjqUR^iu{Hwt+8aDH)d25<{kfsC5{^5xF$ z5-6w6&hxl%Dfp4Nr$)!d>OWK0UIf8>78nR89TJ-petG`~_N9rzK(FSS12h3h#jTfhtJRff3nY8hK2r8AK#ipdjt&nm0Ga~r+)pj| z8s|q@Ss9QF63{vfn&|q-HJ{jA?sG^hA7&ueAOxYkoOj7{LgT`9AcQZG4kCk~%b236 zY1(4sg9i_mfAp0W7rRY-d0$_jiL2t^m<1!M{z)=l4BA5l?S zS~@*3!NANs1VoDu#ZI>Jsn^ILeZky>yVy_3BkOe-Tpwmb@rP_3Ud;LM@1YLJZb8DL zfzF23G?LLZbMs{|Nn}9T>8MX5KX4oXi2HM-zR%3`KUbofe7Ob91DiNIROyF{K<|V` z%=Zt3G2Cak{c$T`pX7kU7TX`^JARX_CtpH;!!-`xxQi{tb9%2Wu_dA5;Zj~+Q5hhf zX`cIvhrfNhtgX$W7lN<_KdV>bFes_6I7sO8yWX9KA)+znzQ2Eq$OJP z#q&vQEc}xNjS_A3wZ3}xEAjKJvSU|sW;mR}0?wS)IRYUNk`=4n1*vX+=_E0~=W)u3 zVTp;*&{rxxgt8mw0pqZIU1oZU0j6kiTDVtE zSqXD8pVl$`BwU|*lHP!gaB}W~c1L0P)cA%P?_}LSK=oYJ!jYh2XP&7y3kXO^5~ODb z49l0|VLV~Zq#!GM)#yGHCPauDi!?mQrAVe%qc?ixMHEUs8xNa0Dwmkg>N_qM z`@kx@5P#>7pFkB^VuQybF1Oef_bve8wVeH;oSK_kY1PJ55^{ekh6Rl{>400ea1_92 z*5Oh-mO!_Um(Wv&j$l%dgceBfqem~-652y(1GU*9KV7ZZIXve9M*-@+WJ<^ri)>;Y zg02@_8S~@`25zjcFsnh9gTyuV5_krI#VMB?M0hb%lbHe+1kt8{I=X7^#DrzZx2FBn zeGrDwUDmn28$}!19Uz5X)1i~FE|*^ak;=BqDpb;#<(9MV;o_7KR3k4BkJ=w&eru!A z-@lv6ETCTuRhGpnVY?1A61Z##owc;9cVP;o4pbIQH*xx zptvYd-iYYYS?;b`id1Q-MMMkSOyTo<0kE+Etuh9WcGfGO3BBNys5`0e+(@cz)id3Z z_37|RZf0h0q0Mo#2k&9*1m%Q)7I85&?UEl#ZsFi|Q*(2&E@hxBaE!Kh$^>^LgtYu( zl(`(;!iOGyvLy%afL%)|GrA4xcqea=g`mZ-_xGU4N%at!Hvy1EEif~CX$;rm$jAhP>6bx>2_+>>R% zwF6gnEpweIKdZ&8XJs`BmnL*?^j47fAm4!XhzJWGv93leaCdZ4r0PpWz@&?!K+x;V z%rKM!3!Q*~fGfNX0$op8?q0yPyk-oHOQU+ zbpm4nh^8(~E~sq-9fMd8I@d4tT>1b$T?|SWKJ;&J>}D*X;-o%$w7+EUAYoWcH|5nq%Z|(r921ryN=$&jR?+@Ck>}q{ILZ=h z5K-dLU5q#_1ZOmi9P{(@Q)<`v?Ru-vk^IE(FzDn1H31RFpZNEp*!>usl7j8Y#%3A{ zPtRKb7=DY-@g{+Se?++6r~dE(?( zF$eY$4{Q8{b#^^2te^C(3611 zzmK-2Cd>u_w_utOwM>3(79mDZj@`n)qFCWtGhe@sm>)n=%ho9vL$$>cpTvmHf@%U> z>tDZK1;K;e6$70LEJudp|3yK@Sr7d^uirt$B0fMVyUJa6NmK0PL0h1u5lPa{Ybmp@V> zp+Xg(l8b^?4Yt*oSb zufWF$=#`zBX}q9=#wGyxGw(*i_&gr%+XzE_MWv-iC9VPXyQ2W7MU#eL6m2^Beh$(J z6m<(zQ`L%w@dbEYUYj>j-b--6@i-**2PWcGyfuuIW5LK*LO|%o#;M43j~^dUruqBs zHz{w1vL`E{Z(-Tb+P{j$dMUXB5fhT76fO6PRix@-L zi;%SdpwZbD@ca8_MD6>sNN(}LW7|{dR~dBhhbL6NL7?QCj~}f+ivo-e*3|gZeHfmZ zA*kBR6c|w6BA9zP^f9y)7N^Z}BuA&Kh%b%Mbww2wm4H9U_y}+K*~m#r=~e=A1}?SlhrQLf=Aqc;>K)HRfKl*JYZ&=> z0aM|Yjbn-+8DX&MbwNRu&cEaH?kvVObtmBn?ZXt@!pXNiJv{(v+HY)iAbly#_JIh% zfBME@=<~^I7#up`8GtTkDhkrqj#JqA)Q#BhJweZe$!~@R!n0?L z%5?wU^B`g1GY>39zrv>&L)VRUzyrPjm#FBl+z}HKyU=KZRN)e5Fr3jpFfe*i8$Pa2 zkEz_hec1JxkalwMI)WwuuG{bS5*9PZ&3|w1NP>8s4yYLhh#)XUv)>4SY=hFywl?*p zV0iwG;BmmNp)agspO1k)Br>-y!aOH*tC8X12{H^Zw!DA8jrkDa4U@8eVEXr;7l4T9 z{$bNwCA6HJ4xKM`aGp#m<*;+57^T91Ec*CGB_(_|osXKw8wN7-^XHM7AV;9`J_4VB zTNVD+wE&z`qyuU$(>-x9>Q|zZlKdcC0kkdP+4(b{@0%H-q}`^F}}nF$4i) zLm62gB^6a4<|N_7kBf_ojh&jFraE!rfi!)JszBlwd@^Q&Fi43k!pO|rHEa+2hbJazAdX>lq4TbF z9j39Ho8RW;S)sk4O!n`UpD}{vrg`~tOR~frOx0akIAv#IQh?Qg7&XH3?>Rmqge89o zvl7Nw0T-*=wrF$ShdkllE~fuexgtYtib*wKLE+-R$={vK`Q``(I(=vPPUlrJ3yXwZO#;{q~y zm=^FbT=fDcHHpM+9_^Ffl^SNV3(Xe4QH^tOtwG{`@k9>$hbmi`S_Tk~plubS;|I1_3%y5~fJKkSow)4PI!68)WOG z0+?oygwA`lRfxF?!oNGD-i!GteL=+4Usr_{vUE^wIKCP=3i z1f=%8L>2RvJgPeiH7*pLBSHhK==jN#@v*UIHVSZcNPvxT7|?+Rr1U#ooc0Ni6J}CjsHYAN4pLH52OHEhHLw3(w3eP<;3G1k2-G^PBF>zD&mpbSr=8z1Hm%vrRw{@D~gKEzHlu z|7oDF57gNO35lB8?SQ^U358bbfoOaBHLWT5igBNjn>{dmBd>kAVSn-BM?gm)85E@} z`uet4uU@sY`!O~K08)tt&BdDExJi^wSA2Pby?xG$7ghvqXmN7n#}B2MFBE*u_4R^C zR3NIzJs5XKk_U{!ByoC{f>U8sObqK9LHWT2DUcD|Zd~W@U^qG?Bm^;w+c|#x_=1Np z5|*Bx9-u8|8cIq_+YBCC{?yuJv&ByF>RhlcIrlZwQ){jyT8ShoAz=VA3y>?W7p4VF zudwKLgU8?<0RxzDxW>l=rRgD@z*#{E0|;FJzw%eEnwt{_*U-1foS3!JeVRSYDRH#) z9AkwlC?Q#ZbgLFfEU3SxMG1rMaPA3x*YJUGhTpKnHTzG~Z!-L(Uzxch1)LI)!T zwlo5!$%27rpoNE@-v~{11WJ_!fFLQqReJ`h7TC2D#GD6{@EFQOt1acD%Ty;)9uo`; z%eQ>922fm!J6*c?RJiLM|VsqG6R5|liiB_C4KtVRG zE&5PPOs@kT<@XSVma2cJ_m^d6T0`@RIQ|Lp7zV$XX2nsAF*6~*MwoPm<#tOuQ5OGz zFR~zUF~b<6CEEBnrfpHV!g0#q{<}+=`X?L*%Upa1vMfn32Z4#~^UH+km;zKR_>I2s z%Hv~k+R;<6sPqow3+HR z)Z(J6u~0@<7P?0@WdW|=930Bwci7`2WNK&|KoLQJ&hXy{ zCTJ-fN#5)GXYSJEd!SR^+w4WOn}T=PLDoSS_bbJS(z?2_feN)$S5EoySoSA z6GVf39hIsqiwg@TwFSs#sGP2GT34=MP?XCy=jqea*$>ei7=A@956etx|LGz??q&Y( zJGZ|F0j=~|OI#&sPCt;7wzvdWNKC4eX9d_9CMKM7)r2#xU-;6_ioOR@xf{uqrC`;+{Ih>?ma4iRr<9^#*spFw6C@c6NrZ%{x6ETI|={?A?()fWNE$9d+=!|>;c;pSybci3T<+xO!adJ9LUC@}WX91;hiFK^6@pLt;7 zY3cL*hYlSNtbsshs(u$OrSnIwA!g+t-QeTq_J$&worEZ3`Qe8%dDv8Mg_;6kDxwRN>}OL2nl$p891Tp-qMxi)|{9$>+0?^KafH>M3zU59@DlD;!ZZD)keq zY711Ho%G8`v~q6i)z-kCUmbiut;=W4dI+lrgChA3Z3e&&)EL-()G~bW%?q;e#No~> zPyGDz`pbLmUU&5V$n5rS@dV9*Ite!MEEnI#62TSw^oIf2S?QN@r*%MwwCZ#*Hffd! zy%{L6n%tj`K{1w`&~1&VgHwaCU}vFik26|sko~INk)etApAZ);inwg2RrNP=dc|cP4M{iblfj79IgDM3Ur6n#_-{%$C7aTJ?4znC|lbi!KP2t-PX&e2^SSLcbw3*ZRwHO_U9t-Y}^8EORp ziK=QyQqn0>()0MXpH>lZ==iU$zQ2tbY>Y!o~j$Br?|K{>M3+zSQ#;0bIR|}x3MuMr7tjO_T1X<`eOmG(1_3% znCUuPY(S_R7!@xErwsLubB|q)-^-rGH zfh_)HWf{`S-)IBufYp}NWqoZtiI=#kHg^C@sA8D#Uj@%=Bv+5zIAm}ZL+0{rwJ=P_LH0)}hmZj_{Ae~kaH_9f z(*tZ3`T5Qr0%3g;sHimsV<+Y}#k>|DthR&=lUG)v>POqMx#vR3$2S1rD{&~GL#(^f zR8_zeWB~gXC~#|MQaEj(B=fz8HP?))S|b_?l*kXeNbL#sMv!+WEBwi&hN>L@;acI&S0cBjVOzihD^S7;&bf`-T>To1>$utGQE(YV8*( z(zY+V3aX8r@U2A*#A=lFOt){Oi+5QPy0cga9G(<$!pbm_ec^-R4uTFj^XkEtf?hWvc7A+!|)eul$^%`F(r9%r6#TPdu9d-qbE0O zQBH2Im{n^U+IRQzuV`rO+P#~M*t#=I%K_RkCU-OS6N~I`iYHFh-q+MFt8k!nHlai3qroFX?_SPho_MW7H78Ri(mGK;(@Atm%=l6O& ze?9%-MH-*a`?{|4I?v-gj^niYfS}^=&jtS~3J_>iYzQstwO6kQdAi|Wh^BM+{2!aI zB8QHk9r_Y&AYcl*zlb&ddyFpf!bE*_wZT9}1C)IWipJG9?u%Z)L*slf#e4UHwUw2U z%cqH&@`*p;cXEy-HsgH<9uP(ah^F$RkPw~e3jGR_i_b&z3JNBwuBU+2W;Z8r`}Q`J zL5A*c4@!_24&QvGTT`TImXe-LLEM{ov$yy*y?rAZV;WK^yu`cE-eu#8{Ov=HlbBfa zA!715l+jPCZ&yy)=C8-5rklh~I6RbenWuiY{_Wf4Wo!FvRoSqt->`yE(o3+GO_vnl~K-wQ?2#%A=o**8?%3rU}o7_eQYhh z|HNw3T5Ic5zH^+8^eb``Ke36f(2x+$YcfWn)};E(SYOEBSa-k#UWz+62O7 z%zCjVnQ2;&@|iI7G|(ntk{!=XdAsGT$>TydM`gH)~rflrBIRjbN|0&f=t7#6LWo z^K_x9(y3eBjJmc(Dmb-%BO{p&n%1zYcXSFpo z%HH98bXK_q1y_OVt^Oh4KtFbs?Gqc)`edY1v;Ta{G>=X*-YZA%F$F{Ng*@Lfk?Djn z<>O`yx{qG)`71{|($~Hic>Vf{lEA6(AoG%yYp|jUh*8v+r17GT z-;^NPc^}8fV-?+_!4H;)<&})mp=H!MPGvVY07z6sT0gx4R0uo8_a8p6CEh!!aRuEs z>;2R@{M?VaKOS!%gjZl^%EaHt&sX=h=Pl0s^hox99$Q93Q^hkcdyzSwy~sI7FDBk5 z+i>(!cCMYZW_EV;bbFGd>;&=dAW2Ovon3NtVb2dAm!3)9cuW!7d}q1bf8V$5`qu3b zm#?QcCRUSNmN)*8T5e@ti_vsb6c%jb8M`4{22YpS`T505Rs-+oU)z|ZJ(iTeOF?{x z@~f2DM&2nR^s^HM`j|XdDqov&bfFt_R-Zi>X~2(>+Y1k#nSR|h@}!BbfT=g-OdolA z!I8*{`#GG=#AALZ8FNL;6y9?}bi`?SutTAWTjI7+^~j5rO!oY1A=fO=vwn<-k5}%@ zLg!ph^G#b@+vsSFN0-X8LUICk9sH+qy-RSg1jQ721fmvFi34U{9~Kuo4R+K+vA<>8 ziXsF8b+GVdynYI!Ks=P?f~ke5#aNzh zKAkvd`Pn*rENBw~e(XzL=RvhW!-ou8yr*_ zSXujw1Ax)u{>LGUi(|C0$8Z0$m|tvgD5%9^P_+h?0RVgCj>wik3=(jVc;H4Es-alO z0C%j@9Q%``+z=Dhehlokq&sc#ifmL&p;l!=08rdf4Jr)*fH*gXqBwlHK&bb6YY^Ka{&%yUW&ae zE1C}4m)m=KvUuR~VTG#@J>M%7{FbM)~dWHgU|NclEa_Eoe z0OEj4l%76~YOtI9C@Kd50kwAx63iC1KGDb2&Q7ma z+W>%A}_DDUk2!Rsz7h0N|@tn`WM!8B1hPRw7`?&d%;y?c$uL zK*q(z)v;fQg_${u^fQ4Yfqt~v?Jp8QZ9s|5IuLvD?{QjHh9^nEq#EbUJm%=jzTL78 zD{UhtCLZ}hgpAqp_!+0^T^}z705U?B+IBE*CC|3QhZ@ES)DGi94Zk8b3Hr0zw+xoX z5N5W&0h$8K4eo}*++4P=N1>U#MB;CA`fnN!fnrn8oExF!lg@+Yx5DrhH&xNIu?4$V zATw?^OqrX2r%cfpF@NZ(hcP)B*C-;V&ffjKejuM44eOYViH@*nonMv%+1zi9xgDR4 zv}(uh2dTcS{%QzmkAR`8M99)egkN-nM(!OhRsPwv40boMXAAqN1n6oG&a|~;7F~QRI{SG$dz~YSeh#-PyCDVraf=Z26t3G`>OItsdR?rw7JV}P8@3)FhDJ;@ z#7Ie!g6%xLVvjO_+!nKGk8Cce*k{p!_rJ>!8yzjOR0w0W(W(ceR1rBehsq*??h3pe zmP_o)?0MEIRQAc8@pXQCljq^0qV^`H_RD z&9$Y{Q2&#c`oO!Uvl=rGdS5@^(i9nHd3q_eeWh%9m#&@FfdjFSUf$4q+1IBL{nX`Q zd;}$BHN`?Kr~+Z%{gocGjok6$r0qacj-VtDak_XtEY|zjTjr|TTBFGejQh{^ww-)< zKUy&~m(em=QATFadmI}&<&b0)>3hsol+nBqQ1sfWi2n;1PW_{oKP;}YyKaNzI-3;Me~C>Uv(gzx|E#@TsnzDb zcX&jEGh$LU<;AJ7F~gQ`3JElHbO;jnsHayqt;AVL$}=qxe_SPuoMor#Y{31gGh0{) zTCz7u%g^#lW+3gcoTR9DX$SS%HyAJ`nE{n7)qc?M3VSf_Y}*LGYuPw5p#JMfFJP+I z59&@(y3)I0gw0Z4wFpp8ZORp{4}N5iRvDS}U6W@9ha9C8@y?y#Qza!O;ZL~FqUPy! zo*b4$C7YBY_Gn2$LIPxpq^b6USJ4FQs{ir*`-h2%wl;rLBcN))(ZZkQs#J{-Io|={ z6ivc|z5MHI3m1Jg?d|zCB{$`Q%O!O7wB(FGO6ys$?QcS!onAV93;hvL`wZ+>DaUgc-I&IU<*sGLPD{IK2Y5P*lCw$asZ?;B?`^-PGO5uLq1L$g@kNjfdzYl)1>fNxv zQ|0B!Lf+7%wuAj^p11!FI^Dc^#UeLIE*(iTSZk02VxNFghv*IovY`Nj*qjgyWwG9a zj}>?U1*31~IvWfqXYl?aq2RfD%5%*Y#St#hp{fb6gvlrTud22=N9j2z*OXtAH29)un~}YZb!%`$k#?IYg}?b_aej|<~oRkkQ%XH7&o1c+QqI~o{nA~(Do43FM#w`Ve}UELU!l<_h{tC zRC(y>G1yU!VAm2do3w^?a!ETjU#p=A-nd&KUyN-H{3-VA8CaqNqrn;FJP>zO${h}+ zKrrDtWP^mk%KiLV|1gd+?KRKl;>btl?}wlz{93=-d-L5Lf9hHYOkiGDSGNs%DH1^W z1Mi0eLUVd5bN`bNW&LO_5Knn)&~>O;xQLf`ygtban1e-V6BDpgy|e%m!Q4-M;;3bWqD)f4)AdZ+6E| z1f_io0AT<)@pILvqNMfj#-)8Sw?bx7@a?Y9f$Hg6`RxyrFPBW8P!1Oi>j)-gi+&;a z+lq=t#GFG-=K(EltGoy|w++st+U4xnlUz)+0UZ`(?TKM9;=@h#nDV6zGCugUdyGdU zKP5{8zl~|-ruFsf8Y-CxQw03IGcs@*k2S6|(5q{LXK~-%yMM7bkjnik?~}okUKq>@ zhA6U&nAsA#Vhg=(qFSHN%{#3b1>}=jS(?A)h{3;gE0s}>DrC2Af9deotz z#9>TtPPrcoOf8ExM~9YpX)!QVvk!6BNbHk%L2a))jG4< zH|<`sqUyZ^-nUNts?{ep*f?iZawSz+tqw2*0ntt$fXGBRMnK%m>$~5NxAE%PoYaO1 z^iVh-$(kdfUdEGE;mwEY71|GMhD}P8swhK=ipkV`&dh4B{ zzfz3eynSn`zeCUSG9s?!C4I)re%Z|*erFZj9`TDd3GhzJnC7b(8{*8K#6u{LDnn~r z)Vzd77U(p&HW`z%%FggC!w=KbqKNbJj$U5Ut8WJew7wtop&_x0dRVRhR?)EPm6|cT zM~O{8sG6O(KXY+g!++_v;X>>-7BH22-5sOzF1|}alZFx&_!$z z-~)Pr>Jb+Cb&-(#Q~bFkOXhzNt3hDmye{_}aBqi(K_hnpRb^!*+R%*0uQKizQ`!Mq zVblrEydoEXdxzQNw1&ZKB#`m$qNnPb-Hof#j$;*YI6C_MA0&Tz{Xv^P@i(njqDT-^ z5W3xxL(8@yXOEC+?-;$8n!|tX>JmAz5M>4PRH)TtKBH*-f#-v7V3X@}Xa)5o2}uHz zXYK7lL93Fx4*`!wo{J(dy&_1=ItfA7<@{7(`=#s8TY_#k`OuTy!CgnbGXq_*-euJ$ zl_^slolI!xD`mlc{{8;JKC6s(pymGt6Hmv>_DKo_RhRvvN7>wuas89-+7*_+iG1+$ zc$J2`QfGH}65asXSa{=UJzg#_R3th48v1XGtEW0X?n8qGoba_J(3o9XX*5L!f1;4K z>MN?RPqO0yYnkQR($}vV>LX>_gsKW9@u$~Nmy!Nl57$rvdY(4=Y<7<`DQ!ki8r3M_ zpoGSQEwC!!gy7#jp|D$G*fH@?-^lBEQM;PLT6nFy>?OlpaQe4he=z)Pgesjv><6O}(%7@9z$!lxDM;6|cazvIr zNxlo$zN6dB@>LhLjg2R$cBX0m94unK_Feb{C1r7LAD$2$EBxuiHo7@cENL5^AN3W1 zK?8t;E{0$+=*e?%SO$bePO**%0p4quN2{fvB}Gv;W0vOYy zsgQf_YwewFaQk3*0z$UHl^uhLp6GMW>d&qwA?tda&a*O(k~ebw?vKw*n}SONosh>H zw3(Dy?OFW-Mul6YU@K^sqP1yfkDPRwr`&Y3w&t3- z56VN}z3`dFxZ6k3`s-% z;^9YSs65E=8{pEA`GtxawYOLE>3X@yQuQ7?rQ zYwHM0vq-C}k<&whvd6ZF)%)kUXEhJ^veY6_63BG0nFR?VGIW8DDmR=+fsRt0= z&>h@Btdjy;{1-%Pzd@cOeLB^}QsE}^#a_}cx6&VMoERm5QrWM01Lp*JiW``Ss0~lb zF;In7QAeHlF1C`$CqYR;5qUZtr2Z1~=GI3nPz<~+Hai0b&zR_nUWFR19s`imi}}(N zWB#RO?7_W#B$goku=RKa)z!sE|edJkS8w9~h6cF>GOT7zDL z*VaN#CJT#$jOC;}C8O|r>|(Gh0iEn${iP@$4syE{UXcF937t|Cl@vF?vaa$;VBa?D$gWI<&!8VDuS_4ENNv# zRXTh&IA3q5$j5-7Ihxei;m_*bo|jY5p1$gmw)w4j--uCaY;5d4=98N18Mi$=&|bMT zS>y&IfL`}`xgU1Qj*f-Xhls)W)s2;ue_iPBJK!)FY;%Xgu1y2!Bg3xv!5$@Gl-jM(t_n064)Cs3<7R5*m z;BvSdIAZe73+t$|kB*vJ6Lh)jDS3SqTa1L`Jwj_J+=`n)E3D4E+63#`u}qXW#;Mb2vDv`{c{M9hWowfTv3~mgkJV1(6dWET*23JOo)R87-s+v zmHk51uy%s$e)}M;JVS^GlbqfWYOrgc8g%^n<)x>|qmF)V2Wy%S{TJ}~+dq8@!dHRM z{cM{4B?wMX#%qVmvwrjufo<9RFvqN5zuh`o?Pqkiyj~6uyTUyf8Ct@6QFZmn-Z$F< zh+?|N#sYPlxLHm<@1~YTVZ@yx%};H}t)9fQ^y%CXeha1tv~tQ~B+2s(( zrs4}q$>EY$1Fw;5uVY)EnYJ;)R4Fd{rt%IinZJ=5GP#9?BQ}NdHrI@5oVl(riN1UV zBtNY*D9!bsWlCfUa(%Dskd^29W|)@6)X8y_-ZSCiTB&gV(zl{!x8gr?c#IH(o%&jz zp>h`+96GBF%8REh87tgQemQ7r<*_omhkqw;myovqMcM!XK^?12)U0tAFhr#Anl91< zw7;6{jH#)qOYUsD=jv<$r_bY2Gk)&rhJ4;CNB;=kh&=lM*l$vBrjV)tL4qZb+_E63 zJ^isuBO`@hQ}K(f&eyN?<-`5BB_%!3_Y=5zZa%FO?;{+2MeE%eJQ!1P5!8P!Dvh`t zZG5@CoSd&u+XFD!2xZKn&Ym7e#8^7{++$^$bo2;mG9+cCrST`OAO2WS(DSUIj-YWW zFA6|)4iFC+Y-V$%oxFSt!TmAC-GiN?)qp+x>i9E3h%HYNg^bziT< zm1|~EIeLOl{x@XY1^s+vX@Ic_gL+yt58C%o)UvdW9Bh`LHvGgL6cS?f!WU;qePDoqN2rmXrTU>~V4!q`0?s1()@MK~2Ei8>4>~(I7jQO;Y|RyMy?w0P zNQH-PaNsi-r3Qx^R@!@e)s>aOw~JB90#_ErYm!)445Xqju=T+~K)no+5zszEAD-;m z+AU~sP|l+=gx>{_L0w~0wSn3;QAV6r;gH@WJ52JX;xtg7J<%Nhfz!o7l z!M%1WT@<`WV6?a}&Ugat z?z?v_dmrVFE2lAq!~h7z)4=P(Q=P(Chva0w{1iM<^p~lJ7#1*CpC=!1ycn3 zHo&pLD#)f$TVB0#|W760?IQ_mlkzI6jhKUl_a5wJrl z*MCHC_y2viXLm=%!~p4(#Qi2Dw2$r8%=gp_MhE}Tuji?EIaaLJ(@q>&L!KP1ijM;)ZFn5*#VFpCqtxGaNxz8S)7R~L-SOCs65&WX88?Ty*{RO4=8>_k zLi&__9}Xr+g5<8GrsnHz+wp%Mz{_*tC7%ss&;2S-&+(XJG!LO^V$87erD>&;C9@0> zQCKhjRJ>$%S?;e;F42saZwUH%u0`^j(?c?waYBsQWcCUZIb+TAQ4j5Z zo{g1ykzSL^3jlFa-WpcZ9wU9)d$+-+^GJ5A~mMLHAZ*^yZ~ zNp6^ZEn{nE=!tV4O?iHG*Df4B!aO-NR2aR#;y97G*nbu{wd0dD)N37oY^{) z#ks^nm(Pd`5fPE4QS5W!xW@3Ex@p~=-|eA5g+BH4vrCO~Oum{AzNlt2xN&vn%b!YT zU9S0M`!1)ruFC!4@!j|4ckMF$XDg-J-OJyvt80*HK;=im=Kc7*^b6h!=mq?&C1_%k zzfp4(rg}Qi=@i$7(HI=G69{=uUMl~j!iP~mqt=mHp;}M&NkE@3exMDh!M|s*U;L$Y zj*g&_pzfV2=?z}#ZJGXhBBIM7%^KW#!zzq_56kSM`gVQpzRRUkr)Ce3mv0spbI&RB zUTa_*Zm%Mv88C<|7FG?|{Aj=Oes3>z3JH<;pPnE&_8%3!GkbRJ43q;XiGiJ;_VET` z2;l6|E#m*2>4aCZDn6@Cx&nC*+btGfE>$okKriI0>Fzy{@mYJY_-KWH+b^0YG>eT< zo4?nZGj57%TAruxjjIkBmJJXh3-^|1)Yl1qvH7t#t@x~!h^ZWVIEJXad?J~;1A*=D z_Pf6ZteBm%_VwLgP)!kfpUB*i`fU!b0w@TCd7Qv`@4P1e@2lCVdsA1?Fym2%X4{P$ zbUP6ssw+a~UazX~ck`_VojOxiM8?#^aN@@yiz_U$p&3=wJE=O`g;YJY-gI=4ynXj3 zm4^=me>6{FTv`e7lXw8l^!xYUkOgq~0y9S&a=quoI|D<*J-d8x(XsjB%)X3*5wfsB z;eR*Ho_l^;dC|>JRdmG|lDlpsFIm}MA|kRS4^kER@cMzs^1#?*UG5T>Q?AmX1FDOb z)1wrER3c6y@ooW1>T(CJ-e8tx{Ciu_dO?TUFvJI?Jpq-t_3g;%)4x#>mlg5c(}GME zLd{TWsLjuljg60=l9C!U1J)$S#WjZ@kAY`oVC;!L82RsW31LywolbY;qo=o&GmlnD zkos1;eD4_v(Y>^61z)m@`wz=XA2yrMR(}~3I38yI)G3NVbctFrnda-#apg_{-_{ef zU~t67?->Y4Bu4B(GY(`8IEF!MBY|7#P1?DCBjU?Xxz$DX9V&rAB%3TSjdJg=u%Eaa zlv!646j_$*t9JEX^VlU#$4eA4%$lPEeN+cog(+!OaH}Up>%Zx3dmymNUX|+9WD}ya zJB?J4t;W!wOrOKZv9V=NqW>pRs>@DdVoZCamW80{+qZARZ=Bft#Qq7Mt$TH=<+GeE z$dz?!|L#^~TYt-%ArY>dakohF)VyIM0B7HMTILwMEBbrIj|T>xzZ`XBHG!IlJf zn9AL_x#`7=bZZ>4QxW#RooSdw5!?fNfyEv~$cO}3=_Hy+GTFPwW7?G`Y0=ltaZ zA!#)`&VKy1>2?1s=8}E+^hwG8nh52;A^%<{|8fN7`tcI?`FO+gb#~*QTXyU4DVpo# z4-Q>Uey4qdPCNW(<}aZ?l}!N~gAQffZ|_9g8ENi2Q_XYlg+FtQnTEUa`qz(p z&f6TZh}y3$rxoS)bl?w-k^oWa8K{QfKq?0M{Q1Ll{~j?-$4Qe2Hp6$SUjlz5eoQU5 z9baj=t;3g-&J_Hc->Z0Eny#+ZHr1h!kZDp)+3zujuhW0vyY`Wkw`#|4suNEei*wk{ zQ*#6hn@RjKpWDOU)!rpF{LSM$DSMb9(OG`bFTnxFe8+PQU;5J?X!6LBxuFk(SyChV zX6WS+lTt#gEPB@ZKS%9O(VNKLZ?zh}?)~*Uxcr`SYxz0n*<737{rSG0Zn#mYxJWR} zV-&Pm52rM&X?{7Af_vd23lMb?!#6)My zi3I*hp3bJsa!uFEB|99j-9vT#*2-PoT;nWq8-e#cnR_WV67>RaDHxRAjr20TOJB4v z^}y9a!SRz98QFY!yX7idGM^1zZuJq-z5Z5P|M=;K0=6M)8u9}!)pQ>eJf57h*>TTi z*S{%Qs@c=Fqa;Vl+K=YcXGd4o>`9sNKCFL~UHSfsNdT`LZ-ZjE8(oBEb!wU3)W9Xt z?~lb)@}Gp3U+bvR@t`lO^vU1#DLL)jX@63?7n@66`}6~OTC=knWBjmLMci_49^M&E zLX@!oKbzA#?U>*9-GcLbF85kThFxMfy#3kEiC)%rPWUg)pF#tY8)LlD?mBE*MslNC zpX{9G(jy-S=eMZGWS^;@N~=3Gd&vI1Ud;Drq>tOoZr3kv=cbksweI0Iy@AO6wA5T} zKb--1Tjb>ApcX>|?JJvx9t89k!0`dQ0MIFV1V8V&g@rvPDzOx_L8uRVy1Mu=>!Pp_ zdkK?VkubyMSFPRPxi(~c(=8Mb5(6HIRzX<%^7%7>HB8NzNP5e};Pub4|CQeuQmx-x zeEw7z=WI;RsUHCzbt@K&smJk}eI6<_(;BbEuCOSJMDWxpmfCl%UiPoMc^)(CzEqm(r8pObM}XL2Hu+Co$`9NH4`3lqm-zUhc+fl`gXX!j)3j$cfqpG%EP zRvh=LgIECl9856+)fv;tQCj0FP+vL(ISQ&BxDDeT11$ZxPhW`07N`ycN}gQ*s3-X$P%<=oM*Ycc21~xTLZH?cz{SVHk4Ane2Wh``-UX+4goohBJ=q_8@%+t#TXFW`34yDVH7vi9iRO$|JAZ$8WqJ5nKFIn0{ud{sH>+c_S zpY^>&&GF_)rtlK=wFXBEPde$8{_L4M zEUOcq*g(Mr90qC)31gADIk-Ssz%~Z#DaZ5!wKahPkTW*GK?8bGYFTu7uf)53-}Hip z6g&Yy>Nl4s#%|>a(vj~wEDaq{GkiWmN+Dg9liSrDAV`?V4u)J?YwP^%Y(R?zsAXtr zTu7G2%!R31mI*v|R(g6!rJn=VL_-cm{G{$nqID6U72#$-Sb5{y=bwE`C{ zWGj1eN;}X}M*9&9D0cZJ(a1;EPTrkIDprQHj@#Dte(l6xmp)99j))*n8-5s8l z^&LZJF`g_$q91AtHtZ+7d;mQ|fezE?A7ivwGgN0Q&EOsb5ylR1hfo?v3={f%x^@sv zyz%(9-P5Qm&%E^R7iU!LQ#q^q->O#VV0{nC%Ypjl~~zbzT4V+M|v|(wQ)=KFJJjU?H&g zL8Jzkx1bmy0R2|yKfgSJ?8ODJQ~?2G7�QSjHDV$fvH%W)j3JF)?N2_%@YMv37Wr zxBN!O1_Y`oG3GrfeH+NI*VZ!EAu^%lmZDTf_yAc8HYHh|m>gE-{qw_a_4^pmc>CAS z@6f|rHBEq+1^FDvSeT$3ekY=B2E*TRHvAc9K>X2NI7hgFmM4@?F_+TCfb0c3qM%g< zjx%TO0(=B8m9&lVVYhIO0S1D)CTd)Ns}%e+3?lXVy&k*332hH@vd*B6T@~UGc5Z`| z*I}}9SSpGDCbJZVT5`(T;z)tgH_cpa2VNY(HW#w-gChdBAk!xBg1iZgg(~|Iaw*Dh zW$*W>?MgdyL+HMIbi~&?(Zx}h+<#t@pkwfTT7P5ic7bk0P5ehO{ZTD4R|b+-GR2NY z^y=LTOn%y|2X^cvVk*jfolZ(G7jCBUo3U`Bc=w4xW$LO*)dyd-9{lkMxDa+wUDj1A z?;O1>)pZlW{WOE6?GbTtw-yGu#`-{|CPmypFPWcz`IRX;sj$M0c>er1#===QV5Iq7 z+|f87A?5iYj3PcBEa(C8mr-H{Nn+;#x*M+!lvh+RpSWfW(#*o{HJGnrhd#gcUG|xm zyZaBAvHSQ$ePjfgg)gUxmYG@FZjdv^2pH-pKz*==Zhm>|h6i4D2}SK)bg%GyuTE5J z{cr&qp12=rEeYCE|g=(c%vk-D|-V36ntxHB>DH#x5CUa#DkxX z9E$>Z&S{etjnRAXSX|M!a&Ukn5>@0`3Sn9xU4%iDEBjFV{VR>5~}6yAWFF z=huRefH`%L)XR(wtD_-Ii6M^iMxuNPtG=93Y3L@Gm4W*^sm8txY7pGOp1)Fo!XwP% zIVKBAjnubl`P|EH2NR@M%?4O?4MZC1>b*Eh2rC^oE}|*y-|GLgun>-D$1_Uc1_-Gs z`s2{w{lx|Y^$m=P%dHo{-#|0na-_Lr2hmxLr@ue)3Pr^RsEQh0p7~rInOocBLoG}H zI{K@Q8gK7J_g5#S=QJ_ws*}iGAv-$1C{vIv2SS4P)vd-Ecv92zgc{9Qu;Zj(4qw0g;hZCF~ z`*q&+o7W*9?lL+X!Yt_w|5ZM2%X!q4yZ8Qj_xI1yOEOYYMl+1HJr5D^3G*sbyjD8k z89NvM6yx8ZB}O;e_y<4Q<91h@ARk``r6Jh5>YBa%7TzSko`EemvLLpw=n=6TetZ*O zfK-(~A*{t>^eA$b^A9PtCS8w!WFXdi>~R~ML3M^M?SEF04AO@Ld{_uNcXvYqQLU@% zw&6OjkkA#=eFw$KxLX*=EF(KUuEN@)jd2(*gXlEoqusGK4!UB;;uv4qmA=E=?J)=+ z2zN3+-xIe!2Fu$MDUyhY2=pFnM*&-aBY01hn8?0yRhL(k+3jx%!_$(Ih@U)K2Gr@! z4f-$4{A>gWu;Fb9Sy`XnvS~A?p~8CK>EAa`&?~UrV_RGf4mud<+>op=bvm?sZ|N`3 z>WnuL+gGyoYd4-!E_ylm#@27MEvQ?zp)f$C>Bmhcr|_N$0ft~}K>;H=6wo-d+5^Au z!bu=$GY$QWxj9Q1eQbVYeI(HeLH)3bfA_*PNGO-R^-!)f8r z1v<>=g>Kpouy)P#F0>!cNSCD)7h~LP#yA2zgjszgzF^*3PJyIuCr~C_d>2Fs^!M6h z_E1v(LYRxM3bJ-4B09Oe*i#VhaA2obpogNGdWu^Ie)eQy*L-;y3YmZseAO#JQP3Ww@x8d78ESZ&TbtWBZ#Gf zE=Gxs8hXj3&|H8U!O-BKfS@3(2<#mld)oQw9p-CWN~ zO9w9hwJ43-!|Y&b+1wsnrUu)Mxz1>GyvWjEJcW$Ax2y2n8BWk8&^AQb#6Y$Se`2?- zywz;HF4x8Qk|_1)&*xLw>URn&47&E*&A*R`)6~Q>S68HE)tX*Qh&JqpQs&(C7Ov#5 z79ScJ^X+r+j$JUdXpP@>w09!^LO}a&?zCdM?dnr(sV$Ll30kIt7L=Mg-`wxKVTXFBX(Q6JA>$%qOE<9Oc!CIjK+L{Ql>1-oW~X%_bIQ{3zY}Ny#An+ zIRr(VGoh6`3NB;T>xTz%qu@YTf8;Q{$!Tdu{p`*6D*R{A?K~yUEo3=p-oP+X1cQr^ zfnlWg(7?d=2m563sOBfGn472BokmroNQOJA6txpNt%y(yVD@EX%-}?RQmVq1L5}?! znwz*Dg7M?^E?OB|IOC!Zj9hsf9<#Wdu0WW8ICt~LjX2O#uU;+qvHn5eE@BTZI#}es zfBzm9bC}ubXL*z6KLTMfwmjO~ZFQ>3 z+2(>C$^Enaa3UV{M;Bx32P5jOh>d^)axvLEn5KLJAE~F}L zoU#r2U~%&C$xXQIfX~REcqRDoX@XbJ!m*$De=cr5qth6n1J;76sr1n-H(14ZE)`at zJRu|~NGM9NZgiiaO|(LFnCYI8|0nexsz*`hW<(>msS)KdSuuZ(FWVoYcrPYPd!*m` z3y&3oyQg#_QEIMw{_`olh2Cd|{tY7r=I6*hL_ARWQPdwo!X`JFs5bd2r=!8wu3KI0 zXT5H*+t1Wk{nb%Jo**N7Llgdi{*WyH6GkVRDcRP(lLaf^sW8#LmSQ>8IBB&-bn!mJ z?YGQ=${q>3iM1$lz8R)%d7rM<8f6QyI&P=bsP>bpwE0uSg0BHqhirL%yt$8x&6iJT zn4eeY{IN8tVEQ0o%d~^c0m2FaeC-mgFZ-~SXMP#K8>Q~qGq~ZyTx)%XFn;oP31ZYi z)`TG+87?mQQB#Gg8?~&fY!`%ZW#deBAqAzf3V)d2iotfwo)G&{SK+G-KxR(q`wG1R=p>ls z08T6BG2DU{wl$RlIwANHYV|)Ws3nYo023^8wlV(8t?$z%d!A*E)E?VDpik0rH)QLY}?!?eAFh}&E_%d^J`4A zMdq$Q2JUnJ^|oJ_vGAcTtDI#uxhh#x9N6l8{y_4Z=Ci|1yoI#6ORq1UA^Om|`LMm< z{L}Cu8r3ki&WLM@ax|L{4!5qhttpV{>XL_lQl2rE4ZY*nt|>5k$(O&ABJ|>s_tG~# z(vI#hyd|B?cb`n2o#x8Vos~QF^3uQdZw%Zpi5i^tQM3qwq8=xD0&WOfkvLV`KYxai z;H|$Rvjb{k-CglXlN$m3pyTj21X56M`2%R4W6}s7qS*D1-SI9Ee{z2c$MBE3y4#m8 zzXv;MK?4m|d>Ebr1?y2z9>oq@dwWkr4(y%Rf+`A+lOeRTwdHY)29#5drwZP*QTbN; z3i!f*!FZ)|=-GIe0+oU#r=F%JE}J^s9dE|Q22#|J`hZRxVs!(2EYwi7Ovg{0Kw^3K z>+=l)BQT@{up)*$eL_+KaKi3xJR#M^++Fe_j{Ix*qA7$nrpP2PH8CYKNx)1`53|<2 zZ7SX(&_G`ncOCNi8oav`Q@1$8#l-~#LR>GrHoqID2sb8JE>E&Sh01*R@E%c({KDbj4pwPF|>Jo9S;T@0`O0Q+g6SCL0>9jq6bC(%y)G7 z^ur&QFMspek`VKgKWG&iUeF(Wvc+LTqA!8Nd3O7GJ&mlu*i(;Jje@-S zwF#Vac2Q#`!AHNoj=#ET=WaWGXne$dvqD%tB}7<+i@nYue)S{O3D)hP_s`n}W~PVeB&wej*fcvbyIhInsv<6>r-ubN1!v7ggMBUNC6NTe1SjUSEOsf-}5 zE?;jJsjccGzCmS028J^afAvr`k+Xw==Y7d+urd7&nUF8~E?Dhr*N!c@3`VkO2JvlP zi6=`GfcuZQ3`lbPDSfe(3Cc=JnkP)ORO{_~eolB<+YV)iq6dR@x10wp`nh>CsdGNi zG@+w8y@doQ)k?AnKle9k<~4_KrOcB}OE3iL+VWdlp1d?!ei9y6`_&1KxZO_#Lm`&{ zM!q_}jXmjRkb*#2+KB*@Om`8=z78G%aR>@5yM4ff#{jwN16NSR*}p;G2Nh(x8Svme zezGG=3i=Oex@hIUvEFp6TuAOpICDh9TV#_CZ@?RNZ1(qZci@z{+SLZ-Layly043On`d$^X+B?Uq|0M{ z7N(|VY~wOV9-NrblM}hc=1U#pIsQwLj6byg{^F|CMXGnl-8t-+^z4#iA9zLdq#haA zmCCfjE_hPXF8M}JQ2uzNa4Yv7pb_ z5X5^z4&WY-y}5=I9mn`fpHD?QX0g>@-GO6(Z|GzgEt5%_!)1g9==qrN$*}t1nApo| zhr0vAZqDSFv)UcRP<0;|%JH#u5tg>zO$XTDp&v>(Xo4huG;D1RCYXqv-Mm zA3QZ*e7R3EqE9KgS9J5NmvUuwplKyXp#7yMaUWJROixHw1bh+xD{WRWe?#lo;@Loc ziGD+sy#Bb7!8@<>vO=T7NB44v2MbC&@(oXZsM$Syf5~E3MR#4#;3DbzdFzH`(sBk# zL;rv$P6ldbXs4tm3r zGY)4TDLvs3a(ftaGXo-pGFEaw*$|P68Vfa}&+!;?=Lo;H;}$NarswM`VD_NQ$?V{V z-sXVTQMy-->@8%DM=pFm*>ZWyz;$stJ)PZfA4|_7M{#{|u@HyBjqE%&VgcR~QaW-! z94X{Pv*p|?^GRZo@zb^8{x(@YHU(PLvdFA;pR>4*u%0x;{d9+P3n+F53KM1iFM{MCh+BxyPb$^oi%wset&Rxgom~2qVOz* zrokdfRGo@7qfY+q)m`3gH0)AaWq(hG4~3$g~R>F-mT$J zr_Xt=w~Yra3ej-UM_cY!y`Sr6bSid5;8~ld*3G;P@2hsQaxW??>8>UZEry!qkDPA& zA`+2%E_>$=*4C^Tg_sf};)cWQtkf|%nbgOWK8uQOw~;T)Itp~Zt(AJ@-QqUgF9T-v z=H^(Thoa_}X3wKXUOj5{c|&9H=DQ)a0&SX;>E2$&IC6%~dp0+Y=K`5dDIQ_u5h8Vj zYD8f2Qc=4o6yKx!E*+tUK{@YQ$ZP&Yx%mwPhlQ#*FY=Z-|a=p~o$?DBSuJvQ7j#bGJN{xeyY#qCq0e zLwwoxEh8{Tm1F_FE*U?&)ihx@bT^E37%fmwOCuEnmD<6sapPan+T<+;8pM zh0@yjZtp@#3*9M|T%)o?$4w38JQ~@>MFFz3CUV!cXI9z`3gsb%TqzO5D;K&)CdUUp z-QKVGV(0mbsu=}&dc+OMTQ8qEm+zH)6X&+>?)2=q;yO3ins)5^U%GVR=@@-A5+6Nx zy(eDAr0!e{&e~cIYbWw9oF_v?%D3x9A^%0Q19pCdhkwbbm6)ZUKIwwJwRP6qwYfk6 z_HZ(2X86mPAyK$^dL;F6C-L<0+?%J)X%RHwNF@?ApRB)T?M*)2_4ya5=4tym~HP=tG9xtIEv?mJGgPE}rT*nN6ff@#kNoAM*(4Eo`p=hG>hK3^ya&(5Fm zs<5kw7f(Ev#K8YH*l^`XYSw*9$R}vjmnc1;K(!Sa8Nz$8DE&qJR=~M=f=Fz&KpnRRM4kNKA@}1?k^zNOi zF28H8?L^O&d+Vi#_kXznG;!m(GD2r=-7m@s*t#iv{%mmV`0RJ_Z%g4lQnMedeve*$ zcXnUwvni^{@N`I>=AB99mHE4Z$ybtZuH|TsvFIcUv{{gxFsQuju`C;UrmeEoL-M_8 zhwzP7(}5{L)v3d4i^q4ctFHPg9|i1gPdT&G!{7K)*XetrDX@6{Le;$1hWqQ&0XQlE zU4m4QJ0CF5?pgV)5n4c%Sv|Jz|7aS$y>VrFZiHH+6xw-^Oepv6&7Nt;C`}&7Vgsr0 zgF8DvXVBri&aV@S*aLsQx3MB{F1)Bf;=^rI_~Um!gmwTaalhFbK=K1d3v4lv5Ph{f z${VhPQLh6UGMl?d*;4~~1+)WazjhqFmgr8R&#+QpVpIWO2vkKgx56yySlX9_bn(Sw zo4TumQe70@8orct<(iha8J1ZovNJ2Kxfy=sT`yZP8yG8oZ zxf_S_w)aZR63ddAEAp>WeWki_eJ+GLohL}h+b56ph!c4VpWfyknJ(@ub#alls7PX2 zzfc!p7sXnx`um!9{@D1=k{we>P*09tj;x`%_b9L7kb-lTV7OAd=Dx)yCXLr%QWUPxxNm0+0pPI zu&dC=EXv$QvRjbLzSMS

{iH%7QigNt3R$>Fw(eX3H1?1F??}u)hmL zajcVl@Q@%qn96OAVU(*cXb{&aVPG#Rcq{LD#`GtjJS842U&_>!zWAPQT3vsWn5wM{ zs#);8IsSQ0Crj(oFBgl5o7TfN8yAdMe2n^5Q$;9x^&#q8E)EU1814AzDBLXB;yiY` z{LZKO{w{;3!T6FySu}qb1>f}+uCCHJn*!loGo(MbQ`fjv61WjxT4AJ~%o-0Q5%5Cj zI6gSwT5G5%F2?-Ulb$MD8SknHYMmjyqTF0jFY=;}D^dMQ@W#nJa)j*;7%LA2BU$xo zBwv{)$i0U2mflDV!$zEt_a~Q|oT6_vBm$Y3uyHo2MfgFJfoW*bO z^yfo9UxpMW(tCXSprwl|-gFY!zNJsM%5A=Qd;9xGHCb)zQ+Goh<*Y(|hiJUp`wE2% zCgtL&lvW&c+6p9U`@RX0v^3NHz)Dim`IsCc^a%sD(dTG+%gf?D?R~l!p z;5YJP-gNdN&-f8!r>_ufprPDMJ?1)z@CZySp&5BppdYt%!!P8 zr}X9+1W_bK!uirMX3@w1OG|%V6NSE7YapE~ECChEf#U6>=%GGnCbcNTJ;kAiq1pS5 z&>a70D@O(nGiZm0Zd3Yw#HTu(q%;Osvo%CQtL+PtL?GnA8{>`^VGH5mzXVz}S9=CX zg%`l#9^^QBtk(dq@yX2ve317|FngQP#q8 z4K?s}WtA*l;-#~F%j-zLOwV^Enfs$l@_N8duB&=M7Bg?*vPaQ2meQGt9`QVDvLvxq zkMKjwbiI2`;<(+w6S+TTP!zPYXf#dTm z_cQvNX)oq928r2**7>fQR*Pfu@{D7f#eTNTUo769ep%XovFukz4 zYUJrhaXeHO&3q*kFW7X6xvcToLxvw03<#qqWoq685<36Qg+!Iy8{VRdM7#hf!6^G` z^|cAEIQYH8eg`%Zh{Omuvw`*F;IIu?FHkbCV89nVe0}v-#DDV3r%$E;Ue)b|{0nA* z)Q}58Zh>VzSaOBKf>sC^ZWwL;oN`#yg~)O-#c&%er*ne^#!~O zP^#RAV=z@fB#FY_17a~ymjQAB?%VN_55OHq2CT6GQyXYv+o}}tfDHv4SKvfM%LAP) zxKQ*1^9_1`n8YK$L{RHN#tLKY+xBJf9F17#5rGo}jO9+@aoubLZ0!jAH+UWOf$9JU zEIvLyjB(Tr=boa#R;4eEH(Empp#TU%BtY%}$)ICkAjUHQn!O4Nf~OEPa=EXrBsF~C3GLjDVAM@%0+;Q5i z!uN#tkHv&uFZ#v(^?V+m=a^+NyRw}WbVE2++H)~7m~c}4(7tPAder06_fhH^a`14| zYGZKkP3o03suT0r+S)7erq_Tu?hux}bxmn*ASGbdws!SPDx))NHxcg(Qqo(vsbRLfXn8y>B)ekLoE)z_{ zdn8`oEm~atDlW55EKQDjfFBz#qx`FyZLioWArp0F+cMIhm?s)zF`U~WeDsWhSVDWNrdg__wV29 znqTX(Kx#HDhFQxf_!JUv{Zzyadw?m|784L?YG}N|1-nSFo`;uC>sQ!HU;t3Rq%tg`kKb~29CbX&Wb9#7B55?-+AJ(4uM-3s1cFua5#;>-pw9^zN!nv|=J3(`z zqfGwP$x2^a|LMb-i1;mUd;&Pu`1fy6cN3my8=Gp!Qqz*19D7&SR=)mjgeLGhh^6%1 zW}wATDHjmQ^b$fM#&d6gAfkry?EKvKd7w?jeV2PnA3Hmj;Ax7yl>Jhhaay*)q?&T~z8l$4du@P42W`2u33W9%&@ zMgmzJz-vpuh%!Ex05T}37Q1Um5-edmft}p?!XH>lVD_<4;(G?IA^^~oe!DM&I6;yM zpmzNFS%IDiu%+gpv?kyaR8m#NwQkCU9ROG$_@C)Mq9srmly|l9r0h2CWS0B-x~1K5 zXw81F_fe<4&q3_35cbtxi;#NV0%z>EUEljl=^Oj#cH&Oyi9BKDTd?HxBSxstc#mt8g_o6%uw<1Fr4DzFyd2le zFpzW~D=+kCL;3a~vH8lX!)Rr#pm=YrKr91c7K=;UIv(AZ{ICZO>aw>f^*X2@hj649 zhDVQ#*v!gDyu&jN2UFC+A*~Jt9cDTkOl#m))I?;iT9&EaQf5wr?NdVIRqxx!z}^Jx z7NQg$ctppC>-B^Sw9eXgBQ&g;TG#eSdH76hz+;m-%>kW}{LB zFj;zPYEyvvYYt*y+zV-o0D^czy9>bI3-}1dd<5kdrmI^NeRFd{U^mLk>jhPHYHAE} z0mOj92Kf$1a=J|}d4uPsa9$%AzVHMyY83VQEM}yqW0P{*z=jWl*(7UWANXLz5xrS= z<0Ek{e(X!ys>xb#P%%Ljf`MoI8iqz=mMOi@XTh1KVCrI`FLh&_R zRlDWkQ583jfQI1RC$o=qCb(Nq}B{*uMJoZSRdew z#Rs4XemEhX)ryxdthsU9c122g^Ge|g{~PsPgC_NoS?9G!VC`_;vO+FR@c=X z!bl3(pU#7;At`EwPQXdC?2PU0>hDKzuQBgk<$#BB)Y9|ilWpU?`)#s>wvi(&nuNX& zKmRaL($@lh;;9|LO{Ui5OD z@XrCI4(vuCHa8=~YjZ^Ev;mD8a@!FeKoBSjK21LF#H?wM1No5kG+3}r-fqoX9J?-E z{ZV4(91oG4keH*+B^MI-IrW|K$$EBx{*0=6o(ECAI~LD53&GXgM%_F0K{n|Hboc4< zo;GGpUB8{^7o?-FVj6p%QCxngR*`AUU2F^fc-5r8|GqLWhNLl)?ex@xQ~Wbp-oV2H zD{S6p4NLp>bZ;c?;&Do$RS2kabM+37zKp_+9Kt7$DSvBL{?oyT=-u?|XNmE}8+<4p zxc%W6n$CG6AEm;}hc*>Qt|Z3zvIb&s#ZM~}cbb&0)YI~e2_Bz}`73arF#M9HW&70Y zeUpd2rq{1qiKV=yzP@&MJFc&i^rj)`g>W4csoTPqR6?Q)0-)*=M86S-!a{Ao)$RiM zJkU}HyuxM&t^N?_+mO#UW0zr}^|ubPVuW?=0poag*Jc%5%G1Ce5MDllx6FxhW2o9L z>Pwd=?D2m%qKxn)mOecem!!102d93oX53xQPwxBZ)BQyiMYZukD73b;T!r!RDznkP zVYb<|&6iAoh-KqAV4>@Cx2W1AQ*lQI>=>?Vc<~tf@MIsi7Po|o$Px+55;3VO6c2c2 zwAhO?JqSTDka0bH$z}+ta-zvYhoTVtEwXZMLh78YZ)Rf_9xS&^ z?IR|?VUD89UYVP}#iF~!#SU8Lb-Z`*fwvaj@V&BuAh@WYV-!Y$hd#HJKif62B5pVA zRRF>-WU#)T^uZMer?4H75FD`dfjXL#87SB*;4!teHB(*+(=L{r(YyV+8635*9wB5% zfmVv^##ai$BKb2(`VS|Xa&X1dvhHwhjMcg9USKhn+Ol0fH7`Ct=f5TH`0E5aA>}n` z*2P$Fd&uXx22&wE$DK1#2d@?lT?XO%BRJit;_78<@l>?kcFZ?fz(h+ zM1?{HH^uh}+J5MkX7Pr!wY9b41es2m zyto`M_W?Kupd0#HbrqFL9&^k8^ycKZ)HP{hPBeQu%l8C19^Ii1uAXXI@Jq2ui&Lq~ z|FPS^JHtPA6tlr%@s+yAWVf)!isiGv%ej=krtVerfNdNc!#0s3jn0z|*FR+vgEO3z zXsJ`jw0j+t6AK6{iEfai8W3G24ZKxQVefB7`wb zqn-mY+#V6}HNAo???-wyD6L2T>6~_Gp8S1Ws?GRIGAkd~zval+gf4+@fBkrzn#PD# z7jmmqt8`D+N_-qL{i0svX&PIvWr*seHlwH2c<>X& zx$upc=bz!S9A5ie=S{Xt(mM9PwXkR#T$bSN{Uhr~%xCW31R2wHci_{91S z(Az;9P&(!UTRDkROy4R5GvND${Hs(7BueWbq~)GeD>imt)O$L3^UaT{5AnS|Q|%ut zBi|`8pJVfEF>A7t{UQ2umDJ$I+xRRb#H#i0?3Ey0VZWb*LD8qO$yl`4ec3<%NPYv2 zxGGC@;n|!-3?qw$gfqGKZA$TpOPPsJVGbL11-+MHv}EW-v+M+zUwc;yXKyZ6aCZVQ z2(uI)2yug8VBK^eZGgDqE(-NEVN}l+jF*#q5kPI{a4B%EzyW9;&fDbetf6(2CKpq% zX@(y>xgzT=l-<$Mad8GKqcLvtNh}PJzLqD;7eaO0;rsV}B6f)QB3!zm%mn|z&DqAv zwFqGWQrwe&Of@vqn&s#cX*9H06a_JOp8qDP*Sq=_p`eP z#_dd%ch$DIu&J4{aLrck+vyvdh8Cm@%_)A9QBxysAYzO%Vb&ls(xo4Kq%lS%0~(pi zAHcSN`wo;`*w~>CfiBH~x232E^~)YO^>&K)q-H}MNG9l24m0JXBg6_fXxN72Ll}1L zbAEzR;h(wYWl;W^nRS{~qXm6rS7b_&r%i+xK$1+i3*Byrr*vg<+m66c5m?D-irFGB z+KY=}95XOKZ)EB+*U%MDEdd(}P?-kmR$)v40eK&u{h>&)>w;Jk2huQ&Xsu7Yf zL#A;@<-j`F+k=M{%TVP%0o;y+;dJiXd|ZlM>QS}gE7Kn3!f@A>1Fn^(oT}s0IPHkR z{ec(@)=0dVGYl;rK+n>i&i@VUe4tWEfurGWr_y6XoprDkVWshG1)XkY8^uU-(du1@ zq%SS?<{y?rHx!vX9HFnKwQFbkXvnijTWnKhkK zNqGmYa0=vyGDZ^Ia|%~(R30=p$a@Hi0sv)f+SHj+h#(>ZjJrR7F0E;P^JW&><~Oc< zd&k+(;y|H95MZFCbp-Zp^>HD6tEKAwy}dTTxC3dR^?QN{NiaTz7aBE1oVBxa2iV6o zlr(Jo`UOEFju2H2JWr?z%>odZ7<3V(Q#z2=vhkx$&`@y8 z)T~{#1*QvB^Xfh7GEIDs8Lblok}Ro!%lx^>A-0F|Xgk5kYTL}T^zltENicuAOEEuw zv%nb=Vg{5JBjs8Z8L?Z8soe2aida+YlT!oaHp3rZh}}F?J^j;XA;EYrSz)YffFdf) zUgx%TV9v8Z`*)L!nRsTj_umq}rYA@}>lVejO_^xZjV_z_>JE8ns)J-nLhTD%)wD-! z?lOycgA1iLuk)w@MotG6;LO`l(pe}+0(L*$&Is1CtkwmGbgImbRS&TS?4RjqE?|e_^dpi6nlz=J@PZs|In|cVFfo?gFmv z2BQxYR))$0Lw0nm6B@0dDjWzwPfTXmC43;4Zo`t+M_RGs5Wq}G6pS02ry}Ei{jA-4 z^wkB1+g4?eM3kq}^CKF=*HJI{J1yL(oVQ~Asx|Y4H&=TvJzu`NIHMD!pjs)G4$=60 z@@cgUa&rKl|0!$H5|;R5ItfTjy-JJ`kHr&>NvGhW`>+S=JcsE|TP65bR*+XbNtQ0ak> z<9S zh}!M4B~%*q%;gl>Q7{@fHu&z!#8zz_8YF&DhbfV~St!;_Pf;^JZ;g`6S= zgRszHfj2a0KO7u_gis3~gA`mLrelin)??4rPnv2E5JpS6=HjWAa$cnb3b#l+%wAk+ zMvy3ESsd(}Y*ya$L;&Yk`YB9`FqmhYYz4hZiIYK_+nJ>ZMvrd z4Y^g^Uz^EPUU>g3ov0K)yAmDjzxq%fYUIi$S0zwaa2gcuaU^-5)^=!lpHh84*yOjr zX3&S??;i`h__=NyP$R|DFgCLARDxP5H5by%QuTx!J^X~`vwMv88k&P~&19Ld&1)E{ z)x~HzvxC`jXx#BCI@#GQHtPK`zM^#o#V68^(2C#ZDi|f)Y-zB$Mws=E(CNMI2gH!c zBNM_f`XSqsfdPa}t1kSYRsh5Z$c3dXE8MT)$p;_*L|RV|4t1#)ksxD4MPmJ#HJ_BA z7UNwwMlmOkejuyP{_N>ZjfCw7c>|B!BY% z_+8FqF+4tAxRV%2gp9m^@F@y33%6dJaU(G6S6SHKZRAcKG^2mrs&~oXbiv=;j2m%s zG&G#n(`mUZup=F=|O16;X{r5@Sj%0sq05yp&XePQn>1vr08 zefaPp17-9U()bO9f65-!!(5|(T5|R8pSds?S1RtjP{@Z%SR*9vMabpI^#$c%L^E}Z z9|N;zSd=e2#Ae52i?)}FLwk4P+q9TK?fI|l7n-%_W_bh}3vL0UCHn?!cGh>;`F&{? z-D;#iUtoR_6TlK3?dvd%Cs>oAwO;Hz;k7OdLXkM4#G@Hg=z7}7;=baA`oXwmY+`~_ zB&ZX)jx_i1CgPGI7reVyrkmKdTf-D9&h5GA{HEYTyhC?vD8V=`+clk5ZH;#=+;ioD zjUD@kWQCQ@$1E6rAqBMf6JLtv_XF=s<&v%L9ibEBVd)r-$RZ5(NBu!*vd2^M1if6+J zbi{N@=)v^=RIgV&P6X+vu=jqjJ`Otiq6@!^mq9q3nNT8W^Z~aXcx{d^{;DLlx-Lsg zN_Im(fQf+tbO*2leD}`U#bp<;wkE?znYp~O;^LhRj-IA0Mao%cgMxoP#^Kpe7px1* zjE6AIOIlMnrsk`*^>&&qOK6wir%S;POmvRuj$&c{>-D`Xd2s)9wOghc+bp9LKSydp zC!AKzvyBAv$NF3Pqzjz)fJIL^Oe7Ink;W@ff8?YEbcv15>GKXe8+A6_OZpZh(EC9yI4(;E8B=3g|5; z=vQH{193Nd+uZN9T?9&7WAPQ3yXnjzD+Bro4u#MZY_bqK>I#y6m!KKaO5?V-ab>_h zgq6pNl5iC7_g?)_?C5^T7T=$}Sf&RGcPnBZcavUNws^0p8dxXn5#r;^s`EYmG}mC8 zSW7}c6d5PIkrh{lFE`HTV)ZLS)~;{iCv_9?J!*?6BvG=zH>8M|i}O@)?F>Uq35rCv zo>PVW!Hbefq}3)2G2Rg!!Qa>*T7{GvPJ02*EtGlWI-H-6Pnf@)SE~iqOv8DKKnOQB1?ZUNWRi*rWuG)EyetrvE((@l|0Bf zKB{l^q_CD-V43kG!8K7A z&67XZiwnh!k%2|=!&LgUb0|)USTGv=1_9@fq&ZP{TZGxoYp2nWxN}GsS)`Gv7-=|E zyEMf#EevU2Gs!lvN5~?l57aa`GnD+AdT^=6*+IGC=a>Llz2;wV(#z5t8ymB-$USg| z<#q5kkaL^U($fn&5NRGeh0}h*2>F=0?fkh4)Jy;urY0vt_$gclsCdBqZKkIur&=K#ZaExoae5qpXowe%aQxf!c{4dGF z2+4~Nef+;Yam~p}<^zirr~Q?V>V8*YU`p|kc|D|2VmAFnT<=!fkzGEt3Ol;ZZKEeQ zrOl`W3~;$Pa4Txr+bh^u9Mh!2MQJ0VL%;oD5GczNqr^0;vD?&EPK5G+liC>@8yh$o z;V%oOEI)7cA@x)K?DA-1khWbU>QnGn6B z(G+xJMqFl6l-G(?t~a#G@h2axhAzD-W??NFf#Gz)5^9cTi|D$&G6*%HiG-;R+1!Gh z8%*SIiOFonPdPnq42$u@c(3gO2COh$*J_Uj_Y z3Ss>jH7ai!O>-XW+xYn&5WhYB`0n6y-up*ZXI4I$A;QN$CTgT{vHv**@zJOY{sen2 z|yd)A)!igzpNNIKtc! zus8(M4*Z0fpm1KB4S><@?|CvG^x+4vAm6Z?BA8yW6j}~Sb1%!8H)Zeh$Bx!+z)&Al zu<*=*eGyD9SINwXi&mr`rO4YZh=jy`qLl+a00bfJ|6>^g8FNNP^H${I-rnAzK!bq< zl=Auc`Cu=b^ah-PKn_cI?;aU3F^Kya_M~Xhf~e4}TJgbU-9K66en?hHiiGw#H=_&# z0Yap&56TIhM4{fUE_0aq0ntPfO-n}yq;O+>&RxQe~9tZr9U#M9Nh6U=jz@dQ71(0NBHgh#B$hBBg z#H~VNO#)jPJiM2zD4n1@!_acs78!5A@fd zaD~|^!l+SDu+pIMeNByeKoZQO-S{*8&$kwaLFcL_Lz`q_+|EVl6;2vfer#)(2&}lX zrCDghS4r9dkWgeBHPx%*#Q%qK0|W?INZ`iwJu%m=~m3*t1K zLFM7Hi0}<-%Du^2h#)ON>ypTod3)(cQoTrp7V`xwKx6iwZXJgT9Vh!YNqn63@!Ika z=|;i7i$>k=N&eaWmFuh<*3r+woyuFYxLDTJO{lDs%2;@R4$A`%ZrM;~i;VnYUB4^I z8C1b;c~AV&9X#{98A0>KhxhTVsKxNVU0|&46zk}+`;To(Y6A6oSk(TJ=%<-+Sxq=xj1ydK*ZqNpt_#}M&R|} zFz63;ObB6USXfk#XQlrSkln56fKvjTG>CaANaHOFl0b!q7%#(gT0ZEs!|#-noqZQ< z)RD~9g1b_`{(BJ@n1rmSPKF`X~u=3Bg zcKB8&%s@e14N1SX(cHF1bAwt_SC`*mstOoR2ym_c2+lo__9GxDKG)=RSgkZNu@AxGvOav3X9!QNq(Of1$13y#nJyN{Snha0#LZg0*I z+b=UF^}s=B+p{g3ZL+rI=^4sSBpOAibm3_Z0}Vqd6*z)t zUDQ3c)IUV5R_b>C#Y@%u@7 zZHeq6<@xB;Sw|&T-7f}Bwi<4d|8B*3{%j_jNfqjg(+PX#qv3XOab!8!ZjMiF;-=jV zIIrgF4#LA1SsG$rPIwaCQz+NrQ=r4?L&}A5V)pQ%M)%GqG(QD)zo2l`udmY9SZTWy zisf{zdR^ZsHKGqMmOcgW8}PLidwo#e!ve#nt^?m4;D~e3P$LD-+!yrf9~p52;0P>a z%8HBQzncITUl1q(aBw3;aO64`a!N`mvb0dK%mdR8=KVPL`PuEp7}mD;K*VF7ia@D= zj#07-S4LY)>#L@&s;Ue4f7yA1Q&zt!Bf!MD50eZZJp*|*1h7uRCm{G7PV-y-Y#^1W zP)#`j!#Nn5ZN$NibCQ+qg(*Mec?lr$h`2w4F93K0NZf0IO$hL8*Zbf_H3bwEXe~TF zJ%Ppd{CN|I)wJjZ1%2T+^!pE_aV*0K;q&LC%J=PH-jWZS{1YF|BjWoRlr=N&N(hH7 zXsLpMD?xPco(wA)85tr)7x;L%Z9FTSl`zO7Umh*|)w?Vil3NJj|-4RypFr@(P2_~j}|1utu zK0*Ij-c?+m{U>toSe>egZ{&BbUS8U=6jnUd{!LzS9dW^mMh(xQbR=%)d_cE{gQ;!< z&J)2^`)XbynK#}~WD`%8$c^}AM5Es3_enIoiQnXwT#69gS~MWs>Sael$DickWn6GExTU{)Gsxi3%9%)Sci{ zP#FI;d>1(>-SQL|ly$emM5*G_UEs=cC1F?U)viH^u{DuyN5B(Dw4a-sjCbEZ06R)p z2xI%eO0r}j0CxA$-WN}bBS6tII4v>i4fXZGg+xiO#+5(w#r5SgC~FkIGxPJGz$zR2 zf{;BAWT2Yf13(jgS@ClQ;!wzx_5dw)KQg$101Y1v2j@qFLGiw#2Z=rD9hj1BKS%W=TmhEWXBDMYVjAR%1ZPXH5iT(lmUmwPJ zPpAfmh7M*K+{*=)Uw|$+L5ISIEty!5m1SgXOd%frWf|b{g}yRj)j~)|fy^_@$DONg z(B=cb!t#S9%(r0HXLLy$Oa;51pT9pDa|iHiK#(APX zI?#0_pGHkUSrGNCg>>q=D z3)oP=xYEi>+ERy|kMEs=avJ-~fI-Ca0!Gf(^774B%|`KwChpt6%jQP(Ll#IktKXr{ zpfoicJ6i)$4~|OpY%iiOd*J*4e9TWDKjyyrkpqMs+FC?%C2#}|FXD&RxjLZ36UZ_& z`k@kR42FM@fW+^Eu)Vf5`2>G*t)d3UXJ;GB%A#F{yR7w-q}#DUs+>$FoR9CLf&RD! zd|jwt<3z5C*|QN^H$T)&-~^8fvjO_jFH5H{)+I5q%UzGW6@wBCiL`$irje=ZASJoT zL9B#Oz-VQdT;D;l7hNl!}) zY{A|2>dJl=$#c$s{tIhRrcv((reG6%lLvh?(45iYnMCr_XRJG*X7p{jexUc!_q`ci zJLNzwfi)TQ4kUFS0ujhz0}e&;DE`>0&Pp8_36?0aO7(*3-6~u*r0H_}=1gg?Uw*Hw z8uda#e5MZe_cNd+fwVTS-*Y7wk861#NoNX5Nzl?GJn=x>&1T&C9Ee@Oh~Gaxwqj!8 zq@|~Sq+hoIMEN#w95YX64r~xj5yf|-v7bGy`wV_}s)eF8*H*_xyDLjfYW58~+tal$ zwFR+kLi&;#7Nzh7fIdTgec)G?`ts$Ct+e0b;ES^V)e?+TE{U?J!aF;~*BW8606wP$ zsetr+Q}h53#s4}WYxD#dX6e~HP!t{RpX^~Me)<=tSi#gNC@LE1i&>?xcfz*`#%+_a zds{Z&KE4~zBcxOj=mXCi(8_6iyn)fR&vSluHq!Y87)G<@PHN(y==LMEU7atkDB^u~ z!trMD&mXpqop3ywpT{>_dthIE-iXJ#l{_{mnL45xyfwnR+}3Hp@SG_(^JY}N5S42~ zk(2NThm~o<*3W@_FJm!l4C3YnsalT0?*+QJhjJ*G>v%osK9EVpO!$Sq(sVODX4&m% zW>UOn%3I=fOfW{RcqkC)vP>?cF%Lgsq$ zBMpY`^-oPbhqf6S6wbQ`Fp@=}H$aL6;lNv+8o{4>+apr0EIvhHA%3X{zY~PXJsLF> zeQ*aMt4{S7INZy%7=E=#GPRP;dsw(1lb6W@*pK7`Ja;Q)gS%A0(5H;?kHA11c04d^ zDq?$}F*E!jqYQSofDHW`JkrSG#kskG#$K+jYQ!OZqoYZY8j!sd|(0fi)nG?5_8U{JF=+{0aULzYl-1Mvpl|vCA-c3wa>&%(Ya3OM zKb)aj2}xkmn3M6|Mp@#O)Y2LUe?RE5fwZqGyE7;KJWV}dOU$@1cY3gZql6It<2swh z+A{X(IY1qOq=*05xUcn4ccGQC7Jp#QBvD(4Clc1XXh= z64D}`X9%qj$zK$h@!3sj)`wBnlI*{ihFBApgphs0(nP9d;k2&xVQS}il`k8TmrOl% z>)Eer`T3)0@U(gOgfTH`UKrSXUKXn{$gTLu#l23FAt6%72Ap=V&a^Z)ze`NaZ`z5X zqN1{q#3$vNVly(?Sqx>esh!qFc4b5er$)0Xn>VjRBCl*J! zw@RM75DdG)yw9B-t8ZxN8cz0mEV@-tOm1!2Z6s++Gti4WSyW&vjHsIC6%)udWT~8= zn)yVb+Cj1!La}cq53)w**@m6vI1vg#FBTr(3<#y;I$#NB83@c40>dZhw5UV(klzfX z%5fhQ0i=W=gh{=qtF6Vv!rC6nHP04gLczN=sxCp@|C6d88-$hvHL{u%%^wJzgzWeT zLn)DvG9=_96Mo(*O?~<#ASiRd@di0gz!m2FoeK<~$4^lO^fe504PViF5+%~kJAS9V zw_K4Ff?0$e>V5+$(Cgg*^wyaIo~3@DwQ>v3GO3(f8H`;wLLwJ+G#~D19nDk~RQXow zGpH&!IN4e(0@2o9hi?5VX`gPZLIrb?MQXOGJp%5}UdcOs+6XxTd z84Ll#!EL<3+Io}fgl}}%i!gZ{#s?c0;9DU)3x^3=2M)@=btN?01)Hd4k+I1;wKC|j z7t~hPR(sv7-+kT+vwVbyT#wligwpHVnwsK~lKKGL0CEN{a;lGsh2F#TSO73L`yAQ% z`L*`@(1UTup_`ReQ76VGe=a~yoO^5z4h|cZbc<4!&%qv`o*fSHQ${C+{6*@<`-l?K za3A<6XWd_EaM7UgP+&DDo_ME1IUs|4;1JnkHa@gQCYDrW$9ZAndYc*$DOQtQLh*6< zAw*OPu@4fc*Ibc4Acm7Rw@BYKAkDmOl0Q7?IW*GYG|)87n|{INdoZ1xd_h_<+3K<= zo{sepOkt**T=Nyf(IN8q3owMA3qcJ>`}L0SNnrLzjo-SQO-)V1eTS`CkYt7L$i29| z&2mry{Y4wg-R?A)4}=feNQn6o@}_DRNqygqHz2UmJ^)t(9xLW*s{zt)lQ`4_#KgVH zF=G_yXw>?DB(dD%8Y1YG#w|`bSOYhZ_!J+|&@AGkAUQuNV8BfIvTXdq)PZIL8wqLV zj81`ZSOkfR;I8RkV|b3_{l-hHo#6y&%u>N@P8fgtQlIpuWa=6?0P@?86nx-%{YHTm z;0}d6sn4*oHjWT*2Q;PW3;U-RcSJ7Y+poTL9Nz&+Ynv@_2m`jKIMR_p_;hq|0HXqG z4K{YDG>M=nC7?j&QhP7dKPF;FD?0(dDH%WrcVp}CQDF1R@A`nXNHtvYEdrbXu}j{h zAN(`=BRmZsVV8*q!?X^rAbpceph$^mQ)WA3>9voPVgEqLANI7{#D1#WRK z&Ug5lP;Pvj6)|T-@9HOd`Z-`2g2d|>(Sm>lKlXy-W{D2+HsHmvW%EH7>3@w57F*LhStwfXY4SxPy zR8X)2=MZ!w(Eo!`CN3HZ)IO##-=Kc+;u@+BGHMUdAAnU!g;7gtHakr)bQXtTg2nh) zpeSwx>R$oZB}uF<;qz@SLX9oE=e?}*K;b}0S%HwRN(m;~K&}#TUFwGa&!riF<2uOM zwCbHjsFBh+rI_LA?p}36?5r>zksm;~ZUGON55l7j3}%7m1(qfV=W?)ujNEyUWYd=N z{huBJ;2xmJs|t&O$rmt47V2!1fJUgv+noN&Q#iH zXwi_n*wZabG)sS~ODEFGso!$B{|l8#4_B4 z8+N_KH^)TZAn7DcOw$}s=++>dc|oc$RwGK4yL34M8I?feQ}2Tb;5+wE?0R4acjK7L>iQr5J#MrSfps9qpX+gC%ArTQM-g(mp5knuKmc2GHfnoqi{eV%gzNytM z@9pgctdBYv)?E%@Uc)Av8yg&JBW6cgMkXva;{vSc%8lDdot`4?HZZaT5d`BFtz#J7K>GNm8G}Q}pCKefpI%OV zjpj0^S@cE0_+2Jjnoux?lD_|CGo@}Ys0~&5rFI&-sdz0uDlk2NL0D^Wz$iwK*4tW+ zQ+*YR0>bTnk9$~~4$eihBZA0Bb<(@7%g0~76|4%Y5q>*m2#MXsuWeWjWN&my zNlA^N7p&iGy&U^aLp3gxbCU_lDb zxq`&T46JXy{V_%C@v*Vm@2j|<(W)kPL)&wgfS{lt8rXx-NTt>EkcsUS+3{bbvgnQI z{-*ZzJ%>xvb#xo)$G%{BHZubpDy^XY+W(bj=nH}GMzQOeTKhQEJL=d}C=>QID%ADw#u*y;mVeGMtZoPRFx z#G&|u6$JT#3FuDyP;kKX9Sg!&KY#f$J?#x8J51=lk@>+uI!^d3JbkOM8F67MYW+ae z3p`u|C!1@L+mjcbeG)u87+l+0T{$>HpBSH&H4oq53Xn0Na9Q1XN=!sd+yopqFeap= z%xPfn9d|0F@4EA44I9e!c1)<*~+a(_?Wl?>ij#Y((vD%&-9h(;;xRa5~l5urLN2AYqvC+;g=# zx0m|tCv=UZFkJ$&?Ejc9J$ce*HLm*qiDsI1PcOib9bkNT1GK@j)6*r79OG&AjE$AO zt+`W8-x1<8OF^jyC<^#&qKKuvdDGI|{PlTyJp)Ln5iXp!Zb7vIEM6tj!B(*`d-$1g zRN+7cg^`sj|9#-@K@HRti1>X-!y^x(6n&`l=@MS+Fhm3!OMJ1*%l5J<31N~xN68D# z7EAtxdpCUKE#XI_VxF&z74t8+T4I7tY>A|Ow*R8ect5rgc|-kgOwB)^=P7_0YMOjI zKDr95G})tLL;DD{Z3v}5U%f7kQCBHO<_yBru)Yx@WdP240%$$m3B{SbzwQ@2c;bM) zhErF|zG7Fc-23SN1ndRcG*T7)5L>-z#zc=IR;9}#A0<{9qros8LZl~~Xi=?OSJgR} zd4|DJu1~eaI@}(Qzp~SvSmo>O{HdC@z0{a2m3yqq?`=*!j*OuA!~gL;(KtA7d~bLN zZpSsodAS5chXPTm=1XX-C90I^rPy45ac!7ITb8}GR~LKd#iqy*6MA+>9%I)_)sRkrH2uv<>6zn`kC%M4Do=V1IOjf% zXS|d9FOD&=C>|}M$E+gkahFSGU#K7tthw1BUQaQ!uLT;(Y^WHRfGZK)wKm^}64(1z zT%9`Aw3qO}P|tJtQ8O>~e}0FG1PO_I)lBQ^8`qbEsK`14*4x&XJf^>Kt@$=g|G6pp zl#F7+@?}$S?|7cs<@Sx3_Jzve$%k+0+y3IFqOX5IU%zCOqfFDrGLANe{O`}Qk&LW5 z%c$!Aoae6Y6g$Ce@_e)M3uZg}$Y@vqD&kXfwQ^kc?E^Ye$1xUlusrmBd!hcok!~mw zI9~ISmtRXpOWd~`!8|Rwb&+W28r87n=didFJW}$K_r`nrff?L?gQV`ryz z8zon5Q!F3iFA<%ih>863@C^%SqSKoGHL3UTv2wquyzsv0)!abz(PJhjGw1g*W$c80 zd+>>0NC=xPJBjQ}|HVT@RHB6McyW$tXbIi;HxP2{T|p_Qn&B&#{;~f4vEi!nQu$vk zQzYc4Vnd=hDAxZZROYVYMMfwser2`(7-Zf%IsM#2MPXH%S;?$?h-BTKXK=DKh)wv2 z>W`ZKEwKjKSMCb-d?62Ca5a}j3SbIVBR(SVY>us$uHPjXchD-1n0w<7^L2Q)egK0U z6YJUf*ZgPxHw_M&WD}!(Q$Lzj`*?fl=u$EB{m}||Ma<0~yLWiRGN8!H+E0M$Lo2uM zdhsHh_WXGZ^MY@HsjI%WvauMRTwpQ6=o_(-(|r)PJ3nr9(sac?zs7 z?xiFjZERUMX#U;ltBW3~`S$s^*R&R#DL+0(+%5sYJLY_OYC{N!$9V_=y_4tMmOIo+4X2f1zylQ&~J1ox1U4l8rcSxkf-iOs*$!=N+B;X2d|${C!8# z*`u#7@*64J`u)V0YMcZ3ZHF^C{5K61Z1o=Zd1XtXU0(HAbQovvr`*3<|K8IwbnM3? zI9#i2F@10Er?#QGxgZb6aLX0hKaa3zn6!L+hw-3I9__#yYNjPki;Yd&TNHh}$M@68 zaw`1$%;c^01bDRQWLSd6(H-pMH2=H%3BE$z{mUz5WpG_JZG8J> zc-40ZJ(0FxrN?*ZwG!EcF2|#=@#Z9wdX@zFgGgpc{LSXxceD``0qolJ*N5Y`*$Plz znogY-Ka7cB8B(u#cI}x(g{Sg*c5+tgb)Vb->lxb<=b^8^(kmnRSnj{0H7I2w-t%Bp zBG7FoCfoS*-!0h;xky#jb-_}?t|M8q{f-g#w0!9Fd7Zh3{OL=BX?;H12!)}?UaxgI zK14Kf{>T0k2&YXn=kXp1T_ZL0Z+jm*h|c(RW`44)G}8G2n==Kcb{<*mgxug)5%(t( zW2%+q@-F-OhlhB=L`9LyTY|HC6w^)dPy*FxGsr9V&ABpd8LlMlGhM||s;s(fZNIGD zkCb3r;Nuu$t6a)yJ349miKO7pl|C~lBRo&xqh}I!e$=mad2Du2OV`iE^r^7=`Yr%jrE~=<5k`I*6!D0)Hw3up;|zG*l`{dWw^1+G`CM;v35VQ zvut^%hK7Zj9$}~62%v}WBzjxs*Mq32lq(hYc>_JuJobroKd!XznYXE0gvXq-_K*vsoPpOHg{G+dXC4`{R+w;lsw13&UNBY zKu-!qt4N_W>m8sTZ-(N^Gro7!#?CUiUBX&feh|KfY3KZfo;o;_)k7NPIaP4YwE3PrnWN7^G^=TmEk z8?jZcX>$<&WzW`9%g>)&IXCWac|!Y|bZg7dXu$|7d0J+61g=@_uG3)bzUP@Y|IWGD zz7E&zE_ftzE2!}!03m;D`7IgZE$C=z>+(~R!WH{{@!sGWX&B76pJmd11rRGja-=;2 zVw@o51QERpoiXoJD-1ALCnku1bm-cyZMVVFqh;B0?Z3;?qIy;4)-HS6fSZKK2Y&vB z8qMs>o`Pk{g%b6;Pm3o9@9?C2{WXbOyyPA$Q5XB9lM!SU<=6{yV!q5S78@C@&=u*lwe{R|Ha&M zb!Kgo&YKB^H)!R6hSC8@Ibhz5EG#Q-5XklnjC}uscYu(PfSJ{o|CeR!ZxH7JWj(z6 zF;G#(ZD7dhQ6Np%3R#T+`PjaDGUqzTf>f|V@FP-598GItEW3?_?|PnU;8!xPyTd)C$$)Oe12~ zb98YV(LYY~(w^RaiM1FmSY*?U%5)@z8+-Os9qTkXC&w-YIcwH{kauvBihobWR?=NfkTzwA{X0>aTfR{cZ3@YcRT zP#^diUi^7+O$M+t5PnUaofm-E-O!LaLSDSu_?#&Kz6G!Z)cZiQqGOo^aR38SseI#I z&&|R2Im`z@6!sJ}pCod#`W4 zQVYMPR9aPy73>{!1sr?pKKU0ny0g_D+#NXxJ#YEDJ(JF4Yw5JTYIpso>)u_|g!Co|Hao^Mpd=1@xqHxkrI&-krEIoX%PYG?gj+} z32CIeq&ozpmF^B{>F!bxkS^&C;Xa)6p7-0mYwR($ANCg3T66yA^Zb(Mv*IBM1%Y1u z?H|l-dW#u8u1R}2wN=i)0-if^*K>}q!tw=4!y%w9dQk!E7%Zwc0VzN}gDTV)hf<;} zl>H55B#`ZkK&PQMwKFpSoxsiQ3^JqyIr=HIx17huB}&s~(`t)vIg{OUndc&y?t~Az z+p)1o(Z52D%nXCgv>zPCfjV=5^MOv7NxPm14{t0>(;c8(kYy|MLjJ99W+ru#3_|o4 zLjHHS+c_m;+*MaGzE)i{yJemzrBto(uBG`ilb!_PRu(lvqrzYYRKM6O8|M-wwL4F? z>Z&8CYA(jGFk217&~xx7jIP(WCa76>El;{Po7_0mI`TB+{1@lVgc!@!zsG+QsAf_X zm}T<2%~Lz@dQem{W5ntG#a)r37r(SEl2bF6{&L&Bzdk?7NmMtSRZY`gx?u)!bA z+Yt^lfYyLymcP*gvB1xOYkUvxPb-qa!9kBjA!=6E8(9MY2#}YrFzYY)%?d8~|0EPu z?{q~zfi}|Vqrs*0$gqy{B+lu2xlhTB+*f47^1cs(Ot0#X1cOIN0>JcR3+>GhecM5< z_Ul)CyB=`zduMi>A~7*BHv$=D<cxks7COo z!&xRL(R|Rp{qzY*YGJ)&YO|tN6-$#CC#$MxmpB z#vOfKKb$=L-TQ)+fgqjScvg${047-VNB6*)^J7}l-26s9Y4Q7mcI*qxR>&U#y@wq@ zayNGde4Kfw8KzpIMocVhU9*Os&5BIA}w5XX((R zdT>crRMj$LkQ%oez-a4*w!11j;jUM}zRsS?gD4Py{YAjP*I9;^X!J_CKv4uQ0J=cX zVb6GQIfQ;%M^rccuwt2jc+)29_Eb`Rs+il@BJLfF*U=KBsFcA3IOx96!}1lM00f>l zh6T7E#_(#hvJ63u2S;l5(gtv=Ko$Iflk)Kr#UQk;0+@20+PP>KL7||nT@=(cQsnae zeTv4{bcI&Jw;oi93Tw>=6&1hrj9y_JZAcf;2FI!9)0VA0Bs$!gS>0abDkPhl{azPb zckj=k%6db{#0WF*$g22q@qpZko&OLaI2UKSs{?F~V%#I)sN7oDm zF_aKNo9m|=p>%0)b#9@7Se(U|8^X2=!2ulJz;eEqmgy) zYGEN)y_zSZ$-*&?E6iOe0uyuSMGta*2SFJ^)4UX6Iy_XW{ID%dI zE^01P(Mf6hU8f`uBYmm5|J{#tYRC859$kH4e*|O*Jk)7|GCyTPT+XS-+)_PVFrdpcvfat!?{a+Fbum^+*QYVJ#E8fI4u^r;(DESX08%XbiNZrfB&$L^FXw0Yc=qBYNq}ZpQFFJDDNL3JSXOX zLjXG14ixhQn{CVtpr(dS2~fDDx*h)l;QLLBbFF-m$KSodhbni8x?7k@o+RpWwd&2A z5imvK5c%N*;sD7(6B|Tk#OoaOFTnS4;-dj$nTUXQDKx0hHvPW$?x z+nT~cjfOOfh=^T~%eXq+`mshjPPQ#>n_YG0@`#a0CXLl)V z;r;`)24v)PFyX+B2=XLXzB!;$XJ@s~#-opwTRtXLk@ofX%ScI$j*o-t5fXEfghR=} z8}Q|u{tfD`Akp=sr>6({xIpd>r42w4&}d!ZampQZ&{IKS7j%DBRZW0V0a6iaLX%pw zjEsz+{RpHuPL7Th351w4xR|Z#p-qqCBr|PuY+z`eZIuS28#LmAj1hj~agcAyUdA6+c#m<9KTx!{HxqQi5rTmSn&_Rm%#ELDlBMJ7Y6NWrd;{lo0nA zt$|{e;mIjqrUr2z9P6^mk}CGN>xt3!#^6E`Lv0Do?)N^Txtd0SL=^hd-D4A)0wucD zOcPp<3!=->t%G}CIj{<$g+BVZI)6}M$`V*U`LEP={|^OU3R*Ok5uw@Zj~Env{Blpf z)@41Tz>vz{=2dyu9=iC&@?K|EG!iPp^97m!4#-3y|81zKr3L2Q6%Ysq2L${Cj_lJX zPF@DdTfTQv%$?fhTA4|5kJ3`wJE(DyBLmvRh4qL;DRG5}1gWAZyZ2luRJ_Z+S_s+J zXvX?OMA=R0X|D2A7#@K)IkcVBL;4xSNyg^r(BfH4RpdgeB9vSd67=))(wu9U(iW4{ zT}V2)8vwqMwl)^DHirI6DLL28Jkj9iK5Mb_f}Gj|>t4-oFRaE&C8VSt1a85S z{sjev0N}~(3*gxFMVEr+)&HjKkZP*ExV$W)qVnbq{p;47ifE|Q1}SV@+$1Dt0B;2D^BXTi;hRZyPusH< zP&upcat!HaZ_kw)@TeUrh(TrXV7GG7e!I54`xl zp8Bb2=4EG#;U63ytCsq&_d=o86<881O-we{*IzNq!$d|H0<_$TcTCYqE=p?>N2>F?S9*Ediv~}B*XbykMTEg z(_+2t2JvPVvw0r@bl=-R(WbDF2(2{-+m3Hl@9@dA;AnE`;>7hTsITtl3Q0s=ZjBvr zGk)>4)aEvxwelzx-tTX0y2Cij3Qef@8hO>VwJxq+35apli}`kq`{ZTqT%WWL zdW`Sh+MR|qyT0BPWEM9T(3ifxjkozct}gS78+&?2WGB=EgYEKfyx;0WfVr7cW@@c}k=O>x~^ zm&-o~)zEbUaUEvntMp@(#0jNIF3KfmW|Gn97}393=Le+LS4nfn4q{yQ%24jjG;;mo zM#n&wij9gomwzEMWlQTZPxjyen?VoWS$>8*Ef2I&!o%w(kOwrM%Oigw`m{#B8+YYM zgf8Yr?np}Q#30D{0Z~04g+Ox%CHNpf<^szK=z)Mdt)@xN)n0_71GM)Huivj8 zoB=ua8l)nyaX^{G=bdvfH3Snd>TPQ>5I|G&$n2~;oPvzpK2|s0j9HK`@Xt`dV#0Ov zYw69;&+C2E;v0jpQRnC!st{j7zxR!-pT_~jv$GH$0n>z)RmsG?LdJW>2>F4dwKhrj z!hP0B(rkL^f73(`87kKj<4Hb=8k*cw4zk^SL)*G3;;!7^r6xt(&W$sxUQY~&q3st7iDN{W1WE7Q29or|)tPv-4o?vKQhueWF3TI^h)Q)FbPbIElkob41Oybz6h(=;Ay z5KV=fRvH^YW&E`mqvc%Og5QS+z9mDGOoZt^8Ig)k+i5LF9K598qPk9%*({P~}vRP$}~ znIs}XD9la9C9I1OJhb&82mWd?ptb$|s}pu1W3%a9vp00XL&Xy`FqT#PTM-pHw^x~y z{5R>rx=@}!HhtW(*wUspF6H-%R)YAk^^@iFPm#FKekD8}85SY9zxenoP0c2B%tJ(5 z@S}U?;y7&%b8bc!7FXR*Q`uElVE%!x4;}w4U+Qb?HeEdr= z$9(_Jb+n;Yw}%_34~OQ*kD*z!v~+Zk&KDLQu7-Q?4BUA0MGf`!K#KoHPycXrdG@)> z8wVW&Ly5M`$JbW@gMq*lD%0sEpmXZR@)3e*@Sq&B4q;{hchggHa{k~u;~oC&2-;N* z&C%k?6spyTj38t02ziFar(<1nofEau6C%K%KOe51J2_q~sf@r;#t&?cP}@N+%oikf zL2=_fI_@&>7k(;1vcbv03=1l!}B)D;%Adblj9YPNl83TifSkC zzxIQ&IR!>DP}C7|M1wsNip-Gwbmim*A0w%oIu+Mvtp*}wQ9+} zAPACF+Dbs@VF3dd*z{nDhwu(i2oPj}V-s8oS)Zi9G<9%x35kQSt+&H(!DV$*&nudq z40%$fu7uD+ejD2?wCz4vMzY`6{Dcfm=vG7zhknUSlU(t35N5VLjD@@CMsooQ4LF;i z)39jm8BFIhlap%0+QTSy?ARo90s+Z&xUB_Jv`G>8r?7 z5a~~<%ngXQlW~K~#)>u+-VxR#PWaMFw9o6uLzerEQtiuF#+D|IRG9pAVoKyN(TUZSj`0@XB? z)7iz4Gp19jCjzwoJ!1lw2mRQX65RY1B$N#Mp3MYiHLBz(inBjR%Q-0i#P!a!lpwyn zJM?f;23b*aiEgV;C+KzH2Fu3v9h;m?jE}$DiG~~aIU<5tzYAx$n1O&ED&3kt-D!D! zQ{M$S*AlMkgw?~R&?5>8ihFx49%svhkuUw?;?&+y2ZN06=FwqSm7Kh_vNEB~3wJQc zU0Eg!009B>D)==K2zVbMP8p6zS#qgNtex@_%L;gv6%~MBgXrKx1;iFvT3QYlgWCu~ zUGh5s|y$fccPHY>WtCfXE>+ApzZ2nH-H9u0#k}Z417^cfb}56JeFb zOi(;Q*I)d{uYYZCjc{4n-bV9wtvp-G|4Zq_YHBPm`>#=*wq}Y2na@W5J`Ew80 z0E7jqpy+P>;|T5tXg<_Zu^>+4_ch=fwB&Ks(Y=$y@=v%)<6k)OB~7dja=ls)07zmHJ|1I6AlErMT2Xc0bhN|rbnlkDVg zlA)QOL8)w_xHS(%7O_3Af*AQX*Sw~truTI~)H3lir=GVyEk;gS8jqC$;R)DJ5F3M_ zo&h+&C|j0cYkmCo97dOzwF%||oYpjNk%7Z?V&X#8J)#yt9zfJ?(rv9{A3l0SAC;F3 z7509hXy$r#Q;!VFWSQkh&l@tO7Jm#AJbSsLWHZ+rAK$#t_S=b??GfR9yt_Y>ixcY- z6KUep_TR`dxD*SXy&<{gK$HL6yBd7Gsnrr~-W*;3m?`y|Y)-z7Vo~VFyF+#k9L_{H zfT~ahxVx0J)ZG5A&-c_4+m{oo*`QKll6$)2G~VKO{GI28A-_9Zf9oqU>dQp?*zjEg zSyou?UGQ2dpF*_qY`S3~vezF_Ch~rx={ZB!a@WY6GWji@&v&+#JRcRkVW_@@BwLn) z0A%OEV?Y2J&2Phdg0rzIo&^mPlPa<#OBEHF0xCWoL&yWTZ3jf`9_%yp;HL>M>olBy zuZo92`)O8iI1A_R!6Vw2X{P-~Vg*XWT`oSp#&7fK)^^ZCt*98adt)fszxa!X=gRJq z_)_V-$}{2eo3l$?$6o5KfIC11HcP7LNR!Z^)wP;0si%tcj7{$>oZi(x)2 zur@tZbY9$%d%&F%1|K!)6%XmjuSt)f>sRhurIM1O3Ke--+cE>wzr86rvo{@oJT%3A zw@u5)cE@9gi+e4Hw?y?VQ}v=k^6UsE0$l*d|J&^NThokhs(#BT)%z4ban9%Od3w_j zr2Dq&?dj9D1>38;Jf6bnmYJ7tYjc+8dc@w@8e3kLtY2HvvFoAOy*(T(B1QFL=x0&Fau#CJUbVm z?#UQ5{Vj(!s{?_JbLj2(xYual3SZPBzJnd_c*u~MBUswcf*qH-%W4@|Xy@{9cK^%C zuOpm-1dW2w^iLuQ5*nd&H6nQ4Ra7$=l{Tb?J84;dg?XmVS6SqBIcWu1rHz5j%Xcf- zpG_G0d8J@8RTLVbi%g7^*{$O12e1v($$_bM|>{5+WjU$%9$bxt7zT2BqBzBIWGmD9U62u_W2Yv3~ zPasU!@jT%rNhLx>Ll(f{S0GLPVSYctctcYi7mYaxO)Jn--ID?j8HW!2CF)lK?EL!D z6x3*ggq9=i-ZE14gA{pcB`pJKTQbA^zR!W8UKZJ_MsX($)3ixoRcmAHoGMrP8X%(k zjx`=#A|goc+vn+gVTD6DJs_cZ-T7ZIHl2==(v$KT;WhHU(4JD8Rat5VVL>4wz%6_} z4?+H>2vu;G8W5gRh723W=lH&Y%{Px^)<~@m`GtbRmVIpZhCvH{U6t(z35kh$v4DH+ zBF-ld?g(uqsciFn(b2?DJ)~LDwtj*l)!gL%hWO%?15;q$D@8~4p`n4E=Lx?PUnjjo zvWL#IW@pS3hdE*nnU$KSmQ#lC~+;eE>-2!MBlZw9eR(uFiD2|z~Q=@CM3m< zHEB@7j&oR1jrwqo4db$#A7@d>nO+3B_%{6 zTiE+Xi;h6m6v9UGL`LG5BsRH)CfMReb>+f5ruZ{tc{@>47{5U!8Z(%j`*ifr`*2ee=8t#`GSXwOAd)Mdrev8i_q6QvBC>Vz0YhKJtR@(ki2&`|RWDudd2#(#GQq#@2n7r$MvQ z&TIzbjZRIE!|JXv37aC$GEdDA6A6nVh7{={eyJW(7#GLo(t(z=I#$m$stEhiKfjd@ zN{0BV$w|jo~{%>vR?JA$IOtZWy^nYd8@p-AsKM$X6TAvj(+WGb zQZW2&6;X(K5$3+=;)Vgk65Ndlgcmvx zoqt;)p_5P_?{n5_Tn3)%HJ1^U%Kg!*YhuBFGvRMt7#ag7f49i?)8AcRood!7ww_a) z&`qXaOvwr-+mQM&HzG-n!z0tlJ?(ePF-qJd^@@X8bIN@=-i&h=$;#Ck!w{1HfsO+8 zwfdVkZ1J$g`F#2E<@fKml_ugs{A-m@GF}hGawB_r%OBw!X~}t#Gkdn}yj@}+`;q4G zvHTS`_w8G^u)sF{3BohdUJff*V_!i&M(JuW6m| zlPNVSa{m1J$9}s`tQhNhlPVa1s5LIYG8g+GleDM5A3rP`st0gCjxR`K`L`My4fNa( zZHEK_EvImR!3jxm(0rgLD&SBbMe0U0L1t3<~9YTZpBiH~`pt2Jiti#gYu;%n5iI(a%M;7ih1t$J{e%4tJSCQSRJ^!KoTWEk60K4zfVq z01f7D3CGyvmN)vnQaeo!X504*kCp9k={PlO-}fnhr91zlGBCHgBVkN|kjAuQ46zHf ztJFnX*$0Zp?v??E`BO=`(f#VjO${r-!;XWW(TQWYe@-4PXa)Y9zo%p!Crvyt`;3I? zWV^SZYMqf``_t0zXRZIHUX|u#NR!vpIe*eVD>+fc@whFQf7^^z^=$!Bu@LYa_4s zD~hi?-hIs>{tx-*jOd6Q^YvQ9Pl$N|OrRYoQDzx#sJ(@08WwDO9NP3p^UzTR<=|Z0 z+*SXcQBs20Ob_xKK=&P?=<;aB-2q9~8GYGOkFJ|`eB@CPRTC7lx>UP3Pq*1AofjvL z)b3V?bE_%$mau-6=zmzv`qmLASskZl73R+_U$`c(AsV4&5#$eW3s#SBh~dBhL`^vy z3uPVeW=V$k00I;XA@X|33(FLJzMb>ZpFczGFS_1UXzFWdY98mr0vAa57z|6U0DGnK zJKR(`Kr9M?*YbE;CGWhLhat8Y4vruK>H%&*%)&_-4cZ}<>b<^x#{T^(e+OeYXvF)w zWcz5>^eHMsLr(9=8OesMd84;7UrLFe?uDpPV~+R9=;F=n{c?Q5`%gC3=33=AujzS3 zsMnUhd6~g4d z`K3lPGgDyT;sYcv{?nC2OXy3V*rSQg&)d zv9Q~w1rim%!WT53T0I<8{>SAtnNkIXa85z-F<8Q=awkS{P<>&1zkRy{Sb(-Rs0BGf zB_l!h!hshALz9wPyc^2H8eBcV2s+5$$h&=bHzVR1!V_Y>pgZdtYykiVfE!gUCpF+g z|BzrQmY%`RKP~BfIO}Nxo$BqYW)y%`I0DD=f}pMr%AEDNwe;vlhlZE|*@Ghj?HXW! zYe>#%V}ov0N%k&MXubk#(n1q!d>?4<0=0=^rlSJrpGt)K(L3iWDP~NMykKXy9b~X+ zL*s2w-L`=@4uEc(eok@mjn@a-Je?IBMkXdw*o~uhzZVopJK{WinCnw0CM@i;CS(n( z2pEmtv(~%+o0^-u4u0PFVbhPUT+KG*Kl?XrP{k#B%;#vO89VR$`jjkPk_Pju7YUl8 zT+X&-IN1Jmi_)%d{Cu+nW-Iii-}v1^{P|#TelZy>cIUlYo;>1;+bywwWg%IK&a4kJ z7Z2AkP+nuk0;gf2tmoLBjv?Pzx5_kUuB&-FGydU*{c5Sv@kM$QM9LklV1B#rbJE5N-#JBJ%MV=1u}Fet{u{;L2&a2Ol!70ekyiQ*%GC4RVIzX@7nmK1={#4ui9b&(P>- zB9D``tt~skb8r=)Wbn1UfKVPKC5SBs)Z`jJz6cWFZrq8LM}!V^XAm<27b%*+5Flm3 zBUSLo0aep|Me&;g+;YRvfGMyMfxrnK2ivi+PoulxfrOWE|6|Q>fRAsA*da;=+@dlv zGNAAa3=4z6D}tVc0?o?2UbZm^qlmz%X9h;q>_)TAq+ex#`2 z_}zm#o0sc!&o=6igt&`2F1YEd3$pINYWk*lT#&T<&UB_ZP^tr~_JlRDJ(3y0l$TSt zPixl^faG{>hVg{goHxM`U(Fk-I4{@cV@0%VS;NORjFP=K}XCE+f<6c42R-V{CH1n(C3U^0vnM2Fjeyf{6bEIC(fYz@$PNeuLYA zsIa=f3i2rc_8C`o?~0&ETLkYCfP07^kWww&yAB`}ngjvj2asa+JF{>Cn(OPg&0B%T zw9Wx>4z~t4vKaB01QZSdGJ%jG^TaKP<$=`KsB#-ZjTFd8Q%f;}@lR1A7HC^Qs(~4~ zd;J&A5=d|RoPPrz+X2cB4#W9W;Ev&CUJg*O`+wDZ`zPq}YaK9K_zOBJsh_hRBufU- z6e-i(=sXC{wQ%FsN%c|l{eq{(7}_xBwzW<5o=@M{ z!umAMcW#GANrAqZ_-~z~hx%(VrS?kM`>4idX$Pk~L^NwArjgwl19Ms!(e3lrHE&4+ zg~x_R6bqT2Cl9x*SRoPzAzNMOCnH`(t#v(TtyZNGOQi&bGAP-!d=A zxKdUQs|+h0`$qTilNo1r`~W}F&{&4aXl+2&yj2aOG(mW0x;!mAR$kW{(VZ|0f1WoV ziE=Vggai#yz9NvxatM&_4bca38@hhwO!4*KMMDUZSNL`kXqJ2hgNh(6YOa@&!hmSj z(}0+O2SV8?Bp%wQA>q5sG4(0V9NCmE#y_pm1CbFCFBP)oX<T5TcM*RT(Fyq+~OiB?CQhiXI#+ zEH#jRlpzs43xPeerpIvDKxee_a?Hue2_zvBvIv{sL^%PiFb@Vzv#m-JS@4M78}?08 z%moi`0?T_pc?MNF7*=mkeFMy3Nxs{F-e@(sH|)wS_o@2~O-uk^1xYrVE=Af11Sq3H zY8lAJuz&c!>*ey#{#jypo3*2>3jXOyO`Vsv>;Pw$mb{bAlf^lIVTKyJ(&**B!~G}G z+@>AgX@5RG(n^f3Ax{_2 zZrMkU9*>HkEzD~=_}jnipNnrNgBB!+L*9Za^`K!r73&Mhl+N9_=oNyA=!$BV6Ya)l zv|%ted z%`y?yE%USDeU1@H(NPyE{VPalIA|q-DPgJpGf4qR*s+F>Q(czMACT$@63ibccsbWj zJ=UqHigahXI@>ae=LogsBNISE7sBzC-dHr-TK@2og0f3mC`MsCTAQ%Gt79bCSu>$r z^cL>&Z0rxO^hAO^!QnT%0{#SFaS*hBD)`?L2&=sLBgPkGg~KQJgOD5zvl;P~)>GaS z8Lc9(^3R7)QRjM?t^2ya5Rw}~x6sHCzn(yL)HHD4&q!0!;#%Z`l{+J@mPb5muC*Im zEna^XkWhJo2xYR*a>C4cdh#Mk-}siV;7cfz1PNS~<(&J$iVVSKx_>!DF#_=lCe_5S znP+ZpZs5y>TniA#OYGS_^A&vf%Kk45x4@l_E1v=NPz3T}ZthxMyBG*Hgi-D$&=0OQ zd_r$)E31H@AW$?W@VeMLRMux^F2g)B|fr zc%VGK(n?ZBa<9uQuC0Cf{Mi;Voxt_m=uCY-w56ki21KhMh+zYYHBGHc_=P=;&r8A! zo0kP?lK2)YXgG&s{cUzqwc^Gf{9KwC1066DUr~qPr{#N>d_SX(u?zjFMeJb1cqX-L zT~Wb2K_6$!)kB+Wxv4{X{kOljAT3-7J&?=oJDIPvb41j?gT~$k3o%;s?*V_boMu)Q zU*M(~iT0x%SSx!zaonYInfYcagBk01PXOu9{w-=1th#|2-o#y2>m|>JsaPKdOETjJ zxbDXV)W}eD@AL3l_FvIs*{*c9D0q^4Cx~`Tb@7TwP?KCr&Qw|~B9#$){|pJeaR(m~|@ zREKH!NYu!I$C=*mug8_4XUI!qp;VYi+!R0aQKB*%OV@V}PIel4bW5mg*>BPI=-WiI zqDQf!7x7A77;3H=q421%vb?kT?vYaCTY5tAV>czH+{$mc=@Y3Q_E+U`g&~txN|I>= z73L}?A}&`n>K5V$ft8c1Kt4VOdY}oikfXmMVSPix3$vd?C=X-3>9W#d8TdH4Ps&SN z5bUhq|1F8wE7b5%FX5{oA>Jkz+B`b_D%RM-)lKOo@Cjl{*&&z=jKN?eP@=_y=P2-I zFC7LU68a|13Pd#!B>-MvnrKM%&^0ii4honh03$i8NXE^C7ZnnT4loNWjN8A;vr9`O zTjvqR_8|aY!6-%&-3?ka^A8_@v&ETB*c}~#Sb>j6%8E`m*p@nkUjm`LX6p>Ovnnbv zkV^`wD54jl!&3A^_Vsd@8ZHG+gINlqHj|Fy`7OwZ~m%%!}G7J@MH3lW|tLrQK6YseH0dzDCFAzOCky zw9j6y{_{Q(TMfJSmYbzu(Lap5t0DWjMRvW%=Y*#rWWQ&7@@l;r7o}Sk$ExX{2qphD zU((bpal%*OAl%N@{+`&UZ<9%YEihEKKLxO^yTa*qZ~TZ*Zw>LgTOWeyOce0U-@FqQ za6(ypqPDQ8f3*Dho79U`T;9U!_83JuX;-ZuglKJ}T3dZfcNGT~tWP?B2fH2CrWNHj zOynEu7yKmU{n4wt_4~eI7ESVBcOJfLIK0z8O6<^Yz8cT~wD`b_{`#$Zh5m0(g-pa>GJmIxHmHiOULh{ z^pCj73~H8Q=ZTbyv_)*|?a;v>8RO8Qr+E6!60rl`!jw*s>fr25_wu(u1SE+0`iiLC zE62$we*VNg8^DQ?b8;=(rR|IVX#o&0!@=UeH#ET2_N}=1`N4hqVR~n?xDGF&jyoaZ zB=}4Jj(=;76VUSytttC89=sbTcvSEtS@fad4tH12m3AqscYq;r#lI)n_U=+To?kzP zk1_{phs9>m1pVsbdZ_o}8l%_b6xmBU-P2%N_TW|k$&qlFwk-R^MDmq}SpMc|wz26| z&E0Mz5ljJj>w$|ikEAIA4&I?@1;eWZ(eu?uVUrx)Vf=x}R)_W`>H$sBF?c&{YENIG zMWMyWYpM*c?6T+lv{NWpN{?qAd-0>Sx#RaAHxYU=1&u9iZl%O=Y6@gub^bkEcobTrMZmFP4^=dpc3E=Vj-q(@N5`Na>_q!;& z9Fc5bo~PFG1jYOBI(TJ-WbIDt4iaQP{)?1Yx_caR^q)XQt>@^R(U+_wLq`fRdes zhO{#KRbDjjBw5E>&(^a0^$#W<*H1~u@$Q+J=-&J{+nZhEor)eKS;Nf3pW*m*dvq6{ z(?2m%hmLQR^pEj9frzGyPKYRPB(D~i3|Z}KvF>wi2rsX6E&Rs+cdPAi;I_1cTeQuI zM(miFXeN%`2jw(&Q$6hP>n4w4c4p1bb!)z2mdwLF3V4qnjjV1yYuzn$Bxx|f!hVP6 zEsu6PLY>P9$(xK;WnjZ-MWSQ8wWL=%43Eku(iM4m>7t!-+Z&%G^E6Snna=wm+p1GC!_5z36NJDN88BPs*tA~Q)=N^ zjAh^gwYFD!wV+(^@n%cu+zz+D#&bK|!>N<{4!x+9&^+|9%&$TW8VK>#PljAh7EWiL zp|qUcRzn=8%{m+_oM+VImz-E@JG#XmUaAsgnR6j)Ysh^1mQux&{bzp-gvaoMw#myoax-#fCwvSnpwV;LFy z%1YGdEjf{xHqvQq+1{PJar-=X^Q1A1)4PLZ z%v+ADA`?rxBL8^*GG|A3XEgtNJ4juRBj@@hY0=P#DJ3da?Xq#<$$_4^;XA{OI|!o3 zzn`vXm#t||l4W9VuTr{C?$DcJ^il~XPA5F7ou=bYQs`KP9h$FRu(B%NtvM;WX+md|5}y^lS4Zrll6-<{)A7j{=Tf3o6T;s*PUCJ z+ca->E$`mkS%22r(Y99?bS)zlzDiC%z!RWTTr|7=92Iq3TJFgO4yN5SV^9Btg+!BA zT~sQq@`^H{<>Q1{$-Jken_}#CFJ7o}QKlDR$4YyV=M-cOcWQ8IiHwr(X!89USMUz` zg8AdG0!rL9z7}=vmZq}8$*I$R8(P7e&LSI!#Z;ppIb8}gR}QD2-XAFz0$aqp>jpc> zfV_Oo%eVY*GSYe`H#vU389HqbsiYOM(v^}5NVn>&bdp$c$k!@6bIory@&NvOnBDud zTU&$={Bhedp1db(B1omJovxB3J?DD!Xv1Wjvoc|p$3sb7Ws+)1$Hy`1oPezrbA3}V zy*uA${he{W)4INxDZ{e|Bz%AJEggd9$i36x+IS{YMvk`6S}wk@B{WQy?T>nBb6<{R zVa}mMXYRpAIfrB!9FNb&PVJT$b9FC!UF<|LZ1%F0cBX`@syNu*@;cxRl>AvU=SMAzbPc&(}zTbT_C_n)RTazhWj`Jz(km^eIZuvDW0R~o>AoLSqNrzmWRDo^Z0!2@9lZ6aLiQ8<%1nMm zri9oUPdBCB{I6Optgi2-PvMS%6^$8a42?<0rYIz?h3$W^(2gN|-{};r3;^RhX>e8WqBd5t z#MwTnrgbW(+S_dX{EjcQg_*F0X}W$f&#?s|KAa)IWy8b_{{V2?!cP{s&f)*mF-3#+yu6~mOqx|QI(k@)xVO7Q{WAOK zQ?jFvXRhuekM8E;$!fh_ANcsc!D|jkQYrTSNX-zqlB0-QRB37)TmID z2ju7)wVwB7KH-wCE$`?)zX}(O?fs1@GVHSy;!(uYl2JF6K&s6){c8&EKVK3O+WEUK z4xSC4X%&J`JwsPG)_g|C3IOE&#d#ajaom3N%f~e1rsE$X_!sG-Qp3Jy#u4 z=fXQTkHv(uM{mSD|aM7>U zRX1eb(Eg{$Fm??N`;fqXyE6?|#mvr3bW=hW+F>b48-GNTr;8JC9N# zP)1hYbN_KucXy|FxpU3S;4AB)=$y0Q0?J4$OD7eX2g%e|`I)7SNj+Q$gjRo8ZESSq z-FRAEJ8J$2=h`T$U?v%q2d;U(?QUn>-J|Qm!#S^(*oNXk>B@p5E z3wnN2Ve{9BUj$}ByQlmsLXZOO77jm7MPuyvL0OxseImo|dju{GE*x zex=O9GyKZXFXd*81{G0}DX}S4ZEZ!3HJMeBv+;RR0gAsHPhHm^m^$3S26~>?@w(8sDn`ly7)905)YuP-}QsKqGjr7!QLq+!KSFj#c*YWpr z0w2(mYDMBiBD%k?RWPrL%I})T`RPa#{au{*4o~HBrsqS&_kLGV8rHfjZlp+rsV%`G zRER&n>x6@Pw-qyYhYswBp!2-(ID(#)g-1$Sx)0hvA=*$?UVaFLOxe!v2tH*32waBT zzQ~iIr+x*B&JUEj?|FrPGCpsTqD02SPFpg!jJn#1s^VlI`m!;4rOnQ6dT{zqtd+?W z=j{T8q$G`9qu)wa2h3@hEPL3E{POaej73t4nxA5n zp1J5)ErwxYV1zvMMcfiut`v3FTDoWXwa{3%;5ie|Wl??Ax7=S{{A37uc{<%kubm^p zzdO|=MmOA!@4_t??CGJmvM;%Xz{69~Wzv6NH2Gd|x#eeP z_f&V+WKY^ud$X#V*qilg7mq#^ps~zlHWF^7w_ft@zN6)GX6yDwe}Y-6iE5@QMuUrm zWtNiZDX5r;%oJ4SL?XkgQaP7(d}9U$+bz(CKo4_raNL+AK$p1sD3BlLCioq6@L&fX z>F?iquo;4e&cu^{N~zpk5@L8;^W!6L%iHVMSa@7>Hl zM&0uMIwcy|@T*poA=Ua=fz$DG;ZG_egf!KYN^gF0E_LF-Xa=+(g2w0gQ4E&BudS(> zQA5cn=Lyq`vquN}nOOe4w{%jQl>Gjgtuz?Rh1WXV&4w_9^fM%pD2zneyIG0mB#4|6I{DXwaFT;NS5Qz8I1zwnoV_~=FKJNLD=jBF8R|#V| z(M@$M?bnXQPPZXUIi4Jz>z4fD&>i{WveY8=OX|5m5tzZ#aV-(C5x z!8`|i!anp^fCVuoW;}b?IG4)tpoTDU<=%$=OSo`1`N*cc`XpEPdJp+=DStY%<$Dz~ zn&Y<*Std5Y>41)hQ(O00e<`{wXe5lk{qCm74vqPG}zw5Jb8XTN}qub!T1 zYe|GGyD_xp0-cQWtDq{)Pc=C^Q7Mtcax4%b9)5ZyPw4j@LALgs9TJ#>1#wq)&o@l~k1zk__ z4@S0vdL}e{)(i3#t6vi6`ZP_9bAPsdk{pI8z41LK5+|qm6NAux+atf3d9+h~+?qH( z)p(~a{qZ?XUEO(zwjp48>j+98?YzKz5nSQUj?@5`K0-7EBGTjS_L&`N={Y;FAP8!e z(65bCA{e)Ff}U8-i$?tiL0Ej?Rr;;D%((RhN#pHY&L2oYBlOmow#!&+C`h)t9^Q>% zQROTz*bXAGi&kyenAhE87&);X6^7Z5W-6T5P4mjS(5IXp9=#z*EEG`r@hdyTOKVRv zVQrLM`MtLCT*whM%TNs3pft^WmJSAtPJeYe3$fA8U$ykF3KOGwX)?c;dCj)eUa)R1 zqKr)Z-LZIXfGtK9hP9rkAWw5y?%VDZsr7gHoyN=%L*qNN_00y>F^a@GRjG6Ci#3*A)D*vvrEilI=+lNt|gsCs9^m6!dUxEA{ND!(e_ zsE}emTs##*{uE6xJ})gTllX=}T*Gei>#?GQnZ4kp0X1`CkUqrI0 zn~~SmEKQKu+-B0YCO+~aLD?EZiZa~a_jrhWj&$}tpe-MW^*_E!PPpytZfxgf6DMX^n zzAqENaXp_X*O4CPB&qvn6q8mhfGDRwsb!A$v6iTxOtpk*r8H!`eC?JTq#K*h!Lp zysTDHn3tYk?`DQ&-b8$PH2kttkk`Mr?=Kks!7T`z*Thuq#hH$q+dmK)foa60E#N*Y z+r{bsq3bKds_MG6F)2|R1xX1BK{^FNkOt`n>F#c&L=X^=mQ=c1T0y$ILAty98~c6F zIsd+WJ=b%Y+qKu8YtAw5amOhHvdjTL;~4bVk!}RZy|d6=!#oEfMIhU0m{kYa=YuQT z^7++b-b2E>i^Rl3Yrnh*}2l9V>&F%1Y+~H4m0d1%o^w~3!hK05s4=%YS`yltS8I|X!q+k zA-jFPX`kC8rC#}*$@K)AH-&s^lVAR&qx6fDNc(2i2Zv4r>88hh#l^MDhuxHW-eC#{ z6KW~g$Y&=StloLa$jbOUH;ESXrBpwzq`y2sLdu{Ha98PO9tu~bWL5o+@u|?*prILA z_UTrB6T<%<$f@Fy)U>Tdf$#P7uz^d0gHx*4jD~@M_dGCMd=XU;=V7-%iE)n=sd<(1 zHAc4-kAh(b1LbW~%?4X{ir?*U6`qs8Y1}z<5jpS-0U80q ze*)%u5raJKkRk#(lo0T;lcSXTB_w26j``#3ukrEKpuGkKJ`7i!it*iBQ^ExuEG|FB zy3{kgV&L})JztXwn^&XTc)}kHUM5&U^p(E|hg95&FnOO1QPgPw4-ew$c|q(0s#SQ8 zswy>b90z`MZ?E-YTR1QwPCwA&Jh-n99GHC8Yj7nyU(dZ!M^AZ<@Ff9PYp^nh2_CTS zEiI=(tPktT-EIE5*Cni3hwZ7Eu#R!?;&+PS=ypTq*rmAm3YIP%;s<1s3V4BE-nyM# zpKV_Ax;;Ze66(j)FaP9nSi$aouHLAgSynw~q}LMretho>38k@qEfDCJ-&P-UMB8~X z85p|ge418ZvBEhU`-_>de{N}t9{fxJ_r6H~d=8HW>yPdAf}~^oyN|KU1P)3oHpL#b zO{F}Wj`OnoRhr~GpL4!tOxaaY_w)tt@l-K0yT|o%q6=Cc6K`$xudCpy@R^g+rqS&h z%JHeb>7m=!&c+_Qf64ppQ9$fRjgY{VvrvFM6TUAF78Z4gxQfZW-Fp|sr#A2WlH2nl z^i;q)V>UYO##5v&#@BGA&^*Juf@hw_8U4``n=nx7ydXmiD2@ofv<MgPQ&!ZJSGIy$a29fD7?XnPDA8(*rV(~G%{!UGBX zFd_?!AuFAbx|_`~1!v~Z_j$^WBwG{~gPUy*Osg{!U0uFb4#k}(e6Ot3HqfoQL&MZ2V(qPv_y`k*BK* zhm_LQRQ~t<7q_SS9}LeFX}!V24#(Ww8q=RuAkbiafx+@dDvWx^bE%F%E%g@3R%pR@ z4#5mef1LxYkJ^tE4yQNomm~}n8%>i2BtOS zayr7M0U13#1Y+cJUMMNPvM5N)S_^_+;%Ar#&|_G-QWh2oP8}^Up*U7w-Y9Zed{| zGBT2s-=j^hSW#P-Kbe$>2-pC;KoVgjjP6cnleBbPi~7_qF*bCFnM74rL}#%?>;D*^ zd_6wSv0KVq;T5%7Zs?1C*1CK{i%fB;yYQF6%AoB4z@X#;1s+7xvoYpY^1&{x`*7Q$_{{a@5E06*ki*wy=(0NBvOY?Z1J18jN1bA0@ zoIwgmskD?7@c9A+3>~|%pKi7UWNH8L>s@%+Vb2>|*S$;5^Ejt#pS!F7Ra?i5kb=4O z=}*R&Srx@>IyXzg*jTx)w!nlM3T1U|j*6{f3Q7}MVowNd$rr%yos&?F|BZQgp@QQ% zy}4BDa3`_K=_dVxsdR$FbaSDsS)+HDQ=ho}_H;2(r?yoOmaoTO1u_(gcG_#bgX0UP zpG@zeJoTFKcAYJkd`fboEWaE*=v>{*NaT~Uduxl(<=k~_W5e-pi(AcP`AMXIcv7lh zuc2M{0|(K2;(RQOI=h_$F|+s*vkdu`KlQ)lBO&46CzN12ooD{|cqs!Pf2AgIC9*Vl zcMnGtY(7BI3ML16`~m4lcrRHIMv9=q1%hTyZtm?k8;bD9n?$t?3}z=o(=r*vSb4zk zdYo?KszZw8$$Z)=txzF@{(V?HK%|x!|Ao2(#Ae~qD`znkg%%WS1D6^y6Zt_zpd>Ps z(Qg8o7ni`4blx0;M3Jmev!<7un^TT&op7-?t@es-+1_ur=W;R%xTKejMtn6M_dFAE zuIq7@N4xNLe1{q!;un$AQXL_0t%dZL%hOr%Nin*B%*3h3^;ogKy}f`8sW@c>M0*4G zBTlLS0$at!vkMCeGS%HAJ!qe5eTJZ&E>K)+vTA}Z{m-9|uO+8e`Tv&-K(+|UF>(1? zwN+KFU~~w3(eeI%BN4_YMbLO4xSK9~5RSlpR-bbR!dit`mViNJz@ldGpca-TIFV@_ z;{W~o7kDnWy!*_A3|wAM4LWedWKQU*VHj@ZtChF^o|SJ-ZhntG`Df&qdZJou&_s}5 z=AiZdjr|(=@u9q~zoJ}yNO?a! zXP#b;amVwi^pT?pAudUntZ@kp-mc-_ePa=~_nc~{aXMzO@v^4A=GeAaP)>2Z%O7PSe# z_2LF!!C~qjp#yBdulSU*Y5bQmKXgi26XXE6&gG5L~1OY6&5Dfi2 ze*^X9%a<-E1;hPyoqPfU!)6NEiddCCww3tI3@;xf#KyjS{6NVv^PdT8Gu!8QtctC_ z(!=r8Y%2JVnR8IpspXedqqlfi=|h;NE;crt5~)CqkKXF9ILxW-x=YB)a&S)!8aX$w z?9FISR!x4(KsX>St4q7I3N~gH)@OAM4T<{; z^|@3^*b$Wu)b7R(jRmWBr7_U_g>lJH5}NewU1J-iAC$eypQ=f(QGdJ<_heO({rwNG zRWnl{pit}Yy&ilzG|GJ)hpT1j9-P!8%{v(@O)_($=@I0jCn2-cAnuS97srxG<>kdAm#4Xq?$2Divb}ngRQ8TG#33X7 zV{HooE`_7at`FI#KZ-0$o*_t0HZZ~f{ILM&;{Sf%;?THvyAJa%)IfG1Dq4@GeaX&# z1>UG#U0uKf&&MvG00B~=W|chmaBNRXa(>Sa4e5+*kGy3ur}kok)S3C{=p5^t8a?k< zbwy^Eg)2ch@roK2Z>@?yRJPSo2CA`9nb;I^+<|xL=HhV(lXer}7y}>kAqEC8<3P;t-y;pi1%DF?CT}$rB_UG{nhauHBncO4PTZ|9B6I*!45mh*dJ_rkQ+!VcKqH8ORzaQI3 zj^y?II4)D;02|IqT;#4Hs&v{_&!kYTaV++%?M>dz7 zDY>~d5W|iu0{QDTCp8wPrd5C~K-hR%8ikT8oqzQwS3-9x6&f8^TdGQ-#_6aYgJF(A z48!rw*wxtTIQIu-PuJfMirkikM;0pYxR2% zy?JwUzQN@2u7y0x0E-wxg+;#WVxQT~n%udO^8<{2jc6zf*AjIhz)@Ok(lC##)NOrtMiMPmS zd0H=(h63<^lUQ<#ItSfh7}+k9}T!!4j= zElMqc5q|EvJVV6J%&@Wi@XWF2y{laXKOavbV}$Vbpr~P}6ZGRiKeXVO1j1@llkb%7 zw_D+1s1KUbeu;pw4Ez=dO(7nVLI+7s2E|91!^@H!6GfP{;wpC29`R-_w-EG^B@t7Bk7fLZ;-ggQ9n zfMcU%r&AL$+K!y&_@~|#^c#x{#!fX$kqh~iNgO0h@ZBROevuRr5m5qS2*@#;JD4c= zvs)jkLXwf~oyyvdtrKY{EYfvpR+Jr#RJ~OYMcMPQ}YOVEq?nzM;fMK0msJ zq&Pe>Ji_TxeMm^E@i?IAzRN_y+LT*(f`a1Fxh1M^yVc~E=Rw%-$ND?C+FDZ!^L5{Q zQxVj~D|W~pymg;^tS!^UNLPrbRvs8h=6lYCVZ-(qkB~w3Bl(r)MU|?KjEfsn-Kmz^ zi^TS59%>W$_T%NZzIDhl*Ig~Ib*gtU7}P)$Uw`)&igHea8sYC)98u>osY1v1BWhsGBEsSjJdQC{CwP0M-7a22h4`|iP#8QKf!jzXO;=*MLtvOS%& zISjd0uhVr1RS7p9?N;nqX9`%ntD(5($KgCAc-Y-fUSDX}xn0+RCF>EVRAx4Mh9TKv zZrZ;qhufNY3oUN{EX6h1)$7>4Yr9^RZ9Sle4 zxwA7e4N9}});Z?Kaid}ycj+70%>?he)!E-~@+zyQUH8k{5AAd2e}k5^C4&qr)< z05A~)$`1rXmIIJZs;Z+ zI2(Hd1-tWKzA$g46lyDxz%0~jJMhgx5)*p^bxZ&JOFq6!2-Z%_z6I`1K=+2(Hlzyk z%cQcggVRG5?9}M!!4;W9BO^hIFW|8Af>RMl;rZHi>IOOx)JH8He+XzHtZne)`W6+1 zszK(4HaifW12!m1CfG)5N z6fo)ZSRFJI=zyJ?>fgQ3no@?!VXw)+N)AyKPEKl6N!~%?dfJ1KT(1w&%JmX7fBSEL`0NG23#$0eOtFjQuxl~t!! zg!e4(1KE`x#}jD7?&dfetj)KjJgU}CJM$>^zNtOC(~y8SZ-l-beCD`TWE>*|;`0%hE)z$mRCO3hLpmch(;=J;ij4(b9kDxq5iT z{cJ^h&;;f(#4nLOZpo|rAH6GOX8qQ#S@}|pJp_BwjgsQefbh&CTl54i1*SPmVM^@0 zMXtJn;<%n~C@gv@Kk|QwZ2i06X}*qC{F&kI8>9QxxGbm#QB3((zlDF6u#M1!8x)e- z1<{dYE=MVjs*G^hx_Z2j4*FbgCBR8!Y_4Ez9{JE|)DPrNE0Sok@lVzrQVfI1#7vc5 z4YYji5Z2L9@C^z}jkLaEJT* zbzeS4Lc+iZ>FSblcXwZ+lhe|A1ukUAX$R1;%+|V=<|mUIy<*k-8wmO6m3YS2P=ViE z9`r);q{T#;c&r_0ia~{6R@Vp5IgD@^_`vab0|L^=qS(KAx94aYA|b)i1l!LxR}iqp zk6~Xbf^63>wi3oYTSx#l0RoLp0&Zx#P!~%$FTnspCXOZj*Duxe!j}&UVOIj%(ir-Hq_1E9t*ym<`^M&WXp+kFz^L{D_6!(xj@gpFb3MzUQk%$40z(V% z_XA!*!QXV)f}opBijE$c@x2gmKOx3%-fOw#vHfpV>$Mxs-`hbgKF?Fc?#jLHPZ8uQryXPl^wQBBr~b++AU}9h<4EYgO-s<%cqhp=n+MARxA{ zDRnPhk~yx6b1o|{2c72zIN-)mQc<0qp9cwZkAuAB&6_t6aXum}7~L{0Igs27hbwpm zE>{MFVo71B8UOC<`B#N_VUL2k*j>cm2*W!#Y^b6 z5FYh|D18De9-f|=QgM3k-}jlxx)>QP``kkx6R_^=>||0eYnz;`iJGOmV46y(q+%&e z$$K)>nUR?5;4Db4yS)7^W2WX(^Qlv%q~o$Kj& zshNm~e@)E=SUC{&qkyp|=-$qS?)!cH{{1NvQz85xmA4A4;NQPbIc@q%|J`cT0x^cnZOb4cKH|*LBj*aiI=o)cjS%E~<(` z4nKxznvOKVgiv5Rv#&kgULpNQp>OLOyX0wv%8~4E`WY&H^fUyBm0fW!F`b2nyu!Q*d_+a_L z*{=;jGEgkQe}GoUo(Y#jF9ercTwLt;6&m9}<3sr7s$KSuFj+u;>r@fAY^r;Qy_y-6 z`D|zzsj1A~*B-zyg5(=$?%;!hJ`Ln(pIq0`QBl`s8yfN|EJ*I!!2kki7!!2$gMV$THLNo;*L6f**syG;NBteYDs?C3~Pba(dj@PjWkjJHg8tpL;j zJ4Dw^&z?Be71(qS(*g4g3kxeQw$;|whE@SK+PLSFS#m$Bop)q%6Zt*ezQg3SnSX zi(WKVHk?vnE3+@kuT&_BFG&lC-)|4D^p0J|!6yzg2c!S|Pfl_4`4bd}i;D)*(%oP~JT@^=k+dg>T?W#9u*U``P9-HJ5CDd3 z_YYn!F8z{ht?AmmUd$EfAqjNb{o~D6t-Ai{imRhBWsf!6-o#8b^&KR`tA;-f`34VU zX=y3wbd!^HJhOTHgi_(YzkPfE-~$#BQKyX3J2)S0{=KHQNl8ih8Wq)?bSr6^H5s!6 zTG4<1-k-~pVNz03!aeq7)D622gS@ad2Qe^~0)Z`uf>9WaSr2}_Bd#x7h#CM;K+cKX zhtW!NO&naWB{<1aQ7wU|0FD|}cSQtn5+6O@dLoM;hP3Ld?ZXPECa(w z(og)@#pfNf1ejpiS_>{X)zetu+UI z0!$9!7)3)x#kz6chKdA^a|&I5n9%}U%3|6DV`H$GM+mwb0Jdcsf)S%pT@^&_UiB6ZT;tgpa6ZQoui}B~#!W^-LSlQSZqnx> zCPM5W_PDQy5U7$KnR9foff-RS_BI&~hxH-x+J%7ZY+B;; zn53j0%0SD+lZdWf8a4qYf{Q<%n+6xjqJ^Pvr1l-kg}u*zIP=DIf*(ZJ=-v=kxV!xWldm_77AJ~<)qE(Po~HZG3&3=4wp;S5{1hy=g?u-3>a zE2vyyA%ekIGA1?_mfX8{uQS&qBqjCVVq*z`KhD^zd-0|I)>P8^t3PVqJ^vCEw10SL zae5&EUxt{BOopM3j?Nw$?9*@+Ms65I^qh-?F#L>5NML;<5X&=9^bkbPGWWm1!y$hW zBcIp}gHJ$1ChOce5#<9oL_d@Wq4GOss#jiN**4vN=t%54#yLqNANeHp5Wo(f!zne8*y@S?6G*URMAbw${I;rnzeJu z`ltm?M>q!^sn@c02-Q?o`&@2C#wgx(_Y>3Bo~fC+Q!>%;ZQY#YVmdfDn5h2E#6}S2 zz_099FlQee7=XuNjMc5=4blAIM0tdJHf;|Lq$U1#|5=uHIOSW~ANC{Xv=BLrwbNB| zhBBtNM&r<9g9P+??z~*bxQEp@``W6Fw((h1nQSM2w{V2TYHKivrW{e`8FtW>4E-@! zU#)zx`Cf5+Sf$avvnJmjoLz~ zy92PNFlwFmI!47fJ2`3pvPrF}sc9?tIbQLks8w^wSsfP-Z~SPM7KhHAOEytwev|Ra zv4?uF*1Lq?=(V18yBgkH zIldFyl&*1cCN;{@KbG<4b!6U{sL$Wprsv)HNmZ-%hdk3dxr?duWSvJ*R2q5Z(oxB!W1l4+8mzj(v^&O{YfBy!yqzqQNoLt>swXbagEsL3fA7`|wI%n&d^ zS@UjMv8#exWZVH`jIOp|@Y(?Hc_)>oBc=v0vz%y5?R@l8!kRM($-@C(qjA>iQTK@|p|*6u*B@7SRZ%KS;GYIXSs$=s%7%$LscHb;hvplb#m( z%4N3K)yHn*AIf0iONh&v+BML21{B&<*eWe&zeM*JwmDi$UrG(;e4AIZ_pjPz0?nlS zd?GFb%GyDx-jnn{rJs(Z&go6QBzP(as|GG6F*wG9DLj71Yy?cxU}z|%1HE@be0+(ot;2vhd}<_< zSK!FytTSQDq~#EO|9}I3@LbmVM*;oPH2~3*bcA2lctZ!HG16=lp&>;*+S5IzUNd<%sf9)6hJ#6hx76fj4LSM`QZ1B*lSewc8BCYc zWs_m7B%a%st||wz!W`lqZf>U&UOU@O0p2VnKTB4VCo>&~${pDcZMW3F=ae*F`+Es^ zZ!gyzzB5l)9)GQIv^vmhkmMy|B#0kR3ZJcrRQ~}6Mw}2#?ZEun?-nf!tSCzAHURsg z0s~Q~&)=*Q(hteM$dgrIdn+T;1FZ_Yy1To_#$sJ1V!r-SQJ$>7rdQJdJL&jwzbBPc z5jdX^HvX`U#F2}@+S_yu#dz?*gqw=0d88t=h^;$%0{j4-rPKO5iuIoy8QwEn|7a0m zt+G5n;;Qv`y2}0&?dj=hpr~s6Wy0Y?eGHQN3lNxmhcaL!yE?;dXL#DSof9Ru*ON#| zE9SUIS-m1UBic@rdt4xnJ{x0gS>MnAC+6|R^8;N$wOfNr4e>hV3-ia?_|EssNDcIM zt0g;2hm1$%^)qVjYq{7lvU5^ZvKgmrrTnlJMRwdP8ZiGvKSrqf*kpPf|JO}povQ|g zhIm@^e^G(~VPD_Bb*>3OSbU*Vf0-Z06&Vr1s8iqd4P2PVO!>zLNBVS%65<;g8X)nC zT=+Sz7YG5ngaAr|9xF)2Dja`Aw4()xh#EpOI1psTlU#86i_)|r>dPy+ZQuxPNZq5u zM7e$Ej_IAZp-P4zo-ygmosxO&Rai%W68JZ=paSN z_5*%f_v}k2%w6DSIgWtgCDxkfn@OD&VM*z&XALokhj&Klw9b28yY_RD4N7K>YuH)R z?4xy_zY|l{v=j{1B%u>d-@(GEWKJMtgH=%O`lty-=XaPZN$J^si`n%?=uIWb)YJY+ z9noBAjE0HMKQ+HGBXoduiRI5hm0 zq`xjf`vxot%?b-OV2Sl6z0%Ox0-nqRVxDhC7KQ?HI6~m-BK+wg!WQVFa5W?;IavTE znP70%xr;P3JiHbgb9gw)`Y{Be1A0i)6H5KrWCT$lJ@=cXJ~lS?cYtMxi2;_>Ua9#6 z%?7HiNJNfU>6V7)*-{LfC|K%u1!6rz9N2i!PeVZl-wf$;I>3Ms_cM-n{}G8);WTs) zK<6DZ2Pi31I^k{dVnTzxFe_XGLCCu<2o#?Y?_9ShFyd7hv@+k%XxzK z8HYDQ3R&d*iAnAUmsoC{Y`V71<4ICc;IPstMGJ?mP!x(*dXb51(IO8^;+5qCx z0395_rbLkO@hzmir-y{YLKWgmyAwDebPf5z0NPmaRJu+ABcn#5usvcwwBfqGBAF`=aOxhD)~!21u553m!}&FqOnI>Ck| z@wzEWnSem4_S!PGb56!S&d72oRYZNDdmrZ&##NoljYPZmABOghPAg+8X%yNQbG!FE z?xm#^rtVDW9u`PWmfDr#4ugfqSIxqxfkFA{hQ6JP5A_SmNhV3cFYZs|c=sF}uUii4 z^;0e1(*OP87AVToBq0C&z{*v#$V~%t-!!SNokzeqW*RC~3kEG4BGHd(8esSVZLW zrEu@^($dnuf3J0*$+>e685!92!N;HA@SLc2G*3jpZIev(r$QcQ!1V-+&6vo@Hz<&F ziog)YaXu-KlaudE^e@-_cF@tYdxRqdJFOJ*DznLMK^y`@g2e(nPws<#a3F(m9#)|f zm@7i493(>si9wl$V?gc)jkOVcfl&NcS62hF0q<@t&A70Bp5d+w>OTt$26D>h&$B)X z;!jB;Hb{&Zjmu?g);rQ~;)GQtIwd6~g6+}w>F7Fv7>b978x{4foUN>k-SHc46s3j9 zKucM2zNEzFhG=a^&M5I#jl-(l{Icy%LH&L8x6$Xyb$J)?%~0|M6B(?0q#uhP*Qh|W zqjSNJ01N2cM=d~aJv8t6BRK!d4-*eB3Oc`lPoE|yCpis>0L6pz7uuseCD^mT>lDDG z$fBzJ{4IC@4e)fy5r_ydwfka@9Q94F@fkHLz9hCPDu=+sS_MN5fcyvPc_fY$$`BDXg^Lsf(rP#vjx6d zpFSzxXnR5V^Pe?dEShcKroqCBAA^5`U!}X%U_NqHLp`tk%f&x$zDwXoB_q%A);_D` zb>MW-#>jEQ&h`{ZgX=<6d8Z81HMzsw=U>m-$9reX!z+}Dd7e1VV4j&H`^wz@_`AJ& zwk~fn=~%nsZKBOR4Q^NNZ8(K~Hu%Ipmy?7JbX;_qk$7jI1fXCUkBzxGTevgeMArl9 zV=(pLTgZlw1D_S9t5LDBkjiv(er#f40Y?dfK?hh@v2){`4g3@&jc$;WdrqBbS_Wze z=AGt+y_t!vhH>>VD3zhH{Z&&_I(Vn^C-W<}!@#gGNW%Ei&>-M`dGC#&;2$u92UZje zPYq?Ko?TU7Ji-B%`c+31JyfMoP;Dx~KZaP-K>y?fFLzY;RS>NR2{i)TDMAtkl{F~A z5FjeTy-_Q2zg`q@X&^847%glVGS@cGEz*-0@-~84ol}9?e-lP01082is*5kK_NY>y9-}fA1;eM3+c~U z_xPw&O8Px@`3LSwj?zbar$6YMd=_;6Dvc@!-=sD#*#*-F_wsXHbT;HN@>lN`SZ}@W zw=C%#Vs)0e_SkRTl1m`&s)<{RUkIvYQjEfj4s{lvVePC-NY6VSZ!D8`BHU7G37rEizC>Flg(Y7S7RXR1>uJjU@UFe8RQewcW*+x)a zb%cwl&gQJt8S5lcCI5{$J}oB7=_MgI_nEV&u>Z)yfBWHZqrF!`)vS_vhZ(8Hl1_v=K82TrL=bV={mE_Khd7X*Bd<*zu_<5d$7t-KT?;l z1lVCZ@o9BoqA9%q<^uvm=MOK_Y`VI*?`1(caS6$}8EMq1UhcjRCHT_myzl^x%W6iw zD6mexiZ(>HjXLQ@t?J=ewSxB_5*F+^nt7AC%;c z=wxPP;T+&pR2l9Y>N7B|R8o*fC#lbg){#Vk66_D^T@Lg+`6u)hLga1#b3Fqr{p=gF zwy*zQ*(@8%h#xn%5}=S%#$XC@`Ta;s(aW=(Q2vphpK2+J!;5zh&r!+oJLlC97kq$GY4N9Rc@!IY`Z;`*U~uRTNxk8nhh!c!uw>hrrddSV5kw*?-7y@ zaB~nZG9qQ8t-P)YRR6}V#xK0ImGTD)@(1#ElJ;wV-%t#o4-TRa`U|n7jf#DLb&B+V zmstWH$Pdax;6LcvFV}-|_S1sWRQ%Uyx!a$JJYllaRvcC?*_icqbiVsU5Cu8&;f(YD zU92J<9lg~|Lwna>3o8pw|9P5_viW4)D|W7}GNz8x|3v6^F8#7QQc_lu+Zd^G=Nx!d z!?p2tA44ecD+)_63l=;-NCcVx{n$u9|3t3z+i9Cx$=J&Hl8AC!lZY+-a^doSbZcAx zzklRw(8r|Iv|0*jh|8-@2SBa*^~5&2n#D5Fr~;y1x?Mof&!q< z4@1`hfn9)~|MOuNhAsNtJ4^+{L=WbK{6o(|K7Sssuq0AAhN#WGz3jqe7)HS;LIy>x z%p^P{MA~y$xlsFRwf|WLg2xW$LVeh1K~d2z9No}@G11b(zRyQu4VWY8WZi$Zy>2Pd z{O-rCw!Dm1c-Z8neoTuEZIWK$F$~&q#Jc1{18SM*GGrc))g`~C=jAbQak;>B3Sec5 zT(~F#eEbR+MnQ$oq+0YI#^{GM%=DyM> zYXj+>&`WOQRsd7Kww6x_F>QnM_=sP-267v%t#>XjJ*tIaQh#5KKgrz}Q(jIEsF*P4 zq#DXDC@9Fw8|v%(eR3bZ#}9|q8pn^76bY~9g^cEM1BO)QX&qU2X0#e&wA^{&p?Fyq zWFlf^wM~=kQ3BQ89t!#-qKye7MZ|_2t|@48lldX0F0!&=v=C4^q7u@(0_J_;w8@~T ztv7=%MGANUP(kb`|2ub2%g8VS&D$nQXDgH?o}g=jvIK4=v^RQ%IaygK7#PHyR^RLE zujARxZ3+-8&7gTW^#(iHFxWGiJJxyStJ_VQvdVx>iS4yj+dS$Ozo_3me##fKk^az# zxx3R)Qx8z_k@U{Y%nS_33ks4$jZRMf@v83>0)(GMQOG{ZZt!k>49 zi<6MhUg2nPSd$0LGprS-5FZEUO()bZI}77nAWTK1GhPm+7xXOI!GX-*g9FarzAIkUIRurzwCb7*xKs)z&s3+1$cZJpjF9S0;((w)J`@=Worp*n7%+9 z#)S(91XIX%<|^l(Lo3a{?EVIpREshUwgjIsXfmqk=rlkmG<^DGYPjd8u}{nh_ZMB* z%h~D6XbJ!^*3{7w`4GUKlR<|^Q-FudR|HT z*-N$DoUD=dY8*z)S|)WanqZif1qbVQ!0=`s3!j6Nvl86v@kSqvRXc6vq^CE38(;9u z0-9vF!wU*#g12&VScxiGIt^YsrgnxOU+*rLhFtdf0Sh4%vYZ(h7|6&z)YSrK0}Bfa zgyj(L2CsM85BYP@tf?`w@J8Og;%{JDE>suH7whiYe4wtZpd8a)V~lHH!F@B?(uT2# z0vB|+gO`$(5}12uCm4UbT^?R?a)QaxhUJsMynydoMOnGRbT9+DEi|-<``~)&asEys zSrP_Dpf`o*9Cis~9$wV^_Bory#q|)FIgg{q0}BSYF(6WaEFzeV!A*w#TUkK?o>7D=zJjD8}C*ZLGNGJwPq0yX!!#(Nnl zfFOXY8v>>PU1y0HyAxCFId%!8Au?%IL$uTYTr~*Q;oV@|2`UcYf5FVtG|BwrbWa}; zFQZS;?g3KUwGUUn?@|l!=3qQLQ&QzE_x{Zsy@z-Y(Xc~c0Xw#sGrV}=$_HF_d7#g` z_yfhH`jf1{m7=n<4CKdyo*-@?;0-7Met(N$P_tWT!MG$sEa%@N{j~~xDTOLuR4r4@ z+ZpC;xSFFxMw40tGLyCKb(eaLy{Fu*Pda@@z3b`P_Rh!IyuP^^$t*M7;PE~nNf0bItH8>7W)kVNg!a{-xs*K z9Q^ZVZn-R-_Z z{ba!~lYv8!;+hdsEMTFaKYS=7F0RW|n3Ka}w-^YUEL^eaqYUb5C*b11`QyMbPt5D& z;^M;Aj9iL^ys@zGrnA4lfBxdj=g;EO(o}u`LqhO$pvrse=9vUGXBkiq0{<17I#hDF zKwwnGL5B(xrZ|i{Fx6MHK-}^Vsi|qq_zt*~L0m#5u%xV5#V0_oN$acrj8TEI>G!{aOz13Sj&pRuM%i0$Lx zcj3&00fR-gdF#r7z{O@MWRxr~0oxG-U1PY(9WBIp&8>LgFUdaU_QDADgYS! z!Gmeo`-~N=y}~h?eG$?f@5{Ybpc7SEjL)yGX659ZLxri{0Ogz%I~7&>{JdUx9qcay z!q@=(z8DsBx&nDn`|+1zcMuhu0A;haxfvL3uYmauSR}xZp#=qurN=QkUg7Nl81RMm zSmfsCzraLU-G?F0c?Nu+W!4WrHMMtlrw;?&|4lBSqQN`fFu5)B@$qRS(DASZ5EFu0 zfNfj>(aR1|@c07&h@({lc}0Cn@H+&ibHI?anyzMt6l!&2;OGG~1sfKEI5M*~V{-Us zAd4cVv%9y~xN!d942rqU)tt5pq13*qsqbEQ+Kzk10kDGSb6gTwm%9)zw0q`3j#$OD z7!A?hOk3MC^xiLcE~jffO9@EgZ;XdVVq8r(1!{?&Q>`>tdyn4TzVtXRpxwMV*1D=u zRt;9bApH>is08Ki?eNb6V^UI5FkFF*h@_Yp8r?_0ypZw)Yn2-3osv=TSn1FD=;I@V z7>pSIgG#RyarQx)S8+c`%@dfweQ`JdD#PbZJ|?EH z)&*vBS3Ye>baJ-@$OhKw+A{1{_6nLcz^e+G?)*`|`WuX3Ov=H;e-F*Nj`JtC>M>ag zj2wzoU^)`)Q3l{$cfl?Ia_2qrku%(g8@BtfAD_Pgz-8sWHj}mx0b zbIv_OLqq=#Z~HKmT;}yaQdFZbU)KR>h8Na1CmTr)N<7Z&_RuzU8pDWrspp>js;*YI zQvkgO5V7JqVCaISwz0VQH*cW7KRYMKEcq4^9h|W3fos z&ia<%e+Kw4%V(A};lyw<#*DWV1yOpfC2;XLr@5yeO~T4CMO%UP3n~CuDWS(-*Ws7o zUU()*k{X0Bzn7Gdz{S()E#Nfnd3sF8Q}X#?o}-P;dzCR@7%_xw}8>S-N4m;|T3@hCq*}#e19T;V7LxzOzq*3!LdcoZwcnKTwN^jGTl|4U-R0 z9sdKF(n!D)TybH-Q-5T3`Yi2hb?_5b)37xzRVqqI2C-ofE#!`!Ion>YHW zyQ57HLB$P?(~2=+A*>_?nyiFXj|AS6hx<_nupN;pDaZp`u`(KB{S9A^YZdihtqTu(S>ICeYJr4 zTxy~RJ_(RtWaC);)x1{w$mecWG_aCYvf_wn5p*d};B@_e=~8cR5yE5koQ@8%(=Xs8 z-3&y<__6C!d+W~qmxl3mOR#rBD-TZqH#hg;<28#u3pmHir1WLyM-g%5eqTZR3br}n zm>NF{wV?B@tmF(?0xy=}ht6gc@+5g6XKlE-mP7%~iHxppYIHO{`6BcqK>YN>G&Ecy z^}b}LpqK|hCA71KL6u&X>%T8ndqAO=UH+)uY_O8jivTJ$(V_{+%m{LQf3BBZ@?Fn!ep;Mozb!EkA zY-?|akx@6m;X)gwqOXU}Y&xOdq&GYOnmvH&u}FXa%b@C^2gV%@Ke7!gr#y=(;ljoS z)GB|3>-~i0fIt=7*OH(ip*;hzKZsJ;zLPHnU@XqhH*UTJ>5PDYj;QDgxYBmfK^x4Y zfZ(K5RdKVj?tw!Njdc9qq@%c3L~PATA2R9(o40S|wY9HDN+&Skk3o8c+vO>5_~r=WlzIy%Hw>&pN|D_b<( zjAusn&Qrv?HMwiap2#$9-2AHAmTGCBbThLoMq<%-7rAg-eXU%!N8AknqnW=Fg^9j{ zGW@XXB225}mjG1E$v zMiG~gNMju>GIq1dek(yOt?$C8?cxHT+R^g}%$yY!hf)N6;lN;w#2_Lu&_x3h8t|R` zh5;vU^3{qAeaz#${Z#kJV{XsI;W%sL-)jF4#6ixKou6+g-(EH$n+tgY7u1Ta*g|kinrfvaB=kyd`m7k>`!4I1E1mw&*4@Vi>H?L_8GGbj9exiHjgSfl z-M6|e!u-pt&xNA|wRJ|+`Z{nymusY3`1Dg$`BGA_Z5!6`PS=PBb6pf=I< z%xqtPeAU^NB~()k@!Nx+Ekx=+#8-t!G+?Bgeu04SO!cr_hS4sfnZXkBopB-AnSn0|OEFOJiji*{q8*GmR6cjpGk{{)33o zu9>ZRZ{v+AH>LEH<6APsTwL*`EWG9}c-n*KvHNRBPJ`ERT{t-d>a|rn2rS6}4e(6- zT1=u{T7O=C{u*T~EU2^_8ovi5e6fj48TW{2KOTmJ$Hbf*984DJzA-Q`fXWV(e=sUU zsMoirC0R`rRaC&wR!EtsJ_^n;7`_4h&`H04aPUW`hz|ouTmZ;PHT|$5)h2|J`Ua3) z=rbHgQaibyhFUyhN3gV@8G_NI5k%lIoB_8l?pi@w@Z2|j!4o=REk#A)Rx~h7h`9eQ zmhnXwvM3p5ACTt$Uvb|Z&Sn3p^%l4Y*A)LWrgB7FWvX|dq2nVd;WWl=kdpVbljoqx<2pgHO|+0p09)s+fnIt z#2JsXNHEK3voJW9`gDR(<`lz8N#EF{7wt6}AL&JU4KUl#)hXw>cGvfpM~hwr>k1`` z8Y$&Jlqgf`6-pF|^9yEtii(O|r9M&oshQqEq8Udt8h0S>9a#Co6cqy{l4_9e#*!v` z?)o=x1~Aq34MzPe$fLiW1AQ!~j9W*Jt+j`0YX)a5vu>eH+$sscOYZQvg#dm1Ma3N5r@ zD5Q4DdOuMv9OmYb4=gQR{&?1>ZfFW_OoT86-ymYj0PzwjZ-96zPt_t14h*nKNgi@` ze3f+Nu0hS5nItxMfq|~HZ(%F?2Iu)7<_R_zE^rAf?>k%(>G=iFr&T0rad9#E`SY#v zh;!if*Yn}ZfE!yEUA24=_u|DEW^cz0 z_HfW%C#jn6yLmpc4Mn@rkX>iCwMvaM?|B$wE0PLWy|#IN77k zX-`e?@lBzwss^xGcX|Uer8YJ{*o+M9s8rX>T?uVea!yu#>@`5` z!lUU?NN=zHUOkJp3Sq11FdyfI4FuV!@E|sQ3yM@_3=9ncIRKdOLD(iAOo6DV-MX4u zK9nXhz_6K_^Op6>P3tt}Rgt1>SVAE9gGhdOxN3Um7A9t9^+gzMMW~mWmV;$;cW~sX zsq{TxzKzT&Pd~7-vWkj|j&_aXUmh{0y+50;OvoyUf_(k;nd7Y_(=}c?TNf6Xs zi3Hpup$!yF7jO}T(Et~JYa1xH02>BLKwr2 z1+<|EK>M^CEiu?H>KYoi@7#%$u;1n3-3xNJT7F-l1-Nx<~yySjt=MjhP{%K2A=wLu0zDZhWI;m?{z*D$Y>b66NBmRR$?5x zt@o6Y5;es`^+&@`C?xNl3RYsLPrI%Ntn8t2sU%CawdM6)#UDfd+CM zx)4roZq>m#f4pBZvjO#c_gpJ3DoL0B&l9bZJFtG%xX9fBCll>j|D^+BqN1R?#0fyFK>fa5XkM01(G4<@WF)_IA*~!sq(GyJHP0U3*+R^QEDBHa9mn zDQ@MsT=`g*a_2rch7eQg6Ua zUFc6VboSsva5k<3kqoFap~Q2L(Ph?;lA&$DE7hB{|Jdru+sdqbMr1Ni-SQ8}UZlPv z5jCGs0zGEyZ0kn1O)F>R%JiYK<)Jj2y-0b8GJ2;^R}D?MI6FJLxF828HTjgBf>r@@ z21xu7>0m&Czce&;Prk%#fRn0?a%*LNGkw<5_=2yzDF8q8+W=+3JOK9izeT>HZDHP9Cdaaj?(Ix$c$vrxv67`6;{LpSdvAtHr<|-IYtkf7oT#82s~RGpeJE z|NMEUTrlm)>2Oi}&(&6N zRB;|dm4^}-%uzyq z%>b<8jEsyRN`SRndI<$36w&q)-9`iCu4U4U+qZkcd5iAFFU^#cW&l56&&ZCke4^CL z{`u!U&mEGFrnfk{4-=-6pZ!5#MSjfB!8imrBy&i97lbS{qNq310;|9>djSLKK zq=NPQp}G0OJJjG(?%$sJ{x&0#2y#lAW316J%%7emC3&^Yy^^LZx26IiW~T?7BrJf7 z6te9Qm9K&;crHkaBvYt1%+2Hd{nwVP6lECNhA4@{K@mLCUPxtqgF$pBGATZrO6 z_58VC_8t6twgKV*!HJUf zsegc&2HOKExHj%9lczbMr?+Y2MyRx!T3XCP4+G$Xk$`0d3oh^$iv&L}uR1U+1ILBw z5e$!wujx%Y^gAA0np{0Rg!_-6P>{ZS;0_j;>f+|sn_+$o0~ml$oPQG_)*NLV8SgF( z3k9fTi$3y9B}@V-x&6dq-sw)XZ(1B5BT~0b9q@AkhQiNKxw-$ zzJf1!YUE(RU7ZV6akzz}15ouO*UdJ++_ za3oS2*IQzKmW_8Pd`zZbx)rrCbQUG32p1GTN|s#x7e2# z?qXKpW+3YD%F43^&cIWXFw%6`pW>iGfffzt2j36GO~&)5wi$PD9ugm1U9J@G0grRf zo;AIHK71-0nhezebh^{4vN7J-suRM^%?(Gy)?ZPg>#f_p)hj!BfS6k#Hmx+V8NX!zD&lF&GvQ#UlivE zyC_x;8f5H|m5nkXx^b`ske!@w6c@`huXXi|;|K_iiXw*0*dfZr6^k^00uV@|C7C?G zxOg_-F}`nq$`%z9_|t-y3U(%T{z${$3BSLfz;&Vvk2?i7g5SAK(n|UNFq!cr#83Fr z#?AXxAFUA-7Y{WyK&!O092agC@H+t|&3u)>Ycz@wm?QFOhQXBv*V5Y9_#NqM#&!l{ z78R9BR3yCo{F-K`936iEh(}wt3?6%Ut~fqAR9yhUr$2kpG<>`&w3*7W{) zXQ(!=F^DVkX0^@fib71uvazyS5Q@=G;0TR8S%bJaj1$gA%$^5uDGw4U0*RXQLWn|pH zu?tg#-G|+SN0QO^@`I=+A9&-&dJ2i=&jS_ooATHvDT$(jay-0dAY&@i(-)6Tna07% ziACtVFFZCesY2C{E~4f|U3GOkeDv?{J*Jow0n#w2FT=;|;cv5hH#X8E1pzcfun|tr z1kUd=*+WSZLC=Mv0}^YLNu+8(#8S?~ax~E)GQ}S-eY5-tcBd)cHnevLo2V_}A6NaZzDmM}QZ{J5d5J%uQ-Z z{8#r5c<3Wq22wQaO>xArd?(MWdMggKC-0xsv4bW$76_cgTn%3cq=%cZt%3{G083lTJgY@&%Q$N6^;q3EA z%#SF42r?QD05YueIKW$2H5|5S6GZH|sx@J>3VdW``yx2&uz1a~}B z?nPh;6J{a<@FRlH$;80Go9PqJG4w_TK~NMZk0n`nv=_Ri0t@sa+uF)tf(1^q-HPVi z9PZ2g_9{*p+Tfh;5Ije9p4V?yT z5YMA()M`}O(ju9rq!%i1mOsM`p9X<|i|`JiF2o!hpON!8K+6#_tE~57>9Vr4^mJHx zBz#Fb!37OGfJ}d$b!*o)(auTNHe1AnFyv=}n|9%X)-#(!J>uS}S zysiZ_lzh*#PiLUX;`9BXrW0j0S zPfb-Bbh2aP<3?&8K*#tbc^jR{(x9Y%HGY|xs8R#`bnMjFDq|33>s&)&V@%F3IeG>} z*jmm<*%$R$SD^4^!^ZL@m@xsEw0zd);NtjT>8wRB<&U^w()1UJ4+daA9`1J%m z8D}j)AT~DkO-NcR8HQ7wrnHih`M8vb@NhbSLbGm`djaP^j`#d7kUxOmkI&~qe{q~^1Ud9TNXMBkJx>KQma|V41YW685c7e&Uv7(E8@;JPn zj(-k1^SYyqN$)G3xoo}R5AVtQzmFdZl~6~6R1PsVkdAOFwzU50ciJs=QMZS0*gWY%O_`L%Lf>{XoGu>{@lW$Iljm*pid zL%SFocAOI=l;iWjjp^*~0^^(Nc6zbS17W!~>=s)OVzpJy+$mjMkFSeO1FV|@*aSkh z<1><#Ed{$zafP=r_~i?_7g->P5J-!Xj{#qU;q13Yf+_|Sw|2FF=2cO!1yBHlJA$r^ zWbY*gn5M1aC1%w&G5cJ=Zs);+6GrNgufs4P$o|8|Hq$!*wWYqo4(wh z^TKB5&dJrxbnkc8i6%gCYx&T(grYbjgBkTPg4ak}?%-kQ7KyLjTZwGYcMiFdz69N6A|h0c-(jQ!yVm&*t?Br z#kn016cFOJ;==4~v4guI7^;^-uoIp<-wf=_5ZM6O!~*rurRI1;gV@Rq$Z?%>xv18g5A$UP3;JAbr*L?8~1j^_oCn)gc& z)D`|mzY|73Wtd~rj-J)rqX=^WT<$~9qK&8*7k;$-c_&^Oq3h0k_8L4A1aMeapw4Jd z29Ak5dlByxo~)SB=r&+2m@_CtzLgt5--I3$iW3eYp~jCNbK~nDQ79&OBw+54OHRoL zMo0JIe(Fe90qZgS@Am@Rhgfz~yefv%p7BuwfPESSe zf^&f$8j27!iZIha4VY^OeL0d6qGy%Q5XtUPwDu#=wIU-iBDGr?a5ULS+|cGej&4Cq zOW?o(U631ETfs}TZ#QCCxwcQ`SN?kU~R>!a|*Y(BixxT6b`H1 z&n%8{KP{+Im1Mz@bMc7@0}Joz^a}q$ZBgD0fj2oR)a~X&OOJXdFV$JQ8o0WV3|=)X zrkZw$h&0re*Sbah#s1fSVJF3Z-3HBnktzCB|0n>cs5t)xUak4>!wCErzP06F58(gz zFF&fVJ_AGnkktu(EKtO%zqKz^md+7k!Jpk>9g5sW!vSuE@(M#5XF)Eu`Go_U9nv2D zFPPv;G~tIYWt`+qaS8!Fl#W?+xcFTQ&LyVb8O>g~+zfM6RIFRn6px-%Hz**pPZ`H%nT!dGmSwAmIei7^?zKc9SH)-#?#JV2#buP|QwBe+>vAWU~N}feBb% z5PCJ((jdyZqjLako?@K8b$-o)s=9iC#VpMe)c^hxTBAh^-BsdQYD4*7LCOXSU z*V?b~fh7iA8W=?Qg0nDY*Q6oBb`jdg4Sv^N>KO&Ufu8H_+k|>w6v_AMCU2l+glpbC zO5_nQsh*ZqRHuPTKy;kw^Uh58hGI;^vNl6E@lvi8!q2{a>%YRSR^sB@dZPje0R&&K zUmtbajB=Uo3w3%lxw~KnzpX)oFAE&IQ01^Vo@Bu(T7o8bj&pE#**uDhE~qZQ zASa{sPIy-PLN9qFC_$04aUrH!@HlAV()e*xGFgo&=_GHREx3930xCcZp04vQm!bf~ zu<-fwcSw~0Y5==R>9s~UV=w>!HYLRtq4JWNwaMAa7_`FpB#0SWSVx?)-(X`$_lqMT<;QcFz}7b0}UAx`{Bc4A6fvWm?B<+KULrXVT7mDmK+7zn#HV6Kq6A zMxgPaVZ(!5rNQ=gk9I@4JiV--51gbZ99C6QCqWr>`(QtEOwX`_4ioAOD3Nz$WPDX4T zl68xU@jABM1q7g0luB$f<2N9pm10A=$hFgIUwzE;)S&v!&&#_w*@g&)XA>#u-$ z;c(=TFpLNbPMtp=Gq}Mjy8vNjY+VCpIb!sxB(+%6BsuD#LxTquA@V0 z&z>)6*Al!>5C86sQKDTK2*rBkwD--F81734e+5qqbZspDBrmN0sm^9Pn)yUTIwAvF zF`(7%e)muE($UZW#X*0alE-2dAM2i#lfxK4EneW+oH^W@_>&@ZVXa9(*p1uVE1%AK zL5pCecpca%r`hkqMAxGC+3>}BBxIbk{{KF9|yUT&P;-1SuzKKP!)8 zL3i%7c05f&48eRZJ06}Ay->Y&^zcxgJc0j|!H`8Qy4S3wS?D0-6|VDvL~Q6bFghA> z@7`;{THp;?OEz`@0-LpD!>2{9rNz+~KX||pdz}yS;?bt~pgU<7@KC@+u)WD1bXtwY z*?c&Yk(Kp@SfeII%C@(pJCMBwKuWehR}#2u1!X9h#PmNUEE2TK&@-jpl-rG#d+({j z(lJ3VSz|6O?J5<(sDd)gaW%Fy(n}C${ixsLXeMLT!3{;>x1~>hRu}O%1sd88H=J%+ z$11Z7EWyDk(|kauv_4~pSIOIl_N4V0qHQk-{=*srUjT9d(K$IPT3cr!`It5bHg}s| z_}r^2C=+k&G6Tpy_v1%QDUX8>J$BiQK!R*rbP@Tiqo)b#183)*pC>0%XQt$mPR;^y zoeK&>R2j5i>WcI7^vn{k8*WWuDQ3UC)}W}k*hMsc=6$`&Woq4q3x$VL_g9qy708`sjjw}pw z)omv=L~6LP(9=K7FT>1ycmvSfqErSi!+3=AhzZjZyo8}|!m;J0xzfj+F&vlA&>r5S zKr5jjkQZFL-Ccym;hAz6X=C|td;SYWPD!_kC)_L>KvtDJ`}tk&!sZOoFMmI#iY-h` zxn7H|6Hn00dCzsb@jg^{GVeRe=x+*|tXwH47H7g4u5BK#MxyjGOb=W<1JXGB0b0Oj z_<*$ZA|_p+8uPe~0Bsa)zJ2p%%z-S7u5l3Iwt>G#wC{?lS(;9i9np-S-@0SR&s3k^ zUy(B4Z-KcM#0XvP0g*=;TM_3a?7D~)nwNLTFg?)QELS30@uEKnD?ofH2t@2m6^mA|^Je zewfxza6VLj(<3Wt6;7elbv_Z_4uUm{eIr)Kw-Ma9MBiO#mRZ>|UW z%ga(`5)9Z|6oTIb7BPCO`ZL=3pSj2pe&8EPykGk^e;p9OqhZ(%<%x;|_m5NRg~kCu z2Ndjb<;T%t7IU+V5)53dYn$+C0M8d=czpF1d+0xy5?hHLj6zjS=Q4NUjU`_y)w8*`x`ogL}ffXPMtF>Ii`bnJswc5?E8!j_(% z$dB=5n6nIfVAhYh+Pyn*m`uJiG1S#{LK7%c+V5V76u!k?A9p$M^3(WuowaJic>L*o zA>XZ}gWMyEE?n3_0;1)d1fwld4*XLCUOtU&WGoj%u5Z58Kk((t`;yJw-Q7x?GXn2t zZPEAddrM zK1#O1!HTo;$M)0yE`}Lq>$|5g7ZB5@cPx>AC*gI|&fz*Aod^+yso)jOW_E&` zVS2wV_zGxW2}@5pkWEevxdpCdi1^sG?j9afp^l4*r_;~-4Gwlim2TQ~_!-v-D4?;W z{i9t_jfm&+x_EjP4KC&X{`8|qL*sG;@57t3pV{xJ=E)51U^YvlbK5L+UQYfR-(Ml( z{qKM%`P&u3&aN(w;A?-rMo>FYhmQFJU;o4(qjA+TMd(aO3<~U4SEk<(^*A>I;Sp=b z+1ZkE0=#qk#@(R^*&XKP(22xob1T+bUcbhte3-iII^Wvu|2>1#+I|m6n5#`Mee5W| zY%>WPD9-7JmD@`?3S4+ikj(zu}ZUy z&(dMaS$H2jdZfBoVZI+=R7iNZ+Q;ax7?2a7bbd`udU;wR^GE=1oTznAk>u^uc(|I&UvWgr zZk)O-krk7jZYF~hA-c1Uc`_PWwC1;Ol`A(Cs7T>1@B~2%+3SlrZKgW1Ug)fNatA&MzZjk(q-UqlPlmK`@GqhPvJT|EE=cIj&ObouR zL7T2cA`kUf05!x{CHy`$g>`VFl?&5Nb)Wsxv z*e!$Ke13;=iXMizWJFjSK|d2Is}}C?_M%AOt6H&< zx>~L{QOL4pr>D*A9g$7X0PpEnIwRCs>az@9^3>V^q00(Wk*Sx0S`+Tg;32#>Q|%cF z0Y!h%i+qb&?42SRr{=f*;Hk}IMdqL6=O0F4zyfGAb4hk}?V~I;%wcCongb}L1BqaR z5G+W_HbzEeqS+Q+9Kz?`Ub4|rj1l)|`TprzM}Ab&hg?P(1jfql9PstoKFPrmAAXWo zLsN6`&hzqEog~y=5^$bI@;xCgIyo<%{-ci1fZ#4@C}+;ie~CKcg`KdkC{V@ncbF#T zW$CR?(mvfLdM^uLUHV z9C27t17*a1U+1y5@6cz#x`&s;3}+UE1JjRtd^fU)AAg|E-4BP_ArLU%<**h3N5Cn9 z;n9s{#z~f|@!_ed_9PNXN2du~5r73zAt4yws*`ZFsLq{+R zqCmO_`fDkv{3gZw3@X~eF$!~ZSCzh+@;`X(VU8RdOvJtp zC;v==b5cDUUe)5BiHpr__wSU|vHvCVu|*;?FP`A)qohW;a7ZdKAz>fNEcA}}l{T{P z@tOu~-VH`#BsWXMS5FO-5{IecT~?vtLVxEoWxjp;{Bs_-Iyu!eG^E0vn1^Q`y8-K; zNX9M(yg-@LJ%uZFFKxgpo3Twj{s)8tQlncXtne}8YHBnQHAb3Ul$uo|yanmy9i5!e zceo#*8CzY|(9lp*gL_H_bpz8V5bwu4JJbW|g&q1UFiuE5`5Z0k9!QzY&C`210PDZ+ ziVg~*rGT!77W0E~z-;Wu@G$pzlSjD2gJ+5!(Q}4W?y1(LW6nFI$MOgk~yasPG>b2RZYPh+hE$um zkkSLkwOCFPK}RljDsr~k5%a3qT(=d|0+`%@f%9r9JR+jN;j?o0W9#23d1zm_xUN&| zI&%)%^SD*4T8KT{-26%_M9;WM%1TF9Bf>)hh43*a>NBOcl%HNwn$Szb{L%F*7d&$o{%Y zfsyslofF-@f8Shdc^#h~*%buBl-Q;;V`F2>%JHH{C5+n7V+7d6;L~Ps{8)MzMljxva>tQyx2-P2(2l!e|zYL^> zh3}8wyhgv3p8k~)X8qc(&PMX8;OQ6+lYjdWB1OEaUGW?I`Wt+fL~MoG`x^!ktczAzW7&`DT{Eob(a4LA3SK$=xx4I0 zC)6`RU0a@TD-Ed`DE;*#sFgzaEZn=5Pj~Wm&-=Z_kCfUf4_tVdr!<^K<-PACE1uO(Xk#1 zHr?STzh&1h>@7*S5cnP_0vZW_Utg$3k-kAG-FxInB>zB81qmHm-6NsTsDsvV#39*% zTZa2Ulyh@(I>5uDcI;Ruwi)4sBDJ-1YjM>v;8~`8)}RTO^&$4F9^~UAD*ma)-ri+A z9N%CN?+M)-F?_af@(o5Y1u{or4h1ZnEUOPUf1gy<&>*JM;GiOgXc@L2mEcgLWuT{j zpP<2#bH@v27IJd0z?S*b!r??>RwCS|HvRQuLu2DTBa=CD|FJ&|-0bPA9vfbn6V=7CuW;xC!a11!Wk-&-VW>zc=q{}f*AD?{nLO&;K9 z8W5ko$3jKtmp7oxS-a&?{>()YT7f$B`!c|PKsE&^S3U6K==HU5yM%QEsOt~aF_UsZ zI!253euljB{IowwpqM(;-i5K-HyB+|oiA7cIY+(NhCRo&yFlX;EK!eQS?}@$f{e8Tlyy)5OksNaA=DXySBkz`<-)=kub^P?OI-tl zr2N0;f~>*vnzcPGX%#0C>W$I3pzq#jXwg_FcpUjRcl+IaXMALCANVS^Jl`6sLKGk0iqF>h0j@$v zNWL%v_NSANEf--V_!f4D6m*c1I zm({0p!R&4GSaag$xxJ^F(@e`bIXQ=;kT^W>W}z2Hl${w7bwGL0wZufnDM-Fr^6N)U zhyN@{sBCj#r(zvCms<1+eWgota^(3i1;elo{?;uy*AzJoa2XQ3R-8DovP(a?4$2MI zQ#!L~)jn^KxK2~sbvZAbI+TRB4l9f%a4IQ_VKoEJqWl~z5yjJtr%OHmc45*L1 z>BpM|IQ*??O&7LbNd~$(vVh*af29gB4_(0E~@+73~7yylt`I7^puG z19UMRK~aTL1Qq|t@Qvo~?%$Zv;%twD3O;A_1}T(3467pdf>*b`gWLfd{tz)ST z!o#)e*WsmMdG18ut5C%yT*voNz3x=R3}lV~fq}FTkeR>(Et>R< zjM3iGWdc(@Thdi@$q6-~$H>8dLhJl|cb)?mm`|NLZD7?zE&m!q$--O}FX*ue$o-Xe zgKwAbUhe=+^*!$dSgHV4IF@BgF2LC%DCij8oqVwmU%>>M#e;)AQ26myVD%ji=eAu4 zZ8qDHkeBypyz`N?8DE%uG=T;HihxE4n|{#VEu9@xkI#7aOf^LhtVEU*xk{={eUvLwH*hOh|@q^mS z#ANcyTY!|KnGHvbAS_z6fYruenlL6KNG4OWvu_88@ynHkfI)fUqJGV!`d}%Iw(aT* zGsBIaQEGzQnx{?twsVZns$(z@PR5aRU5LGVq>OqM6@wi~2>27`XHnR?A|^J2+7EPe zWYn2&-*!kkngP(c7rpZ1$~($-5`Vy#+!{;%KeS$O|F12tGCDHv<5yp&N&KCflBVKI I`E%F)7r7A#fB*mh literal 0 HcmV?d00001 From d035ba33dec2a6eb251bffb4d9b48d8fa896b1e4 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 30 May 2022 11:23:18 +0200 Subject: [PATCH 138/140] [system] Update documentation --- docs/usage.md | 156 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 156 insertions(+) create mode 100644 docs/usage.md diff --git a/docs/usage.md b/docs/usage.md new file mode 100644 index 00000000..89c573e4 --- /dev/null +++ b/docs/usage.md @@ -0,0 +1,156 @@ + +SeBS has three basic commands: `benchmark`, `experiment`, and `local`. +For each command you can pass `--verbose` flag to increase the verbosity of the output. +By default, all scripts will create a cache in directory `cache` to store code with +dependencies and information on allocated cloud resources. +Benchmarks will be rebuilt after a change in source code is detected. +To enforce redeployment of code and benchmark input please use flags `--update-code` +and `--update-storage`, respectively. + +**Note:** the cache does not support updating cloud region. If you want to deploy benchmarks +to a new cloud region, then use a new cache directory. + +### Benchmark + +This command is used to build, deploy, and execute serverless benchmark in cloud. +The example below invokes the benchmark `110.dynamic-html` on AWS via the standard HTTP trigger. + +``` +./sebs.py benchmark invoke 110.dynamic-html test --config config/example.json --deployment aws --verbose +``` + +To configure your benchmark, change settings in the config file or use command-line options. +The full list is available by running `./sebs.py benchmark invoke --help`. + +### Regression + +Additionally, we provide a regression option to execute all benchmarks on a given platform. +The example below demonstrates how to run the regression suite with `test` input size on AWS. + +``` +./sebs.py benchmark regression test --config config/example.json --deployment aws +``` + +The regression can be executed on a single benchmark as well: + +``` +./sebs.py benchmark regression test --config config/example.json --deployment aws --benchmark-name 120.uploader +``` + +### Experiment + +This command is used to execute benchmarks described in the paper. The example below runs the experiment **perf-cost**: + +``` +./sebs.py experiment invoke perf-cost --config config/example.json --deployment aws +``` + +The configuration specifies that benchmark **110.dynamic-html** is executed 50 times, with 50 concurrent invocations, and both cold and warm invocations are recorded. + +```json +"perf-cost": { + "benchmark": "110.dynamic-html", + "experiments": ["cold", "warm"], + "input-size": "test", + "repetitions": 50, + "concurrent-invocations": 50, + "memory-sizes": [128, 256] +} +``` + +To download cloud metrics and process the invocations into a .csv file with data, run the process construct + +``` +./sebs.py experiment process perf-cost --config example.json --deployment aws +``` + +### Local + +In addition to the cloud deployment, we provide an opportunity to launch benchmarks locally with the help of [minio](https://min.io/) storage. +This allows us to conduct debugging and a local characterization of the benchmarks. + +First, launch a storage instance. The command below is going to deploy a Docker container, +map the container's port to port `9011` on host network, and write storage instance configuration +to file `out_storage.json` + +``` +./sebs.py storage start minio --port 9011 --output-json out_storage.json +``` + +Then, we need to update the configuration of `local` deployment with information on the storage +instance. The `.deployment.local`` object in the configuration JSON needs to contain a new object +`storage` with the data provided in the `out_storage.json` file. Fortunately, we can achieve +automatically with a single command by using `jq`: + +``` +jq --argfile file1 out_storage.json '.deployment.local.storage = $file1 ' config/example.json > config/local_deployment.json +``` + +The output file will contain a JSON object that should look similar to this one: + +```json +"deployment": { + "name": "local", + "local": { + "storage": { + "address": "172.17.0.2:9000", + "mapped_port": 9011, + "access_key": "XXXXX", + "secret_key": "XXXXX", + "instance_id": "XXXXX", + "input_buckets": [], + "output_buckets": [], + "type": "minio" + } + } +} +``` + +To launch Docker containers, use the following command - this example launches benchmark `110.dynamic-html` with size `test`: + +``` +./sebs.py local start 110.dynamic-html test out_benchmark.json --config config/local_deployment.json --deployments 1 +``` + +The output file `out_benchmark.json` will contain the information on containers deployed and the endpoints that can be used to invoke functions: + +``` +{ + "functions": [ + { + "benchmark": "110.dynamic-html", + "hash": "5ff0657337d17b0cf6156f712f697610", + "instance_id": "e4797ae01c52ac54bfc22aece1e413130806165eea58c544b2a15c740ec7d75f", + "name": "110.dynamic-html-python-128", + "port": 9000, + "triggers": [], + "url": "172.17.0.3:9000" + } + ], + "inputs": [ + { + "random_len": 10, + "username": "testname" + } + ], + "storage: { + ... + } +} +``` + +In our example, we can use `curl` to invoke the function with provided input: + +``` +curl 172.17.0.3:9000 --request POST --data '{"random_len": 10,"username": "testname"}' --header 'Content-Type: application/json' +``` + +To stop containers, you can use the following command: + +``` +./sebs.py local stop out_benchmark.json +./sebs.py storage stop out_storage.json +``` + +The stopped containers won't be automatically removed unless the option `--remove-containers` has been passed to the `start` command. + From cdd8ad2a89bd07432a3254f5178e09449215d205 Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 30 May 2022 11:44:55 +0200 Subject: [PATCH 139/140] [system] Further updates to documentation --- docs/benchmarks.md | 23 +++++++++++++++++++ docs/build.md | 55 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 78 insertions(+) create mode 100644 docs/benchmarks.md create mode 100644 docs/build.md diff --git a/docs/benchmarks.md b/docs/benchmarks.md new file mode 100644 index 00000000..51b3ba5b --- /dev/null +++ b/docs/benchmarks.md @@ -0,0 +1,23 @@ + +## Benchmark Applications + + +| Type | Benchmark | Languages | Description | +| :--- | :---: | :---: | :---: | +| Webapps | 110.dynamic-html | Python, Node.js | Generate dynamic HTML from a template. | +| Webapps | 120.uploader | Python, Node.js | Uploader file from provided URL to cloud storage. | +| Multimedia | 210.thumbnailer | Python, Node.js | Generate a thumbnail of an image. | +| Multimedia | 220.video-processing | Python | Add a watermark and generate gif of a video file. | +| Utilities | 311.compression | Python | Create a .zip file for a group of files in storage and return to user to download. | +| Utilities | 504.dna-visualization | Python | Creates a visualization data for DNA sequence. | +| Inference | 411.image-recognition | Python | Image recognition with ResNet and pytorch. | +| Scientific | 501.graph-pagerank | Python | PageRank implementation with igraph. | +| Scientific | 501.graph-mst | Python | Minimum spanning tree (MST) implementation with igraph. | +| Scientific | 501.graph-bfs | Python | Breadth-first search (BFS) implementation with igraph. | + +For details on benchmark selection and their characterization, please refer to [our paper](#paper). + +## Workflow Applications + +**(WiP)** Coming soon! + diff --git a/docs/build.md b/docs/build.md new file mode 100644 index 00000000..c6c293aa --- /dev/null +++ b/docs/build.md @@ -0,0 +1,55 @@ + +SeBS caches built code packages to save time, as installing dependencies can be time and bandwidth consuming, e.g., for ML frameworks such as PyTorch. +Furthermore, some benchmarks require special treatment - for example, PyTorch image recognition benchmark requires additinal stripping and compression steps to fit into the size limits of AWS Lambda code package. + +By default, we deploy benchmark code as package uploaded to the serverless platform. +However, on some platforms we use [Docker images](#docker-image-build) instead. + +```mermaid +sequenceDiagram + participant Benchmark Builder + participant Cache + participant Platform + participant Docker Image Builder + Benchmark Builder->>Cache: Query for an up-to-date build. + Benchmark Builder->>Benchmark Builder: Prepare environment and benchmark code. + Benchmark Builder->>Benchmark Builder: Install platform-specific dependencies. + Benchmark Builder->>Benchmark Builder: Install benchmark dependencies. + Benchmark Builder->>Platform: Package code. + Platform-->>Docker Image Builder: Build Image. + Platform->>Benchmark Builder: Returns zip file or image tag. +``` +## Code Package Build + +**Query Cache** - first, we check if there is an up-to-date build of the benchmark function +that can be used. + +**Prepare Environment** - benchmark code with data is copied to the build location. + +**Add Benchmark Data** - optional step of adding additional, external dependencies. An example is downloading `ffmpeg` release into `220.video-processing` benchmark. + +**Add Platform-Specific Wrappers** - we add lightweight shims to implement the cloud-specific API and keep benchmark applications generic and portable. + +**Add Deployment Packages** - some platforms require installing specific dependencies, such as cloud storage SDKs in Azure and Google Cloud, as well as the Minio SDK for OpenWhisk. + +**Install Dependencies** - in this step, we use the Docker builder container. +We mount the working copy as a volume in the container, and execute there +This step is skipped for OpenWhisk. + +**Package Code** - we move files to create the directory structure expected on each cloud platform and +create a final deployment package. An example of a customization is Azure Functions, where additional +JSON configuration files are needed. + +**Build Docker Image** - in this step, we create a new image `function.{platform}.{benchmark}.{language}-{version}`. +Benchmark and all of its dependencies are installed there, and the image can be deployed directly +to the serverless platform. At the moment, this step is used only in OpenWhisk. + +## Docker Image Build + +A different approach is taken in OpenWhisk. +Since OpenWhisk has a very small size limit on code packages, we deploy all functions as Docker images. +There, in this step, we copy the prepared benchmark code into a newly created Docker image where +all dependencies are installed. The image is later pushed to either DockerHub or a user-defined registry. + +In future, we plan to extend Docker image support to other platforms as well. + From e5e7be1caa5079e8398867878ad0badf942e2a0e Mon Sep 17 00:00:00 2001 From: Marcin Copik Date: Mon, 30 May 2022 12:06:27 +0200 Subject: [PATCH 140/140] [system] Update platform documentation --- docs/experiments.md | 31 +++++++++++++++++++ docs/platforms.md | 73 ++++++++++++++++++++++++++++++++++++++++----- 2 files changed, 97 insertions(+), 7 deletions(-) create mode 100644 docs/experiments.md diff --git a/docs/experiments.md b/docs/experiments.md new file mode 100644 index 00000000..cc5907a5 --- /dev/null +++ b/docs/experiments.md @@ -0,0 +1,31 @@ + +## Experiments + +For details on experiments and methodology, please refer to [our paper](#paper). + +#### Performance & cost + +Invokes given benchmark a selected number of times, measuring the time and cost of invocations. +Supports `cold` and `warm` invocations with a selected number of concurrent invocations. +In addition, to accurately measure the overheads of Azure Function Apps, we offer `burst` and `sequential` invocation type that doesn't distinguish +between cold and warm startups. + +#### Network ping-pong + +Measures the distribution of network latency between benchmark driver and function instance. + +#### Invocation overhead + +The experiment performs the clock drift synchronization protocol to accurately measure the startup time of a function by comparing +benchmark driver and function timestamps. + +#### Eviction model + +**(WiP)** Executes test functions multiple times, with varying size, memory and runtime configurations, to test for how long function instances stay alive. +The result helps to estimate the analytical models describing cold startups. +Currently supported only on AWS. + +#### Communication Channels + +**(WiP)** + diff --git a/docs/platforms.md b/docs/platforms.md index ff6d22d6..3f6b9f28 100644 --- a/docs/platforms.md +++ b/docs/platforms.md @@ -1,8 +1,14 @@ -SeBS supports three commercial serverless platforms: AWS Lambda, Azure Functions, and Google Cloud -Functions. +SeBS supports three commercial serverless platforms: AWS Lambda, Azure Functions, and Google Cloud Functions. Furthermore, we support the open source FaaS system OpenWhisk. +The file `config/example.json` contains all parameters that users can change +to customize the deployment. +Some of these parameters, such as cloud credentials or storage instance address, +are required. +In the following subsections, we discuss the mandatory and optional customization +points for each platform. + ## AWS Lambda AWS provides one year of free services, including a significant amount of computing time in AWS Lambda. @@ -127,8 +133,7 @@ To use a different Docker Hub repository, change the key Alternatively, OpenWhisk users can configure the FaaS platform to use a custom and private Docker registry and push new images there. -Furthermore, a local Docker registry can speed up development when debugging -a new function. +A local Docker registry can speed up development when debugging a new function. SeBS can use alternative Docker registry - see `dockerRegistry` settings in the example to configure registry endpoint and credentials. When the `registry` URL is not provided, SeBS will use Docker Hub. @@ -141,7 +146,7 @@ for details. **Warning**: this feature is experimental and has not been tested extensively. At the moment, it cannot be used on a `kind` cluster due to issues with -Docker authorization on invoker nodes. +Docker authorization on invoker nodes. [See the OpenWhisk issue for details](https://github.com/apache/openwhisk-deploy-kube/issues/721). ### Code Deployment @@ -150,7 +155,7 @@ when the function's contents have changed, and when the user requests a forced r In OpenWhisk, this setup is changed - SeBS will first attempt to verify if the image exists already in the registry and skip building the Docker image when possible. -Then, SeBS tcan deploy seamlessly to OpenWhisk using default images +Then, SeBS can deploy seamlessly to OpenWhisk using default images available on Docker Hub. Furthermore, checking for image existence in the registry helps avoid failing invocations in OpenWhisk. @@ -167,10 +172,64 @@ To use that feature in SeBS, set the `experimentalManifest` flag to true. ### Storage -To provide persistent object storage in OpenWhisk, we deploy an instance +To provide persistent object storage in OpenWhisk, users must first deploy an instance of [`Minio`](https://github.com/minio/minio) storage. The storage instance is deployed as a Docker container, and it can be retained across many experiments. +OpenWhisk functions must be able to reach the storage instance. +Even on a local machine, it's necessary to configure the network address, as OpenWhisk functions +are running isolated from the host network and won't be able to reach other containers running on the Docker bridge. + +Use the following command to deploy the storage instance locally and map the host public port 9011 to Minio instance. + +```bash +./sebs.py storage start minio --port 9011 --output-json out_storage.json +``` + +The output will look similar to the one below. +As we can see, the storage container is running on the default Docker bridge network with address `172.17.0.2` and uses port `9000`. +From the host network, port `9011` is mapped to the container's port `9000` to allow external parties - such as OpenWhisk functions - to reach the storage. + +``` +{ + "address": "172.17.0.2:9000", + "mapped_port": 9011, + "access_key": "XXX", + "secret_key": "XXX", + "instance_id": "XXX", + "input_buckets": [], + "output_buckets": [], + "type": "minio" +} +``` + +The storage configuration found in `out_storage.json` needs to be provided to SeBS, +and the instance address must be updated to not use the internal address. +In this case, the host machine's address is `172.22.20.30`. +Note that that other parties must use the host network port `9011` to reach Minio instance. +Docker's port mapping will take care of the rest. + +``` +jq --argfile file1 out_storage.json '.deployment.openwhisk.storage = $file1, .deployment.openwhisk.storage.address = 172.22.20.30:9011' config/example.json > config/openwhisk.json +``` + +Not sure which address is correct? Use `curl` to verify if Minio's instance can be reached: + +``` +curl -i 172.22.20.30:9011/minio/health/live +HTTP/1.1 200 OK +Accept-Ranges: bytes +Content-Length: 0 +Content-Security-Policy: block-all-mixed-content +Server: MinIO +Strict-Transport-Security: max-age=31536000; includeSubDomains +Vary: Origin +X-Amz-Request-Id: 16F3D9B9FDFFA340 +X-Content-Type-Options: nosniff +X-Xss-Protection: 1; mode=block +Date: Mon, 30 May 2022 10:01:21 GMT +``` + The `shutdownStorage` switch controls the behavior of SeBS. When set to true, SeBS will remove the Minio instance after finishing all work.