diff --git a/.gitignore b/.gitignore index bdece504a..e110f17bc 100644 --- a/.gitignore +++ b/.gitignore @@ -59,4 +59,5 @@ pom.xml.versionsBackup konduit-serving-codegen/src/main/resources/docker/centos/aarch64/cuda/10.0/nvidia-deb/cuda-repo-l4t-10-0-local-10.0.326_1.0-1_arm64.deb konduit-serving-codegen/src/main/resources/docker/centos/aarch64/cuda/10.0/nvidia-deb/libcudnn7_7.6.3.28-1+cuda10.0_arm64.deb -konduit-serving-core-git.properties \ No newline at end of file +konduit-serving-core-git.properties +builds \ No newline at end of file diff --git a/create-all-binaries.sh b/create-all-binaries.sh new file mode 100755 index 000000000..0fae2b68c --- /dev/null +++ b/create-all-binaries.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set -euxo pipefail + +declare -a platforms=("windows-x86_64" "macosx-x86_64" "linux-x86_64") +declare -a chips=("cpu" "gpu") +declare -a spins=("minimal" "pmml" "python" "all") + +mkdir -p builds + +PROJECT_VERSION="$(mvn help:evaluate -D expression=project.version | grep -e '^[^\[]')" + +## now loop through the above array +for platform in "${platforms[@]}" +do + for chip in "${chips[@]}" + do + for spin in "${spins[@]}" + do + if [[ "${platform}" == 'macosx-x86_64' && "${chip}" == 'gpu' ]]; then + continue + fi + + echo "Compiling for $platform | $chip | $spin" + + python build_jar.py --os "${platform}" --chip "${chip}" --spin "${spin}" \ + --target builds/konduit-serving-uberjar-"${PROJECT_VERSION}"-"${spin}"-"${platform}"-"${chip}".jar + done + done +done + diff --git a/konduit-serving-api/src/main/java/ai/konduit/serving/pipeline/PipelineStep.java b/konduit-serving-api/src/main/java/ai/konduit/serving/pipeline/PipelineStep.java index 18f8d7d17..53b2f8a62 100644 --- a/konduit-serving-api/src/main/java/ai/konduit/serving/pipeline/PipelineStep.java +++ b/konduit-serving-api/src/main/java/ai/konduit/serving/pipeline/PipelineStep.java @@ -30,25 +30,21 @@ @JsonSubTypes({ @JsonSubTypes.Type(value = PmmlStep.class, name = "PMML"), - @JsonSubTypes.Type(value = PmmlStep.class, name = "PmmlConfig"), + @JsonSubTypes.Type(value = PmmlStep.class, name = "PmmlStep"), @JsonSubTypes.Type(value = SameDiffStep.class, name = "SAMEDIFF"), - @JsonSubTypes.Type(value = SameDiffStep.class, name = "SameDiffConfig"), + @JsonSubTypes.Type(value = SameDiffStep.class, name = "SameDiffStep"), @JsonSubTypes.Type(value = TensorFlowStep.class, name = "TENSORFLOW"), - @JsonSubTypes.Type(value = TensorFlowStep.class, name = "TensorFlowConfig"), + @JsonSubTypes.Type(value = TensorFlowStep.class, name = "TensorFlowStep"), @JsonSubTypes.Type(value = OnnxStep.class, name = "ONNX"), - @JsonSubTypes.Type(value = OnnxStep.class, name = "OnnxConfig"), + @JsonSubTypes.Type(value = OnnxStep.class, name = "OnnxStep"), @JsonSubTypes.Type(value = KerasStep.class, name = "KERAS"), - @JsonSubTypes.Type(value = KerasStep.class, name = "KerasConfig"), + @JsonSubTypes.Type(value = KerasStep.class, name = "KerasStep"), @JsonSubTypes.Type(value = Dl4jStep.class, name= "DL4J"), - @JsonSubTypes.Type(value = Dl4jStep.class, name= "DL4JConfig"), + @JsonSubTypes.Type(value = Dl4jStep.class, name= "Dl4jStep"), @JsonSubTypes.Type(value = PythonStep.class, name = "PYTHON"), @JsonSubTypes.Type(value = PythonStep.class, name = "PythonStep"), - @JsonSubTypes.Type(value = PmmlStep.class, name = "PMML"), - @JsonSubTypes.Type(value = PmmlStep.class, name = "PmmlStep"), @JsonSubTypes.Type(value = TransformProcessStep.class, name = "TRANSFORM"), @JsonSubTypes.Type(value = TransformProcessStep.class, name = "TransformProcessStep"), - @JsonSubTypes.Type(value = CustomPipelineStep.class, name = "CUSTOM"), - @JsonSubTypes.Type(value = CustomPipelineStep.class, name = "CustomPipelineStep"), @JsonSubTypes.Type(value = ImageLoadingStep.class, name = "IMAGE"), @JsonSubTypes.Type(value = ImageLoadingStep.class, name = "ImageLoadingStep"), @JsonSubTypes.Type(value = JsonExpanderTransformStep.class, name = "JSON_EXPANDER"), @@ -56,7 +52,9 @@ @JsonSubTypes.Type(value = ArrayConcatenationStep.class, name = "ARRAY_CONCAT"), @JsonSubTypes.Type(value = ArrayConcatenationStep.class, name = "ArrayConcatenationStep"), @JsonSubTypes.Type(value = WordPieceTokenizerStep.class, name = "WORDPIECE_TOKENIZER"), - @JsonSubTypes.Type(value = WordPieceTokenizerStep.class, name = "WordPieceTokenizerStep") + @JsonSubTypes.Type(value = WordPieceTokenizerStep.class, name = "WordPieceTokenizerStep"), + @JsonSubTypes.Type(value = CustomPipelineStep.class, name = "CUSTOM"), + @JsonSubTypes.Type(value = CustomPipelineStep.class, name = "CustomPipelineStep") }) @JsonTypeInfo(use = NAME, property = "type") @Deprecated diff --git a/konduit-serving-codegen/src/main/java/ai/konduit/serving/codegen/pythoncodegen/CodeGen.java b/konduit-serving-codegen/src/main/java/ai/konduit/serving/codegen/pythoncodegen/CodeGen.java index e38e4f650..1525ff2cf 100644 --- a/konduit-serving-codegen/src/main/java/ai/konduit/serving/codegen/pythoncodegen/CodeGen.java +++ b/konduit-serving-codegen/src/main/java/ai/konduit/serving/codegen/pythoncodegen/CodeGen.java @@ -24,6 +24,8 @@ import ai.konduit.serving.InferenceConfiguration; import ai.konduit.serving.config.*; +import ai.konduit.serving.config.metrics.ColumnDistribution; +import ai.konduit.serving.config.metrics.MetricsConfig; import ai.konduit.serving.config.metrics.NoOpMetricsConfig; import ai.konduit.serving.config.metrics.impl.ClassificationMetricsConfig; import ai.konduit.serving.config.metrics.impl.MultiLabelMetricsConfig; @@ -63,6 +65,8 @@ public static void main( String[] args ) throws Exception { JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(objectMapper, JsonSchemaConfig.html5EnabledSchema()); Set> clazzes = new LinkedHashSet<>(); + clazzes.add(ColumnDistribution.class); + clazzes.add(MetricsConfig.class); clazzes.add(MultiLabelMetricsConfig.class); clazzes.add(NoOpMetricsConfig.class); clazzes.add(ClassificationMetricsConfig.class); diff --git a/konduit-serving-core/src/main/java/ai/konduit/serving/deploy/DeployKonduitServing.java b/konduit-serving-core/src/main/java/ai/konduit/serving/deploy/DeployKonduitServing.java index 8a1d73570..5b40f3269 100644 --- a/konduit-serving-core/src/main/java/ai/konduit/serving/deploy/DeployKonduitServing.java +++ b/konduit-serving-core/src/main/java/ai/konduit/serving/deploy/DeployKonduitServing.java @@ -30,12 +30,15 @@ import io.vertx.micrometer.MicrometerMetricsOptions; import io.vertx.micrometer.VertxPrometheusOptions; import io.vertx.micrometer.backends.BackendRegistries; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import uk.org.lidalia.sysoutslf4j.context.SysOutOverSLF4J; import java.util.concurrent.TimeUnit; @Slf4j +@NoArgsConstructor(access = AccessLevel.PRIVATE) public class DeployKonduitServing { static { diff --git a/konduit-serving-io/konduit-serving-camera/src/test/java/ai/konduit/serving/camera/ManualTest.java b/konduit-serving-io/konduit-serving-camera/src/test/java/ai/konduit/serving/camera/ManualTest.java index 8c23042e2..61a4b8500 100644 --- a/konduit-serving-io/konduit-serving-camera/src/test/java/ai/konduit/serving/camera/ManualTest.java +++ b/konduit-serving-io/konduit-serving-camera/src/test/java/ai/konduit/serving/camera/ManualTest.java @@ -27,7 +27,7 @@ public void manualTest() throws Exception { .width(w) .height(h) .outputKey("myImage") - .build()) + .build()) .add(ShowImagePipelineStep.builder() .displayName("Image Viewer") .width(w) diff --git a/python/cli.py b/python/cli.py index d7c30a3b0..72b450fd5 100644 --- a/python/cli.py +++ b/python/cli.py @@ -4,6 +4,7 @@ import subprocess import click from packaging.version import parse +from hurry.filesize import size USER_PATH = os.path.expanduser("~") KONDUIT_BASE_DIR = os.path.join(USER_PATH, ".konduit-serving") @@ -11,10 +12,11 @@ KONDUIT_JAR_DIR = os.path.join(KONDUIT_BASE_DIR, "jar") KONDUIT_JAR_PATH = os.path.join(KONDUIT_JAR_DIR, "konduit.jar") +INCOMPATIBLE_COMPILATION_TAGS = ["cli_base", "cli_base_2", "cli_base_3", "cli_base_4"] DOWNLOAD_TAG = "cli_base" LAST_COMPATIBLE_KONDUIT_VERSION = "0.1.0-SNAPSHOT" -DEFAULT_KONDUIT_TAG = "cli_base_2" +DEFAULT_KONDUIT_TAG = "cli_base_4" KONDUIT_JAR_URL_FORMAT = "https://github.com/KonduitAI/konduit-serving/releases/download/" \ "{tag}/konduit-serving-uberjar-{version}-{spin}-{platform}-{chip}.jar" @@ -54,7 +56,7 @@ def download_if_required(url, save_path): print("The required CLI binary has already been downloaded.") return else: - print("Downloading command line binaries") + print("Downloading command line binaries from " + url) with open(save_path, 'wb') as f: if total is None: @@ -66,7 +68,8 @@ def download_if_required(url, save_path): downloaded += len(data) f.write(data) done = int(50 * downloaded / total) - sys.stdout.write('\r[{}{}]'.format('█' * done, '.' * (50 - done))) + sys.stdout.write('\r[{}{}]'.format('█' * done, '.' * (50 - done)) + + (" ({}/{})".format(size(downloaded), size(total)))) sys.stdout.flush() sys.stdout.write('\n') @@ -154,7 +157,9 @@ def get_jar_url(platform, version, spin, chip): chip=chip) -git_tags = get_git_tags() +git_tags = set(get_git_tags()).difference(INCOMPATIBLE_COMPILATION_TAGS) +if len(git_tags) == 0: + git_tags = [DEFAULT_KONDUIT_TAG] DEFAULT_KONDUIT_TAG = git_tags[0] # Assuming the first one in the response is the most recent one @@ -230,4 +235,4 @@ def cli(): subprocess.call( arguments, shell=sys.platform.startswith("win") - ) \ No newline at end of file + ) diff --git a/python/konduit/__init__.py b/python/konduit/__init__.py index 9d2a942ca..e1e0ac70f 100644 --- a/python/konduit/__init__.py +++ b/python/konduit/__init__.py @@ -1,10 +1,10 @@ import os USER_PATH = os.path.expanduser("~") -KONDUIT_BASE_DIR = os.path.join(USER_PATH, ".konduit") -KONDUIT_DIR = os.path.join(KONDUIT_BASE_DIR, "konduit-serving") +KONDUIT_BASE_DIR = os.path.join(USER_PATH, ".konduit-serving") +KONDUIT_JAR_DIR = os.path.join(KONDUIT_BASE_DIR, "jar") -jar = os.getenv("KONDUIT_JAR_PATH", os.path.join(KONDUIT_DIR, "konduit.jar")) +jar = os.getenv("KONDUIT_JAR_PATH", os.path.join(KONDUIT_JAR_DIR, "konduit.jar")) try: import pydl4j diff --git a/python/konduit/base_inference.py b/python/konduit/base_inference.py index 376f5baf9..bc17199c7 100644 --- a/python/konduit/base_inference.py +++ b/python/konduit/base_inference.py @@ -2,6 +2,133 @@ from konduit.json_utils import empty_type_dict, DictWrapper, ListWrapper +class ColumnDistribution(object): + + _normalizerType_enum = enum.Enum( + "_normalizerType_enum", + "STANDARDIZE MIN_MAX IMAGE_MIN_MAX IMAGE_VGG16 MULTI_STANDARDIZE MULTI_MIN_MAX MULTI_HYBRID CUSTOM", + module=__name__, + ) + _types_map = { + "mean": {"type": float, "subtype": None}, + "min": {"type": float, "subtype": None}, + "max": {"type": float, "subtype": None}, + "standardDeviation": {"type": float, "subtype": None}, + "normalizerType": {"type": str, "subtype": None}, + } + _formats_map = {} + + def __init__( + self, + mean=None, + min=None, + max=None, + standard_deviation=None, + normalizer_type=None, + ): + self.__mean = mean + self.__min = min + self.__max = max + self.__standard_deviation = standard_deviation + self.__normalizer_type = normalizer_type + + def _get_mean(self): + return self.__mean + + def _set_mean(self, value): + if not isinstance(value, float): + raise TypeError("mean must be float") + self.__mean = value + + mean = property(_get_mean, _set_mean) + + def _get_min(self): + return self.__min + + def _set_min(self, value): + if not isinstance(value, float): + raise TypeError("min must be float") + self.__min = value + + min = property(_get_min, _set_min) + + def _get_max(self): + return self.__max + + def _set_max(self, value): + if not isinstance(value, float): + raise TypeError("max must be float") + self.__max = value + + max = property(_get_max, _set_max) + + def _get_standard_deviation(self): + return self.__standard_deviation + + def _set_standard_deviation(self, value): + if not isinstance(value, float): + raise TypeError("standardDeviation must be float") + self.__standard_deviation = value + + standard_deviation = property(_get_standard_deviation, _set_standard_deviation) + + def _get_normalizer_type(self): + return self.__normalizer_type + + def _set_normalizer_type(self, value): + if not isinstance(value, str): + raise TypeError("normalizerType must be str") + if value in self._normalizerType_enum.__members__: + self.__type = value + else: + raise ValueError("Value {} not in _normalizerType_enum list".format(value)) + + normalizer_type = property(_get_normalizer_type, _set_normalizer_type) + + def as_dict(self): + d = empty_type_dict(self) + if self.__mean is not None: + d["mean"] = ( + self.__mean.as_dict() + if hasattr(self.__mean, "as_dict") + else self.__mean + ) + if self.__min is not None: + d["min"] = ( + self.__min.as_dict() if hasattr(self.__min, "as_dict") else self.__min + ) + if self.__max is not None: + d["max"] = ( + self.__max.as_dict() if hasattr(self.__max, "as_dict") else self.__max + ) + if self.__standard_deviation is not None: + d["standardDeviation"] = ( + self.__standard_deviation.as_dict() + if hasattr(self.__standard_deviation, "as_dict") + else self.__standard_deviation + ) + if self.__normalizer_type is not None: + d["normalizerType"] = ( + self.__normalizer_type.as_dict() + if hasattr(self.__normalizer_type, "as_dict") + else self.__normalizer_type + ) + return d + + +class MetricsConfig(object): + + _types_map = {} + _formats_map = {} + + def __init__(self): + pass + + def as_dict(self): + d = empty_type_dict(self) + return d + + class MultiLabelMetricsConfig(object): _types_map = { @@ -888,11 +1015,11 @@ class ServingConfig(object): def __init__( self, http_port=None, - listen_host=None, + listen_host="localhost", output_data_format="NUMPY", uploads_directory="file-uploads/", - log_timings=None, - create_logging_endpoints=None, + log_timings=False, + create_logging_endpoints=False, metrics_configurations=None, metric_types=None, ): diff --git a/python/konduit/json_utils.py b/python/konduit/json_utils.py index 02e97a116..d7d5d8db7 100644 --- a/python/konduit/json_utils.py +++ b/python/konduit/json_utils.py @@ -52,7 +52,7 @@ def _ensure_serializable(input_config): def config_to_dict_with_type(inference_config): """ Converts an inference configuration to a Python dictionary - with '@type' key. + with 'type' key. :param inference_config: InferenceConfig object :return: Python dict @@ -60,19 +60,19 @@ def config_to_dict_with_type(inference_config): has_as_dict_attribute(inference_config) _ensure_serializable(inference_config) input_dict = inference_config.as_dict() - input_dict["@type"] = inference_config.__class__.__name__ + input_dict["type"] = inference_config.__class__.__name__ return input_dict def empty_type_dict(input_object): """Generates an otherwise empty Python dict with the correct - "@type" key from Java. + "type" key from Java. :param input_class: arbitrary instance of a Python class :return: """ d = dict() - d["@type"] = input_object.__class__.__name__ + d["type"] = input_object.__class__.__name__ return d diff --git a/python/konduit/load.py b/python/konduit/load.py index 2558ffeae..cf0d413b2 100644 --- a/python/konduit/load.py +++ b/python/konduit/load.py @@ -132,7 +132,7 @@ def server_from_file(file_path, start_server=False, use_yaml=True): step_data = data.get("steps", None) steps = [] - for step_config in step_data.values(): + for step_config in step_data: steps.append(get_step(step_config)) server = Server( @@ -211,8 +211,8 @@ def get_python_step(step_config): """ python_step = PythonStep() - for key, python_config in step_config: - python_step.step(key, PythonConfig(**python_config)) + for key, config in step_config["python_configs"].items(): + python_step.step(python_config=PythonConfig(**config), input_name=key) return python_step diff --git a/python/konduit/server.py b/python/konduit/server.py index cc90de025..8145172c6 100644 --- a/python/konduit/server.py +++ b/python/konduit/server.py @@ -1,11 +1,13 @@ import json import logging +import sys import os import re import requests import signal import subprocess -from konduit import KONDUIT_DIR +import uuid +from konduit import KONDUIT_JAR_DIR from konduit.base_inference import PipelineStep from konduit.client import Client from konduit.inference import InferenceConfiguration @@ -56,7 +58,7 @@ def __init__( self.port = -1 if jar_path is None: jar_path = os.getenv( - "KONDUIT_JAR_PATH", os.path.join(KONDUIT_DIR, "konduit.jar") + "KONDUIT_JAR_PATH", os.path.join(KONDUIT_JAR_DIR, "konduit.jar") ) if inference_config: @@ -90,6 +92,8 @@ def __init__( else: self.extra_jar_args = extra_jar_args + self.server_id = uuid.uuid4().hex[:8] + def get_client(self, input_data_format=None, prediction_type=None, @@ -108,8 +112,11 @@ def get_client(self, port = self.port host = serving_config._get_listen_host() - if not host.startswith("http://"): - host = "http://" + host + if not host: + host = "http://localhost" + else: + if not host.startswith("http://"): + host = "http://" + host if not input_data_format: input_data_format = "NUMPY" @@ -130,24 +137,16 @@ def get_client(self, output_names=output_names, ) - def start(self, kill_existing_server=True): + def start(self, server_id=None): """Start the Konduit server - :param kill_existing_server: whether to kill any previously started server if it wasn't stop. + :param server_id the server will be started with this id. """ - if kill_existing_server: - if os.path.exists(self.pid_file_path): - with open(self.pid_file_path, "rb") as pid_file: - pid = int(pid_file.readline().strip()) - try: - stop_server_by_pid(pid) - except OSError: - logging.debug( - "Attempt to kill existing process by pid: '{}' failed. The process might not " - "exist. ".format(pid) - ) - - os.remove(self.pid_file_path) + + if not server_id: + server_id = self.server_id + else: + self.server_id = server_id json_config = config_to_dict_with_type(self.config) with open(self.config_path, "w") as f: @@ -155,7 +154,7 @@ def start(self, kill_existing_server=True): logging.info("Wrote config.json to path " + abs_path) json.dump(json_config, f) - args = self._process_extra_args(abs_path) + args = self._process_extra_args(abs_path, server_id) process = subprocess.Popen(args=args, stdout=subprocess.PIPE) self.process = process @@ -201,23 +200,33 @@ def start(self, kill_existing_server=True): return process, port, started - def stop(self): + def stop(self, server_id=None): """Stop the server""" - if self.process is None: - if os.path.exists(self.config_path): - os.remove(self.config_path) - raise Exception("Server is not started!") + if not server_id: + server_id = self.server_id else: - if os.path.exists(self.config_path): - os.remove(self.config_path) - self.process.kill() + self.server_id = server_id + + command = self.get_command("stop " + server_id) + logging.info("Running with args\n" + " ".join(command)) + subprocess.call(command, shell=sys.platform.startswith("win")) - def _process_extra_args(self, absolute_path): + def _process_extra_args(self, absolute_path, server_id): """Process submitted extra arguments list. :param absolute_path: absolute path of the configuration file :return: concatenated string arguments """ + args = self.get_command("serve -c " + absolute_path) + + if server_id: + args.append("-id") + args.append(server_id) + + logging.info("Running with args\n" + " ".join(args)) + return args + + def get_command(self, command): classpath = [self.jar_path] classpath.extend(self.extra_jar_args) @@ -227,12 +236,7 @@ def _process_extra_args(self, absolute_path): args.extend(self.extra_start_args) args.append("-cp") args.append(os.pathsep.join(classpath)) - args.append("ai.konduit.serving.configprovider.KonduitServingMain") - args.append("--pidFile") - args.append(os.path.abspath(self.pid_file_path)) - args.append("--configPath") - args.append(absolute_path) - args.append("--verticleClassName") - args.append("ai.konduit.serving.verticles.inference.InferenceVerticle") - logging.info("Running with args\n" + " ".join(args)) + args.append("ai.konduit.serving.launcher.KonduitServingLauncher") + if command: + args.extend(command.strip().split(" ")) return args diff --git a/python/setup.py b/python/setup.py index ef4c44f88..b0c86cd2d 100644 --- a/python/setup.py +++ b/python/setup.py @@ -3,7 +3,7 @@ setup( name="konduit", - version="0.1.5", + version="0.1.8", packages=find_packages(), install_requires=[ "requests>=2.22.0", @@ -16,7 +16,8 @@ "pydatavec", "pyyaml", "click", - "packaging" + "packaging", + "hurry.filesize" ], py_modules=["konduit", "cli"], extras_require={ diff --git a/python/tests/test_bert_serving.py b/python/tests/test_bert_serving.py index 1110dfbb7..456be7c73 100644 --- a/python/tests/test_bert_serving.py +++ b/python/tests/test_bert_serving.py @@ -3,10 +3,9 @@ from konduit import ( ParallelInferenceConfig, ServingConfig, - TensorFlowConfig, - ModelConfigType, + TensorFlowStep, + InferenceConfiguration ) -from konduit import TensorDataTypesConfig, ModelStep, InferenceConfiguration from konduit.client import Client from konduit.server import Server from konduit.utils import is_port_in_use @@ -14,6 +13,7 @@ @pytest.mark.integration def test_server_start(): + server_id = "tensorflow_server" input_names = ["IteratorGetNext:0", "IteratorGetNext:1", "IteratorGetNext:4"] output_names = ["loss/Softmax"] parallel_inference_config = ParallelInferenceConfig(workers=1) @@ -22,19 +22,13 @@ def test_server_start(): log_timings=True, ) - tensorflow_config = TensorFlowConfig( + model_pipeline_step = TensorFlowStep( path="bert_mrpc_frozen.pb", - tensor_data_types_config=TensorDataTypesConfig( - input_data_types={ - "IteratorGetNext:0": "INT32", - "IteratorGetNext:1": "INT32", - "IteratorGetNext:4": "INT32", - } - ), - ) - - model_pipeline_step = ModelStep( - model_config=tensorflow_config, + input_data_types={ + input_names[0]: "INT32", + input_names[1]: "INT32", + input_names[2]: "INT32", + }, parallel_inference_config=parallel_inference_config, input_names=input_names, output_names=output_names, @@ -45,14 +39,14 @@ def test_server_start(): ) server = Server( - inference_config=inference, extra_start_args="-Xmx8g", jar_path="konduit.jar" + inference_config=inference, extra_start_args="-Xmx8g" ) - _, port, started = server.start() + _, port, started = server.start(server_id) data_input = { - "IteratorGetNext:0": np.load("../data/input-0.npy"), - "IteratorGetNext:1": np.load("../data/input-1.npy"), - "IteratorGetNext:4": np.load("../data/input-4.npy"), + input_names[0]: np.load("../data/input-0.npy"), + input_names[1]: np.load("../data/input-1.npy"), + input_names[2]: np.load("../data/input-4.npy"), } assert started # will be true if the server was started @@ -63,9 +57,7 @@ def test_server_start(): try: predicted = client.predict(data_input) print(predicted) - server.stop() except Exception as e: print(e) - server.stop() - -test_server_start() \ No newline at end of file + finally: + server.stop(server_id) diff --git a/python/tests/test_client.py b/python/tests/test_client.py index 159e19ce3..a1617a064 100644 --- a/python/tests/test_client.py +++ b/python/tests/test_client.py @@ -4,6 +4,7 @@ @pytest.mark.integration def test_client_from_server(): + server_id = "python_server" python_config = PythonConfig( python_code="first += 2", @@ -13,12 +14,12 @@ def test_client_from_server(): step = PythonStep().step(python_config) server = Server(steps=step, serving_config=ServingConfig()) - server.start() + server.start(server_id) try: server.get_client() finally: - server.stop() + server.stop(server_id) @pytest.mark.unit diff --git a/python/tests/test_json.py b/python/tests/test_json.py index 3a43b2e9e..37536b988 100644 --- a/python/tests/test_json.py +++ b/python/tests/test_json.py @@ -18,10 +18,8 @@ def test_json_compare(): http_port=1300, output_data_format="NUMPY" ) - tensorflow_config = TensorFlowConfig(path="model.pb") - - model_pipeline_step = ModelStep( - model_config=tensorflow_config, + model_pipeline_step = TensorFlowStep( + path="model.pb", parallel_inference_config=parallel_inference_config, input_names=["IteratorGetNext:0", "IteratorGetNext:1", "IteratorGetNext:4"], output_names=["loss/Softmax"], diff --git a/python/tests/test_json_utils.py b/python/tests/test_json_utils.py index ee5fc38c0..63fbff452 100644 --- a/python/tests/test_json_utils.py +++ b/python/tests/test_json_utils.py @@ -6,7 +6,7 @@ @pytest.mark.unit def test_empty_dict_type(): d1 = empty_type_dict(InferenceConfiguration()) - d2 = {"@type": "InferenceConfiguration"} + d2 = {"type": "InferenceConfiguration"} assert d1 == d2 diff --git a/python/tests/test_load_yaml.py b/python/tests/test_load_yaml.py index 3a000ac38..fefe6890c 100644 --- a/python/tests/test_load_yaml.py +++ b/python/tests/test_load_yaml.py @@ -23,58 +23,82 @@ def test_yaml_client_loading(): @pytest.mark.integration def test_yaml_minimal_loading(): file_path = "yaml/konduit_minimal.yaml" - server = server_from_file(file_path, use_yaml=True, start_server=True) - client = client_from_file(file_path, use_yaml=True) + try: + server = server_from_file(file_path, use_yaml=True, start_server=True) + client = client_from_file(file_path, use_yaml=True) + finally: + server.stop() del server, client @pytest.mark.integration def test_json_minimal_loading(): file_path = "yaml/konduit_minimal.json" - server = server_from_file(file_path, use_yaml=False, start_server=True) - client = client_from_file(file_path, use_yaml=False) + try: + server = server_from_file(file_path, use_yaml=False, start_server=True) + client = client_from_file(file_path, use_yaml=False) + finally: + server.stop() del server, client @pytest.mark.unit def test_keras_serving(): file_path = "yaml/konduit_keras.yaml" - server = server_from_file(file_path=file_path) + try: + server = server_from_file(file_path=file_path) + finally: + server.stop() del server @pytest.mark.unit def test_tf_simple_serving(): file_path = "yaml/konduit_tf_simple.yaml" - server = server_from_file(file_path=file_path) + try: + server = server_from_file(file_path=file_path) + finally: + server.stop() del server @pytest.mark.unit def test_dl4j_mln_serving(): file_path = "yaml/konduit_dl4j_mln.yaml" - server = server_from_file(file_path=file_path) + try: + server = server_from_file(file_path=file_path) + finally: + server.stop() del server @pytest.mark.unit def test_dl4j_cg_serving(): file_path = "yaml/konduit_dl4j_cg.yaml" - server = server_from_file(file_path=file_path) + try: + server = server_from_file(file_path=file_path) + finally: + server.stop() del server @pytest.mark.unit def test_dl4j_samediff_serving(): file_path = "yaml/konduit_samediff.yaml" - server = server_from_file(file_path=file_path) + try: + server = server_from_file(file_path=file_path) + finally: + server.stop() del server @pytest.mark.unit def test_tensor_flow_serving(): file_path = "yaml/konduit_tensorflow.yaml" - server = server_from_file(file_path=file_path) + try: + server = server_from_file(file_path=file_path) + finally: + server.stop() del server @@ -85,4 +109,4 @@ def test_yaml_server_python_prediction(): server, client = konduit.load.from_file(file_path, start_server=True) client.predict(np.load("../data/input-0.npy")) finally: - server.stop() + server.stop() \ No newline at end of file