diff --git a/.github/in-solidarity.yml b/.github/in-solidarity.yml index 6fc13d49a..fdbd72ad8 100644 --- a/.github/in-solidarity.yml +++ b/.github/in-solidarity.yml @@ -14,3 +14,4 @@ ignore: - "tools/hybrid-quickstart/steps.sh" # because the GKE cli uses 'master' + - "tools/target-server-validator/callout/build_java_callout.sh" # because github.com/apigee/api-platform-samples uses voliating branch name diff --git a/.github/workflows/devrel-static-checks.yml b/.github/workflows/devrel-static-checks.yml index 5b308779f..7f2fe3be2 100644 --- a/.github/workflows/devrel-static-checks.yml +++ b/.github/workflows/devrel-static-checks.yml @@ -74,6 +74,7 @@ jobs: LINTER_RULES_PATH: "." GROOVY_NPM_GROOVY_LINT_FILTER_REGEX_EXCLUDE: "Jenkinsfile" MARKDOWN_MARKDOWN_LINK_CHECK_DISABLE_ERRORS: true + PYTHON_MYPY_DISABLE_ERRORS: true commit-messages: name: Conventional Commits Lint diff --git a/CODEOWNERS b/CODEOWNERS index 8e5385587..8c37d5c10 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -38,3 +38,4 @@ /tools/pipeline-runner @seymen @danistrebel /tools/sf-dependency-list @yuriylesyuk /tools/proxy-endpoint-unifier @anaik91 +/tools/target-server-validator @anaik91 diff --git a/README.md b/README.md index 4c707765c..ee662dee6 100644 --- a/README.md +++ b/README.md @@ -93,6 +93,8 @@ Apigee products. A tool to set up the sample deployments of Apigee Envoy. - [Apigee API Proxy Endpoint Unifier](tools/proxy-endpoint-unifier) - A tool to unify/split proxy endpoints based on API basepath. +- [Apigee Target Server Validator](tools/target-server-validator) - + A tool to validate all targets in Target Servers & Apigee API Proxy Bundles. ## Labs diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py index c03240151..447c7beb2 100644 --- a/tools/proxy-endpoint-unifier/main.py +++ b/tools/proxy-endpoint-unifier/main.py @@ -16,8 +16,8 @@ import os import sys -from apigee import Apigee -import utils +from apigee import Apigee # pylint: disable=import-error +import utils # pylint: disable=import-error def main(): diff --git a/tools/target-server-validator/README.md b/tools/target-server-validator/README.md new file mode 100644 index 000000000..8b7b24b0d --- /dev/null +++ b/tools/target-server-validator/README.md @@ -0,0 +1,91 @@ +# Apigee Target Server Validator + +The objective of this tool to validate targets in Target Servers & Apigee API Proxy Bundles exported from Apigee. +Validation is done by deploying a sample proxy which check if HOST & PORT is open from Apigee. + +> **NOTE**: Discovery of Targets in API Proxy & Sharedflows is limited to only parsing URL from `TargetEndpoint` & `ServiceCallout` Policy. + +> **NOTE**: Dynamic targets are **NOT** supported, Ex : `https://host.{request.formparam.region}.example.com}` + +## Pre-Requisites +* Python3.x +* Java +* Maven +* Please install the required Python dependencies +``` + python3 -m pip install -r requirements.txt +``` +* Please build the java callout jar by running the below command + +``` +bash callout/build_java_callout.sh +``` + +* Please fill in `input.properties` + +``` +[source] +baseurl=https://x.x.x.x/v1 # Apigee Base URL. e.g http://management-api.apigee-opdk.corp:8080 +org=xxx-xxxx-xxx-xxxxx # Apigee Org ID +auth_type=basic # API Auth type basic | oauth + +[target] +baseurl=https://apigee.googleapis.com/v1 # Apigee Base URL +org=xxx-xxxx-xxx-xxxxx # Apigee Org ID +auth_type=oauth # API Auth type basic | oauth + +[csv] +file=input.csv # Path to input CSV. Note: CSV needs HOST & PORT columns +default_port=443 # default port if port is not provided in CSV + +[validation] +check_csv=true # 'true' to validate Targets in input csv +check_proxies=true # 'true' to validate Proxy Targets else 'false' +skip_proxy_list=mock1,stream # Comma sperated list of proxies to skip validation; +proxy_export_dir=export # Export directory needed when check_proxies='true' +api_env=dev # Target Environment to deploy Validation API Proxy +api_name=target_server_validator # Target API Name of Validation API Proxy +api_force_redeploy=false # set 'true' to Re-deploy Target API Proxy +api_hostname=example.apigee.com # Target VirtualHost or EnvGroup Domain Name +api_ip= # IP address corresponding to api_hostname. Use if DNS record doesnt exist +report_format=csv # Report Format. Choose csv or md (defaults to md) +``` + +* Sample input CSV with target servers +> **NOTE:** You need to set `check_csv=true` in the `validation` section of `input.properties` + +> **NOTE:** You need to set `file=` in the `csv` section of `input.properties` +> If PORT is omitted from the csv, the value of default_port will be used from `input.properties`. +``` +HOST,PORT +httpbin.org +mocktarget.apigee.net,80 +smtp.gmail.com,465 +``` + + +* Please run below commands to authenticate, based on the Apigee flavours you are using. + +``` +export APIGEE_OPDK_ACCESS_TOKEN=$(echo -n ":" | base64) # Access token for Apigee OPDK +export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) # Access token for Apigee X/Hybrid +``` + +## Highlevel Working +* Export Target Server Details +* Export Proxy Bundle +* Parse Each Proxy Bundle for Target +* Run Validate API against each Target (optional) +* Generate csv/md Report + +## Usage + +Run the script as below +``` +python3 main.py +``` + +## Report +Validation Report: `report.md` OR `report.csv` can be found in the same directory as the script. + +Please check a [Sample report](report.md) diff --git a/tools/target-server-validator/apigee_utils.py b/tools/target-server-validator/apigee_utils.py new file mode 100644 index 000000000..89edd3ebe --- /dev/null +++ b/tools/target-server-validator/apigee_utils.py @@ -0,0 +1,286 @@ +#!/usr/bin/python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import sys +import requests +import shutil +from time import sleep + + +class Apigee: + def __init__( + self, + apigee_type="x", + base_url="https://apigee.googleapis.com/v1", + auth_type="oauth", + org="validate", + ): + self.org = org + self.baseurl = f"{base_url}/organizations/{org}" + self.apigee_type = apigee_type + self.auth_type = auth_type + access_token = self.get_access_token() + self.auth_header = { + "Authorization": "Bearer {}".format(access_token) + if self.auth_type == "oauth" + else "Basic {}".format(access_token) # noqa + } + + def is_token_valid(self, token): + url = f"https://www.googleapis.com/oauth2/v1/tokeninfo?access_token={token}" # noqa + response = requests.get(url) + if response.status_code == 200: + print(f"Token Validated for user {response.json()['email']}") + return True + return False + + def get_access_token(self): + token = os.getenv( + "APIGEE_ACCESS_TOKEN" + if self.apigee_type == "x" + else "APIGEE_OPDK_ACCESS_TOKEN" + ) + if token is not None: + if self.apigee_type == "x": + if self.is_token_valid(token): + return token + else: + print( + 'please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ' # noqa type: ignore + ) + sys.exit(1) + else: + return token + else: + if self.apigee_type == "x": + print( + 'please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ' # noqa + ) + else: + print("please export APIGEE_OPDK_ACCESS_TOKEN") + sys.exit(1) + + def set_auth_header(self): + access_token = self.get_access_token() + self.auth_header = { + "Authorization": "Bearer {}".format(access_token) + if self.auth_type == "oauth" + else "Basic {}".format(access_token) + } + + def list_environments(self): + url = f"{self.baseurl}/environments" + headers = self.auth_header.copy() + response = requests.request("GET", url, headers=headers) + if response.status_code == 200: + return response.json() + else: + return [] + + def list_target_servers(self, env): + url = f"{self.baseurl}/environments/{env}/targetservers" + headers = self.auth_header.copy() + response = requests.request("GET", url, headers=headers) + if response.status_code == 200: + return response.json() + else: + return [] + + def get_target_server(self, env, target_server): + url = f"{self.baseurl}/environments/{env}/targetservers/{target_server}" # noqa + headers = self.auth_header.copy() + response = requests.request("GET", url, headers=headers) + if response.status_code == 200: + return response.json() + else: + return [] + + def get_api(self, api_name): + url = f"{self.baseurl}/apis/{api_name}" + headers = self.auth_header.copy() + response = requests.request("GET", url, headers=headers) + if response.status_code == 200: + return True + else: + return False + + def create_api(self, api_name, proxy_bundle_path): + url = f"{self.baseurl}/apis?action=import&name={api_name}&validate=true" # noqa + proxy_bundle_name = os.path.basename(proxy_bundle_path) + files = [ + ( + "data", + (proxy_bundle_name, open(proxy_bundle_path, "rb"), "application/zip"), # noqa + ) + ] + headers = self.auth_header.copy() + response = requests.request( + "POST", url, headers=headers, data={}, files=files + ) + if response.status_code == 200: + revision = response.json().get('revision', "1") + return True, revision + print(response.text) + return False, None + + def get_api_revisions_deployment(self, env, api_name, api_rev): # noqa + url = ( + url + ) = f"{self.baseurl}/environments/{env}/apis/{api_name}/revisions/{api_rev}/deployments" # noqa + headers = self.auth_header.copy() + response = requests.request("GET", url, headers=headers, data={}) + if response.status_code == 200: + resp = response.json() + api_deployment_status = resp.get("state", "") + if self.apigee_type == "x": + if api_deployment_status == "READY": + return True + if self.apigee_type == "opdk": + if api_deployment_status == "deployed": + return True + print(f"API {api_name} is in Status: {api_deployment_status} !") # noqa + return False + else: + print(response.text) + return False + + def deploy_api(self, env, api_name, api_rev): + url = ( + url + ) = f"{self.baseurl}/environments/{env}/apis/{api_name}/revisions/{api_rev}/deployments?override=true" # noqa + headers = self.auth_header.copy() + response = requests.request("POST", url, headers=headers, data={}) + if response.status_code == 200: + return True + else: + resp = response.json() + if "already deployed" in resp["error"]["message"]: + print("Proxy {} is already Deployed".format(api_name)) + return True + print(response.text) + return False + + def deploy_api_bundle(self, env, api_name, proxy_bundle_path, api_rev=1, api_force_redeploy=False): # noqa + api_deployment_retry = 60 + api_deployment_sleep = 5 + api_deployment_retry_count = 0 + api_exists = False + if self.get_api(api_name): + print( + f"Proxy with name {api_name} already exists in Apigee Org {self.org}" # noqa + ) + api_exists = True + if api_force_redeploy: + api_exists = False + if not api_exists: + api_created, api_rev = self.create_api(api_name, proxy_bundle_path) + if api_created: + print( + f"Proxy has been imported with name {api_name} in Apigee Org {self.org}" # noqa + ) + api_exists = True + else: + print(f"ERROR : Proxy {api_name} import failed !!! ") + return False + if api_exists: + if self.deploy_api(env, api_name, api_rev): + print( + f"Proxy with name {api_name} has been deployed to {env} in Apigee Org {self.org}" # noqa + ) + while api_deployment_retry_count < api_deployment_retry: + if self.get_api_revisions_deployment( + env, api_name, api_rev + ): + print( + f"Proxy {api_name} active in runtime after {api_deployment_retry_count*api_deployment_sleep} seconds " # noqa + ) + return True + else: + print( + f"Checking API deployment status in {api_deployment_sleep} seconds" # noqa + ) + sleep(api_deployment_sleep) + api_deployment_retry_count += 1 + else: + print( + f"ERROR : Proxy deployment to {env} in Apigee Org {self.org} Failed !!" # noqa + ) + return False + + def get_api_vhost(self, vhost_name, env): + if self.apigee_type == "opdk": + url = f"{self.baseurl}/environments/{env}/virtualhosts/{vhost_name}" # noqa + else: + url = f"{self.baseurl}/envgroups/{vhost_name}" + headers = self.auth_header.copy() + response = requests.request("GET", url, headers=headers) + if response.status_code == 200: + if self.apigee_type == "opdk": + hosts = response.json()["hostAliases"] + else: + hosts = response.json()["hostnames"] + if len(hosts) == 0: + print( + f"ERROR: Vhost/Env Group {vhost_name} contains no domains" # noqa + ) + return None + return hosts + else: + print(f"ERROR: Vhost/Env Group {vhost_name} contains no domains") # noqa + return None + + def list_apis(self, api_type): + url = f"{self.baseurl}/{api_type}" + headers = self.auth_header.copy() + response = requests.get(url, headers=headers) + if response.status_code == 200: + if self.apigee_type == "x": + if len(response.json()) == 0: + return [] + return [ + p["name"] + for p in response.json()[ + "proxies" if api_type == "apis" else "sharedFlows" + ] + ] # noqa + return response.json() + else: + return [] + + def list_api_revisions(self, api_type, api_name): + url = f"{self.baseurl}/{api_type}/{api_name}/revisions" + headers = self.auth_header.copy() + response = requests.get(url, headers=headers) + if response.status_code == 200: + return response.json() + else: + return [] + + def fetch_api_revision(self, api_type, api_name, revision, export_dir): # noqa + url = f"{self.baseurl}/{api_type}/{api_name}/revisions/{revision}?format=bundle" # noqa + headers = self.auth_header.copy() + response = requests.get(url, headers=headers, stream=True) + if response.status_code == 200: + self.write_proxy_bundle(export_dir, api_name, response.raw) + return True + return False + + def write_proxy_bundle(self, export_dir, file_name, data): + file_path = f"./{export_dir}/{file_name}.zip" + with open(file_path, "wb") as fl: + shutil.copyfileobj(data, fl) diff --git a/tools/target-server-validator/apiproxy/policies/AM-Set-Json-Response.xml b/tools/target-server-validator/apiproxy/policies/AM-Set-Json-Response.xml new file mode 100644 index 000000000..df4a122b0 --- /dev/null +++ b/tools/target-server-validator/apiproxy/policies/AM-Set-Json-Response.xml @@ -0,0 +1,23 @@ + + + + + { + "host":"{request.header.host_name}", + "port": "{request.header.port_number}", + "status":"{flow.reachableStatus}" +} + + + \ No newline at end of file diff --git a/tools/target-server-validator/apiproxy/policies/JC-Port-Open-Check.xml b/tools/target-server-validator/apiproxy/policies/JC-Port-Open-Check.xml new file mode 100644 index 000000000..cd2061782 --- /dev/null +++ b/tools/target-server-validator/apiproxy/policies/JC-Port-Open-Check.xml @@ -0,0 +1,17 @@ + + + + com.apigee.devrel.apigee_target_server_validator.PortOpenCheck + java://jc-target-server-validator.jar + \ No newline at end of file diff --git a/tools/target-server-validator/apiproxy/proxies/default.xml b/tools/target-server-validator/apiproxy/proxies/default.xml new file mode 100644 index 000000000..10d49b91f --- /dev/null +++ b/tools/target-server-validator/apiproxy/proxies/default.xml @@ -0,0 +1,36 @@ + + + + + + + JC-Port-Open-Check + + + + + AM-Set-Json-Response + + + + + + + + + + /validate-target-server + + + \ No newline at end of file diff --git a/tools/target-server-validator/apiproxy/target-server-validator.xml b/tools/target-server-validator/apiproxy/target-server-validator.xml new file mode 100644 index 000000000..f7457d683 --- /dev/null +++ b/tools/target-server-validator/apiproxy/target-server-validator.xml @@ -0,0 +1,14 @@ + + + diff --git a/tools/target-server-validator/callout/build_java_callout.sh b/tools/target-server-validator/callout/build_java_callout.sh new file mode 100644 index 000000000..4b8fb85e5 --- /dev/null +++ b/tools/target-server-validator/callout/build_java_callout.sh @@ -0,0 +1,52 @@ +#!/bin/sh + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +JAVA_SRC_PATH="$( cd "$(dirname "$0")" || exit >/dev/null 2>&1 ; pwd -P )" +echo +echo "This script downloads JAR files and installs them into the local Maven repo." +echo + +curl -O https://raw.githubusercontent.com/apigee/api-platform-samples/master/doc-samples/java-cookbook/lib/expressions-1.0.0.jar + + mvn install:install-file \ + -Dfile=expressions-1.0.0.jar \ + -DgroupId=com.apigee.edge \ + -DartifactId=expressions \ + -Dversion=1.0.0 \ + -Dpackaging=jar \ + -DgeneratePom=true + +rm expressions-1.0.0.jar + +curl -O https://raw.githubusercontent.com/apigee/api-platform-samples/master/doc-samples/java-cookbook/lib/message-flow-1.0.0.jar + + mvn install:install-file \ + -Dfile=message-flow-1.0.0.jar \ + -DgroupId=com.apigee.edge \ + -DartifactId=message-flow \ + -Dversion=1.0.0 \ + -Dpackaging=jar \ + -DgeneratePom=true + +rm message-flow-1.0.0.jar + +echo +echo done. +echo + +cd "$JAVA_SRC_PATH" || exit 1 +mvn clean install diff --git a/tools/target-server-validator/callout/pom.xml b/tools/target-server-validator/callout/pom.xml new file mode 100644 index 000000000..90b665844 --- /dev/null +++ b/tools/target-server-validator/callout/pom.xml @@ -0,0 +1,159 @@ + + + + 4.0.0 + com.apigee.callout + jc-target-server-validator + 1.0-SNAPSHOT + JavaTargetServerValidator + http://maven.apache.org + jar + + UTF-8 + UTF-8 + 1.7 + ../apiproxy/resources/java + 6.8.7 + 1.7 + 1.6.1 + + + + + + + + com.apigee.edge + message-flow + 1.0.0 + + + com.apigee.edge + expressions + 1.0.0 + + + + + + ${project.artifactId} + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy-dependencies + prepare-package + + copy-dependencies + + + ${project.build.directory}/lib + false + false + true + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + ${java.version} + ${java.version} + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.6 + + + package + + jar + + + + + + + + false + + + + + + + maven-antrun-plugin + + + package + + + + + + + + + + + + + + + run + + + + + + + + diff --git a/tools/target-server-validator/callout/src/main/java/PortOpenCheck.java b/tools/target-server-validator/callout/src/main/java/PortOpenCheck.java new file mode 100644 index 000000000..a31b6da86 --- /dev/null +++ b/tools/target-server-validator/callout/src/main/java/PortOpenCheck.java @@ -0,0 +1,93 @@ +// Copyright 2023 Google LLC + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at + +// http://www.apache.org/licenses/LICENSE-2.0 + +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +package com.apigee.devrel.apigee_target_server_validator; + +import com.apigee.flow.execution.ExecutionContext; +import com.apigee.flow.execution.ExecutionResult; +import com.apigee.flow.execution.spi.Execution; +import com.apigee.flow.message.MessageContext; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Socket; +import java.net.SocketTimeoutException; +import java.net.UnknownHostException; +import com.apigee.flow.execution.Action; + + +/** + * A callout that checks if a particular port is open on a specified host. + */ +public class PortOpenCheck implements Execution { + + /** + * Checks if the specified host and port are available. + * + * @param host The host name or IP address to check. + * @param port The port number to check. + * @return A string indicating whether the host and port are available + */ + private static String available(final String host, final int port) { + Socket socket = new Socket(); + final int sockettimeout = 1000; + try { + socket.connect(new InetSocketAddress(host, port), sockettimeout); + return "REACHABLE"; + } catch (SocketTimeoutException e) { + return "NOT_REACHABLE"; + } catch (UnknownHostException e) { + return "UNKNOWN_HOST"; + } catch (IOException e) { + return "NOT_REACHABLE"; + } finally { + if (socket != null) { + try { + socket.close(); + } catch (IOException e) { + throw new RuntimeException("Exception occured", e); + } + } + } + } + + /** + * Executes the callout. + * + * @param messageContext The message context. + * @param executionContext The execution context. + * @return The execution result. + */ + public ExecutionResult execute(final MessageContext messageContext, + final ExecutionContext executionContext) { + try { + String hostname = messageContext.getMessage().getHeader("host_name"); + String port = messageContext.getMessage().getHeader("port_number"); + int portnumber = Integer.parseInt(port); + String status = available(hostname, portnumber); + messageContext.setVariable("flow.reachableStatus", status); + return ExecutionResult.SUCCESS; + } catch (Exception e) { + ExecutionResult executionResult = new ExecutionResult(false, + Action.ABORT); + //--Returns custom error message and header + executionResult.setErrorResponse(e.getMessage()); + executionResult.addErrorResponseHeader("ExceptionClass", + e.getClass().getName()); + //--Set flow variables -- may be useful for debugging. + messageContext.setVariable("JAVA_ERROR", e.getMessage()); + return executionResult; + } + } +} diff --git a/tools/target-server-validator/callout/src/main/java/package-info.java b/tools/target-server-validator/callout/src/main/java/package-info.java new file mode 100644 index 000000000..d1e2a1829 --- /dev/null +++ b/tools/target-server-validator/callout/src/main/java/package-info.java @@ -0,0 +1,22 @@ +// Copyright 2023 Google LLC + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at + +// http://www.apache.org/licenses/LICENSE-2.0 + +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +/** + * A callout that checks if a particular port is open on a specified host. + * + * @author anaik91 + * @version .01 + */ +package com.apigee.devrel.apigee_target_server_validator; diff --git a/tools/target-server-validator/input.properties b/tools/target-server-validator/input.properties new file mode 100644 index 000000000..e15ba1439 --- /dev/null +++ b/tools/target-server-validator/input.properties @@ -0,0 +1,26 @@ +[source] +baseurl=https://apigee.googleapis.com/v1 +org=xxx-xxx-xxx +auth_type=oauth + +[target] +baseurl=https://apigee.googleapis.com/v1 +org=xxx-xxx-xxx +auth_type=oauth + +[csv] +file=input.csv +default_port=443 + +[validation] +check_csv=true +check_proxies=true +proxy_export_dir=export +skip_proxy_list=mock1,stream +api_env=dev +api_name=target-server-validator +api_force_redeploy=true +api_hostname=example.apigee.com +api_ip= +report_format=md +allow_insecure=false \ No newline at end of file diff --git a/tools/target-server-validator/main.py b/tools/target-server-validator/main.py new file mode 100644 index 000000000..72410441d --- /dev/null +++ b/tools/target-server-validator/main.py @@ -0,0 +1,272 @@ +#!/usr/bin/python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import sys +from utilities import ( # pylint: disable=import-error + parse_config, + create_proxy_bundle, + run_validator_proxy, + delete_file, + read_csv, + write_csv_report, + write_md_report, + create_dir, + unzip_file, + parse_proxy_hosts, + has_templating, + get_tes, + get_row_host_port, +) +from apigee_utils import Apigee # pylint: disable=import-error + + +def main(): + # Parse Inputs + cfg = parse_config("input.properties") + check_proxies = cfg["validation"].getboolean("check_proxies") + proxy_export_dir = cfg["validation"]["proxy_export_dir"] + report_format = cfg["validation"]["report_format"] + allow_insecure = cfg["validation"].getboolean("allow_insecure") + if report_format not in ["csv", "md"]: + report_format = "md" + + # Intialize Source & Target Apigee + SourceApigee = Apigee( + "x" if "apigee.googleapis.com" in cfg["source"]["baseurl"] else "opdk", + cfg["source"]["baseurl"], + cfg["source"]["auth_type"], + cfg["source"]["org"], + ) + + TargetApigee = Apigee( + "x" if "apigee.googleapis.com" in cfg["source"]["baseurl"] else "opdk", + cfg["target"]["baseurl"], + cfg["target"]["auth_type"], + cfg["target"]["org"], + ) + + environments = SourceApigee.list_environments() + all_target_servers = [] + # Fetch Target Servers from Source Apigee@ + print("INFO: exporting Target Servers !") + for each_env in environments: + target_servers = SourceApigee.list_target_servers(each_env) + for each_ts in target_servers: + ts_info = SourceApigee.get_target_server(each_env, each_ts) + ts_info["env"] = each_env + all_target_servers.append(ts_info) + + # Fetch Targets in APIs & Shared Flows from Source Apigee + proxy_hosts = {} + proxy_targets = {} + if check_proxies: + skip_proxy_list = ( + cfg["validation"].get("skip_proxy_list", "").split(",") + ) + print( + "INFO: exporting proxies to be analyzed ! this may take a while !" + ) + api_types = ["apis", "sharedflows"] + api_revision_map = {} + for each_api_type in api_types: + api_revision_map[each_api_type] = {} + api_revision_map[each_api_type]["proxies"] = {} + api_revision_map[each_api_type]["export_dir"] = ( + proxy_export_dir + f"/{each_api_type}" + ) + create_dir(proxy_export_dir + f"/{each_api_type}") + + for each_api in SourceApigee.list_apis(each_api_type): + if each_api not in skip_proxy_list: + api_revision_map[each_api_type]["proxies"][ + each_api + ] = SourceApigee.list_api_revisions(each_api_type, each_api)[ # noqa + -1 + ] + else: + print(f"INFO : Skipping API {each_api}") + for each_api_type, each_api_type_data in api_revision_map.items(): + proxy_hosts[each_api_type] = {} + for each_api, each_api_rev in each_api_type_data["proxies"].items(): # noqa + print( + f"Exporting API : {each_api} with revision : {each_api_rev} " # noqa + ) + SourceApigee.fetch_api_revision( + each_api_type, + each_api, + each_api_rev, + api_revision_map[each_api_type]["export_dir"], + ) + print( + f"Unzipping API : {each_api} with revision : {each_api_rev} " # noqa + ) + unzip_file( + f"{api_revision_map[each_api_type]['export_dir']}/{each_api}.zip", # noqa + f"{api_revision_map[each_api_type]['export_dir']}/{each_api}", # noqa + ) + parsed_proxy_hosts = parse_proxy_hosts( + f"{api_revision_map[each_api_type]['export_dir']}/{each_api}/apiproxy" # noqa + ) + proxy_hosts[each_api_type][each_api] = parsed_proxy_hosts + proxy_tes = get_tes(parsed_proxy_hosts) + for each_te in proxy_tes: + if each_te in proxy_targets: + proxy_targets[each_te].append( + f"{each_api_type} - {each_api}" + ) + else: + proxy_targets[each_te] = [ + f"{each_api_type} - {each_api}" + ] + # Validate Targets against Target Apigee + + bundle_path = os.path.dirname(os.path.abspath(__file__)) + + # Create Validation Proxy Bundle + print("INFO: Creating proxy bundle !") + create_proxy_bundle(bundle_path, cfg["validation"]["api_name"], "apiproxy") + + # Deploy Validation Proxy Bundle + print("INFO: Deploying proxy bundle !") + if not TargetApigee.deploy_api_bundle( + cfg["validation"]["api_env"], + cfg["validation"]["api_name"], + f"{bundle_path}/{cfg['validation']['api_name']}.zip", + 1, + cfg["validation"].getboolean("api_force_redeploy", False) + ): + print(f"Proxy: {cfg['validation']['api_name']} deployment failed.") + sys.exit(1) + # CleanUp Validation Proxy Bundle + print("INFO: Cleaning Up local proxy bundle !") + delete_file(f"{bundle_path}/{cfg['validation']['api_name']}.zip") + + # Fetch API Northbound Endpoint + print( + f"INFO: Fetching VHost with name {cfg['validation']['api_hostname']} !" # noqa + ) + vhost_domain_name = cfg["validation"]["api_hostname"] + vhost_ip = cfg["validation"].get("api_ip", "").strip() + api_url = f"https://{vhost_domain_name}/validate_target_server" + final_report = [] + _cached_hosts = {} + + # Run Target Server Validation + print("INFO: Running validation against All Target Servers") + for each_ts in all_target_servers: + status = run_validator_proxy( + api_url, vhost_domain_name, vhost_ip, each_ts["host"], each_ts["port"], allow_insecure # noqa + ) + final_report.append( + [ + each_ts["name"], + "TargetServer", + each_ts["host"], + str(each_ts["port"]), + each_ts["env"], + status, + " & ".join(list(set(proxy_targets[each_ts["name"]]))) + if each_ts["name"] in proxy_targets + else "No References in any API", + ] + ) + + # Run Validation on Targets configured in Proxies + print("INFO: Running validation against All Targets discovered in Proxies") + for each_api_type, apis in proxy_hosts.items(): + for each_api, each_targets in apis.items(): + for each_target in each_targets: + if ( + not has_templating(each_target["host"]) + and not each_target["target_server"] + ): + if ( + f"{each_target['host']}:{each_target['port']}" in _cached_hosts # noqa + ): + print( + "INFO: Fetching validation status from cached hosts" # noqa + ) + status = _cached_hosts[ + f"{each_target['host']}:{each_target['port']}" # noqa + ] + else: + status = run_validator_proxy( + api_url, + vhost_domain_name, + vhost_ip, + each_target["host"], + each_target["port"], + allow_insecure, + ) + _cached_hosts[ + f"{each_target['host']}:{each_target['port']}" + ] = status + final_report.append( + [ + each_api, + "APIProxy" + if each_api_type == "apis" + else "SharedFlow", + each_target["host"], + str(each_target["port"]), + "_ORG_API_", + status, + each_target["source"], + ] + ) + if cfg["validation"].getboolean("check_csv"): + csv_file = cfg["csv"]["file"] + default_port = cfg["csv"]["default_port"] + csv_rows = read_csv(csv_file) + for each_row in csv_rows: + each_host, each_port = get_row_host_port(each_row, default_port) + if f"{each_host}:{each_port}" in _cached_hosts: + print("INFO: Fetching validation status from cached hosts") + status = _cached_hosts[f"{each_host}:{each_port}"] + else: + status = run_validator_proxy( + api_url, vhost_domain_name, vhost_ip, each_host, each_port, allow_insecure # noqa + ) + _cached_hosts[f"{each_host}:{each_port}"] = status + final_report.append( + [ + each_host, + "Input CSV", + each_host, + each_port, + "_NA_", + status, + "_NA_", + ] + ) + + # Write CSV Report + # TODO: support relative report path + if report_format == "csv": + report_file = "report.csv" + print(f"INFO: Dumping report to file {report_file}") + write_csv_report(report_file, final_report) + + if report_format == "md": + report_file = "report.md" + print(f"INFO: Dumping report to file {report_file}") + write_md_report(report_file, final_report) + + +if __name__ == "__main__": + main() diff --git a/tools/target-server-validator/pipeline.sh b/tools/target-server-validator/pipeline.sh new file mode 100755 index 000000000..2e1fe7923 --- /dev/null +++ b/tools/target-server-validator/pipeline.sh @@ -0,0 +1,80 @@ +#!/bin/sh + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e + +SCRIPTPATH="$( cd "$(dirname "$0")" || exit >/dev/null 2>&1 ; pwd -P )" + +bash "$SCRIPTPATH/callout/build_java_callout.sh" + +# Clean up previously generated files +rm -rf "$SCRIPTPATH/input.properties" +rm -rf "$SCRIPTPATH/export" +rm -rf "$SCRIPTPATH/report*" + +# Generate input file +cat > "$SCRIPTPATH/input.properties" << EOF +[source] +baseurl=https://apigee.googleapis.com/v1 +org=$APIGEE_X_ORG +auth_type=oauth + +[target] +baseurl=https://apigee.googleapis.com/v1 +org=$APIGEE_X_ORG +auth_type=oauth + +[csv] +file=input.csv +default_port=443 + +[validation] +check_csv=true +check_proxies=true +proxy_export_dir=export +skip_proxy_list= +api_env=$APIGEE_X_ENV +api_name=target_server_validator +api_force_redeploy=true +api_hostname=$APIGEE_X_HOSTNAME +api_ip= +report_format=md +allow_insecure=false +EOF + +# Generate optional input csv file +cat > "$SCRIPTPATH/input.csv" << EOF +HOST,PORT +httpbin.org +httpbin.org,443 +domaindoesntexist.apigee.tom +smtp.gmail.com,465 +EOF + +# Install Dependencies +python3 -m pip install -r "$SCRIPTPATH/requirements.txt" + +# Generate Gcloud Acccess Token +APIGEE_ACCESS_TOKEN="$(gcloud config config-helper --force-auth-refresh --format json | jq -r '.credential.access_token')" +export APIGEE_ACCESS_TOKEN + +# Running the Target Server Validator +cd "$SCRIPTPATH" + +python3 main.py + +# Display Report +cat "$SCRIPTPATH/report.md" diff --git a/tools/target-server-validator/report.md b/tools/target-server-validator/report.md new file mode 100644 index 000000000..6ff20241c --- /dev/null +++ b/tools/target-server-validator/report.md @@ -0,0 +1,9 @@ + +# Apigee Target Server Health Report + +NAME | TARGET_SOURCE | HOST | PORT | ENV | STATUS | INFO +--- | --- | --- | --- | --- | --- | --- +git | APIProxy | api.github.com | 443 | _ORG_API_ | REACHABLE | TargetEndpoint : default +mock | APIProxy | mocktarget.apigee.net | 443 | _ORG_API_ | REACHABLE | TargetEndpoint : default +mock_base | APIProxy | mocktarget.apigee.net | 443 | _ORG_API_ | REACHABLE | TargetEndpoint : default + \ No newline at end of file diff --git a/tools/target-server-validator/requirements.txt b/tools/target-server-validator/requirements.txt new file mode 100644 index 000000000..52f3e381b --- /dev/null +++ b/tools/target-server-validator/requirements.txt @@ -0,0 +1,20 @@ +#!/usr/bin/python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +xmltodict==0.13.0 +requests==2.28.1 +forcediphttpsadapter==1.0.2 diff --git a/tools/target-server-validator/utilities.py b/tools/target-server-validator/utilities.py new file mode 100644 index 000000000..c96feb169 --- /dev/null +++ b/tools/target-server-validator/utilities.py @@ -0,0 +1,244 @@ +#!/usr/bin/python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import os +import sys +import configparser +import zipfile +import csv +from urllib.parse import urlparse +import requests +import xmltodict +import urllib3 +from forcediphttpsadapter.adapters import ForcedIPHTTPSAdapter + + +def parse_config(config_file): + config = configparser.ConfigParser() + config.read(config_file) + return config + + +def zipdir(path, ziph): + # ziph is zipfile handle + for root, _, files in os.walk(path): + for file in files: + ziph.write( + os.path.join(root, file), + os.path.relpath( + os.path.join(root, file), os.path.join(path, "..") # noqa + ), + ) # noqa + + +def create_proxy_bundle(proxy_bundle_directory, api_name, target_dir): # noqa + with zipfile.ZipFile( + f"{proxy_bundle_directory}/{api_name}.zip", "w", zipfile.ZIP_DEFLATED + ) as zipf: # noqa + zipdir(target_dir, zipf) + + +def run_validator_proxy( + url, dns_host, vhost_ip, target_host, target_port="443", allow_insecure=False): # noqa + headers = { + "host_name": target_host, + "port_number": str(target_port), + "Host": dns_host, + } + if allow_insecure: + print("INFO: Skipping Certificate Verification & disabling warnings because 'allow_insecure' is set to true") # noqa + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + session = requests.Session() + if len(vhost_ip) > 0: + session.mount( + f"https://{dns_host}", ForcedIPHTTPSAdapter(dest_ip=vhost_ip) + ) # noqa + r = session.get(url, headers=headers, verify=(not allow_insecure)) + if r.status_code == 200: + return r.json()["status"] + return "STATUS_UNKNOWN" + + +def delete_file(file_name): + try: + os.remove(file_name) + except FileNotFoundError: + print(f"File {file_name} doesnt exist") + + +def write_csv_report( + file_name, + rows, + header=["NAME", "TARGET_SOURCE", "HOST", "PORT", "ENV", "STATUS", "INFO"], +): # noqa + with open(file_name, "w", newline="") as file: + writer = csv.writer(file) + writer.writerow(header) + for each_row in rows: + writer.writerow(each_row) + + +def read_csv(file_name): + read_rows = [] + try: + with open(file_name) as file: + rows = csv.reader(file) + for each_row in rows: + read_rows.append(each_row) + except FileNotFoundError: + print(f"WARN: {file_name} not found ! ") + return read_rows + + +def write_md_report( + file_name, + rows, + header=["NAME", "TARGET_SOURCE", "HOST", "PORT", "ENV", "STATUS", "INFO"], +): # noqa + mded_rows = [] + for each_row in rows: + mded_rows.append(" | ".join(each_row)) + mded_rows = "\n".join(mded_rows) + data = f""" +# Apigee Target Server Health Report + +{" | ".join(header)} +{" | ".join(['---' for i in range(len(header))])} +{mded_rows} + """ + with open(file_name, "w") as file: + file.write(data) + + +def create_dir(dir): + try: + os.makedirs(dir) + except FileExistsError: + print(f"INFO: {dir} already exists") + + +def list_dir(dir, soft=False): + try: + return os.listdir(dir) + except FileNotFoundError: + if soft: + return [] + print(f'ERROR: Directory "{dir}" not found') + sys.exit(1) + + +def unzip_file(path_to_zip_file, directory_to_extract_to): + with zipfile.ZipFile(path_to_zip_file, "r") as zip_ref: + zip_ref.extractall(directory_to_extract_to) + + +def parse_xml(file): + try: + with open(file) as fl: + doc = xmltodict.parse(fl.read()) + return doc + except FileNotFoundError: + print(f'ERROR: File "{file}" not found') + return {} + + +def parse_http_target_connection(http_placement, http_placement_data): + hosts = None + if ( + "HTTPTargetConnection" in http_placement_data[http_placement] + and "URL" in http_placement_data[http_placement]["HTTPTargetConnection"] # noqa + ): # noqa + url_data = urlparse( + http_placement_data[http_placement]["HTTPTargetConnection"]["URL"] + ) # noqa + hosts = { + "host": url_data.hostname, + "port": str(url_data.port) + if url_data.port is not None + else ("443" if url_data.scheme == "https" else "80"), # noqa + "source": f"{http_placement} : {http_placement_data[http_placement]['@name']}", # noqa + "target_server": False, + } + if ( + "HTTPTargetConnection" in http_placement_data[http_placement] + and "LoadBalancer" # noqa + in http_placement_data[http_placement]["HTTPTargetConnection"] + ): # noqa + servers = http_placement_data[http_placement]["HTTPTargetConnection"][ + "LoadBalancer" + ][ + "Server" + ] # noqa + servers_list = servers if type(servers) is list else [servers] # noqa + target_servers = [ts["@name"] for ts in servers_list] # noqa + hosts = { + "host": target_servers, + "port": "", + "source": f"{http_placement} : {http_placement_data[http_placement]['@name']}", # noqa + "target_server": True, + } + return hosts + + +def parse_proxy_hosts(proxy_path): + policies_path = f"{proxy_path}/policies" + targets_path = f"{proxy_path}/targets" + policies = [i for i in list_dir(policies_path, True) if i.endswith(".xml")] # noqa + targets = [i for i in list_dir(targets_path, True) if i.endswith(".xml")] # noqa + hosts = [] + for each_policy in policies: + each_policy_info = parse_xml(f"{policies_path}/{each_policy}") # noqa + if "ServiceCallout" in each_policy_info: + host_data = parse_http_target_connection( + "ServiceCallout", each_policy_info + ) # noqa + if host_data is not None: + hosts.append(host_data) + for each_target in targets: + each_target_info = parse_xml(f"{targets_path}/{each_target}") # noqa + host_data = parse_http_target_connection( + "TargetEndpoint", each_target_info + ) # noqa + if host_data is not None: + hosts.append(host_data) + return hosts + + +def has_templating(data): + if "{" in data and "}" in data: + return True + else: + return False + + +def get_tes(data): + tes = [] + for each_host in data: + if each_host["target_server"]: + tes.extend(each_host["host"]) + return tes + + +def get_row_host_port(row, default_port=443): + host, port = None, None + if len(row) == 0: + print("WARN: Input row has no host ") + if len(row) == 1: + host, port = row[0], default_port + if len(row) > 1: + host, port = row[0], row[1] + return host, port