From 5fef79c42e00d778a7ae4344c508c9bbf979f8e9 Mon Sep 17 00:00:00 2001 From: apigee-devrel-helper <109337440+apigee-devrel-helper@users.noreply.github.com> Date: Tue, 8 Aug 2023 17:04:41 +0200 Subject: [PATCH 01/28] chore(main): release 1.12.0 --- CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ffdfec19..7bc095160 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## [1.12.0](https://github.com/apigee/devrel/compare/v1.11.0...v1.12.0) (2023-08-08) + + +### Features + +* cloud build job to delete stale image artifacts ([2e1e923](https://github.com/apigee/devrel/commit/2e1e923e46d5e8405edd513253c9ac83804c4d55)) +* fix semantic changes from httpbin to mocktarget ([87a376f](https://github.com/apigee/devrel/commit/87a376f93f041ec7d91c36ffc854250890d278cd)) +* remove hackish fix since mocktarget now supports query param args ([e4c2280](https://github.com/apigee/devrel/commit/e4c2280f3dc736bba4a05f9ba6f97c4c2afadb3f)) +* replace httpbin with mocktarget for identity facade and envoy quickstart ([b3fcfb9](https://github.com/apigee/devrel/commit/b3fcfb91a8afd126fca0588cd5c2e1375d243c24)) +* replace httpbin.org with mocktarget.apigee.net ([58a8185](https://github.com/apigee/devrel/commit/58a81857cb4fdff4edaa6542aeb32707eefb3d48)) + ## [1.11.0](https://github.com/apigee/devrel/compare/v1.10.1...v1.11.0) (2023-07-05) From d9489f281a150604a2ebff532e7517e6fa6491e1 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 19:10:27 +0530 Subject: [PATCH 02/28] feat: Added proxy-endpoint-unifier source --- tools/proxy-endpoint-unifier/README.md | 47 ++ tools/proxy-endpoint-unifier/input.properties | 10 + tools/proxy-endpoint-unifier/main.py | 105 ++++ tools/proxy-endpoint-unifier/requirements.txt | 2 + tools/proxy-endpoint-unifier/utils.py | 558 ++++++++++++++++++ tools/proxy-endpoint-unifier/xorhybrid.py | 40 ++ 6 files changed, 762 insertions(+) create mode 100644 tools/proxy-endpoint-unifier/README.md create mode 100644 tools/proxy-endpoint-unifier/input.properties create mode 100644 tools/proxy-endpoint-unifier/main.py create mode 100644 tools/proxy-endpoint-unifier/requirements.txt create mode 100644 tools/proxy-endpoint-unifier/utils.py create mode 100644 tools/proxy-endpoint-unifier/xorhybrid.py diff --git a/tools/proxy-endpoint-unifier/README.md b/tools/proxy-endpoint-unifier/README.md new file mode 100644 index 000000000..85709c3d2 --- /dev/null +++ b/tools/proxy-endpoint-unifier/README.md @@ -0,0 +1,47 @@ +# Apigee OPDK to Apigee X/Hybrid API Proxy Endpoint Unifier + + +## Objective +Apigee X has a limitation of hosting only 5 Proxy Endpoints per proxy.Apigee OPDK /Edge has no such limitaion. +Objective is take a proxy bundle and smartly convert them into conditional flows and group them with other proxy endpoints. + +## Disclaimer +This is not an Officially Supported Google Product! + +## Pre-Requisites +* python3.x +* Please Install required Python Libs +``` + python3 -m pip install requirements.txt +``` +* Please fill in `input.properties` +``` + [common] + input_apis=apis # Folder Containing Extracted Proxy Bundles + processed_apis=transformed # Folder to export transfored Proxies to + proxy_bundle_directory=transformed_bundles # Folder to export transfored Proxies Bundles (zip) to + proxy_endpoint_count=4 # Number of Proxy Endpoint to retain while transforming + debug=false # Flag to export debug logs + + [validate] + enabled=true # Flag to enable Validation + gcp_project_id=apigee-payg-377208 # Apigee X/Hybrid Project to run Validation +``` + +* Please run below command to authenticate against Apigee X/Hybrid APIS if Validation is enabled + +``` + export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) +``` + + +## Running +Run the Script as below +``` +python3 main.py +``` + + +## Copyright + +Copyright 2023 Google LLC. This software is provided as-is, without warranty or representation for any use or purpose. Your use of it is subject to your agreement with Google. diff --git a/tools/proxy-endpoint-unifier/input.properties b/tools/proxy-endpoint-unifier/input.properties new file mode 100644 index 000000000..c99155489 --- /dev/null +++ b/tools/proxy-endpoint-unifier/input.properties @@ -0,0 +1,10 @@ +[common] +input_apis=apis +processed_apis=transformed +proxy_bundle_directory=transformed_bundles +proxy_endpoint_count=4 +debug=false + +[validate] +enabled=true +gcp_project_id=apigee-payg-377208 diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py new file mode 100644 index 000000000..a7b5385d9 --- /dev/null +++ b/tools/proxy-endpoint-unifier/main.py @@ -0,0 +1,105 @@ +import os +from xorhybrid import ApigeeXorHybrid +import utils + +def main(): + cfg = utils.parse_config('input.properties') + proxy_dir = cfg['common']['input_apis'] + proxy_dest_dir = cfg['common']['processed_apis'] + proxy_bundle_directory = cfg['common']['proxy_bundle_directory'] + export_debug_file=cfg.getboolean('common','debug') + validation_enabled=cfg.getboolean('validate','enabled') + utils.delete_folder(proxy_dest_dir) + utils.delete_folder(proxy_bundle_directory) + utils.create_dir(proxy_bundle_directory) + proxy_endpoint_count = utils.get_proxy_endpoint_count(cfg) + proxies = utils.list_dir(proxy_dir) + + final_dict = {} + processed_dict = {} + + for each_dir in proxies: + each_proxy_dict = utils.read_proxy_artifacts( + f"{proxy_dir}/{each_dir}", + utils.parse_proxy_root(f"{proxy_dir}/{each_dir}") + ) + if len(each_proxy_dict) > 0: + each_proxy_rel=utils.get_proxy_objects_relationships(each_proxy_dict) + final_dict[each_dir]=each_proxy_dict + processed_dict[each_dir]=each_proxy_rel + + processing_final_dict = final_dict.copy() + + path_group_map = {} + for each_api,each_api_info in processed_dict.items(): + path_group_map[each_api] = utils.get_api_path_groups(each_api_info) + + grouped_apis = {} + for each_api,base_path_info in path_group_map.items(): + grouped_apis[each_api]=utils.group_paths_by_path(base_path_info,proxy_endpoint_count) + + bundled_group = {} + for each_api,grouped_api in grouped_apis.items(): + bundled_group[each_api]=utils.bundle_path(grouped_api) + + merged_pes = {} + merged_objects = {} + for each_api,grouped_api in bundled_group.items(): + print(f'Processing API ====> {each_api} with {len(grouped_api)} groups') + for index,each_group in enumerate(grouped_api): + merged_objects[f"{each_api}_{index}"]={ + 'Policies':[], + 'TargetEndpoints':[], + 'ProxyEndpoints' :[] + } + for each_path,pes in each_group.items(): + each_pe = '-'.join(pes) + merged_pes[each_pe] = utils.merge_proxy_endpoints( + processing_final_dict[each_api], + each_path, + pes + ) + merged_objects[f"{each_api}_{index}"]['Name'] = f"{final_dict[each_api]['proxyName']}_{index}" + merged_objects[f"{each_api}_{index}"]['Policies'].extend([ item for pe in pes for item in processed_dict[each_api][pe]['Policies']]) + merged_objects[f"{each_api}_{index}"]['TargetEndpoints'].extend([ item for pe in pes for item in processed_dict[each_api][pe]['TargetEndpoints']]) + merged_objects[f"{each_api}_{index}"]['Policies'] = list(set(merged_objects[f"{each_api}_{index}"]['Policies'])) + merged_objects[f"{each_api}_{index}"]['TargetEndpoints'] = list(set(merged_objects[f"{each_api}_{index}"]['TargetEndpoints'])) + merged_objects[f"{each_api}_{index}"]['ProxyEndpoints'].append(each_pe) + + + + for each_api,grouped_api in bundled_group.items(): + for index,each_group in enumerate(grouped_api): + utils.clone_proxies( + f"{proxy_dir}/{each_api}", + f"{proxy_dest_dir}/{each_api}_{index}", + merged_objects[f"{each_api}_{index}"], + merged_pes, + proxy_bundle_directory + ) + + files = { + 'final_dict' : final_dict, + 'processed_dict' : processed_dict, + 'path_group_map' : path_group_map, + 'grouped_apis' : grouped_apis, + 'bundled_group' : bundled_group, + 'merged_pes' : merged_pes, + 'merged_objects' : merged_objects, + } + if export_debug_file: + utils.export_debug_log(files) + + if validation_enabled: + gcp_project_id=cfg['validate']['gcp_project_id'] + x=ApigeeXorHybrid(gcp_project_id) + x.set_auth_header(os.getenv('APIGEE_ACCESS_TOKEN')) + result = {} + bundled_proxies=utils.list_dir(proxy_bundle_directory) + for each_bundle in bundled_proxies: + validation=x.validate_api('apis',f"{proxy_bundle_directory}/{each_bundle}") + result[each_bundle]=validation + print(f"{each_bundle} ==> Validation : {validation}") + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/requirements.txt b/tools/proxy-endpoint-unifier/requirements.txt new file mode 100644 index 000000000..c0ac1c8c2 --- /dev/null +++ b/tools/proxy-endpoint-unifier/requirements.txt @@ -0,0 +1,2 @@ +xmltodict==0.13.0 +requests==2.28.1 \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/utils.py b/tools/proxy-endpoint-unifier/utils.py new file mode 100644 index 000000000..636318f28 --- /dev/null +++ b/tools/proxy-endpoint-unifier/utils.py @@ -0,0 +1,558 @@ +#!/usr/bin/python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import configparser +import os +import sys +import xmltodict +import json +import shutil +import zipfile + +def parse_config(config_file): + config = configparser.ConfigParser() + config.read(config_file) + return config + +def get_proxy_endpoint_count(cfg): + try: + proxy_endpoint_count=cfg.getint('common','proxy_endpoint_count') + if not (proxy_endpoint_count > 0 and proxy_endpoint_count <= 5): + print('ERRROR: proxy_endpoint_count should be > Zero(0) & < Five(5)') + sys.exit(1) + except ValueError: + print('proxy_endpoint_count should be Numberic') + sys.exit(1) + return proxy_endpoint_count + +def create_dir(dir): + try: + os.makedirs(dir) + except FileExistsError: + print(f"INFO: {dir} already exists") + +def list_dir(dir,isok=False): + try: + return os.listdir(dir) + except FileNotFoundError: + if isok: + print(f"Ignoring : Directory \"{dir}\" not found") + return [] + print(f"ERROR: Directory \"{dir}\" not found") + sys.exit(1) + +def get_proxy_entrypoint(dir): + files=list_dir(dir) + ent = [] + for eachfile in files: + if eachfile.endswith(".xml"): + ent.append(eachfile) + if len(ent)==1: + return os.path.join(dir,ent[0]) + else: + if len(ent)>1: + print(f"ERROR: Directory \"{dir}\" contains multiple xml files at root") + else: + print(f"ERROR: Directory \"{dir}\" has no xml file at root") + return None + +def parse_json(file): + try: + with open(file) as fl: + doc = json.loads(fl.read()) + return doc + except FileNotFoundError: + print(f"ERROR: File \"{file}\" not found") + return {} + +def parse_xml(file): + try: + with open(file) as fl: + doc = xmltodict.parse(fl.read()) + return doc + except FileNotFoundError: + print(f"ERROR: File \"{file}\" not found") + return {} + + +def write_json(file,data): + try: + with open(file,'w') as fl: + fl.write(json.dumps(data,indent=2)) + except FileNotFoundError: + print(f"ERROR: File \"{file}\" not found") + return False + return True + +def write_xml_from_dict(file,data): + try: + with open(file,'w') as fl: + fl.write(xmltodict.unparse(data,pretty=True)) + except FileNotFoundError: + print(f"ERROR: File \"{file}\" not found") + return False + return True + +def parse_proxy_root(dir): + file=get_proxy_entrypoint(dir) + if file is None: + return {} + doc=parse_xml(file) + return doc + +def read_proxy_artifacts(dir,entrypoint): + APIProxy=entrypoint['APIProxy'] + # Check if proxy has multiple endpoints + if isinstance(APIProxy['ProxyEndpoints']['ProxyEndpoint'], list): + # print(f"Processing Proxy ==> {entrypoint['APIProxy']['@name']}") + proxyName = entrypoint['APIProxy']['@name'] + proxy_dict = { + # 'BasePaths':[], + # 'Policies':{}, + 'ProxyEndpoints':{}, + 'TargetEndpoints':{}, + 'proxyName':proxyName + } + ProxyEndpoints= APIProxy['ProxyEndpoints']['ProxyEndpoint'] + ProxyEndpoints = ( [ProxyEndpoints] if isinstance(ProxyEndpoints,str) else ProxyEndpoints) + for each_pe in ProxyEndpoints: + proxy_dict['ProxyEndpoints'][each_pe]=parse_xml(os.path.join(dir,'proxies',f"{each_pe}.xml")) + + """ + proxy_dict['BasePaths']=APIProxy['BasePaths'] + + for each_policy in APIProxy['Policies']['Policy']: + proxy_dict['Policies'][each_policy]=parse_xml(os.path.join(dir,'policies',f"{each_policy}.xml")) + """ + # print(APIProxy['TargetEndpoints']['TargetEndpoint']) + TargetEndpoints =APIProxy['TargetEndpoints']['TargetEndpoint'] + TargetEndpoints = ([TargetEndpoints] if isinstance(TargetEndpoints,str) else TargetEndpoints) + for each_te in TargetEndpoints: + proxy_dict['TargetEndpoints'][each_te]=parse_xml(os.path.join(dir,'targets',f"{each_te}.xml")) + + # Skip when proxy has one endpoints + else: + print(f"Skipping Proxy ==> {entrypoint['APIProxy']['@name']}") + return {} + return proxy_dict + + +def get_all_policies_from_step(Step): + policies=[] + StepData=([Step] if isinstance(Step,dict) else Step) + for eachStep in StepData: + policies.append(eachStep['Name']) + return policies + +def get_all_policies_from_flow(Flow,fault_rule=False): + policies=[] + if not fault_rule: + Request=([] if Flow['Request'] is None else + ( + [Flow['Request']['Step']] if isinstance(Flow['Request']['Step'],dict) + else Flow['Request']['Step'] + ) + ) + Response=([] if Flow['Response'] is None else + ( + [Flow['Response']['Step']] if isinstance(Flow['Response']['Step'],dict) + else Flow['Response']['Step'] + ) + ) + for each_flow in Request: + policies.extend(get_all_policies_from_step(each_flow)) + for each_flow in Response: + policies.extend(get_all_policies_from_step(each_flow)) + else: + FaultRules = ([] if Flow is None else + ( + [Flow['Step']] if isinstance(Flow['Step'],dict) + else Flow['Step'] + ) + ) + for each_step in FaultRules: + policies.extend(get_all_policies_from_step(each_step)) + return policies + +def get_all_policies_from_endpoint(endpointData,endpointType): + policies=[] + policies.extend( + get_all_policies_from_flow( + endpointData[endpointType]['PreFlow'] + ) + ) + policies.extend( + get_all_policies_from_flow( + endpointData[endpointType]['PostFlow'] + ) + ) + + Flows = ( + [] + if endpointData[endpointType]['Flows'] is None else + ( + [endpointData[endpointType]['Flows']['Flow']] + if isinstance( + endpointData[endpointType]['Flows']['Flow'],dict) + else + endpointData[endpointType]['Flows']['Flow'] + )) + + for eachFlow in Flows: + policies.extend( + get_all_policies_from_flow( + eachFlow + ) + ) + if 'DefaultFaultRule' in endpointData[endpointType]: + policies.extend( + get_all_policies_from_flow(endpointData[endpointType]['DefaultFaultRule'],True) + ) + return policies + +def get_target_endpoints(ProxyEndpointData): + target_endpoints=[] + routes = ( + [ProxyEndpointData['RouteRule']] + if isinstance(ProxyEndpointData['RouteRule'],dict) + else ProxyEndpointData['RouteRule'] + ) + for eachRoute in routes: + if 'TargetEndpoint' in eachRoute: + target_endpoints.append(eachRoute['TargetEndpoint']) + return target_endpoints + +def get_proxy_objects_relationships(proxy_dict): + proxy_object_map = {} + ProxyEndpoints = proxy_dict['ProxyEndpoints'] + for ProxyEndpoint,ProxyEndpointData in ProxyEndpoints.items(): + proxy_object_map[ProxyEndpoint]={} + + target_endpoints = get_target_endpoints(ProxyEndpointData['ProxyEndpoint']) + TargetEndpointsData = { te :proxy_dict['TargetEndpoints'][te] for te in target_endpoints} + policies = [] + policies.extend(get_all_policies_from_endpoint(ProxyEndpointData,'ProxyEndpoint')) + for _,each_te in TargetEndpointsData.items(): + policies.extend(get_all_policies_from_endpoint(each_te,'TargetEndpoint')) + proxy_object_map[ProxyEndpoint]={ + # 'Policies' : get_all_policies_from_endpoint(ProxyEndpointData,'ProxyEndpoint'), + 'Policies' : policies, + 'BasePath' : ProxyEndpointData['ProxyEndpoint']['HTTPProxyConnection']['BasePath'], + 'TargetEndpoints' : target_endpoints, + # 'Resources' : [] + } + + return proxy_object_map + +def get_api_path_groups(each_api_info): + api_path_group_map={} + for pe,pe_info in each_api_info.items(): + if pe_info['BasePath'] is None: + if '_null_' in api_path_group_map: + # api_path_group_map['_null_'].append({ pe :pe_info['BasePath']}) + api_path_group_map['_null_'].append({ pe :None}) + else: + # api_path_group_map['_null_']=[{ pe :pe_info['BasePath']}] + api_path_group_map['_null_']=[{ pe :None}] + else: + base_path_split=[ i for i in pe_info['BasePath'].split('/') if i!= ""] + if base_path_split[0] in api_path_group_map: + api_path_group_map[base_path_split[0]].append( + # { pe :pe_info['BasePath']} + { pe :base_path_split[0]} + ) + else: + # api_path_group_map[base_path_split[0]]=[{ pe :pe_info['BasePath']}] + api_path_group_map[base_path_split[0]]=[{ pe :base_path_split[0]}] + return api_path_group_map + + +def group_paths_by_path(api_info,pe_count_limit): + + """ + { + "AMCatVehiculos": [ + { + "PE-AMCatalogosVehiculos": "/AMCatVehiculos" + } + ], + "cotizadorflexibleapi": [ + { + "PE-Acceso": "/cotizadorflexibleapi/api/acceso" + }, + { + "PE-CotizadorExterno": "/cotizadorflexibleapi/api/CotizadorExterno" + }, + { + "PE-Token": "/cotizadorflexibleapi/Token" + } + ], + "CotFlex": [ + { + "PE-CotFlex": "/CotFlex" + } + ], + "WebConecta": [ + { + "PE-WebConecta": "/WebConecta" + } + ] + } + """ + result = [] + #['AMCatVehiculos', 'cotizadorflexibleapi', 'CotFlex', 'WebConecta'] + paths = list(api_info.keys()) + path_count=len(paths) + if path_count > pe_count_limit: + # count=0 + for i in range(0,path_count,pe_count_limit): + each_result=[] + if i+pe_count_limit > path_count: + for k in paths[i:path_count]: + each_result.extend(api_info[k]) + else: + for k in paths[i:i+pe_count_limit]: + each_result.extend(api_info[k]) + result.append(each_result) + else: + each_result=[] + for _,v in api_info.items(): + each_result.extend(v) + result.append(each_result) + return result + # print(json.dumps(result,indent=2)) + + +def bundle_path(each_group_bundle): + """ + [ + { + "PE-AMCatalogosVehiculos": "AMCatVehiculos" + }, + { + "PE-Acceso": "cotizadorflexibleapi" + }, + { + "PE-CotizadorExterno": "cotizadorflexibleapi" + }, + { + "PE-Token": "cotizadorflexibleapi" + }, + { + "PE-CotFlex": "CotFlex" + } + ] + """ + outer_group = [] + for each_group in each_group_bundle: + subgroups = {} + for each_pe in each_group: + path=list(each_pe.values())[0] + proxy_ep=list(each_pe.keys())[0] + if path in subgroups: + subgroups[path].append(proxy_ep) + else: + subgroups[path]=[proxy_ep] + outer_group.append(subgroups) + return outer_group + +def apply_condition(step,condition): + step_or_rule = step.copy() + if 'Condition' in step_or_rule: + if step_or_rule['Condition'] is None: + step_or_rule['Condition']=condition + elif len(step_or_rule['Condition'].strip()) > 0: + if step_or_rule['Condition'].strip().startswith('('): + step_or_rule['Condition']= f"{condition} and {step_or_rule['Condition']}" + else: + step_or_rule['Condition']= f"{condition} and {step_or_rule['Condition']}" + else: + step_or_rule['Condition']=condition + else: + step_or_rule['Condition']=condition + return step_or_rule + +def process_steps(step,condition): + processed_step = [] + if step is None: + return processed_step + elif isinstance(step['Step'],dict): + processed_step = [ apply_condition(step['Step'],condition) ] + # processed_step = [ {'Step': apply_condition(step['Step'],condition)} ] + elif isinstance(step['Step'],list): + processed_step = [ apply_condition(i,condition) for i in step['Step'] ] + # processed_step = [ {'Step':apply_condition(i,condition)} for i in step['Step'] ] + else: + return processed_step + return processed_step + +def process_flow(flow,condition): + processed_flow=flow.copy() + if flow['Request'] is not None: + processed_flow['Request']['Step'] = process_steps(flow['Request'],condition) + if flow['Response'] is not None: + processed_flow['Response']['Step'] = process_steps(flow['Response'],condition) + processed_flow_with_condition = apply_condition(processed_flow,condition) + return processed_flow_with_condition + +def process_route_rules(route_rules,condition): + processed_rr =[] + for each_rr in (route_rules if isinstance(route_rules,list) else [route_rules]): + each_processed_rr=apply_condition(each_rr,condition) + processed_rr.append(each_processed_rr) + return processed_rr + +def merge_proxy_endpoints(api_dict,basepath,pes): + merged_pe = {'ProxyEndpoint' : {}} + for each_pe,each_pe_info in api_dict['ProxyEndpoints'].items(): + if each_pe in pes : + original_basepath = each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['BasePath'] + # TODO : Build full Request path + condition=(original_basepath if original_basepath is None else f'(request.path Matches "{original_basepath}*")') + copied_flows = ( + None if each_pe_info['ProxyEndpoint']['Flows'] is None else each_pe_info['ProxyEndpoint']['Flows'].copy() + ) + original_flows = ([] if copied_flows is None else ([copied_flows['Flow']] if isinstance(copied_flows['Flow'],dict) else copied_flows['Flow'])) + + if len(merged_pe['ProxyEndpoint'])==0: + merged_pe['ProxyEndpoint']={ + '@name': [], + 'Description': None, + 'FaultRules': None, + 'PreFlow': { + '@name': 'PreFlow', + 'Request': {'Step':[]}, + 'Response': {'Step':[]}, + }, + 'PostFlow': { + '@name': 'PostFlow', + 'Request': {'Step':[]}, + 'Response': {'Step':[]}, + }, + 'Flows': {'Flow':[]}, + 'HTTPProxyConnection': {'BasePath': '', 'Properties': {}, 'VirtualHost': ''}, + 'RouteRule': [] + } + + merged_pe['ProxyEndpoint']['Description'] = each_pe_info['ProxyEndpoint']['Description'] + merged_pe['ProxyEndpoint']['FaultRules'] = each_pe_info['ProxyEndpoint']['FaultRules'] + merged_pe['ProxyEndpoint']['HTTPProxyConnection']['BasePath']=(basepath if basepath is None else f'/{basepath}') + merged_pe['ProxyEndpoint']['HTTPProxyConnection']['Properties']=each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['Properties'] + merged_pe['ProxyEndpoint']['HTTPProxyConnection']['VirtualHost']=each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['VirtualHost'] + + merged_pe['ProxyEndpoint']['@name'].append(each_pe_info['ProxyEndpoint']['@name']) + merged_pe['ProxyEndpoint']['RouteRule'].extend( + process_route_rules(each_pe_info['ProxyEndpoint']['RouteRule'],condition) + ) + merged_pe['ProxyEndpoint']['PreFlow']['Request']['Step'].extend( + process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) + ) + merged_pe['ProxyEndpoint']['PreFlow']['Response']['Step'].extend( + process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) + ) + merged_pe['ProxyEndpoint']['PostFlow']['Request']['Step'].extend( + process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) + ) + merged_pe['ProxyEndpoint']['PostFlow']['Response']['Step'].extend( + process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) + ) + for each_flow in original_flows: + merged_pe['ProxyEndpoint']['Flows']['Flow'].append( + process_flow(each_flow,condition) + ) + merged_pe['ProxyEndpoint']['@name'] = "-".join(merged_pe['ProxyEndpoint']['@name']) + + return merged_pe + +def copy_folder(src,dst): + try: + shutil.copytree(src, dst) + except FileNotFoundError as e: + print(e) + sys.exit(1) + +def delete_folder(src): + try: + shutil.rmtree(src) + except FileNotFoundError as e: + print(f'Ignoring : {e}') + return + +def delete_file(src): + try: + os.remove(src) + except FileNotFoundError as e: + print(f'Ignoring : {e}') + return + +def clean_up_artifacts(target_dir,artifacts_to_retains): + for file in list_dir(target_dir,True): + each_policy_file=file.split('.xml')[0] + if each_policy_file not in artifacts_to_retains: + delete_file(f"{target_dir}/{file}") + +def filter_objects(obj_data,obj_type,targets): + result = None + if obj_data is None: + return result + elif isinstance(obj_data[obj_type],str): + result = ({ obj_type: obj_data[obj_type] } if obj_data[obj_type] in targets else None ) + elif isinstance(obj_data[obj_type],list): + result = { obj_type: [ v for v in obj_data[obj_type] if v in targets ] } + else: + return result + return result + + +def zipdir(path, ziph): + # ziph is zipfile handle + for root, dirs, files in os.walk(path): + for file in files: + ziph.write(os.path.join(root, file), + os.path.relpath(os.path.join(root, file), + os.path.join(path, '..'))) + +def clone_proxies(source_dir,target_dir,objects,merged_pes,proxy_bundle_directory): + target_dir=f"{target_dir}/apiproxy" + copy_folder(source_dir,target_dir) + file=get_proxy_entrypoint(target_dir) + root=parse_xml(file) + delete_file(file) + root['APIProxy']['@name']=objects['Name'] + root['APIProxy']['Policies']= filter_objects(root['APIProxy']['Policies'],'Policy',objects['Policies']) + # root['APIProxy']['ProxyEndpoints']=filter_objects(root['APIProxy']['ProxyEndpoints'],'Policies',objects['ProxyEndpoints']) + root['APIProxy']['TargetEndpoints']=filter_objects(root['APIProxy']['TargetEndpoints'],'TargetEndpoint',objects['TargetEndpoints']) + # root['APIProxy']['Resources']=filter_objects(root['APIProxy']['Policies'],'Policies',objects['Policies']) + clean_up_artifacts(f"{target_dir}/policies",objects['Policies']) + clean_up_artifacts(f"{target_dir}/targets",objects['TargetEndpoints']) + for pe in objects['ProxyEndpoints']: + write_xml_from_dict(f"{target_dir}/proxies/{pe}.xml",merged_pes[pe]) + clean_up_artifacts(f"{target_dir}/proxies",objects['ProxyEndpoints']) + # root['APIProxy']['ProxyEndpoints']=filter_objects(root['APIProxy']['ProxyEndpoints'],'ProxyEndpoint',objects['ProxyEndpoints']) + root['APIProxy']['ProxyEndpoints']= {'ProxyEndpoint' : ( objects['ProxyEndpoints'] if len(objects['ProxyEndpoints']) > 1 else objects['ProxyEndpoints'][0] )} + transformed_file = file.split('/') + transformed_file[-1]=f"{objects['Name']}.xml" + write_xml_from_dict("/".join(transformed_file),root) + delete_folder(f"{target_dir}/manifests") + with zipfile.ZipFile(f"{proxy_bundle_directory}/{objects['Name']}.zip", 'w', zipfile.ZIP_DEFLATED) as zipf: + zipdir(target_dir, zipf) + + +def export_debug_log(files,log_path='logs'): + create_dir(log_path) + for file,data in files.items(): + file_name=f'{log_path}/{file}.json' + write_json(file_name,data) \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/xorhybrid.py b/tools/proxy-endpoint-unifier/xorhybrid.py new file mode 100644 index 000000000..6a80cd239 --- /dev/null +++ b/tools/proxy-endpoint-unifier/xorhybrid.py @@ -0,0 +1,40 @@ +#!/usr/bin/python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests +import os + +class ApigeeXorHybrid: + def __init__(self,org): + self.baseurl=f"https://apigee.googleapis.com/v1/organizations/{org}" + self.auth_header = {} + + def set_auth_header(self,token): + self.auth_header = { + 'Authorization' : f"Bearer {token}" + } + + def validate_api(self,api_type,proxy_bundle_path): + api_name = os.path.basename(proxy_bundle_path).split('.zip')[0] + url = f"{self.baseurl}/{api_type}?name={api_name}&action=validate&validate=true" + files=[ + ('data',(api_name,open(proxy_bundle_path,'rb'),'application/zip')) + ] + response = requests.request("POST", url, headers=self.auth_header, data={}, files=files) + if response.status_code == 200 : + return True + else: + return response.json() \ No newline at end of file From a7c8a030e40e55aede631d5df438265012ac7ddd Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 19:12:26 +0530 Subject: [PATCH 03/28] feat: updated Licenses in the proxy-endpoint-unifier wrapper --- tools/proxy-endpoint-unifier/input.properties | 14 ++++++++++++++ tools/proxy-endpoint-unifier/main.py | 16 ++++++++++++++++ tools/proxy-endpoint-unifier/requirements.txt | 14 ++++++++++++++ tools/proxy-endpoint-unifier/xorhybrid.py | 2 +- 4 files changed, 45 insertions(+), 1 deletion(-) diff --git a/tools/proxy-endpoint-unifier/input.properties b/tools/proxy-endpoint-unifier/input.properties index c99155489..f2d6ab305 100644 --- a/tools/proxy-endpoint-unifier/input.properties +++ b/tools/proxy-endpoint-unifier/input.properties @@ -1,3 +1,17 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + [common] input_apis=apis processed_apis=transformed diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py index a7b5385d9..08a607b46 100644 --- a/tools/proxy-endpoint-unifier/main.py +++ b/tools/proxy-endpoint-unifier/main.py @@ -1,3 +1,19 @@ +#!/usr/bin/python + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os from xorhybrid import ApigeeXorHybrid import utils diff --git a/tools/proxy-endpoint-unifier/requirements.txt b/tools/proxy-endpoint-unifier/requirements.txt index c0ac1c8c2..cb37891bf 100644 --- a/tools/proxy-endpoint-unifier/requirements.txt +++ b/tools/proxy-endpoint-unifier/requirements.txt @@ -1,2 +1,16 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + xmltodict==0.13.0 requests==2.28.1 \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/xorhybrid.py b/tools/proxy-endpoint-unifier/xorhybrid.py index 6a80cd239..68ac38dad 100644 --- a/tools/proxy-endpoint-unifier/xorhybrid.py +++ b/tools/proxy-endpoint-unifier/xorhybrid.py @@ -1,6 +1,6 @@ #!/usr/bin/python -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From b39a38fed5bbb05f22c3e84771aca68cc0d6b938 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 19:15:17 +0530 Subject: [PATCH 04/28] feat: removed comments & added newlines in the proxy-endpoint-unifier wrapper --- tools/proxy-endpoint-unifier/main.py | 2 +- tools/proxy-endpoint-unifier/requirements.txt | 2 +- tools/proxy-endpoint-unifier/utils.py | 66 +------------------ tools/proxy-endpoint-unifier/xorhybrid.py | 2 +- 4 files changed, 4 insertions(+), 68 deletions(-) diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py index 08a607b46..3f3a6dd46 100644 --- a/tools/proxy-endpoint-unifier/main.py +++ b/tools/proxy-endpoint-unifier/main.py @@ -118,4 +118,4 @@ def main(): print(f"{each_bundle} ==> Validation : {validation}") if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/tools/proxy-endpoint-unifier/requirements.txt b/tools/proxy-endpoint-unifier/requirements.txt index cb37891bf..7fd3dd334 100644 --- a/tools/proxy-endpoint-unifier/requirements.txt +++ b/tools/proxy-endpoint-unifier/requirements.txt @@ -13,4 +13,4 @@ # limitations under the License. xmltodict==0.13.0 -requests==2.28.1 \ No newline at end of file +requests==2.28.1 diff --git a/tools/proxy-endpoint-unifier/utils.py b/tools/proxy-endpoint-unifier/utils.py index 636318f28..ee71c356f 100644 --- a/tools/proxy-endpoint-unifier/utils.py +++ b/tools/proxy-endpoint-unifier/utils.py @@ -117,11 +117,8 @@ def read_proxy_artifacts(dir,entrypoint): APIProxy=entrypoint['APIProxy'] # Check if proxy has multiple endpoints if isinstance(APIProxy['ProxyEndpoints']['ProxyEndpoint'], list): - # print(f"Processing Proxy ==> {entrypoint['APIProxy']['@name']}") proxyName = entrypoint['APIProxy']['@name'] proxy_dict = { - # 'BasePaths':[], - # 'Policies':{}, 'ProxyEndpoints':{}, 'TargetEndpoints':{}, 'proxyName':proxyName @@ -131,13 +128,6 @@ def read_proxy_artifacts(dir,entrypoint): for each_pe in ProxyEndpoints: proxy_dict['ProxyEndpoints'][each_pe]=parse_xml(os.path.join(dir,'proxies',f"{each_pe}.xml")) - """ - proxy_dict['BasePaths']=APIProxy['BasePaths'] - - for each_policy in APIProxy['Policies']['Policy']: - proxy_dict['Policies'][each_policy]=parse_xml(os.path.join(dir,'policies',f"{each_policy}.xml")) - """ - # print(APIProxy['TargetEndpoints']['TargetEndpoint']) TargetEndpoints =APIProxy['TargetEndpoints']['TargetEndpoint'] TargetEndpoints = ([TargetEndpoints] if isinstance(TargetEndpoints,str) else TargetEndpoints) for each_te in TargetEndpoints: @@ -248,11 +238,9 @@ def get_proxy_objects_relationships(proxy_dict): for _,each_te in TargetEndpointsData.items(): policies.extend(get_all_policies_from_endpoint(each_te,'TargetEndpoint')) proxy_object_map[ProxyEndpoint]={ - # 'Policies' : get_all_policies_from_endpoint(ProxyEndpointData,'ProxyEndpoint'), 'Policies' : policies, 'BasePath' : ProxyEndpointData['ProxyEndpoint']['HTTPProxyConnection']['BasePath'], 'TargetEndpoints' : target_endpoints, - # 'Resources' : [] } return proxy_object_map @@ -281,39 +269,7 @@ def get_api_path_groups(each_api_info): def group_paths_by_path(api_info,pe_count_limit): - - """ - { - "AMCatVehiculos": [ - { - "PE-AMCatalogosVehiculos": "/AMCatVehiculos" - } - ], - "cotizadorflexibleapi": [ - { - "PE-Acceso": "/cotizadorflexibleapi/api/acceso" - }, - { - "PE-CotizadorExterno": "/cotizadorflexibleapi/api/CotizadorExterno" - }, - { - "PE-Token": "/cotizadorflexibleapi/Token" - } - ], - "CotFlex": [ - { - "PE-CotFlex": "/CotFlex" - } - ], - "WebConecta": [ - { - "PE-WebConecta": "/WebConecta" - } - ] - } - """ result = [] - #['AMCatVehiculos', 'cotizadorflexibleapi', 'CotFlex', 'WebConecta'] paths = list(api_info.keys()) path_count=len(paths) if path_count > pe_count_limit: @@ -333,29 +289,9 @@ def group_paths_by_path(api_info,pe_count_limit): each_result.extend(v) result.append(each_result) return result - # print(json.dumps(result,indent=2)) def bundle_path(each_group_bundle): - """ - [ - { - "PE-AMCatalogosVehiculos": "AMCatVehiculos" - }, - { - "PE-Acceso": "cotizadorflexibleapi" - }, - { - "PE-CotizadorExterno": "cotizadorflexibleapi" - }, - { - "PE-Token": "cotizadorflexibleapi" - }, - { - "PE-CotFlex": "CotFlex" - } - ] - """ outer_group = [] for each_group in each_group_bundle: subgroups = {} @@ -555,4 +491,4 @@ def export_debug_log(files,log_path='logs'): create_dir(log_path) for file,data in files.items(): file_name=f'{log_path}/{file}.json' - write_json(file_name,data) \ No newline at end of file + write_json(file_name,data) diff --git a/tools/proxy-endpoint-unifier/xorhybrid.py b/tools/proxy-endpoint-unifier/xorhybrid.py index 68ac38dad..a2e096766 100644 --- a/tools/proxy-endpoint-unifier/xorhybrid.py +++ b/tools/proxy-endpoint-unifier/xorhybrid.py @@ -37,4 +37,4 @@ def validate_api(self,api_type,proxy_bundle_path): if response.status_code == 200 : return True else: - return response.json() \ No newline at end of file + return response.json() From 18badeef1adad545e94bec8374cd22ca554990a9 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 20:03:54 +0530 Subject: [PATCH 05/28] feat: addressed flake8 for main proxy-endpoint-unifier wrapper --- tools/proxy-endpoint-unifier/main.py | 94 +++++++++++++++------------- 1 file changed, 49 insertions(+), 45 deletions(-) diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py index 3f3a6dd46..94db6799a 100644 --- a/tools/proxy-endpoint-unifier/main.py +++ b/tools/proxy-endpoint-unifier/main.py @@ -18,13 +18,14 @@ from xorhybrid import ApigeeXorHybrid import utils + def main(): cfg = utils.parse_config('input.properties') proxy_dir = cfg['common']['input_apis'] proxy_dest_dir = cfg['common']['processed_apis'] proxy_bundle_directory = cfg['common']['proxy_bundle_directory'] - export_debug_file=cfg.getboolean('common','debug') - validation_enabled=cfg.getboolean('validate','enabled') + export_debug_file = cfg.getboolean('common', 'debug') + validation_enabled = cfg.getboolean('validate', 'enabled') utils.delete_folder(proxy_dest_dir) utils.delete_folder(proxy_bundle_directory) utils.create_dir(proxy_bundle_directory) @@ -36,56 +37,58 @@ def main(): for each_dir in proxies: each_proxy_dict = utils.read_proxy_artifacts( - f"{proxy_dir}/{each_dir}", - utils.parse_proxy_root(f"{proxy_dir}/{each_dir}") + f"{proxy_dir}/{each_dir}", + utils.parse_proxy_root(f"{proxy_dir}/{each_dir}") ) if len(each_proxy_dict) > 0: - each_proxy_rel=utils.get_proxy_objects_relationships(each_proxy_dict) - final_dict[each_dir]=each_proxy_dict - processed_dict[each_dir]=each_proxy_rel + each_proxy_rel = utils.get_proxy_objects_relationships( + each_proxy_dict) + final_dict[each_dir] = each_proxy_dict + processed_dict[each_dir] = each_proxy_rel processing_final_dict = final_dict.copy() - + path_group_map = {} - for each_api,each_api_info in processed_dict.items(): + for each_api, each_api_info in processed_dict.items(): path_group_map[each_api] = utils.get_api_path_groups(each_api_info) grouped_apis = {} - for each_api,base_path_info in path_group_map.items(): - grouped_apis[each_api]=utils.group_paths_by_path(base_path_info,proxy_endpoint_count) + for each_api, base_path_info in path_group_map.items(): + grouped_apis[each_api] = utils.group_paths_by_path( + base_path_info, proxy_endpoint_count) bundled_group = {} - for each_api,grouped_api in grouped_apis.items(): - bundled_group[each_api]=utils.bundle_path(grouped_api) + for each_api, grouped_api in grouped_apis.items(): + bundled_group[each_api] = utils.bundle_path(grouped_api) merged_pes = {} merged_objects = {} - for each_api,grouped_api in bundled_group.items(): - print(f'Processing API ====> {each_api} with {len(grouped_api)} groups') - for index,each_group in enumerate(grouped_api): - merged_objects[f"{each_api}_{index}"]={ - 'Policies':[], - 'TargetEndpoints':[], - 'ProxyEndpoints' :[] + for each_api, grouped_api in bundled_group.items(): + print(f'Processing API => {each_api} with {len(grouped_api)} groups') + for index, each_group in enumerate(grouped_api): + merged_objects[f"{each_api}_{index}"] = { + 'Policies': [], + 'TargetEndpoints': [], + 'ProxyEndpoints': [] } - for each_path,pes in each_group.items(): + for each_path, pes in each_group.items(): each_pe = '-'.join(pes) merged_pes[each_pe] = utils.merge_proxy_endpoints( processing_final_dict[each_api], each_path, pes ) - merged_objects[f"{each_api}_{index}"]['Name'] = f"{final_dict[each_api]['proxyName']}_{index}" - merged_objects[f"{each_api}_{index}"]['Policies'].extend([ item for pe in pes for item in processed_dict[each_api][pe]['Policies']]) - merged_objects[f"{each_api}_{index}"]['TargetEndpoints'].extend([ item for pe in pes for item in processed_dict[each_api][pe]['TargetEndpoints']]) - merged_objects[f"{each_api}_{index}"]['Policies'] = list(set(merged_objects[f"{each_api}_{index}"]['Policies'])) - merged_objects[f"{each_api}_{index}"]['TargetEndpoints'] = list(set(merged_objects[f"{each_api}_{index}"]['TargetEndpoints'])) - merged_objects[f"{each_api}_{index}"]['ProxyEndpoints'].append(each_pe) - - - - for each_api,grouped_api in bundled_group.items(): - for index,each_group in enumerate(grouped_api): + merged_objects[f"{each_api}_{index}"]['Name'] = f"{final_dict[each_api]['proxyName']}_{index}" # noqa + merged_objects[f"{each_api}_{index}"]['Policies'].extend( # noqa + [ item for pe in pes for item in processed_dict[each_api][pe]['Policies']]) # noqa + merged_objects[f"{each_api}_{index}"]['TargetEndpoints'].extend( # noqa + [ item for pe in pes for item in processed_dict[each_api][pe]['TargetEndpoints']]) # noqa + merged_objects[f"{each_api}_{index}"]['Policies'] = list(set(merged_objects[f"{each_api}_{index}"]['Policies'])) # noqa + merged_objects[f"{each_api}_{index}"]['TargetEndpoints'] = list(set(merged_objects[f"{each_api}_{index}"]['TargetEndpoints'])) # noqa + merged_objects[f"{each_api}_{index}"]['ProxyEndpoints'].append(each_pe) # noqa + + for each_api, grouped_api in bundled_group.items(): + for index, each_group in enumerate(grouped_api): utils.clone_proxies( f"{proxy_dir}/{each_api}", f"{proxy_dest_dir}/{each_api}_{index}", @@ -95,27 +98,28 @@ def main(): ) files = { - 'final_dict' : final_dict, - 'processed_dict' : processed_dict, - 'path_group_map' : path_group_map, - 'grouped_apis' : grouped_apis, - 'bundled_group' : bundled_group, - 'merged_pes' : merged_pes, - 'merged_objects' : merged_objects, + 'final_dict': final_dict, + 'processed_dict': processed_dict, + 'path_group_map': path_group_map, + 'grouped_apis': grouped_apis, + 'bundled_group': bundled_group, + 'merged_pes': merged_pes, + 'merged_objects': merged_objects, } if export_debug_file: utils.export_debug_log(files) if validation_enabled: - gcp_project_id=cfg['validate']['gcp_project_id'] - x=ApigeeXorHybrid(gcp_project_id) + gcp_project_id = cfg['validate']['gcp_project_id'] + x = ApigeeXorHybrid(gcp_project_id) x.set_auth_header(os.getenv('APIGEE_ACCESS_TOKEN')) result = {} - bundled_proxies=utils.list_dir(proxy_bundle_directory) + bundled_proxies = utils.list_dir(proxy_bundle_directory) for each_bundle in bundled_proxies: - validation=x.validate_api('apis',f"{proxy_bundle_directory}/{each_bundle}") - result[each_bundle]=validation - print(f"{each_bundle} ==> Validation : {validation}") + validation = x.validate_api('apis',f"{proxy_bundle_directory}/{each_bundle}") # noqa + result[each_bundle] = validation + print(f"{each_bundle} ==> Validation : {validation}") + if __name__ == '__main__': main() From 7ccef07a097041fc323388433dce7f8abca32a8a Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 20:08:43 +0530 Subject: [PATCH 06/28] feat: addressed flake8 for xorhybrid proxy-endpoint-unifier wrapper --- tools/proxy-endpoint-unifier/xorhybrid.py | 24 ++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/tools/proxy-endpoint-unifier/xorhybrid.py b/tools/proxy-endpoint-unifier/xorhybrid.py index a2e096766..11ecadd55 100644 --- a/tools/proxy-endpoint-unifier/xorhybrid.py +++ b/tools/proxy-endpoint-unifier/xorhybrid.py @@ -17,24 +17,26 @@ import requests import os + class ApigeeXorHybrid: - def __init__(self,org): - self.baseurl=f"https://apigee.googleapis.com/v1/organizations/{org}" + def __init__(self, org): + self.baseurl = f"https://apigee.googleapis.com/v1/organizations/{org}" self.auth_header = {} - def set_auth_header(self,token): + def set_auth_header(self, token): self.auth_header = { - 'Authorization' : f"Bearer {token}" + 'Authorization': f"Bearer {token}" } - - def validate_api(self,api_type,proxy_bundle_path): + + def validate_api(self, api_type, proxy_bundle_path): api_name = os.path.basename(proxy_bundle_path).split('.zip')[0] - url = f"{self.baseurl}/{api_type}?name={api_name}&action=validate&validate=true" - files=[ - ('data',(api_name,open(proxy_bundle_path,'rb'),'application/zip')) + url = f"{self.baseurl}/{api_type}?name={api_name}&action=validate&validate=true" # noqa + files = [ + ('data', (api_name, open(proxy_bundle_path,'rb'), 'application/zip')) # noqa ] - response = requests.request("POST", url, headers=self.auth_header, data={}, files=files) - if response.status_code == 200 : + response = requests.request("POST", url, headers=self.auth_header, + data={}, files=files) + if response.status_code == 200: return True else: return response.json() From b81245213b2653b7595652dfa79326e9d96f0355 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 20:49:47 +0530 Subject: [PATCH 07/28] feat: addressed flake8 for utils proxy-endpoint-unifier wrapper --- tools/proxy-endpoint-unifier/utils.py | 414 +++++++++++++------------- 1 file changed, 213 insertions(+), 201 deletions(-) diff --git a/tools/proxy-endpoint-unifier/utils.py b/tools/proxy-endpoint-unifier/utils.py index ee71c356f..b3b4dcc35 100644 --- a/tools/proxy-endpoint-unifier/utils.py +++ b/tools/proxy-endpoint-unifier/utils.py @@ -22,29 +22,33 @@ import shutil import zipfile + def parse_config(config_file): config = configparser.ConfigParser() config.read(config_file) return config + def get_proxy_endpoint_count(cfg): try: - proxy_endpoint_count=cfg.getint('common','proxy_endpoint_count') + proxy_endpoint_count = cfg.getint('common', 'proxy_endpoint_count') if not (proxy_endpoint_count > 0 and proxy_endpoint_count <= 5): - print('ERRROR: proxy_endpoint_count should be > Zero(0) & < Five(5)') + print('ERROR: Proxy Endpoints should be > Zero(0) & < Five(5)') sys.exit(1) except ValueError: print('proxy_endpoint_count should be Numberic') sys.exit(1) return proxy_endpoint_count + def create_dir(dir): try: os.makedirs(dir) except FileExistsError: print(f"INFO: {dir} already exists") -def list_dir(dir,isok=False): + +def list_dir(dir, isok=False): try: return os.listdir(dir) except FileNotFoundError: @@ -54,21 +58,23 @@ def list_dir(dir,isok=False): print(f"ERROR: Directory \"{dir}\" not found") sys.exit(1) + def get_proxy_entrypoint(dir): - files=list_dir(dir) - ent = [] + files = list_dir(dir) + ent = [] for eachfile in files: if eachfile.endswith(".xml"): ent.append(eachfile) - if len(ent)==1: - return os.path.join(dir,ent[0]) + if len(ent) == 1: + return os.path.join(dir, ent[0]) else: - if len(ent)>1: - print(f"ERROR: Directory \"{dir}\" contains multiple xml files at root") + if len(ent) > 1: + print(f"ERROR: Directory \"{dir}\" contains multiple xml files at root") # noqa else: - print(f"ERROR: Directory \"{dir}\" has no xml file at root") + print(f"ERROR: Directory \"{dir}\" has no xml file at root") # noqa return None + def parse_json(file): try: with open(file) as fl: @@ -78,6 +84,7 @@ def parse_json(file): print(f"ERROR: File \"{file}\" not found") return {} + def parse_xml(file): try: with open(file) as fl: @@ -88,51 +95,54 @@ def parse_xml(file): return {} -def write_json(file,data): +def write_json(file, data): try: - with open(file,'w') as fl: - fl.write(json.dumps(data,indent=2)) + with open(file, 'w') as fl: + fl.write(json.dumps(data, indent=2)) except FileNotFoundError: print(f"ERROR: File \"{file}\" not found") return False return True -def write_xml_from_dict(file,data): + +def write_xml_from_dict(file, data): try: - with open(file,'w') as fl: - fl.write(xmltodict.unparse(data,pretty=True)) + with open(file, 'w') as fl: + fl.write(xmltodict.unparse(data, pretty=True)) except FileNotFoundError: print(f"ERROR: File \"{file}\" not found") return False return True + def parse_proxy_root(dir): - file=get_proxy_entrypoint(dir) + file = get_proxy_entrypoint(dir) if file is None: return {} - doc=parse_xml(file) + doc = parse_xml(file) return doc -def read_proxy_artifacts(dir,entrypoint): - APIProxy=entrypoint['APIProxy'] + +def read_proxy_artifacts(dir, entrypoint): + APIProxy = entrypoint['APIProxy'] # Check if proxy has multiple endpoints if isinstance(APIProxy['ProxyEndpoints']['ProxyEndpoint'], list): proxyName = entrypoint['APIProxy']['@name'] proxy_dict = { - 'ProxyEndpoints':{}, - 'TargetEndpoints':{}, - 'proxyName':proxyName + 'ProxyEndpoints': {}, + 'TargetEndpoints': {}, + 'proxyName': proxyName } - ProxyEndpoints= APIProxy['ProxyEndpoints']['ProxyEndpoint'] - ProxyEndpoints = ( [ProxyEndpoints] if isinstance(ProxyEndpoints,str) else ProxyEndpoints) + ProxyEndpoints = APIProxy['ProxyEndpoints']['ProxyEndpoint'] + ProxyEndpoints = ([ProxyEndpoints] if isinstance(ProxyEndpoints,str) else ProxyEndpoints) # noqa for each_pe in ProxyEndpoints: - proxy_dict['ProxyEndpoints'][each_pe]=parse_xml(os.path.join(dir,'proxies',f"{each_pe}.xml")) + proxy_dict['ProxyEndpoints'][each_pe] = parse_xml(os.path.join(dir,'proxies',f"{each_pe}.xml")) # noqa - TargetEndpoints =APIProxy['TargetEndpoints']['TargetEndpoint'] - TargetEndpoints = ([TargetEndpoints] if isinstance(TargetEndpoints,str) else TargetEndpoints) + TargetEndpoints = APIProxy['TargetEndpoints']['TargetEndpoint'] + TargetEndpoints = ([TargetEndpoints] if isinstance(TargetEndpoints,str) else TargetEndpoints) # noqa for each_te in TargetEndpoints: - proxy_dict['TargetEndpoints'][each_te]=parse_xml(os.path.join(dir,'targets',f"{each_te}.xml")) - + proxy_dict['TargetEndpoints'][each_te]=parse_xml(os.path.join(dir,'targets',f"{each_te}.xml")) # noqa + # Skip when proxy has one endpoints else: print(f"Skipping Proxy ==> {entrypoint['APIProxy']['@name']}") @@ -141,44 +151,37 @@ def read_proxy_artifacts(dir,entrypoint): def get_all_policies_from_step(Step): - policies=[] - StepData=([Step] if isinstance(Step,dict) else Step) + policies = [] + StepData = ([Step] if isinstance(Step, dict) else Step) for eachStep in StepData: policies.append(eachStep['Name']) return policies -def get_all_policies_from_flow(Flow,fault_rule=False): - policies=[] + +def get_all_policies_from_flow(Flow, fault_rule=False): + policies = [] if not fault_rule: - Request=([] if Flow['Request'] is None else - ( - [Flow['Request']['Step']] if isinstance(Flow['Request']['Step'],dict) - else Flow['Request']['Step'] - ) - ) - Response=([] if Flow['Response'] is None else - ( - [Flow['Response']['Step']] if isinstance(Flow['Response']['Step'],dict) - else Flow['Response']['Step'] - ) - ) + Request = ([] if Flow['Request'] is None else ( + [Flow['Request']['Step']] if isinstance(Flow['Request']['Step'], dict) # noqa + else Flow['Request']['Step'])) + Response = ([] if Flow['Response'] is None else ( + [Flow['Response']['Step']] if isinstance(Flow['Response']['Step'], dict) # noqa + else Flow['Response']['Step'])) for each_flow in Request: policies.extend(get_all_policies_from_step(each_flow)) for each_flow in Response: policies.extend(get_all_policies_from_step(each_flow)) else: - FaultRules = ([] if Flow is None else - ( - [Flow['Step']] if isinstance(Flow['Step'],dict) - else Flow['Step'] - ) - ) + FaultRules = ([] if Flow is None else ( + [Flow['Step']] if isinstance(Flow['Step'], dict) + else Flow['Step'])) for each_step in FaultRules: policies.extend(get_all_policies_from_step(each_step)) return policies -def get_all_policies_from_endpoint(endpointData,endpointType): - policies=[] + +def get_all_policies_from_endpoint(endpointData, endpointType): + policies = [] policies.extend( get_all_policies_from_flow( endpointData[endpointType]['PreFlow'] @@ -191,13 +194,11 @@ def get_all_policies_from_endpoint(endpointData,endpointType): ) Flows = ( - [] - if endpointData[endpointType]['Flows'] is None else - ( - [endpointData[endpointType]['Flows']['Flow']] - if isinstance( - endpointData[endpointType]['Flows']['Flow'],dict) - else + [] if endpointData[endpointType]['Flows'] is None else ( + [endpointData[endpointType]['Flows']['Flow']] if isinstance( + endpointData[endpointType]['Flows']['Flow'], + dict) + else endpointData[endpointType]['Flows']['Flow'] )) @@ -207,17 +208,18 @@ def get_all_policies_from_endpoint(endpointData,endpointType): eachFlow ) ) - if 'DefaultFaultRule' in endpointData[endpointType]: + if 'DefaultFaultRule' in endpointData[endpointType]: policies.extend( - get_all_policies_from_flow(endpointData[endpointType]['DefaultFaultRule'],True) + get_all_policies_from_flow(endpointData[endpointType]['DefaultFaultRule'], True) # noqa ) return policies + def get_target_endpoints(ProxyEndpointData): - target_endpoints=[] - routes = ( - [ProxyEndpointData['RouteRule']] - if isinstance(ProxyEndpointData['RouteRule'],dict) + target_endpoints = [] + routes = ( + [ProxyEndpointData['RouteRule']] + if isinstance(ProxyEndpointData['RouteRule'], dict) else ProxyEndpointData['RouteRule'] ) for eachRoute in routes: @@ -225,57 +227,51 @@ def get_target_endpoints(ProxyEndpointData): target_endpoints.append(eachRoute['TargetEndpoint']) return target_endpoints + def get_proxy_objects_relationships(proxy_dict): proxy_object_map = {} ProxyEndpoints = proxy_dict['ProxyEndpoints'] - for ProxyEndpoint,ProxyEndpointData in ProxyEndpoints.items(): - proxy_object_map[ProxyEndpoint]={} - - target_endpoints = get_target_endpoints(ProxyEndpointData['ProxyEndpoint']) - TargetEndpointsData = { te :proxy_dict['TargetEndpoints'][te] for te in target_endpoints} + for ProxyEndpoint, ProxyEndpointData in ProxyEndpoints.items(): + proxy_object_map[ProxyEndpoint] = {} + target_endpoints = get_target_endpoints(ProxyEndpointData['ProxyEndpoint']) # noqa + TargetEndpointsData = {te: proxy_dict['TargetEndpoints'][te] for te in target_endpoints} # noqa policies = [] - policies.extend(get_all_policies_from_endpoint(ProxyEndpointData,'ProxyEndpoint')) - for _,each_te in TargetEndpointsData.items(): - policies.extend(get_all_policies_from_endpoint(each_te,'TargetEndpoint')) - proxy_object_map[ProxyEndpoint]={ - 'Policies' : policies, - 'BasePath' : ProxyEndpointData['ProxyEndpoint']['HTTPProxyConnection']['BasePath'], - 'TargetEndpoints' : target_endpoints, + policies.extend(get_all_policies_from_endpoint(ProxyEndpointData, 'ProxyEndpoint')) # noqa + for _, each_te in TargetEndpointsData.items(): + policies.extend(get_all_policies_from_endpoint(each_te, 'TargetEndpoint')) # noqa + proxy_object_map[ProxyEndpoint] = { + 'Policies': policies, + 'BasePath': ProxyEndpointData['ProxyEndpoint']['HTTPProxyConnection']['BasePath'], # noqa + 'TargetEndpoints': target_endpoints, } - return proxy_object_map + def get_api_path_groups(each_api_info): - api_path_group_map={} - for pe,pe_info in each_api_info.items(): + api_path_group_map = {} + for pe, pe_info in each_api_info.items(): if pe_info['BasePath'] is None: if '_null_' in api_path_group_map: - # api_path_group_map['_null_'].append({ pe :pe_info['BasePath']}) - api_path_group_map['_null_'].append({ pe :None}) + api_path_group_map['_null_'].append({pe: None}) else: - # api_path_group_map['_null_']=[{ pe :pe_info['BasePath']}] - api_path_group_map['_null_']=[{ pe :None}] + api_path_group_map['_null_'] = [{pe: None}] else: - base_path_split=[ i for i in pe_info['BasePath'].split('/') if i!= ""] + base_path_split = [ i for i in pe_info['BasePath'].split('/') if i != ""] # noqa if base_path_split[0] in api_path_group_map: api_path_group_map[base_path_split[0]].append( - # { pe :pe_info['BasePath']} - { pe :base_path_split[0]} - ) + {pe: base_path_split[0]}) else: - # api_path_group_map[base_path_split[0]]=[{ pe :pe_info['BasePath']}] - api_path_group_map[base_path_split[0]]=[{ pe :base_path_split[0]}] + api_path_group_map[base_path_split[0]] = [{pe: base_path_split[0]}] # noqa return api_path_group_map -def group_paths_by_path(api_info,pe_count_limit): +def group_paths_by_path(api_info, pe_count_limit): result = [] paths = list(api_info.keys()) - path_count=len(paths) + path_count = len(paths) if path_count > pe_count_limit: - # count=0 - for i in range(0,path_count,pe_count_limit): - each_result=[] + for i in range(0, path_count, pe_count_limit): + each_result = [] if i+pe_count_limit > path_count: for k in paths[i:path_count]: each_result.extend(api_info[k]) @@ -284,9 +280,9 @@ def group_paths_by_path(api_info,pe_count_limit): each_result.extend(api_info[k]) result.append(each_result) else: - each_result=[] - for _,v in api_info.items(): - each_result.extend(v) + each_result = [] + for _, v in api_info.items(): + each_result.extend(v) result.append(each_result) return result @@ -296,130 +292,141 @@ def bundle_path(each_group_bundle): for each_group in each_group_bundle: subgroups = {} for each_pe in each_group: - path=list(each_pe.values())[0] - proxy_ep=list(each_pe.keys())[0] + path = list(each_pe.values())[0] + proxy_ep = list(each_pe.keys())[0] if path in subgroups: subgroups[path].append(proxy_ep) else: - subgroups[path]=[proxy_ep] + subgroups[path] = [proxy_ep] outer_group.append(subgroups) return outer_group -def apply_condition(step,condition): - step_or_rule = step.copy() + +def apply_condition(step, condition): + step_or_rule = step.copy() if 'Condition' in step_or_rule: if step_or_rule['Condition'] is None: - step_or_rule['Condition']=condition + step_or_rule['Condition'] = condition elif len(step_or_rule['Condition'].strip()) > 0: if step_or_rule['Condition'].strip().startswith('('): - step_or_rule['Condition']= f"{condition} and {step_or_rule['Condition']}" + step_or_rule['Condition'] = f"{condition} and {step_or_rule['Condition']}" # noqa else: - step_or_rule['Condition']= f"{condition} and {step_or_rule['Condition']}" + step_or_rule['Condition'] = f"{condition} and {step_or_rule['Condition']}" # noqa else: - step_or_rule['Condition']=condition + step_or_rule['Condition'] = condition else: - step_or_rule['Condition']=condition + step_or_rule['Condition'] = condition return step_or_rule -def process_steps(step,condition): + +def process_steps(step, condition): processed_step = [] if step is None: return processed_step - elif isinstance(step['Step'],dict): - processed_step = [ apply_condition(step['Step'],condition) ] - # processed_step = [ {'Step': apply_condition(step['Step'],condition)} ] - elif isinstance(step['Step'],list): - processed_step = [ apply_condition(i,condition) for i in step['Step'] ] - # processed_step = [ {'Step':apply_condition(i,condition)} for i in step['Step'] ] + elif isinstance(step['Step'], dict): + processed_step = [apply_condition(step['Step'], condition)] + elif isinstance(step['Step'], list): + processed_step = [apply_condition(i, condition) for i in step['Step']] else: return processed_step return processed_step -def process_flow(flow,condition): - processed_flow=flow.copy() + +def process_flow(flow, condition): + processed_flow = flow.copy() if flow['Request'] is not None: - processed_flow['Request']['Step'] = process_steps(flow['Request'],condition) + processed_flow['Request']['Step'] = process_steps(flow['Request'], + condition) if flow['Response'] is not None: - processed_flow['Response']['Step'] = process_steps(flow['Response'],condition) - processed_flow_with_condition = apply_condition(processed_flow,condition) + processed_flow['Response']['Step'] = process_steps(flow['Response'], + condition) + processed_flow_with_condition = apply_condition(processed_flow, + condition) return processed_flow_with_condition -def process_route_rules(route_rules,condition): - processed_rr =[] - for each_rr in (route_rules if isinstance(route_rules,list) else [route_rules]): - each_processed_rr=apply_condition(each_rr,condition) + +def process_route_rules(route_rules, condition): + processed_rr = [] + for each_rr in (route_rules if isinstance(route_rules, list) + else [route_rules]): + each_processed_rr = apply_condition(each_rr, condition) processed_rr.append(each_processed_rr) return processed_rr -def merge_proxy_endpoints(api_dict,basepath,pes): - merged_pe = {'ProxyEndpoint' : {}} - for each_pe,each_pe_info in api_dict['ProxyEndpoints'].items(): - if each_pe in pes : - original_basepath = each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['BasePath'] + +def merge_proxy_endpoints(api_dict, basepath, pes): + merged_pe = {'ProxyEndpoint': {}} + for each_pe, each_pe_info in api_dict['ProxyEndpoints'].items(): + if each_pe in pes: + original_basepath = each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['BasePath'] # noqa # TODO : Build full Request path - condition=(original_basepath if original_basepath is None else f'(request.path Matches "{original_basepath}*")') + condition=(original_basepath if original_basepath is None else f'(request.path Matches "{original_basepath}*")') # noqa copied_flows = ( - None if each_pe_info['ProxyEndpoint']['Flows'] is None else each_pe_info['ProxyEndpoint']['Flows'].copy() + None if each_pe_info['ProxyEndpoint']['Flows'] is None else each_pe_info['ProxyEndpoint']['Flows'].copy() # noqa ) - original_flows = ([] if copied_flows is None else ([copied_flows['Flow']] if isinstance(copied_flows['Flow'],dict) else copied_flows['Flow'])) - - if len(merged_pe['ProxyEndpoint'])==0: - merged_pe['ProxyEndpoint']={ - '@name': [], - 'Description': None, - 'FaultRules': None, + original_flows = ([] if copied_flows is None else + ([copied_flows['Flow']] if isinstance(copied_flows['Flow'],dict) else copied_flows['Flow'])) # noqa + + if len(merged_pe['ProxyEndpoint']) == 0: + merged_pe['ProxyEndpoint'] = { + '@name': [], + 'Description': None, + 'FaultRules': None, 'PreFlow': { - '@name': 'PreFlow', - 'Request': {'Step':[]}, - 'Response': {'Step':[]}, - }, + '@name': 'PreFlow', + 'Request': {'Step': []}, + 'Response': {'Step': []}, + }, 'PostFlow': { - '@name': 'PostFlow', - 'Request': {'Step':[]}, - 'Response': {'Step':[]}, + '@name': 'PostFlow', + 'Request': {'Step': []}, + 'Response': {'Step': []}, }, - 'Flows': {'Flow':[]}, - 'HTTPProxyConnection': {'BasePath': '', 'Properties': {}, 'VirtualHost': ''}, + 'Flows': {'Flow': []}, + 'HTTPProxyConnection': {'BasePath': '', + 'Properties': {}, + 'VirtualHost': ''}, 'RouteRule': [] } - - merged_pe['ProxyEndpoint']['Description'] = each_pe_info['ProxyEndpoint']['Description'] - merged_pe['ProxyEndpoint']['FaultRules'] = each_pe_info['ProxyEndpoint']['FaultRules'] - merged_pe['ProxyEndpoint']['HTTPProxyConnection']['BasePath']=(basepath if basepath is None else f'/{basepath}') - merged_pe['ProxyEndpoint']['HTTPProxyConnection']['Properties']=each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['Properties'] - merged_pe['ProxyEndpoint']['HTTPProxyConnection']['VirtualHost']=each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['VirtualHost'] - - merged_pe['ProxyEndpoint']['@name'].append(each_pe_info['ProxyEndpoint']['@name']) + + merged_pe['ProxyEndpoint']['Description'] = each_pe_info['ProxyEndpoint']['Description'] # noqa + merged_pe['ProxyEndpoint']['FaultRules'] = each_pe_info['ProxyEndpoint']['FaultRules'] # noqa + merged_pe['ProxyEndpoint']['HTTPProxyConnection']['BasePath'] = (basepath if basepath is None else f'/{basepath}') # noqa + merged_pe['ProxyEndpoint']['HTTPProxyConnection']['Properties'] = each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['Properties'] # noqa + merged_pe['ProxyEndpoint']['HTTPProxyConnection']['VirtualHost'] = each_pe_info['ProxyEndpoint']['HTTPProxyConnection']['VirtualHost'] # noqa + + merged_pe['ProxyEndpoint']['@name'].append(each_pe_info['ProxyEndpoint']['@name']) # noqa merged_pe['ProxyEndpoint']['RouteRule'].extend( - process_route_rules(each_pe_info['ProxyEndpoint']['RouteRule'],condition) + process_route_rules(each_pe_info['ProxyEndpoint']['RouteRule'],condition) # noqa ) merged_pe['ProxyEndpoint']['PreFlow']['Request']['Step'].extend( - process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) + process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) # noqa ) merged_pe['ProxyEndpoint']['PreFlow']['Response']['Step'].extend( - process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) + process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) # noqa ) merged_pe['ProxyEndpoint']['PostFlow']['Request']['Step'].extend( - process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) + process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) # noqa ) merged_pe['ProxyEndpoint']['PostFlow']['Response']['Step'].extend( - process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) + process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) # noqa ) for each_flow in original_flows: merged_pe['ProxyEndpoint']['Flows']['Flow'].append( - process_flow(each_flow,condition) + process_flow(each_flow, condition) ) - merged_pe['ProxyEndpoint']['@name'] = "-".join(merged_pe['ProxyEndpoint']['@name']) - + merged_pe['ProxyEndpoint']['@name'] = "-".join(merged_pe['ProxyEndpoint']['@name']) # noqa return merged_pe -def copy_folder(src,dst): + +def copy_folder(src, dst): try: - shutil.copytree(src, dst) + shutil.copytree(src, dst) except FileNotFoundError as e: print(e) sys.exit(1) + def delete_folder(src): try: shutil.rmtree(src) @@ -427,6 +434,7 @@ def delete_folder(src): print(f'Ignoring : {e}') return + def delete_file(src): try: os.remove(src) @@ -434,20 +442,22 @@ def delete_file(src): print(f'Ignoring : {e}') return -def clean_up_artifacts(target_dir,artifacts_to_retains): - for file in list_dir(target_dir,True): - each_policy_file=file.split('.xml')[0] + +def clean_up_artifacts(target_dir, artifacts_to_retains): + for file in list_dir(target_dir, True): + each_policy_file = file.split('.xml')[0] if each_policy_file not in artifacts_to_retains: delete_file(f"{target_dir}/{file}") -def filter_objects(obj_data,obj_type,targets): + +def filter_objects(obj_data, obj_type, targets): result = None if obj_data is None: return result - elif isinstance(obj_data[obj_type],str): - result = ({ obj_type: obj_data[obj_type] } if obj_data[obj_type] in targets else None ) - elif isinstance(obj_data[obj_type],list): - result = { obj_type: [ v for v in obj_data[obj_type] if v in targets ] } + elif isinstance(obj_data[obj_type], str): + result = ({ obj_type: obj_data[obj_type] } if obj_data[obj_type] in targets else None ) # noqa + elif isinstance(obj_data[obj_type], list): + result = {obj_type: [v for v in obj_data[obj_type] if v in targets]} else: return result return result @@ -457,38 +467,40 @@ def zipdir(path, ziph): # ziph is zipfile handle for root, dirs, files in os.walk(path): for file in files: - ziph.write(os.path.join(root, file), - os.path.relpath(os.path.join(root, file), + ziph.write(os.path.join(root, file), + os.path.relpath(os.path.join(root, file), os.path.join(path, '..'))) - -def clone_proxies(source_dir,target_dir,objects,merged_pes,proxy_bundle_directory): - target_dir=f"{target_dir}/apiproxy" - copy_folder(source_dir,target_dir) - file=get_proxy_entrypoint(target_dir) - root=parse_xml(file) + + +def clone_proxies(source_dir, target_dir, + objects, merged_pes, proxy_bundle_directory): + target_dir = f"{target_dir}/apiproxy" + copy_folder(source_dir, target_dir) + file = get_proxy_entrypoint(target_dir) + root = parse_xml(file) delete_file(file) - root['APIProxy']['@name']=objects['Name'] - root['APIProxy']['Policies']= filter_objects(root['APIProxy']['Policies'],'Policy',objects['Policies']) - # root['APIProxy']['ProxyEndpoints']=filter_objects(root['APIProxy']['ProxyEndpoints'],'Policies',objects['ProxyEndpoints']) - root['APIProxy']['TargetEndpoints']=filter_objects(root['APIProxy']['TargetEndpoints'],'TargetEndpoint',objects['TargetEndpoints']) - # root['APIProxy']['Resources']=filter_objects(root['APIProxy']['Policies'],'Policies',objects['Policies']) - clean_up_artifacts(f"{target_dir}/policies",objects['Policies']) - clean_up_artifacts(f"{target_dir}/targets",objects['TargetEndpoints']) + root['APIProxy']['@name'] = objects['Name'] + root['APIProxy']['Policies'] = filter_objects( + root['APIProxy']['Policies'], 'Policy', objects['Policies']) + root['APIProxy']['TargetEndpoints'] = filter_objects( + root['APIProxy']['TargetEndpoints'], 'TargetEndpoint', objects['TargetEndpoints']) # noqa + clean_up_artifacts(f"{target_dir}/policies", objects['Policies']) + clean_up_artifacts(f"{target_dir}/targets", objects['TargetEndpoints']) for pe in objects['ProxyEndpoints']: - write_xml_from_dict(f"{target_dir}/proxies/{pe}.xml",merged_pes[pe]) - clean_up_artifacts(f"{target_dir}/proxies",objects['ProxyEndpoints']) - # root['APIProxy']['ProxyEndpoints']=filter_objects(root['APIProxy']['ProxyEndpoints'],'ProxyEndpoint',objects['ProxyEndpoints']) - root['APIProxy']['ProxyEndpoints']= {'ProxyEndpoint' : ( objects['ProxyEndpoints'] if len(objects['ProxyEndpoints']) > 1 else objects['ProxyEndpoints'][0] )} + write_xml_from_dict(f"{target_dir}/proxies/{pe}.xml", merged_pes[pe]) + clean_up_artifacts(f"{target_dir}/proxies", objects['ProxyEndpoints']) + root['APIProxy']['ProxyEndpoints'] = {'ProxyEndpoint': ( + objects['ProxyEndpoints'] if len(objects['ProxyEndpoints']) > 1 else objects['ProxyEndpoints'][0] )} # noqa transformed_file = file.split('/') - transformed_file[-1]=f"{objects['Name']}.xml" - write_xml_from_dict("/".join(transformed_file),root) + transformed_file[-1] = f"{objects['Name']}.xml" + write_xml_from_dict("/".join(transformed_file), root) delete_folder(f"{target_dir}/manifests") - with zipfile.ZipFile(f"{proxy_bundle_directory}/{objects['Name']}.zip", 'w', zipfile.ZIP_DEFLATED) as zipf: + with zipfile.ZipFile(f"{proxy_bundle_directory}/{objects['Name']}.zip", 'w', zipfile.ZIP_DEFLATED) as zipf: # noqa zipdir(target_dir, zipf) -def export_debug_log(files,log_path='logs'): +def export_debug_log(files, log_path='logs'): create_dir(log_path) - for file,data in files.items(): - file_name=f'{log_path}/{file}.json' - write_json(file_name,data) + for file, data in files.items(): + file_name = f'{log_path}/{file}.json' + write_json(file_name, data) From 740316e0c2a9413a47484a18c3a0d9c0fcd343a8 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 21:35:37 +0530 Subject: [PATCH 08/28] feat: added test scripts for proxy-endpoint-unifier --- tools/proxy-endpoint-unifier/input.properties | 8 ++-- tools/proxy-endpoint-unifier/pipeline.sh | 47 +++++++++++++++++++ .../apiproxy/manifests/manifest.xml | 25 ++++++++++ .../apiproxy/policies/ExtractVariables-3.xml | 13 +++++ .../apiproxy/policies/Populate-Cache-1.xml | 15 ++++++ .../api_bundles/apiproxy/policies/Quota-1.xml | 15 ++++++ .../policies/Statistics-Collector-1.xml | 8 ++++ .../apiproxy/proxies/ProxyEndpoint-1.xml | 22 +++++++++ .../apiproxy/proxies/ProxyEndpoint-2.xml | 22 +++++++++ .../apiproxy/proxies/ProxyEndpoint-3.xml | 22 +++++++++ .../apiproxy/proxies/ProxyEndpoint-4.xml | 22 +++++++++ .../apiproxy/proxies/ProxyEndpoint-5.xml | 22 +++++++++ .../api_bundles/apiproxy/proxies/default.xml | 29 ++++++++++++ .../api_bundles/apiproxy/targets/default.xml | 22 +++++++++ .../test/api_bundles/apiproxy/test-proxy.xml | 35 ++++++++++++++ 15 files changed, 323 insertions(+), 4 deletions(-) create mode 100644 tools/proxy-endpoint-unifier/pipeline.sh create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml diff --git a/tools/proxy-endpoint-unifier/input.properties b/tools/proxy-endpoint-unifier/input.properties index f2d6ab305..0e51cf008 100644 --- a/tools/proxy-endpoint-unifier/input.properties +++ b/tools/proxy-endpoint-unifier/input.properties @@ -13,12 +13,12 @@ # limitations under the License. [common] -input_apis=apis -processed_apis=transformed -proxy_bundle_directory=transformed_bundles +input_apis=test/api_bundles +processed_apis=test/transformed +proxy_bundle_directory=test/transformed_bundles proxy_endpoint_count=4 debug=false [validate] enabled=true -gcp_project_id=apigee-payg-377208 +gcp_project_id=xxx-xxx-xxx diff --git a/tools/proxy-endpoint-unifier/pipeline.sh b/tools/proxy-endpoint-unifier/pipeline.sh new file mode 100644 index 000000000..e0a1a128a --- /dev/null +++ b/tools/proxy-endpoint-unifier/pipeline.sh @@ -0,0 +1,47 @@ +#!/bin/sh + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e + +SCRIPTPATH="$( cd "$(dirname "$0")" || exit >/dev/null 2>&1 ; pwd -P )" + +# Clean up previously generated files +rm -rf "$SCRIPTPATH/input.properties" +rm -rf "$SCRIPTPATH/transformed" +rm -rf "$SCRIPTPATH/transformed_bundles" + +# Generate input file +cat > "$SCRIPTPATH/input.properties" << EOF +[common] +input_apis=$SCRIPTPATH/test/api_bundles +processed_apis=$SCRIPTPATH/transformed +proxy_bundle_directory=$SCRIPTPATH/transformed_bundles +proxy_endpoint_count=4 +debug=true + +[validate] +enabled=true +gcp_project_id=$APIGEE_ORG +EOF + +# Install Dependencies +python3 -m pip install -r "$SCRIPTPATH/requirements.txt" + +# Generate Gcloud Acccess Token +export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) + +# Execute Utility +python3 "$SCRIPTPATH/main.py" diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml new file mode 100644 index 000000000..8a782da6d --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml new file mode 100644 index 000000000..1160c4786 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml @@ -0,0 +1,13 @@ + + + response + + + $.results[0].geometry.location.lat + + + $.results[0].geometry.location.lng + + + geocoderesponse + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml new file mode 100644 index 000000000..ef28107bf --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml @@ -0,0 +1,15 @@ + + + Populate Cache-1 + + + my + test + + test cache + Exclusive + + 3600 + + request.content + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml new file mode 100644 index 000000000..04a895218 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml @@ -0,0 +1,15 @@ + + + Quota-1 + + + 1 + false + false + month + 2023-8-7 12:00:00 + + 20 + 5 + + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml new file mode 100644 index 000000000..1f6187574 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml @@ -0,0 +1,8 @@ + + + Statistics Collector-1 + + + value + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml new file mode 100644 index 000000000..fb90c67bf --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + /test-policy-path2 + + default + + + default + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml new file mode 100644 index 000000000..81e309c1c --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + /test-policy-3 + + default + + + default + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml new file mode 100644 index 000000000..463f953cc --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + /test-policy-4 + + default + + + default + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml new file mode 100644 index 000000000..baed69e01 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + /test-policy-5 + + default + + + default + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml new file mode 100644 index 000000000..63ee19793 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + /test-policy-6 + + default + + + default + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml new file mode 100644 index 000000000..bb1e99411 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml @@ -0,0 +1,29 @@ + + + + + + + + Quota-1 + + + Populate-Cache-1 + + + + + + + + + + + /test-policy + + default + + + default + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml new file mode 100644 index 000000000..b131e9d78 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + ExtractVariables-3 + + + + + + + http://mocktarget.apigee.net/ + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml new file mode 100644 index 000000000..64bb36a97 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml @@ -0,0 +1,35 @@ + + + /test-proxy + + 1691042549827 + opdk@google.com + + test-proxy + 1691559297323 + opdk@google.com + SHA-512:b3eda74a317897b887e184350e869b373215daca9325d2884f6f95da113decbb86f4f168760354e81f32aa1826ab7e7db3015562f5e655673b0ed017db143770:dc-1 + + ExtractVariables-3 + Populate-Cache-1 + Populate-Cache-without-expiry + Populate-Cache + Quota-1 + Response-Cache + Statistics-Collector-1 + + + ProxyEndpoint-1 + ProxyEndpoint-2 + ProxyEndpoint-3 + ProxyEndpoint-4 + ProxyEndpoint-5 + default + + + + + + default + + From cf6d1ff5cdb940de93b9fa9c7a97aaaaacd3dffa Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 21:39:58 +0530 Subject: [PATCH 09/28] feat: added missing licenses --- .../test/api_bundles/apiproxy/manifests/manifest.xml | 12 ++++++++++++ .../apiproxy/policies/ExtractVariables-3.xml | 12 ++++++++++++ .../apiproxy/policies/Populate-Cache-1.xml | 12 ++++++++++++ .../test/api_bundles/apiproxy/policies/Quota-1.xml | 12 ++++++++++++ .../apiproxy/policies/Statistics-Collector-1.xml | 12 ++++++++++++ .../api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml | 12 ++++++++++++ .../api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml | 12 ++++++++++++ .../api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml | 12 ++++++++++++ .../api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml | 12 ++++++++++++ .../api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml | 12 ++++++++++++ .../test/api_bundles/apiproxy/proxies/default.xml | 12 ++++++++++++ .../test/api_bundles/apiproxy/targets/default.xml | 12 ++++++++++++ .../test/api_bundles/apiproxy/test-proxy.xml | 12 ++++++++++++ 13 files changed, 156 insertions(+) diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml index 8a782da6d..3022d76cf 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml index 1160c4786..d6dc23438 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/ExtractVariables-3.xml @@ -1,4 +1,16 @@ + response diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml index ef28107bf..1fece47bc 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml @@ -1,4 +1,16 @@ + Populate Cache-1 diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml index 04a895218..1d85c2be4 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Quota-1.xml @@ -1,4 +1,16 @@ + Quota-1 diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml index 1f6187574..693be0e90 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Statistics-Collector-1.xml @@ -1,4 +1,16 @@ + Statistics Collector-1 diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml index fb90c67bf..6d6742543 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-1.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml index 81e309c1c..dd7958e0c 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-2.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml index 463f953cc..2eaffbc11 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-3.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml index baed69e01..fd6ff8929 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml index 63ee19793..f9f51ccb4 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-5.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml index bb1e99411..fe26e2c40 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml index b131e9d78..a8286b6d2 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml @@ -1,4 +1,16 @@ + diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml index 64bb36a97..f9a5201c7 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml @@ -1,4 +1,16 @@ + /test-proxy From f3ff9a01f28e9c015b16da09df61f98bb9b0f127 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 22:02:18 +0530 Subject: [PATCH 10/28] feat: fixed shell lint and updated README.md --- README.md | 2 ++ tools/proxy-endpoint-unifier/README.md | 4 +--- tools/proxy-endpoint-unifier/pipeline.sh | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 144532a37..4c707765c 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,8 @@ Apigee products. A tool to generate topologically sorted Shared Flow dependencies. - [Apigee Envoy Quickstart Toolkit](tools/apigee-envoy-quickstart) - A tool to set up the sample deployments of Apigee Envoy. +- [Apigee API Proxy Endpoint Unifier](tools/proxy-endpoint-unifier) - + A tool to unify/split proxy endpoints based on API basepath. ## Labs diff --git a/tools/proxy-endpoint-unifier/README.md b/tools/proxy-endpoint-unifier/README.md index 85709c3d2..e7256920b 100644 --- a/tools/proxy-endpoint-unifier/README.md +++ b/tools/proxy-endpoint-unifier/README.md @@ -1,7 +1,5 @@ -# Apigee OPDK to Apigee X/Hybrid API Proxy Endpoint Unifier +# Apigee API Proxy Endpoint Unifier - -## Objective Apigee X has a limitation of hosting only 5 Proxy Endpoints per proxy.Apigee OPDK /Edge has no such limitaion. Objective is take a proxy bundle and smartly convert them into conditional flows and group them with other proxy endpoints. diff --git a/tools/proxy-endpoint-unifier/pipeline.sh b/tools/proxy-endpoint-unifier/pipeline.sh index e0a1a128a..94feb5cf7 100644 --- a/tools/proxy-endpoint-unifier/pipeline.sh +++ b/tools/proxy-endpoint-unifier/pipeline.sh @@ -41,7 +41,8 @@ EOF python3 -m pip install -r "$SCRIPTPATH/requirements.txt" # Generate Gcloud Acccess Token -export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) +APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) +export APIGEE_ACCESS_TOKEN # Execute Utility python3 "$SCRIPTPATH/main.py" From 655e5aff29d6a0273fefbf87b919b5dc3a9cce09 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Mon, 14 Aug 2023 22:05:05 +0530 Subject: [PATCH 11/28] feat: updated CODEOWNERS --- CODEOWNERS | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index caf8da8f1..8e5385587 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -36,4 +36,5 @@ /tools/oas-configurable-proxy @danistrebel /tools/pipeline-linter @seymen @danistrebel /tools/pipeline-runner @seymen @danistrebel -/tools/sf-dependency-list @yuriylesyuk \ No newline at end of file +/tools/sf-dependency-list @yuriylesyuk +/tools/proxy-endpoint-unifier @anaik91 From cbae7326cf2475efddc21a018034cc85a40d0512 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Tue, 15 Aug 2023 15:39:10 +0530 Subject: [PATCH 12/28] fix: added execute permissions on pipeline.sh --- tools/proxy-endpoint-unifier/pipeline.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 tools/proxy-endpoint-unifier/pipeline.sh diff --git a/tools/proxy-endpoint-unifier/pipeline.sh b/tools/proxy-endpoint-unifier/pipeline.sh old mode 100644 new mode 100755 From 06fd5fa70278c2b20c1defb6240389aa0811fcc0 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Tue, 15 Aug 2023 17:02:41 +0530 Subject: [PATCH 13/28] feat: added pip package install to pipeline-runner --- tools/pipeline-runner/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/pipeline-runner/Dockerfile b/tools/pipeline-runner/Dockerfile index 9d2fc55c2..bcbdb2742 100644 --- a/tools/pipeline-runner/Dockerfile +++ b/tools/pipeline-runner/Dockerfile @@ -33,7 +33,8 @@ RUN apk add --no-cache \ freetype-dev \ harfbuzz \ ca-certificates \ - ttf-freefont + ttf-freefont \ + py-pip # Reduce nighly log (note: -ntp requires maven 3.6.1+) RUN mv /usr/bin/mvn /usr/bin/_mvn &&\ From b245415ceac9fce9b5247d6261497058faf9fe79 Mon Sep 17 00:00:00 2001 From: Omid Tahouri Date: Tue, 15 Aug 2023 14:49:36 +0100 Subject: [PATCH 14/28] fix: changes to proxy endpoint unifier readme --- tools/proxy-endpoint-unifier/README.md | 29 +++++++++++++------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/tools/proxy-endpoint-unifier/README.md b/tools/proxy-endpoint-unifier/README.md index e7256920b..882837fb8 100644 --- a/tools/proxy-endpoint-unifier/README.md +++ b/tools/proxy-endpoint-unifier/README.md @@ -1,32 +1,33 @@ # Apigee API Proxy Endpoint Unifier -Apigee X has a limitation of hosting only 5 Proxy Endpoints per proxy.Apigee OPDK /Edge has no such limitaion. -Objective is take a proxy bundle and smartly convert them into conditional flows and group them with other proxy endpoints. +Apigee X and hybrid have a limitation of hosting up to 5 Proxy Endpoints per API Proxy. Apigee Edge has no such limitation. +The objective of this tool is to take a proxy bundle and intelligently convert its proxy endpoints into logically +grouped conditional flows, in order to stay within the Proxy Endpoint limit. ## Disclaimer -This is not an Officially Supported Google Product! +This is not an officially supported Google product. -## Pre-Requisites -* python3.x -* Please Install required Python Libs +## Prerequisites +* `python3` +* Please install the required Python dependencies ``` - python3 -m pip install requirements.txt + python3 -m pip install -r requirements.txt ``` * Please fill in `input.properties` ``` [common] input_apis=apis # Folder Containing Extracted Proxy Bundles - processed_apis=transformed # Folder to export transfored Proxies to - proxy_bundle_directory=transformed_bundles # Folder to export transfored Proxies Bundles (zip) to - proxy_endpoint_count=4 # Number of Proxy Endpoint to retain while transforming + processed_apis=transformed # Folder to export transformed Proxies to + proxy_bundle_directory=transformed_bundles # Folder to export transformed Proxies Bundles (zip) to + proxy_endpoint_count=4 # Number of Proxy Endpoints to retain while transforming (1-5) debug=false # Flag to export debug logs [validate] - enabled=true # Flag to enable Validation - gcp_project_id=apigee-payg-377208 # Apigee X/Hybrid Project to run Validation + enabled=true # Flag to enable proxy validation + gcp_project_id=xxx-xxx-xxx # Apigee Project for proxy validation ``` -* Please run below command to authenticate against Apigee X/Hybrid APIS if Validation is enabled +* If enabling validation, please run the following command to authenticate against Apigee APIs: ``` export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) @@ -34,7 +35,7 @@ This is not an Officially Supported Google Product! ## Running -Run the Script as below +Run the script as below ``` python3 main.py ``` From 5f92893d1fe0fdcc164feea9ff41d7595e6477d9 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Tue, 15 Aug 2023 21:00:11 +0530 Subject: [PATCH 15/28] fix: removing manifests , fixing Cache Policy & target endpoint --- .../apiproxy/manifests/manifest.xml | 37 ------------ .../apiproxy/policies/Populate-Cache-1.xml | 4 +- .../api_bundles/apiproxy/targets/default.xml | 2 +- .../test/api_bundles/apiproxy/test-proxy.xml | 58 +++++-------------- 4 files changed, 17 insertions(+), 84 deletions(-) delete mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml deleted file mode 100644 index 3022d76cf..000000000 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/manifests/manifest.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml index 1fece47bc..589cb0edf 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Populate-Cache-1.xml @@ -15,10 +15,10 @@ Populate Cache-1 - my + my test - test cache + test-cache Exclusive 3600 diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml index a8286b6d2..ff5aa88a7 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/targets/default.xml @@ -29,6 +29,6 @@ - http://mocktarget.apigee.net/ + https://mocktarget.apigee.net/ \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml index f9a5201c7..ae884bfc1 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/test-proxy.xml @@ -1,47 +1,17 @@ - - /test-proxy - - 1691042549827 - opdk@google.com - - test-proxy - 1691559297323 - opdk@google.com - SHA-512:b3eda74a317897b887e184350e869b373215daca9325d2884f6f95da113decbb86f4f168760354e81f32aa1826ab7e7db3015562f5e655673b0ed017db143770:dc-1 - - ExtractVariables-3 - Populate-Cache-1 - Populate-Cache-without-expiry - Populate-Cache - Quota-1 - Response-Cache - Statistics-Collector-1 - - - ProxyEndpoint-1 - ProxyEndpoint-2 - ProxyEndpoint-3 - ProxyEndpoint-4 - ProxyEndpoint-5 - default - - - - - - default - - + \ No newline at end of file From f82175625f80d44fdeb78f25788d5846c56042a2 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Wed, 16 Aug 2023 14:17:36 +0530 Subject: [PATCH 16/28] fix: added support for FS read when proxy root xml is empty --- tools/proxy-endpoint-unifier/main.py | 7 +++++ tools/proxy-endpoint-unifier/utils.py | 42 ++++++++++++++++++++++++++- 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py index 94db6799a..a16d78561 100644 --- a/tools/proxy-endpoint-unifier/main.py +++ b/tools/proxy-endpoint-unifier/main.py @@ -15,6 +15,7 @@ # limitations under the License. import os +import sys from xorhybrid import ApigeeXorHybrid import utils @@ -110,6 +111,7 @@ def main(): utils.export_debug_log(files) if validation_enabled: + errors = {} gcp_project_id = cfg['validate']['gcp_project_id'] x = ApigeeXorHybrid(gcp_project_id) x.set_auth_header(os.getenv('APIGEE_ACCESS_TOKEN')) @@ -117,8 +119,13 @@ def main(): bundled_proxies = utils.list_dir(proxy_bundle_directory) for each_bundle in bundled_proxies: validation = x.validate_api('apis',f"{proxy_bundle_directory}/{each_bundle}") # noqa + if not validation: + errors[each_bundle] = validation result[each_bundle] = validation print(f"{each_bundle} ==> Validation : {validation}") + if len(errors) > 0: + print('ERROR: Some Validations have failed') + sys.exit(1) if __name__ == '__main__': diff --git a/tools/proxy-endpoint-unifier/utils.py b/tools/proxy-endpoint-unifier/utils.py index b3b4dcc35..d34c122c2 100644 --- a/tools/proxy-endpoint-unifier/utils.py +++ b/tools/proxy-endpoint-unifier/utils.py @@ -75,6 +75,20 @@ def get_proxy_entrypoint(dir): return None +def get_proxy_files(dir, file_type='proxies'): + target_dir = os.path.join(dir, file_type) + files = list_dir(target_dir) + xml_files = [] + for eachfile in files: + if eachfile.endswith(".xml"): + xml_files.append(os.path.splitext(eachfile)[0]) + if len(xml_files) == 0: + print(f"ERROR: Directory \"{target_dir}\" has no xml files") # noqa + return [] + else: + return xml_files + + def parse_json(file): try: with open(file) as fl: @@ -120,6 +134,31 @@ def parse_proxy_root(dir): if file is None: return {} doc = parse_xml(file) + api_proxy = doc.get('APIProxy', {}) + proxy_endpoints = api_proxy.get('ProxyEndpoints', {}).get('ProxyEndpoint', {}) # noqa + target_endpoints = api_proxy.get('TargetEndpoints', {}).get('TargetEndpoint', {}) # noqa + policies = api_proxy.get('Policies', {}).get('Policy', {}) + if len(proxy_endpoints) == 0: + print('Proceeding with Filesystem parse of ProxyEndpoints') + doc['APIProxy']['ProxyEndpoints'] = {} + proxies = get_proxy_files(dir) + doc['APIProxy']['ProxyEndpoints']['ProxyEndpoint'] = proxies + else: + print('Skipping with Filesystem parse of ProxyEndpoints') + if len(target_endpoints) == 0: + print('Proceeding with Filesystem parse of TargetEndpoints') + doc['APIProxy']['TargetEndpoints'] = {} + targets = get_proxy_files(dir, 'targets') + doc['APIProxy']['TargetEndpoints']['TargetEndpoint'] = targets + else: + print('Skipping with Filesystem parse of TargetEndpoints') + if len(policies) == 0: + print('Proceeding with Filesystem parse of Policies') + doc['APIProxy']['Policies'] = {} + policies_list = get_proxy_files(dir, 'policies') + doc['APIProxy']['Policies']['Policy'] = policies_list + else: + print('Skipping with Filesystem parse of Policies') return doc @@ -477,7 +516,8 @@ def clone_proxies(source_dir, target_dir, target_dir = f"{target_dir}/apiproxy" copy_folder(source_dir, target_dir) file = get_proxy_entrypoint(target_dir) - root = parse_xml(file) + # root = parse_xml(file) + root = parse_proxy_root(target_dir) delete_file(file) root['APIProxy']['@name'] = objects['Name'] root['APIProxy']['Policies'] = filter_objects( From d2db7181d950b742641b36ad9f2e55a63900cc92 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Wed, 16 Aug 2023 17:11:03 +0530 Subject: [PATCH 17/28] fix: addressed PR comments --- tools/proxy-endpoint-unifier/README.md | 8 +++++--- tools/proxy-endpoint-unifier/{xorhybrid.py => apigee.py} | 2 +- tools/proxy-endpoint-unifier/input.properties | 2 +- tools/proxy-endpoint-unifier/main.py | 4 ++-- tools/proxy-endpoint-unifier/pipeline.sh | 4 ++-- tools/proxy-endpoint-unifier/utils.py | 4 ++-- 6 files changed, 13 insertions(+), 11 deletions(-) rename tools/proxy-endpoint-unifier/{xorhybrid.py => apigee.py} (98%) diff --git a/tools/proxy-endpoint-unifier/README.md b/tools/proxy-endpoint-unifier/README.md index 882837fb8..2cc1b1ed8 100644 --- a/tools/proxy-endpoint-unifier/README.md +++ b/tools/proxy-endpoint-unifier/README.md @@ -16,9 +16,9 @@ This is not an officially supported Google product. * Please fill in `input.properties` ``` [common] - input_apis=apis # Folder Containing Extracted Proxy Bundles + input_apis=apis # Folder Containing exported & unzipped Proxy Bundles processed_apis=transformed # Folder to export transformed Proxies to - proxy_bundle_directory=transformed_bundles # Folder to export transformed Proxies Bundles (zip) to + proxy_bundle_directory=transformed_zipped_bundles # Folder to export transformed Proxies Bundles (zip) to proxy_endpoint_count=4 # Number of Proxy Endpoints to retain while transforming (1-5) debug=false # Flag to export debug logs @@ -34,12 +34,14 @@ This is not an officially supported Google product. ``` -## Running +## Usage Run the script as below ``` python3 main.py ``` +## Limitations +* This tool does not currently handle the resources within API proxies. ## Copyright diff --git a/tools/proxy-endpoint-unifier/xorhybrid.py b/tools/proxy-endpoint-unifier/apigee.py similarity index 98% rename from tools/proxy-endpoint-unifier/xorhybrid.py rename to tools/proxy-endpoint-unifier/apigee.py index 11ecadd55..e3f5f0b41 100644 --- a/tools/proxy-endpoint-unifier/xorhybrid.py +++ b/tools/proxy-endpoint-unifier/apigee.py @@ -18,7 +18,7 @@ import os -class ApigeeXorHybrid: +class Apigee: def __init__(self, org): self.baseurl = f"https://apigee.googleapis.com/v1/organizations/{org}" self.auth_header = {} diff --git a/tools/proxy-endpoint-unifier/input.properties b/tools/proxy-endpoint-unifier/input.properties index 0e51cf008..ae04281de 100644 --- a/tools/proxy-endpoint-unifier/input.properties +++ b/tools/proxy-endpoint-unifier/input.properties @@ -15,7 +15,7 @@ [common] input_apis=test/api_bundles processed_apis=test/transformed -proxy_bundle_directory=test/transformed_bundles +proxy_bundle_directory=test/transformed_zipped_bundles proxy_endpoint_count=4 debug=false diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py index a16d78561..303378129 100644 --- a/tools/proxy-endpoint-unifier/main.py +++ b/tools/proxy-endpoint-unifier/main.py @@ -16,7 +16,7 @@ import os import sys -from xorhybrid import ApigeeXorHybrid +from apigee import Apigee import utils @@ -113,7 +113,7 @@ def main(): if validation_enabled: errors = {} gcp_project_id = cfg['validate']['gcp_project_id'] - x = ApigeeXorHybrid(gcp_project_id) + x = Apigee(gcp_project_id) x.set_auth_header(os.getenv('APIGEE_ACCESS_TOKEN')) result = {} bundled_proxies = utils.list_dir(proxy_bundle_directory) diff --git a/tools/proxy-endpoint-unifier/pipeline.sh b/tools/proxy-endpoint-unifier/pipeline.sh index 94feb5cf7..63df2e9f0 100755 --- a/tools/proxy-endpoint-unifier/pipeline.sh +++ b/tools/proxy-endpoint-unifier/pipeline.sh @@ -34,14 +34,14 @@ debug=true [validate] enabled=true -gcp_project_id=$APIGEE_ORG +gcp_project_id=$APIGEE_X_ORG EOF # Install Dependencies python3 -m pip install -r "$SCRIPTPATH/requirements.txt" # Generate Gcloud Acccess Token -APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token) +APIGEE_ACCESS_TOKEN="$(gcloud config config-helper --force-auth-refresh --format json | jq -r '.credential.access_token')" export APIGEE_ACCESS_TOKEN # Execute Utility diff --git a/tools/proxy-endpoint-unifier/utils.py b/tools/proxy-endpoint-unifier/utils.py index d34c122c2..951073661 100644 --- a/tools/proxy-endpoint-unifier/utils.py +++ b/tools/proxy-endpoint-unifier/utils.py @@ -36,7 +36,7 @@ def get_proxy_endpoint_count(cfg): print('ERROR: Proxy Endpoints should be > Zero(0) & < Five(5)') sys.exit(1) except ValueError: - print('proxy_endpoint_count should be Numberic') + print('proxy_endpoint_count should be a Number') sys.exit(1) return proxy_endpoint_count @@ -53,7 +53,7 @@ def list_dir(dir, isok=False): return os.listdir(dir) except FileNotFoundError: if isok: - print(f"Ignoring : Directory \"{dir}\" not found") + print(f"Ignoring: Directory \"{dir}\" not found") return [] print(f"ERROR: Directory \"{dir}\" not found") sys.exit(1) From d80c5b4b7e255ae7f0bba5d7221fa0f5c72495ab Mon Sep 17 00:00:00 2001 From: anaik91 Date: Wed, 16 Aug 2023 18:38:42 +0530 Subject: [PATCH 18/28] feat: added support for PostClientFlow --- tools/proxy-endpoint-unifier/main.py | 2 +- .../apiproxy/policies/Message-Logging-1.xml | 21 +++++++++++++++++++ .../apiproxy/proxies/ProxyEndpoint-4.xml | 8 +++++++ .../api_bundles/apiproxy/proxies/default.xml | 14 ++++++++++++- tools/proxy-endpoint-unifier/utils.py | 21 +++++++++++++++++-- 5 files changed, 62 insertions(+), 4 deletions(-) create mode 100644 tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Message-Logging-1.xml diff --git a/tools/proxy-endpoint-unifier/main.py b/tools/proxy-endpoint-unifier/main.py index 303378129..c03240151 100644 --- a/tools/proxy-endpoint-unifier/main.py +++ b/tools/proxy-endpoint-unifier/main.py @@ -119,7 +119,7 @@ def main(): bundled_proxies = utils.list_dir(proxy_bundle_directory) for each_bundle in bundled_proxies: validation = x.validate_api('apis',f"{proxy_bundle_directory}/{each_bundle}") # noqa - if not validation: + if not isinstance(validation, bool): errors[each_bundle] = validation result[each_bundle] = validation print(f"{each_bundle} ==> Validation : {validation}") diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Message-Logging-1.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Message-Logging-1.xml new file mode 100644 index 000000000..c092f9fc5 --- /dev/null +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/policies/Message-Logging-1.xml @@ -0,0 +1,21 @@ + + + + Message-Logging-1 + + Message.id = {request.header.id} + IP + 556 + + \ No newline at end of file diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml index fd6ff8929..bb4b2afec 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/ProxyEndpoint-4.xml @@ -22,6 +22,14 @@ + + + + + Message-Logging-1 + + + /test-policy-5 diff --git a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml index fe26e2c40..137f89681 100644 --- a/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml +++ b/tools/proxy-endpoint-unifier/test/api_bundles/apiproxy/proxies/default.xml @@ -27,8 +27,20 @@ - + + + Populate-Cache-1 + + + + + + + Message-Logging-1 + + + /test-policy diff --git a/tools/proxy-endpoint-unifier/utils.py b/tools/proxy-endpoint-unifier/utils.py index 951073661..2dd439a0c 100644 --- a/tools/proxy-endpoint-unifier/utils.py +++ b/tools/proxy-endpoint-unifier/utils.py @@ -232,6 +232,14 @@ def get_all_policies_from_endpoint(endpointData, endpointType): ) ) + if (endpointType == 'ProxyEndpoint' and + 'PostClientFlow' in endpointData[endpointType]): + policies.extend( + get_all_policies_from_flow( + endpointData[endpointType]['PostClientFlow'] + ) + ) + Flows = ( [] if endpointData[endpointType]['Flows'] is None else ( [endpointData[endpointType]['Flows']['Flow']] if isinstance( @@ -442,14 +450,23 @@ def merge_proxy_endpoints(api_dict, basepath, pes): process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) # noqa ) merged_pe['ProxyEndpoint']['PreFlow']['Response']['Step'].extend( - process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Request'],condition) # noqa + process_steps(each_pe_info['ProxyEndpoint']['PreFlow']['Response'],condition) # noqa ) merged_pe['ProxyEndpoint']['PostFlow']['Request']['Step'].extend( process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) # noqa ) merged_pe['ProxyEndpoint']['PostFlow']['Response']['Step'].extend( - process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Request'],condition) # noqa + process_steps(each_pe_info['ProxyEndpoint']['PostFlow']['Response'],condition) # noqa ) + if 'PostClientFlow' in each_pe_info['ProxyEndpoint']: + merged_pe['ProxyEndpoint']['PostClientFlow'] = { + '@name': 'PostClientFlow', + 'Request': {'Step': []}, + 'Response': {'Step': []}, + } + merged_pe['ProxyEndpoint']['PostClientFlow']['Response']['Step'].extend( # noqa + process_steps(each_pe_info['ProxyEndpoint']['PostClientFlow']['Response'], None) # noqa + ) for each_flow in original_flows: merged_pe['ProxyEndpoint']['Flows']['Flow'].append( process_flow(each_flow, condition) From fd3ba097b36aa6f311160f1af690b57cde934470 Mon Sep 17 00:00:00 2001 From: anaik91 Date: Wed, 16 Aug 2023 21:00:31 +0530 Subject: [PATCH 19/28] feat: modified pipeline.sh & README --- tools/proxy-endpoint-unifier/README.md | 4 ---- tools/proxy-endpoint-unifier/pipeline.sh | 19 ++++++++++++++++++- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/tools/proxy-endpoint-unifier/README.md b/tools/proxy-endpoint-unifier/README.md index 2cc1b1ed8..9a43155e2 100644 --- a/tools/proxy-endpoint-unifier/README.md +++ b/tools/proxy-endpoint-unifier/README.md @@ -42,7 +42,3 @@ python3 main.py ## Limitations * This tool does not currently handle the resources within API proxies. - -## Copyright - -Copyright 2023 Google LLC. This software is provided as-is, without warranty or representation for any use or purpose. Your use of it is subject to your agreement with Google. diff --git a/tools/proxy-endpoint-unifier/pipeline.sh b/tools/proxy-endpoint-unifier/pipeline.sh index 63df2e9f0..dd83d7c43 100755 --- a/tools/proxy-endpoint-unifier/pipeline.sh +++ b/tools/proxy-endpoint-unifier/pipeline.sh @@ -44,5 +44,22 @@ python3 -m pip install -r "$SCRIPTPATH/requirements.txt" APIGEE_ACCESS_TOKEN="$(gcloud config config-helper --force-auth-refresh --format json | jq -r '.credential.access_token')" export APIGEE_ACCESS_TOKEN -# Execute Utility +# Building API Proxy Bundle for Proxy containing more than 5 Proxy Endpoints +cd "$SCRIPTPATH/test/api_bundles" +rm -rf "$SCRIPTPATH/test/api_bundles/test.zip" +echo "Building original proxy bundle" +zip -q -r test.zip apiproxy/ +cd "$SCRIPTPATH" + +# Validating API Proxy Bundle for Proxy containing more than 5 Proxy Endpoints +echo "Validating the original proxy bundle" +python3 -c "import os, sys ,json; \ + from apigee import Apigee; \ + x = Apigee(os.getenv('APIGEE_X_ORG')); \ + x.set_auth_header(os.getenv('APIGEE_ACCESS_TOKEN')); \ + r=x.validate_api('apis','test/api_bundles/test.zip'); \ + print(json.dumps(r,indent=2))" +rm -rf "$SCRIPTPATH/test/api_bundles/test.zip" + +# Running and Validating API Proxy Bundle after splitting the proxies python3 "$SCRIPTPATH/main.py" From af3e94ecb7a5c3851e478c0099c5a48d69c2c2dd Mon Sep 17 00:00:00 2001 From: anaik91 Date: Wed, 16 Aug 2023 21:43:22 +0530 Subject: [PATCH 20/28] feat: added zip packaged to pipeline runner --- tools/pipeline-runner/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/pipeline-runner/Dockerfile b/tools/pipeline-runner/Dockerfile index bcbdb2742..9e454b160 100644 --- a/tools/pipeline-runner/Dockerfile +++ b/tools/pipeline-runner/Dockerfile @@ -34,7 +34,8 @@ RUN apk add --no-cache \ harfbuzz \ ca-certificates \ ttf-freefont \ - py-pip + py-pip \ + zip # Reduce nighly log (note: -ntp requires maven 3.6.1+) RUN mv /usr/bin/mvn /usr/bin/_mvn &&\ From c9f27365a4e4b99a09b5042c01123687e88c95c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:30:27 +0000 Subject: [PATCH 21/28] chore(deps-dev): bump word-wrap Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.5. - [Release notes](https://github.com/jonschlinkert/word-wrap/releases) - [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.5) --- updated-dependencies: - dependency-name: word-wrap dependency-type: indirect ... Signed-off-by: dependabot[bot] --- .../data-converters-shared-flow/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/references/data-converters-shared-flow/package-lock.json b/references/data-converters-shared-flow/package-lock.json index 5a7e47c18..41b3bbb28 100644 --- a/references/data-converters-shared-flow/package-lock.json +++ b/references/data-converters-shared-flow/package-lock.json @@ -1738,9 +1738,9 @@ } }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -3263,9 +3263,9 @@ } }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true }, "workerpool": { From ba499707654cda57ab0c3b7f8bd05fe1aa2587b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:30:28 +0000 Subject: [PATCH 22/28] chore(deps): bump requests in /tools/proxy-endpoint-unifier Bumps [requests](https://github.com/psf/requests) from 2.28.1 to 2.31.0. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.28.1...v2.31.0) --- updated-dependencies: - dependency-name: requests dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- tools/proxy-endpoint-unifier/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/proxy-endpoint-unifier/requirements.txt b/tools/proxy-endpoint-unifier/requirements.txt index 7fd3dd334..ef1bdd28b 100644 --- a/tools/proxy-endpoint-unifier/requirements.txt +++ b/tools/proxy-endpoint-unifier/requirements.txt @@ -13,4 +13,4 @@ # limitations under the License. xmltodict==0.13.0 -requests==2.28.1 +requests==2.31.0 From eecbbb1cbecd18e90e66befeb3df0fb1f08358c7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:30:29 +0000 Subject: [PATCH 23/28] chore(deps-dev): bump word-wrap in /references/cicd-pipeline Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.5. - [Release notes](https://github.com/jonschlinkert/word-wrap/releases) - [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.5) --- updated-dependencies: - dependency-name: word-wrap dependency-type: indirect ... Signed-off-by: dependabot[bot] --- references/cicd-pipeline/package-lock.json | 28 +++++----------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/references/cicd-pipeline/package-lock.json b/references/cicd-pipeline/package-lock.json index 2107fd9ae..8d033e269 100644 --- a/references/cicd-pipeline/package-lock.json +++ b/references/cicd-pipeline/package-lock.json @@ -12,7 +12,6 @@ "apickli": "^3.0.1", "apigeelint": "^2.13.0", "eslint": "^7.0.0", - "eslint-config-google": "^0.14.0", "eslint-config-prettier": "^6.11.0", "mocha": "^7.2.0", "nyc": "^15.1.0", @@ -2564,15 +2563,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint-config-google": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/eslint-config-google/-/eslint-config-google-0.14.0.tgz", - "integrity": "sha512-WsbX4WbjuMvTdeVL6+J3rK1RGhCTqjsFjX7UMSMgZiyxxaNLkoJENbrGExzERFeoTpGw3F3FypTiWAP9ZXzkEw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/eslint-config-prettier": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.11.0.tgz", @@ -7179,9 +7169,9 @@ } }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -9642,12 +9632,6 @@ } } }, - "eslint-config-google": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/eslint-config-google/-/eslint-config-google-0.14.0.tgz", - "integrity": "sha512-WsbX4WbjuMvTdeVL6+J3rK1RGhCTqjsFjX7UMSMgZiyxxaNLkoJENbrGExzERFeoTpGw3F3FypTiWAP9ZXzkEw==", - "dev": true - }, "eslint-config-prettier": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.11.0.tgz", @@ -13474,9 +13458,9 @@ } }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true }, "wrap-ansi": { From eb987067bb4a35aa56bfbab92c13c79f01f436e7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:30:29 +0000 Subject: [PATCH 24/28] chore(deps-dev): bump word-wrap in /references/cicd-sharedflow-pipeline Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.5. - [Release notes](https://github.com/jonschlinkert/word-wrap/releases) - [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.5) --- updated-dependencies: - dependency-name: word-wrap dependency-type: indirect ... Signed-off-by: dependabot[bot] --- .../cicd-sharedflow-pipeline/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/references/cicd-sharedflow-pipeline/package-lock.json b/references/cicd-sharedflow-pipeline/package-lock.json index 84c2a126f..b8fcaf0e2 100644 --- a/references/cicd-sharedflow-pipeline/package-lock.json +++ b/references/cicd-sharedflow-pipeline/package-lock.json @@ -5625,9 +5625,9 @@ } }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -10426,9 +10426,9 @@ } }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true }, "wrappy": { From a8bca60bca548ca9133f0deaaf100b5548820f14 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:30:46 +0000 Subject: [PATCH 25/28] chore(deps): bump tough-cookie and jsdom in /references/js-callout Bumps [tough-cookie](https://github.com/salesforce/tough-cookie) and [jsdom](https://github.com/jsdom/jsdom). These dependencies needed to be updated together. Updates `tough-cookie` from 2.5.0 to 4.1.3 - [Release notes](https://github.com/salesforce/tough-cookie/releases) - [Changelog](https://github.com/salesforce/tough-cookie/blob/master/CHANGELOG.md) - [Commits](https://github.com/salesforce/tough-cookie/compare/v2.5.0...v4.1.3) Updates `jsdom` from 16.3.0 to 16.7.0 - [Release notes](https://github.com/jsdom/jsdom/releases) - [Changelog](https://github.com/jsdom/jsdom/blob/master/Changelog.md) - [Commits](https://github.com/jsdom/jsdom/compare/16.3.0...16.7.0) --- updated-dependencies: - dependency-name: tough-cookie dependency-type: indirect - dependency-name: jsdom dependency-type: indirect ... Signed-off-by: dependabot[bot] --- references/js-callout/package-lock.json | 669 +++++++++++------------- 1 file changed, 305 insertions(+), 364 deletions(-) diff --git a/references/js-callout/package-lock.json b/references/js-callout/package-lock.json index ca307121a..2ee09ddee 100644 --- a/references/js-callout/package-lock.json +++ b/references/js-callout/package-lock.json @@ -908,6 +908,15 @@ "@sinonjs/commons": "^1.7.0" } }, + "node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, "node_modules/@types/babel__core": { "version": "7.1.9", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.9.tgz", @@ -1043,9 +1052,9 @@ } }, "node_modules/abab": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.3.tgz", - "integrity": "sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", "dev": true }, "node_modules/acorn": { @@ -1079,6 +1088,18 @@ "node": ">=0.4.0" } }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, "node_modules/ajv": { "version": "6.12.3", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.3.tgz", @@ -1974,9 +1995,9 @@ } }, "node_modules/decimal.js": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.2.0.tgz", - "integrity": "sha512-vDPw+rDgn3bZe1+F/pyEwb1oMG2XTlRVgAa6B4KccTEpYgF8w6eQllVbQcfIJnZyvzFtFpxnpGtx8dd7DJp/Rw==", + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", "dev": true }, "node_modules/decode-uri-component": { @@ -1988,12 +2009,6 @@ "node": ">=0.10" } }, - "node_modules/deep-is": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", - "dev": true - }, "node_modules/deepmerge": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", @@ -2282,23 +2297,24 @@ } }, "node_modules/escodegen": { - "version": "1.14.3", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", - "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", "dev": true, "dependencies": { "esprima": "^4.0.1", - "estraverse": "^4.2.0", - "esutils": "^2.0.2", - "optionator": "^0.8.1", - "source-map": "~0.6.1" + "estraverse": "^5.2.0", + "esutils": "^2.0.2" }, "bin": { "escodegen": "bin/escodegen.js", "esgenerate": "bin/esgenerate.js" }, "engines": { - "node": ">=4.0" + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" } }, "node_modules/esprima": { @@ -2315,9 +2331,9 @@ } }, "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true, "engines": { "node": ">=4.0" @@ -2596,12 +2612,6 @@ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true - }, "node_modules/fb-watchman": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", @@ -3007,6 +3017,20 @@ "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", "dev": true }, + "node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", @@ -3022,6 +3046,19 @@ "npm": ">=1.3.7" } }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/human-signals": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", @@ -3093,15 +3130,6 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, - "node_modules/ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/is-accessor-descriptor": { "version": "0.1.6", "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", @@ -3299,9 +3327,9 @@ } }, "node_modules/is-potential-custom-element-name": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.0.tgz", - "integrity": "sha1-DFLlS8yjkbssSUsh6GJtczbG45c=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, "node_modules/is-property": { @@ -4104,40 +4132,75 @@ "dev": true }, "node_modules/jsdom": { - "version": "16.3.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.3.0.tgz", - "integrity": "sha512-zggeX5UuEknpdZzv15+MS1dPYG0J/TftiiNunOeNxSl3qr8Z6cIlQpN0IdJa44z9aFxZRIVqRncvEhQ7X5DtZg==", + "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", "dev": true, "dependencies": { - "abab": "^2.0.3", - "acorn": "^7.1.1", + "abab": "^2.0.5", + "acorn": "^8.2.4", "acorn-globals": "^6.0.0", "cssom": "^0.4.4", - "cssstyle": "^2.2.0", + "cssstyle": "^2.3.0", "data-urls": "^2.0.0", - "decimal.js": "^10.2.0", + "decimal.js": "^10.2.1", "domexception": "^2.0.1", - "escodegen": "^1.14.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", "html-encoding-sniffer": "^2.0.1", - "is-potential-custom-element-name": "^1.0.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.0", - "parse5": "5.1.1", - "request": "^2.88.2", - "request-promise-native": "^1.0.8", - "saxes": "^5.0.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", "symbol-tree": "^3.2.4", - "tough-cookie": "^3.0.1", + "tough-cookie": "^4.0.0", "w3c-hr-time": "^1.0.2", "w3c-xmlserializer": "^2.0.0", "webidl-conversions": "^6.1.0", "whatwg-encoding": "^1.0.5", "whatwg-mimetype": "^2.3.0", - "whatwg-url": "^8.0.0", - "ws": "^7.2.3", + "whatwg-url": "^8.5.0", + "ws": "^7.4.6", "xml-name-validator": "^3.0.0" }, "engines": { "node": ">=10" + }, + "peerDependencies": { + "canvas": "^2.5.0" + }, + "peerDependenciesMeta": { + "canvas": { + "optional": true + } + } + }, + "node_modules/jsdom/node_modules/acorn": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/jsdom/node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" } }, "node_modules/jsesc": { @@ -4291,19 +4354,6 @@ "node": ">=6" } }, - "node_modules/levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", @@ -4582,12 +4632,6 @@ "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=", "dev": true }, - "node_modules/lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", - "dev": true - }, "node_modules/lodash.support": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.support/-/lodash.support-2.4.1.tgz", @@ -5076,23 +5120,6 @@ "node": ">=6" } }, - "node_modules/optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", - "dev": true, - "dependencies": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" - }, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/p-each-series": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.1.0.tgz", @@ -5172,9 +5199,9 @@ } }, "node_modules/parse5": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", - "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "dev": true }, "node_modules/parseurl": { @@ -5302,15 +5329,6 @@ "node": ">=0.10.0" } }, - "node_modules/prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/pretty-format": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-26.6.2.tgz", @@ -5419,6 +5437,12 @@ "node": ">=0.6" } }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, "node_modules/range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -5546,45 +5570,6 @@ "node": ">= 6" } }, - "node_modules/request-promise-core": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.3.tgz", - "integrity": "sha512-QIs2+ArIGQVp5ZYbWD5ZLCY29D5CfWizP8eWnm8FoGD1TX61veauETVQbrV60662V0oFBkrDOuaBI8XgtuyYAQ==", - "dev": true, - "dependencies": { - "lodash": "^4.17.15" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/request-promise-native": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.8.tgz", - "integrity": "sha512-dapwLGqkHtwL5AEbfenuzjTYg35Jd6KPytsC2/TLkVMz8rm+tNt72MGUWT1RP/aYawMpN6HqbNGBQaRcBtjQMQ==", - "dev": true, - "dependencies": { - "request-promise-core": "1.1.3", - "stealthy-require": "^1.1.1", - "tough-cookie": "^2.3.3" - }, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/request-promise-native/node_modules/tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "dependencies": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - }, - "engines": { - "node": ">=0.8" - } - }, "node_modules/request/node_modules/tough-cookie": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", @@ -5622,6 +5607,12 @@ "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", "dev": true }, + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true + }, "node_modules/resolve": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", @@ -6446,15 +6437,6 @@ "node": ">= 0.6" } }, - "node_modules/stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/streamsearch": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", @@ -6962,23 +6944,24 @@ } }, "node_modules/tough-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", - "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "dev": true, "dependencies": { - "ip-regex": "^2.1.0", - "psl": "^1.1.28", - "punycode": "^2.1.1" + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" }, "engines": { "node": ">=6" } }, "node_modules/tr46": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.0.2.tgz", - "integrity": "sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", "dev": true, "dependencies": { "punycode": "^2.1.1" @@ -7017,18 +7000,6 @@ "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", "dev": true }, - "node_modules/type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", - "dev": true, - "dependencies": { - "prelude-ls": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, "node_modules/type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", @@ -7090,6 +7061,15 @@ "node": ">=0.10.0" } }, + "node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true, + "engines": { + "node": ">= 4.0.0" + } + }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -7171,6 +7151,16 @@ "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", "dev": true }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "node_modules/use": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", @@ -7327,28 +7317,19 @@ "dev": true }, "node_modules/whatwg-url": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.1.0.tgz", - "integrity": "sha512-vEIkwNi9Hqt4TV9RdnaBPNt+E2Sgmo3gePebCRgZ1R7g6d23+53zCTnuB0amKI4AXq6VM8jj2DUAa0S1vjJxkw==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", "dev": true, "dependencies": { - "lodash.sortby": "^4.7.0", - "tr46": "^2.0.2", - "webidl-conversions": "^5.0.0" + "lodash": "^4.7.0", + "tr46": "^2.1.0", + "webidl-conversions": "^6.1.0" }, "engines": { "node": ">=10" } }, - "node_modules/whatwg-url/node_modules/webidl-conversions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", - "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -7370,15 +7351,6 @@ "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, - "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/wrap-ansi": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", @@ -8329,6 +8301,12 @@ "@sinonjs/commons": "^1.7.0" } }, + "@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true + }, "@types/babel__core": { "version": "7.1.9", "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.9.tgz", @@ -8461,9 +8439,9 @@ "dev": true }, "abab": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.3.tgz", - "integrity": "sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", "dev": true }, "acorn": { @@ -8488,6 +8466,15 @@ "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", "dev": true }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "requires": { + "debug": "4" + } + }, "ajv": { "version": "6.12.3", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.3.tgz", @@ -9260,9 +9247,9 @@ "dev": true }, "decimal.js": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.2.0.tgz", - "integrity": "sha512-vDPw+rDgn3bZe1+F/pyEwb1oMG2XTlRVgAa6B4KccTEpYgF8w6eQllVbQcfIJnZyvzFtFpxnpGtx8dd7DJp/Rw==", + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", "dev": true }, "decode-uri-component": { @@ -9271,12 +9258,6 @@ "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=", "dev": true }, - "deep-is": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", - "dev": true - }, "deepmerge": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", @@ -9523,15 +9504,14 @@ "dev": true }, "escodegen": { - "version": "1.14.3", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", - "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", "dev": true, "requires": { "esprima": "^4.0.1", - "estraverse": "^4.2.0", + "estraverse": "^5.2.0", "esutils": "^2.0.2", - "optionator": "^0.8.1", "source-map": "~0.6.1" } }, @@ -9542,9 +9522,9 @@ "dev": true }, "estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true }, "esutils": { @@ -9777,12 +9757,6 @@ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", - "dev": true - }, "fb-watchman": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", @@ -10110,6 +10084,17 @@ } } }, + "http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "requires": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + } + }, "http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", @@ -10121,6 +10106,16 @@ "sshpk": "^1.7.0" } }, + "https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, "human-signals": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", @@ -10174,12 +10169,6 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, - "ip-regex": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", - "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", - "dev": true - }, "is-accessor-descriptor": { "version": "0.1.6", "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", @@ -10335,9 +10324,9 @@ } }, "is-potential-custom-element-name": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.0.tgz", - "integrity": "sha1-DFLlS8yjkbssSUsh6GJtczbG45c=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, "is-property": { @@ -10989,37 +10978,57 @@ "dev": true }, "jsdom": { - "version": "16.3.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.3.0.tgz", - "integrity": "sha512-zggeX5UuEknpdZzv15+MS1dPYG0J/TftiiNunOeNxSl3qr8Z6cIlQpN0IdJa44z9aFxZRIVqRncvEhQ7X5DtZg==", + "version": "16.7.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz", + "integrity": "sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw==", "dev": true, "requires": { - "abab": "^2.0.3", - "acorn": "^7.1.1", + "abab": "^2.0.5", + "acorn": "^8.2.4", "acorn-globals": "^6.0.0", "cssom": "^0.4.4", - "cssstyle": "^2.2.0", + "cssstyle": "^2.3.0", "data-urls": "^2.0.0", - "decimal.js": "^10.2.0", + "decimal.js": "^10.2.1", "domexception": "^2.0.1", - "escodegen": "^1.14.1", + "escodegen": "^2.0.0", + "form-data": "^3.0.0", "html-encoding-sniffer": "^2.0.1", - "is-potential-custom-element-name": "^1.0.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", "nwsapi": "^2.2.0", - "parse5": "5.1.1", - "request": "^2.88.2", - "request-promise-native": "^1.0.8", - "saxes": "^5.0.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", "symbol-tree": "^3.2.4", - "tough-cookie": "^3.0.1", + "tough-cookie": "^4.0.0", "w3c-hr-time": "^1.0.2", "w3c-xmlserializer": "^2.0.0", "webidl-conversions": "^6.1.0", "whatwg-encoding": "^1.0.5", "whatwg-mimetype": "^2.3.0", - "whatwg-url": "^8.0.0", - "ws": "^7.2.3", + "whatwg-url": "^8.5.0", + "ws": "^7.4.6", "xml-name-validator": "^3.0.0" + }, + "dependencies": { + "acorn": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "dev": true + }, + "form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + } } }, "jsesc": { @@ -11136,16 +11145,6 @@ "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", "dev": true }, - "levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", - "dev": true, - "requires": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" - } - }, "lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", @@ -11421,12 +11420,6 @@ "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=", "dev": true }, - "lodash.sortby": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", - "dev": true - }, "lodash.support": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/lodash.support/-/lodash.support-2.4.1.tgz", @@ -11823,20 +11816,6 @@ "mimic-fn": "^2.1.0" } }, - "optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", - "dev": true, - "requires": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" - } - }, "p-each-series": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.1.0.tgz", @@ -11895,9 +11874,9 @@ } }, "parse5": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", - "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", "dev": true }, "parseurl": { @@ -11998,12 +11977,6 @@ "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", "dev": true }, - "prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", - "dev": true - }, "pretty-format": { "version": "26.6.2", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-26.6.2.tgz", @@ -12090,6 +12063,12 @@ "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", "dev": true }, + "querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", + "dev": true + }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -12213,38 +12192,6 @@ } } }, - "request-promise-core": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.3.tgz", - "integrity": "sha512-QIs2+ArIGQVp5ZYbWD5ZLCY29D5CfWizP8eWnm8FoGD1TX61veauETVQbrV60662V0oFBkrDOuaBI8XgtuyYAQ==", - "dev": true, - "requires": { - "lodash": "^4.17.15" - } - }, - "request-promise-native": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.8.tgz", - "integrity": "sha512-dapwLGqkHtwL5AEbfenuzjTYg35Jd6KPytsC2/TLkVMz8rm+tNt72MGUWT1RP/aYawMpN6HqbNGBQaRcBtjQMQ==", - "dev": true, - "requires": { - "request-promise-core": "1.1.3", - "stealthy-require": "^1.1.1", - "tough-cookie": "^2.3.3" - }, - "dependencies": { - "tough-cookie": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", - "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", - "dev": true, - "requires": { - "psl": "^1.1.28", - "punycode": "^2.1.1" - } - } - } - }, "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -12257,6 +12204,12 @@ "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", "dev": true }, + "requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==", + "dev": true + }, "resolve": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", @@ -12961,12 +12914,6 @@ "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", "dev": true }, - "stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", - "dev": true - }, "streamsearch": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", @@ -13399,20 +13346,21 @@ "dev": true }, "tough-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", - "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "dev": true, "requires": { - "ip-regex": "^2.1.0", - "psl": "^1.1.28", - "punycode": "^2.1.1" + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" } }, "tr46": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.0.2.tgz", - "integrity": "sha512-3n1qG+/5kg+jrbTzwAykB5yRYtQCTqOGKq5U5PE3b0a1/mzo6snDhjGS0zJVJunO0NrT3Dg1MLy5TjWP/UJppg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-2.1.0.tgz", + "integrity": "sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw==", "dev": true, "requires": { "punycode": "^2.1.1" @@ -13445,15 +13393,6 @@ "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", "dev": true }, - "type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", - "dev": true, - "requires": { - "prelude-ls": "~1.1.2" - } - }, "type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", @@ -13503,6 +13442,12 @@ "set-value": "^2.0.1" } }, + "universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "dev": true + }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -13573,6 +13518,16 @@ "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", "dev": true }, + "url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dev": true, + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "use": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", @@ -13709,22 +13664,14 @@ "dev": true }, "whatwg-url": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.1.0.tgz", - "integrity": "sha512-vEIkwNi9Hqt4TV9RdnaBPNt+E2Sgmo3gePebCRgZ1R7g6d23+53zCTnuB0amKI4AXq6VM8jj2DUAa0S1vjJxkw==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-8.7.0.tgz", + "integrity": "sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg==", "dev": true, "requires": { - "lodash.sortby": "^4.7.0", - "tr46": "^2.0.2", - "webidl-conversions": "^5.0.0" - }, - "dependencies": { - "webidl-conversions": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-5.0.0.tgz", - "integrity": "sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA==", - "dev": true - } + "lodash": "^4.7.0", + "tr46": "^2.1.0", + "webidl-conversions": "^6.1.0" } }, "which": { @@ -13742,12 +13689,6 @@ "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, - "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true - }, "wrap-ansi": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", From 6257abdf696e92162ba0e60be034aa028200ea3d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:30:48 +0000 Subject: [PATCH 26/28] chore(deps-dev): bump word-wrap in /references/product-recommendations Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.5. - [Release notes](https://github.com/jonschlinkert/word-wrap/releases) - [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.5) --- updated-dependencies: - dependency-name: word-wrap dependency-type: indirect ... Signed-off-by: dependabot[bot] --- references/product-recommendations/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/references/product-recommendations/package-lock.json b/references/product-recommendations/package-lock.json index 8596946b2..1de450bc9 100644 --- a/references/product-recommendations/package-lock.json +++ b/references/product-recommendations/package-lock.json @@ -5120,9 +5120,9 @@ } }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -9429,9 +9429,9 @@ } }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true }, "wrappy": { From ae53334c0f1f7cf22ac7c42cce146bfa553bcb96 Mon Sep 17 00:00:00 2001 From: apigee-devrel-helper <109337440+apigee-devrel-helper@users.noreply.github.com> Date: Thu, 17 Aug 2023 08:38:57 +0200 Subject: [PATCH 27/28] chore(main): release 1.13.0 --- CHANGELOG.md | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7bc095160..802bb245d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [1.13.0](https://github.com/apigee/devrel/compare/v1.12.0...v1.13.0) (2023-08-17) + + +### Features + +* added missing licenses ([cf6d1ff](https://github.com/apigee/devrel/commit/cf6d1ff5cdb940de93b9fa9c7a97aaaaacd3dffa)) +* added pip package install to pipeline-runner ([06fd5fa](https://github.com/apigee/devrel/commit/06fd5fa70278c2b20c1defb6240389aa0811fcc0)) +* Added proxy-endpoint-unifier source ([d9489f2](https://github.com/apigee/devrel/commit/d9489f281a150604a2ebff532e7517e6fa6491e1)) +* added support for PostClientFlow ([d80c5b4](https://github.com/apigee/devrel/commit/d80c5b4b7e255ae7f0bba5d7221fa0f5c72495ab)) +* added test scripts for proxy-endpoint-unifier ([740316e](https://github.com/apigee/devrel/commit/740316e0c2a9413a47484a18c3a0d9c0fcd343a8)) +* added zip packaged to pipeline runner ([af3e94e](https://github.com/apigee/devrel/commit/af3e94ecb7a5c3851e478c0099c5a48d69c2c2dd)) +* addressed flake8 for main proxy-endpoint-unifier wrapper ([18badee](https://github.com/apigee/devrel/commit/18badeef1adad545e94bec8374cd22ca554990a9)) +* addressed flake8 for utils proxy-endpoint-unifier wrapper ([b812452](https://github.com/apigee/devrel/commit/b81245213b2653b7595652dfa79326e9d96f0355)) +* addressed flake8 for xorhybrid proxy-endpoint-unifier wrapper ([7ccef07](https://github.com/apigee/devrel/commit/7ccef07a097041fc323388433dce7f8abca32a8a)) +* fixed shell lint and updated README.md ([f3ff9a0](https://github.com/apigee/devrel/commit/f3ff9a01f28e9c015b16da09df61f98bb9b0f127)) +* modified pipeline.sh & README ([fd3ba09](https://github.com/apigee/devrel/commit/fd3ba097b36aa6f311160f1af690b57cde934470)) +* removed comments & added newlines in the proxy-endpoint-unifier wrapper ([b39a38f](https://github.com/apigee/devrel/commit/b39a38fed5bbb05f22c3e84771aca68cc0d6b938)) +* updated CODEOWNERS ([655e5af](https://github.com/apigee/devrel/commit/655e5aff29d6a0273fefbf87b919b5dc3a9cce09)) +* updated Licenses in the proxy-endpoint-unifier wrapper ([a7c8a03](https://github.com/apigee/devrel/commit/a7c8a030e40e55aede631d5df438265012ac7ddd)) + + +### Bug Fixes + +* added execute permissions on pipeline.sh ([cbae732](https://github.com/apigee/devrel/commit/cbae7326cf2475efddc21a018034cc85a40d0512)) +* added support for FS read when proxy root xml is empty ([f821756](https://github.com/apigee/devrel/commit/f82175625f80d44fdeb78f25788d5846c56042a2)) +* addressed PR comments ([d2db718](https://github.com/apigee/devrel/commit/d2db7181d950b742641b36ad9f2e55a63900cc92)) +* changes to proxy endpoint unifier readme ([b245415](https://github.com/apigee/devrel/commit/b245415ceac9fce9b5247d6261497058faf9fe79)) +* removing manifests , fixing Cache Policy & target endpoint ([5f92893](https://github.com/apigee/devrel/commit/5f92893d1fe0fdcc164feea9ff41d7595e6477d9)) + ## [1.12.0](https://github.com/apigee/devrel/compare/v1.11.0...v1.12.0) (2023-08-08) From 308a108ff749ad23b95ff8a16ec95f40e7007615 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Aug 2023 06:41:08 +0000 Subject: [PATCH 28/28] chore(deps-dev): bump word-wrap in /references/js-callout Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.5. - [Release notes](https://github.com/jonschlinkert/word-wrap/releases) - [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.5) --- updated-dependencies: - dependency-name: word-wrap dependency-type: indirect ... Signed-off-by: dependabot[bot] --- references/js-callout/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/references/js-callout/package-lock.json b/references/js-callout/package-lock.json index ca307121a..09772989d 100644 --- a/references/js-callout/package-lock.json +++ b/references/js-callout/package-lock.json @@ -7371,9 +7371,9 @@ "dev": true }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -13743,9 +13743,9 @@ "dev": true }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", "dev": true }, "wrap-ansi": {