From 5368868733fd4d8d5080df7efc2cb7a2c655c434 Mon Sep 17 00:00:00 2001 From: rheins Date: Wed, 7 Jun 2023 10:05:31 -0400 Subject: [PATCH] Version 0.1.0 --- CHANGELOG.md | 10 +++ LICENSE.MD | 13 ++++ README.md | 34 +++++++++ clarity_view_importer/README.md | 63 +++++++++++++++ .../clarity_view_importer.py | 74 ++++++++++++++++++ clarity_view_importer/requirements.txt | 1 + .../views/view_bqdataset.txt | 11 +++ clarity_view_importer/views/view_bqtable.txt | 11 +++ clarity_view_importer/views/view_bucket.txt | 23 ++++++ clarity_view_importer/views/view_cloudsql.txt | 18 +++++ clarity_view_importer/views/view_cluster.txt | 16 ++++ .../views/view_container.txt | 54 +++++++++++++ .../views/view_container_latest.txt | 13 ++++ .../views/view_external_ip.txt | 52 +++++++++++++ .../views/view_iam_policy.txt | 8 ++ clarity_view_importer/views/view_instance.txt | 22 ++++++ .../views/view_instancegroup.txt | 14 ++++ .../views/view_k8s_deployments.txt | 76 +++++++++++++++++++ clarity_view_importer/views/view_k8s_jobs.txt | 54 +++++++++++++ .../views/view_k8s_namespace.txt | 27 +++++++ .../views/view_k8s_network_policy.txt | 27 +++++++ clarity_view_importer/views/view_k8s_pod.txt | 42 ++++++++++ ..._rbac_authorization_ClusterRoleBinding.txt | 29 +++++++ ...ew_k8s_rbac_authorization_cluster_role.txt | 28 +++++++ .../view_k8s_rbac_authorization_role.txt | 30 ++++++++ ...ew_k8s_rbac_authorization_role_binding.txt | 32 ++++++++ .../views/view_k8s_replica_set.txt | 74 ++++++++++++++++++ .../views/view_k8s_service.txt | 38 ++++++++++ .../views/view_loadbalancer.txt | 59 ++++++++++++++ .../views/view_logbucket.txt | 25 ++++++ clarity_view_importer/views/view_logsink.txt | 12 +++ .../views/view_logsink_logbuckets.txt | 17 +++++ .../views/view_logsink_pubsubs.txt | 27 +++++++ clarity_view_importer/views/view_network.txt | 28 +++++++ clarity_view_importer/views/view_node.txt | 24 ++++++ clarity_view_importer/views/view_nodepool.txt | 21 +++++ clarity_view_importer/views/view_pod.txt | 48 ++++++++++++ clarity_view_importer/views/view_project.txt | 13 ++++ .../views/view_project_logging_summary.txt | 23 ++++++ .../views/view_project_summary.txt | 20 +++++ clarity_view_importer/views/view_redis.txt | 23 ++++++ clarity_view_importer/views/view_resource.txt | 11 +++ .../views/view_ssl_certs.txt | 17 +++++ 43 files changed, 1262 insertions(+) create mode 100644 CHANGELOG.md create mode 100644 LICENSE.MD create mode 100644 README.md create mode 100644 clarity_view_importer/README.md create mode 100755 clarity_view_importer/clarity_view_importer.py create mode 100644 clarity_view_importer/requirements.txt create mode 100644 clarity_view_importer/views/view_bqdataset.txt create mode 100644 clarity_view_importer/views/view_bqtable.txt create mode 100644 clarity_view_importer/views/view_bucket.txt create mode 100644 clarity_view_importer/views/view_cloudsql.txt create mode 100644 clarity_view_importer/views/view_cluster.txt create mode 100644 clarity_view_importer/views/view_container.txt create mode 100644 clarity_view_importer/views/view_container_latest.txt create mode 100644 clarity_view_importer/views/view_external_ip.txt create mode 100644 clarity_view_importer/views/view_iam_policy.txt create mode 100644 clarity_view_importer/views/view_instance.txt create mode 100644 clarity_view_importer/views/view_instancegroup.txt create mode 100644 clarity_view_importer/views/view_k8s_deployments.txt create mode 100644 clarity_view_importer/views/view_k8s_jobs.txt create mode 100644 clarity_view_importer/views/view_k8s_namespace.txt create mode 100644 clarity_view_importer/views/view_k8s_network_policy.txt create mode 100644 clarity_view_importer/views/view_k8s_pod.txt create mode 100644 clarity_view_importer/views/view_k8s_rbac_authorization_ClusterRoleBinding.txt create mode 100644 clarity_view_importer/views/view_k8s_rbac_authorization_cluster_role.txt create mode 100644 clarity_view_importer/views/view_k8s_rbac_authorization_role.txt create mode 100644 clarity_view_importer/views/view_k8s_rbac_authorization_role_binding.txt create mode 100644 clarity_view_importer/views/view_k8s_replica_set.txt create mode 100644 clarity_view_importer/views/view_k8s_service.txt create mode 100644 clarity_view_importer/views/view_loadbalancer.txt create mode 100644 clarity_view_importer/views/view_logbucket.txt create mode 100644 clarity_view_importer/views/view_logsink.txt create mode 100644 clarity_view_importer/views/view_logsink_logbuckets.txt create mode 100644 clarity_view_importer/views/view_logsink_pubsubs.txt create mode 100644 clarity_view_importer/views/view_network.txt create mode 100644 clarity_view_importer/views/view_node.txt create mode 100644 clarity_view_importer/views/view_nodepool.txt create mode 100644 clarity_view_importer/views/view_pod.txt create mode 100644 clarity_view_importer/views/view_project.txt create mode 100644 clarity_view_importer/views/view_project_logging_summary.txt create mode 100644 clarity_view_importer/views/view_project_summary.txt create mode 100644 clarity_view_importer/views/view_redis.txt create mode 100644 clarity_view_importer/views/view_resource.txt create mode 100644 clarity_view_importer/views/view_ssl_certs.txt diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..f914a2f --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,10 @@ + +# Change Log +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/) +and this project adheres to [Semantic Versioning](http://semver.org/). + +## [0.1.0] - 2023-06-07 +### Added +- Initial release of clarity_view_importer.py and BigQuery view definitions \ No newline at end of file diff --git a/LICENSE.MD b/LICENSE.MD new file mode 100644 index 0000000..ac8fb1c --- /dev/null +++ b/LICENSE.MD @@ -0,0 +1,13 @@ +Copyright 2023 Nuro + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..deff2ef --- /dev/null +++ b/README.md @@ -0,0 +1,34 @@ +# CLARITY + +The **CL**oud **A**sset **R**epositry and **I**nventory **T**ool for **Y**ou (CLARITY) is a tool to extend Google Cloud Platform's [Cloud Asset Inventory](https://cloud.google.com/asset-inventory/docs/overview) (CAI) for better usability and functionality. It consists of two components: + +- **BigQuery Data Views** - a set of helpful views that make it easier to retrieve useful information from the 250+ tables created by the CAI data export process. +- **CLARITY GUI** - a web interface for querying and pivoting among common data types. + +## Getting Started + +### Enable GCP Cloud Asset Inventory BigQuery Export +Follow [this guide](https://cloud.google.com/asset-inventory/docs/exporting-to-bigquery) to export asset metadata for your GCP environment to BigQuery table. You may want to start with a single snapshot to get started and then move to a regularly scheduled snapshot using a Google Cloud Function, such as what is described in this [Medium.com]( https://medium.com/google-cloud/using-gcp-cloud-asset-inventory-export-to-keep-track-of-your-gcp-resources-over-time-20fb6fa63c68) guide. + +### Import the CLARITY Views +These 37 views make it easier to access common data types from your CAI BigQuery tables and are needed for the CLARITY web interface to function. + +Run the latest version of `clarity_view_importer.py` from this repository, specifying the GCP project and BigQuery dataset for your existing CAI data. By default, the views will be created within your CAI dataset but this can be overridden with the `-P` and `-D` options at runtime. + +```` +# Create views in the cai_project project and cai_dataset dataset +./clarity_view_importer.py -p cai_project -d cai_dataset + +Adding cai_project.cai_dataset.view_nodepool +Adding cai_project.cai_dataset.view_k8s_rbac_authorization_cluster_role +Adding cai_project.cai_dataset.view_bucket +Adding cai_project.cai_dataset.view_k8s_deployments +... +```` + +### Install the CLARITY Web Interface +Once the CLARITY Web Interface is available, instructions will be included here. + +## License + +The software is provided by [Nuro](https://nuro.ai) under the Apache Software License agreement. \ No newline at end of file diff --git a/clarity_view_importer/README.md b/clarity_view_importer/README.md new file mode 100644 index 0000000..4914ba3 --- /dev/null +++ b/clarity_view_importer/README.md @@ -0,0 +1,63 @@ +# CAI View Importer +Script to import BigQuery custom view definitions to facilitate streamlined querying of Google Cloud Asset Inventory data. These views must be created for the CLARITY web interface to function. + +## Requirements: +- Google Cloud Asset Inventory data [exported to BigQuery](https://cloud.google.com/asset-inventory/docs/exporting-to-bigquery) +- Python BigQuery module +- GCP account with `roles/bigquery.jobUser` and `roles/bigquery.dataEditor` predefined IAM roles to create the views. + +## Usage: + +```` +usage: clarity_view_importer.py [-h] -p PROJECT -d DATASET [-P VIEW_PROJECT] [-D VIEW_DATASET] [-v VIEW_DIRECTORY] + +Import BigQuery view definitions for Google Cloud Asset Inventory. + +optional arguments: + -h, --help show this help message and exit + -p PROJECT, --project PROJECT + GCP project name for the BigQuery Cloud Asset Inventory dataset is hosted + -d DATASET, --dataset DATASET + BigQuery dataset name where the BigQuery Cloud Asset Inventory is hosted + -P VIEW_PROJECT, --view_project VIEW_PROJECT + GCP project name where the view will be stored. Default is --project value. + -D VIEW_DATASET, --view_dataset VIEW_DATASET + BigQuery dataset name where the view will be stored. Default is --dataset value. + -v VIEW_DIRECTORY, --view_directory VIEW_DIRECTORY + Directory where view definitions are stored +```` + +## Example Usage: +```` +# Create views in the cai_project project and cai_dataset dataset +./clarity_view_importer.py -p cai_project -d cai_dataset + +Adding cai_project.cai_dataset.view_nodepool +Adding cai_project.cai_dataset.view_k8s_rbac_authorization_cluster_role +Adding cai_project.cai_dataset.view_bucket +Adding cai_project.cai_dataset.view_k8s_deployments +... +```` + +## Example View: +GCS Bucket Inventory +```` +SELECT +bucket.name as bucketPath, +bucket.resource.data.kind as bucketKind, +bucket.resource.data.name as bucketName, +bucket.resource.parent as bucketParent, +bucket.resource.data.timeCreated as bucketCreation, +bucket.resource.data.updated as bucketUpdated, +bucket.resource.data.location as bucketLocation, +bucket.resource.data.versioning.enabled as bucketVersioning, +bucket.resource.data.iamConfiguration.publicAccessPrevention as bucketPublicAccessPrevention, +bucket.resource.data.iamConfiguration.bucketPolicyOnly.enabled as bucketIAMPolicy, +bucket.resource.data.iamConfiguration.uniformBucketLevelAccess.enabled as bucketUniformAcess, +bucket.resource.data.locationType as bucketLocationType, +bucket.resource.data.directory.enabled as bucketDirectoryEnabled, +project.projectName as projectName +FROM `$project.$dataset.resource_storage_googleapis_com_Bucket` bucket +JOIN `$project.$dataset.view_project` project ON bucket.resource.parent = project.projectParent +WHERE DATE(bucket.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +```` \ No newline at end of file diff --git a/clarity_view_importer/clarity_view_importer.py b/clarity_view_importer/clarity_view_importer.py new file mode 100755 index 0000000..2fd3d42 --- /dev/null +++ b/clarity_view_importer/clarity_view_importer.py @@ -0,0 +1,74 @@ +#! /usr/bin/env python3 + +# Version 0.1.0, 2023-06-01 +# Author: rheins@nuro.ai +# Creates BigQuery views in the specififed dataset to facilitate easier Cloud Asset Inventory querying + +import argparse +import sys +import glob +import re +from google.cloud import bigquery + + +def get_file_contents(file): + f = open(file) + s = f.read() + f.close() + return s + + +def main(): + # Parse arguments + parser = argparse.ArgumentParser( + description='Import BigQuery view definitions for Google Cloud Asset Inventory.') + parser.add_argument('-p', '--project', type=str, + help='GCP project name for the BigQuery Cloud Asset Inventory dataset is hosted', required=True) + parser.add_argument('-d', '--dataset', type=str, + help='BigQuery dataset name where the BigQuery Cloud Asset Inventory is hosted', required=True) + parser.add_argument('-P', '--view_project', type=str, + help='GCP project name where the view will be stored. Default is the value of --project.', required=False) + parser.add_argument('-D', '--view_dataset', type=str, + help='BigQuery dataset name where the view will be stored. Default is the value of --dataset.', required=False) + parser.add_argument('-v', '--view_directory', type=str, default="./views", + help='Directory where view definitions are stored', required=False) + args = parser.parse_args() + + # Store the views in the Cloud Asset Inventory BigQuery project/dataset by default + if not args.view_project: + args.view_project = args.project + + if not args.view_dataset: + args.view_dataset = args.dataset + + views = glob.glob(f"{args.view_directory}/view_*.txt") + + if not views: + sys.exit( + f"No view definitions found in '{args.view_directory}', exiting") + + client = bigquery.Client() + + for view_path in views: + view_name = "" + valid_path = re.search(r"/(view_\w+).txt", view_path) + if valid_path: + view_name = valid_path.group(1) + + view_id = f"{args.view_project}.{args.view_dataset}.{view_name}" + print(f"Adding view: {view_id}") + + view_src = get_file_contents(view_path) + view_src = view_src.replace("$project", args.project) + view_src = view_src.replace("$dataset", args.dataset) + + bq_view = bigquery.Table(view_id) + bq_view.view_query = view_src + + bq_view = client.create_table(bq_view) + else: + print(f"Invalid view name {view_path}, skipping") + + +if __name__ == "__main__": + main() diff --git a/clarity_view_importer/requirements.txt b/clarity_view_importer/requirements.txt new file mode 100644 index 0000000..1272036 --- /dev/null +++ b/clarity_view_importer/requirements.txt @@ -0,0 +1 @@ +google-cloud-bigquery \ No newline at end of file diff --git a/clarity_view_importer/views/view_bqdataset.txt b/clarity_view_importer/views/view_bqdataset.txt new file mode 100644 index 0000000..26b5cf4 --- /dev/null +++ b/clarity_view_importer/views/view_bqdataset.txt @@ -0,0 +1,11 @@ +SELECT +bqDataset.name as bqDatasetPath, +bqDataset.resource.data.datasetReference.datasetId as bqDatasetName, +bqDataset.resource.data.datasetReference.projectId as projectName, +bqDataset.resource.data.description.value as bqDatasetDescription, +(SELECT COUNT(*) FROM `$project.$dataset.view_bqtable` bqTable WHERE bqTable.bqDatasetName = bqDataset.resource.data.datasetReference.datasetId ) as bqTableCount, +bqDataset.resource.location as bqDatasetLocation, +bqDataset.updateTime as bqDatasetUpdateTime +FROM `$project.$dataset.resource_bigquery_googleapis_com_Dataset` bqDataset +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_bqtable.txt b/clarity_view_importer/views/view_bqtable.txt new file mode 100644 index 0000000..c09bf09 --- /dev/null +++ b/clarity_view_importer/views/view_bqtable.txt @@ -0,0 +1,11 @@ +SELECT +resource.data.tableReference.tableId as bqTableName, +name as bqTablePath, +resource.parent as bqDatasetPath, +resource.data.tableReference.projectId as projectName, +resource.data.tableReference.datasetId as bqDatasetName, +resource.location as bqTableLocation, +updateTime as bqTableUpdateTime, +FROM `$project.$dataset.resource_bigquery_googleapis_com_Table` +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai diff --git a/clarity_view_importer/views/view_bucket.txt b/clarity_view_importer/views/view_bucket.txt new file mode 100644 index 0000000..0a3ee47 --- /dev/null +++ b/clarity_view_importer/views/view_bucket.txt @@ -0,0 +1,23 @@ +SELECT +bucket.name as bucketPath, +bucket.resource.data.kind as bucketKind, +bucket.resource.data.name as bucketName, +--billing.cost_after_credits as billingLastMonthCostAfterCredits, +--billing.cost_before_credits as billingLastMonthCostBeforeCredits, +bucket.resource.parent as bucketParent, +bucket.resource.data.timeCreated as bucketCreation, +bucket.resource.data.updated as bucketUpdated, +bucket.resource.data.location as bucketLocation, +bucket.resource.data.versioning.enabled as bucketVersioning, +bucket.resource.data.iamConfiguration.publicAccessPrevention as bucketPublicAccessPrevention, +bucket.resource.data.iamConfiguration.bucketPolicyOnly.enabled as bucketIAMPolicy, +bucket.resource.data.iamConfiguration.uniformBucketLevelAccess.enabled as bucketUniformAcess, +bucket.resource.data.locationType as bucketLocationType, +bucket.resource.data.directory.enabled as bucketDirectoryEnabled, +project.projectName as projectName +FROM `$project.$dataset.resource_storage_googleapis_com_Bucket` bucket +JOIN `$project.$dataset.view_project` project ON bucket.resource.parent = project.projectParent +--LEFT JOIN `$project.$dataset.billing_snapshot` billing ON (billing.service = 'Cloud Storage' AND billing.resourceName = bucket.resource.data.name AND billing.month = FORMAT_DATE('%G%m', DATE_SUB(current_date(), INTERVAL 1 MONTH))) + +WHERE DATE(bucket.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_cloudsql.txt b/clarity_view_importer/views/view_cloudsql.txt new file mode 100644 index 0000000..caf1456 --- /dev/null +++ b/clarity_view_importer/views/view_cloudsql.txt @@ -0,0 +1,18 @@ +SELECT +resource.data.name as sqlName, +resource.data.project as projectName, +resource.parent as projectParent, +resource.data.databaseVersion as sqlDBVersion, +resource.data.settings.tier as sqlTier, +resource.data.instanceType as sqlInstanceType, +resource.data.serviceAccountEmailAddress as serviceAccount, +resource.data.onPremisesConfiguration. +hostPort as sqlHostPort, +resource.data.region as sqlRegion, +resource.data. +createTime as sqlCreateTime, +name as sqlPath, +FROM `$project.$dataset.resource_sqladmin_googleapis_com_Instance` cloudsql +JOIN `$project.$dataset.view_project` project ON project.projectParent = cloudsql.resource.parent +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_cluster.txt b/clarity_view_importer/views/view_cluster.txt new file mode 100644 index 0000000..dc46d41 --- /dev/null +++ b/clarity_view_importer/views/view_cluster.txt @@ -0,0 +1,16 @@ +SELECT +resource.data.name as clusterName, +name as clusterPath, +resource.location as clusterLocation, +resource.data.endpoint as clusterEndpoint, +resource.data.workloadIdentityConfig as clusterWorkload, +resource.data.nodeConfig.workloadMetadataConfig.mode as clusterWorkloadMode, +resource.data.clusterIpv4Cidr as clusterCIDR, +resource.data.masterAuthorizedNetworksConfig.cidrBlocks as authorizedNetworks, +SPLIT(name, '/')[OFFSET(4)] as projectName, +resource.data.currentMasterVersion as clusterMasterVersion, +resource.data.currentNodeVersion as clusterNodeVersion, +resource.data.workloadIdentityConfig.workloadPool as clusterWorkloadIdentity, +FROM `$project.$dataset.resource_container_googleapis_com_Cluster` +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: kbroughton@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_container.txt b/clarity_view_importer/views/view_container.txt new file mode 100644 index 0000000..6748511 --- /dev/null +++ b/clarity_view_importer/views/view_container.txt @@ -0,0 +1,54 @@ +-- We need to UNION ALL the contrainerregistry (deprecated) and artifactregistry tables to get all_containers. +-- containerregistry is deprecated in favor of artifactregistry +WITH all_containers as ( +-- BEGIN CONTAINER REGISTRY +SELECT + SPLIT(resource.data.name, '/')[SAFE_OFFSET(0)] as hostname, + SPLIT(resource.data.name, '/')[SAFE_OFFSET(1)] as project, + SPLIT(SPLIT(resource.data.name, '@')[SAFE_OFFSET(0)], '/')[SAFE_OFFSET(2)] as repository, + ARRAY_REVERSE(SPLIT(SPLIT(REPLACE(resource.data.name, CONCAT(SPLIT(resource.data.name, '/')[SAFE_OFFSET(0)], '/', SPLIT(resource.data.name, '/')[SAFE_OFFSET(1)], '/', SPLIT(resource.data.name, '/')[SAFE_OFFSET(2)], '/'), ''), '@')[SAFE_OFFSET(0)], '/'))[SAFE_OFFSET(0)] as container_name, +-- full_resource_name not supported for containerregistry + NULL as full_resource_name, + resource.data.name as uri, +ARRAY_LENGTH(SPLIT(resource.data.name, '/')) as num_parts, + ( + SELECT STRING_AGG(part, '/' ORDER BY index) + FROM UNNEST(SPLIT(SPLIT(resource.data.name, '@')[OFFSET(0)], '/')) part WITH OFFSET index + WHERE index BETWEEN 2 AND ARRAY_LENGTH(SPLIT(resource.data.name, '/')) + ) container_path, + SPLIT(name, '@')[SAFE_OFFSET(1)] as sha, + resource.data.timeUploaded as upload_time, + resource.data.tags as tags, +FROM `$project.$dataset.resource_containerregistry_googleapis_com_Image` +WHERE +DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- END CONTAINER REGISTRY + +-- We destinguish between containerregistry and artifactregistry based on full_resource_path (=NULL for containerregistry) + +-- BEGIN ARTIFACT REGISTRY +UNION ALL +SELECT + SPLIT(resource.data.uri, '/')[SAFE_OFFSET(0)] as hostname, + SPLIT(resource.data.name, '/')[SAFE_OFFSET(1)] as project, + SPLIT(SPLIT(resource.data.name, '@')[SAFE_OFFSET(0)], '/')[SAFE_OFFSET(5)] as repository, + ARRAY_REVERSE(SPLIT(SPLIT(REPLACE(resource.data.name, CONCAT(SPLIT(resource.data.name, '/')[SAFE_OFFSET(0)], '/', SPLIT(resource.data.name, '/')[SAFE_OFFSET(1)], '/', SPLIT(resource.data.name, '/')[SAFE_OFFSET(2)], '/'), ''), '@')[SAFE_OFFSET(0)], '/'))[SAFE_OFFSET(0)] as container_name, + CONCAT("//artifactregistry.googleapis.com/", resource.data.uri) as full_resource_name, + resource.data.name as uri, +ARRAY_LENGTH(SPLIT(resource.data.name, '/')) as num_parts, + ( + SELECT STRING_AGG(part, '/' ORDER BY index) + FROM UNNEST(SPLIT(SPLIT(resource.data.name, '@')[OFFSET(0)], '/')) part WITH OFFSET index + WHERE index BETWEEN 2 AND ARRAY_LENGTH(SPLIT(resource.data.name, '/')) + ) container_path, + SPLIT(name, '@')[SAFE_OFFSET(1)] as sha, + resource.data.uploadTime as upload_time, + resource.data.tags as tags, +FROM `$dataset.resource_artifactregistry_googleapis_com_DockerImage` artifact_containers +WHERE +DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +) +-- SELECT DISTINCT(hostname) FROM all_containers +SELECT * FROM all_containers + +-- authors: kbroughton@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_container_latest.txt b/clarity_view_importer/views/view_container_latest.txt new file mode 100644 index 0000000..fb4082f --- /dev/null +++ b/clarity_view_importer/views/view_container_latest.txt @@ -0,0 +1,13 @@ +-- Select just the most recent upload_time, i.e., latest container +with latest_upload_times as ( + select + view_container.container_path, + max(view_container.upload_time) as latest_upload_time + from `$dataset.view_container` view_container + GROUP BY view_container.container_path +) +SELECT * EXCEPT(container_path) from latest_upload_times +JOIN $dataset.view_container as view_container + ON latest_upload_times.latest_upload_time = view_container.upload_time and latest_upload_times.container_path = view_container.container_path +ORDER BY latest_upload_time ASC +-- authors: kbroughton@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_external_ip.txt b/clarity_view_importer/views/view_external_ip.txt new file mode 100644 index 0000000..0137b30 --- /dev/null +++ b/clarity_view_importer/views/view_external_ip.txt @@ -0,0 +1,52 @@ +SELECT +address, name, type, project.projectName as projectName, path +FROM +( + SELECT + resource.data.address as address, + resource.data.name as name, + resource.discoveryName as type, + name as path, + resource.parent as parent, + FROM `$project.$dataset.resource_compute_googleapis_com_Address` + WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) + + UNION ALL + + SELECT + resource.data.IPaddress as address, + resource.data.name as name, + resource.discoveryName as type, + name as path, + resource.parent as parent, + FROM `$project.$dataset.resource_compute_googleapis_com_GlobalForwardingRule` + WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) + + UNION ALL + + SELECT + ips.ipAddress as address, + sqladmin.resource.data.name as name, + sqladmin.resource.discoveryName as type, + sqladmin.name as path, + resource.parent as parent, + FROM `$project.$dataset.resource_sqladmin_googleapis_com_Instance` as sqladmin + CROSS JOIN UNNEST(sqladmin.resource.data.ipAddresses) as ips + WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) + + UNION ALL + + SELECT + interfaces.ipAddress as address, + vpn.resource.data.name as name, + vpn.resource.discoveryName as type, + vpn.name as path, + resource.parent as parent, + FROM `$project.$dataset.resource_compute_googleapis_com_VpnGateway` as vpn + CROSS JOIN UNNEST(vpn.resource.data.vpnInterface) as interfaces + WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +) +JOIN `$project.$dataset.view_project` project ON parent = project.projectParent +WHERE NOT (starts_with(address, "10.") OR starts_with(address, "0.") OR starts_with(address, "100.")) +ORDER BY address ASC +-- author rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_iam_policy.txt b/clarity_view_importer/views/view_iam_policy.txt new file mode 100644 index 0000000..7df6410 --- /dev/null +++ b/clarity_view_importer/views/view_iam_policy.txt @@ -0,0 +1,8 @@ +SELECT name as iamResourcePath, account, bindings.role as iamRole, asset_type as assetType +FROM `$project.$dataset.iam-policy` policy, +UNNEST(policy.iam_policy.bindings) bindings, +UNNEST(bindings.members) account +WHERE +DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai + diff --git a/clarity_view_importer/views/view_instance.txt b/clarity_view_importer/views/view_instance.txt new file mode 100644 index 0000000..fcd3458 --- /dev/null +++ b/clarity_view_importer/views/view_instance.txt @@ -0,0 +1,22 @@ +SELECT +resource.data.name as instanceName, +instance.name as instancePath, +SPLIT(instance.name, '/')[OFFSET(4)] as projectName, +billing.cost_after_credits as billingLastMonthCostAfterCredits, +billing.cost_before_credits as billingLastMonthCostBeforeCredits, +resource.data.labels[SAFE_OFFSET(0)].key as instanceGKETag, +resource.data.labels[SAFE_OFFSET(0)].value as instanceManagedBy, +SPLIT(resource.data.machineType, '/')[OFFSET(10)] as instanceMachineType, +resource.data.status as instanceStatus, +resource.location as instanceLocation, +resource.data.networkInterfaces[SAFE_OFFSET(0)].networkIP as instanceIPAddr, +resource.data.networkInterfaces[SAFE_OFFSET(0)].network as networkSelfLink, +resource.data.networkInterfaces[SAFE_OFFSET(0)].subnetwork as subnetSelfLink, +resource.data.serviceAccounts[SAFE_OFFSET(0)].email as instanceServiceAccount, +resource.data.disks[SAFE_OFFSET(0)].diskSizeGb as instanceDiskCapacity, +resource.data.disks[SAFE_OFFSET(0)].licenses[SAFE_OFFSET(0)] as instanceLicense, +resource.data.creationTimestamp as instanceCreationTimestamp, +FROM `$project.$dataset.resource_compute_googleapis_com_Instance` instance +LEFT JOIN `$project.$dataset.dev_snapshot_resource_billing` billing ON (billing.resourcePath = instance.name and billing.month = FORMAT_DATE('%G%m', DATE_SUB(current_date(), INTERVAL 1 MONTH))) +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_instancegroup.txt b/clarity_view_importer/views/view_instancegroup.txt new file mode 100644 index 0000000..58e6a51 --- /dev/null +++ b/clarity_view_importer/views/view_instancegroup.txt @@ -0,0 +1,14 @@ +SELECT + +ig.resource.data.name as instancegroupName, +ig.name as instancegroupPath, +ig.resource.parent as instancegroupParent, +ig.resource.data.description as instancegroupDescription, +ig.resource.data.network as instancegroupNetworkURL, +ig.resource.data.subnetwork as instancegroupSubnetURL, +ig.resource.location as instancegroupLocation, +project.projectName +FROM `$project.$dataset.resource_compute_googleapis_com_InstanceGroup` ig +JOIN `$project.$dataset.view_project` project ON project.projectParent = ig.resource.parent +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_k8s_deployments.txt b/clarity_view_importer/views/view_k8s_deployments.txt new file mode 100644 index 0000000..75f22cb --- /dev/null +++ b/clarity_view_importer/views/view_k8s_deployments.txt @@ -0,0 +1,76 @@ +-- selected fields for asset_type=apps.k8s.io/Deployment +-- per-asset-export does not yet support most k8s resources including apps.k8s.io/Deployment, so we extract it from "resource" table. + +SELECT +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels") labels, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, + +JSON_EXTRACT(resource.data, "$.spec.template.spec.dnsPolicy") as dns_policy, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostIPC") as hostIPC, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostNetwork") as hostNetwork, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostPID") as hostPID, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostname") as hostname, +JSON_EXTRACT(resource.data, "$.spec.template.spec.nodeName") as nodename, +JSON_EXTRACT(resource.data, "$.spec.template.spec.serviceAccount") as serviceAccount, +JSON_EXTRACT(resource.data, "$.spec.template.spec.serviceAccountName") as serviceAccountName, + +JSON_EXTRACT(container_ports, "$.containerPort") as containerPort, +JSON_EXTRACT(container_ports, "$.hostIP") as containerPort_hostIP, +JSON_EXTRACT(container_ports, "$.hostPort") as containerPort_hostPort, +JSON_EXTRACT(container_ports, "$.name") as containerPort_name, +JSON_EXTRACT(container_ports, "$.protocol") as containerPort_protocol, + +JSON_EXTRACT(containers, "$.name") as containers_name, +JSON_EXTRACT(containers, "$.image") as containers_image, +JSON_EXTRACT(containers, "$.livenessProbe") as containers_livenessProbe, +JSON_EXTRACT(containers, "$.command") as containers_command, +JSON_EXTRACT(containers, "$.args") as containers_args, + +JSON_EXTRACT(ownerReferences, "$.apiVersion") as ownerReferences_apiVersion, +JSON_EXTRACT(ownerReferences, "$.blockOwnerDeletion") as ownerReferences_blockOwnerDeletion, +JSON_EXTRACT(ownerReferences, "$.controller") as ownerReferences_controller, +JSON_EXTRACT(ownerReferences, "$.name") as ownerReferences_name, +JSON_EXTRACT(ownerReferences, "$.kind") as ownerReferences_kind, + +JSON_EXTRACT(volumes, "$.name") as volumes_name, +JSON_EXTRACT(volumes, "$.volumeSource") as volumes_volumeSource, +JSON_EXTRACT(volumes, "$.volumeSource.projected.sources") as volumes_projected_sources, + +JSON_EXTRACT(container_volumeMounts, "$.mountPath") as volumeMounts_mountPath, +JSON_EXTRACT(container_volumeMounts, "$.name") as volumeMounts_name, +JSON_EXTRACT(container_volumeMounts, "$.readOnly") as volumeMounts_readOnly, +JSON_EXTRACT(container_volumeMounts, "$.subPath") as volumeMounts_subPath, +JSON_EXTRACT(container_volumeMounts, "$.subPathExpr") as volumeMounts_subPathExpr, + +JSON_EXTRACT(conditions, "$.lastTransitionTime") as conditions_lastTransitionTime, +JSON_EXTRACT(conditions, "$.lastUpdateTime") as conditions_lastUpdateTime, +JSON_EXTRACT(conditions, "$.message") as conditions_message, +JSON_EXTRACT(conditions, "$.reason") as conditions_reason, +JSON_EXTRACT(conditions, "$.status") as conditions_status, +JSON_EXTRACT(conditions, "$.type") as conditions_type, + +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.ownerReferences")) as ownerReferences, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.template.spec.containers")) as containers, +UNNEST(JSON_EXTRACT_ARRAY(containers, "$.ports")) as container_ports, +UNNEST(JSON_EXTRACT_ARRAY(containers, "$.volumeMounts")) as container_volumeMounts, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.template.spec.volumes")) as volumes, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.status.conditions")) as conditions + +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "apps.k8s.io/Deployment" +LIMIT 100 + +-- author: jezhang@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_k8s_jobs.txt b/clarity_view_importer/views/view_k8s_jobs.txt new file mode 100644 index 0000000..f7f5d16 --- /dev/null +++ b/clarity_view_importer/views/view_k8s_jobs.txt @@ -0,0 +1,54 @@ +SELECT +JSON_EXTRACT(containers, "$.name") as container_name, +JSON_EXTRACT(containers, "$.image") as container_image, +JSON_EXTRACT(containers, "$.resources") as container_resources, +JSON_EXTRACT(containers, "$.volumeMounts") as container_volume_mounts, +JSON_EXTRACT(containers, "$.args") as container_args, +JSON_EXTRACT(containers, "$.env") as container_env, + +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels.app") as app, +JSON_EXTRACT(resource.data, "$.metadata.labels.controller-uid") as controller_uid, +JSON_EXTRACT(resource.data, "$.metadata.labels.job-name") as job_name, +JSON_EXTRACT(resource.data, "$.metadata.labels.name") as name, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, + +JSON_EXTRACT(resource.data, "$.spec.template.spec.dnsPolicy") as dnsPolicy, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostIPC") as hostIPC, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostNetwork") as hostNetwork, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostPID") as hostPID, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostname") as hostname, +JSON_EXTRACT(resource.data, "$.spec.template.spec.nodeName") as nodeName, + +JSON_EXTRACT(resource.data, "$.status.active") as status_active, +JSON_EXTRACT(resource.data, "$.status.completionTime") as status_completionTime, +JSON_EXTRACT(resource.data, "$.status.failed") as status_failed, +JSON_EXTRACT(resource.data, "$.status.startTime") as status_startTime, +JSON_EXTRACT(resource.data, "$.status.succeeded") as status_succeeded, + +JSON_EXTRACT(status_conditions, "$.lastProbeTime") as last_probe_time, +JSON_EXTRACT(status_conditions, "$.lastTransitionTime") as last_transition_time, +JSON_EXTRACT(status_conditions, "$.message") as message, +JSON_EXTRACT(status_conditions, "$.reason") as reason, +JSON_EXTRACT(status_conditions, "$.status") as status, +JSON_EXTRACT(status_conditions, "$.type") as type, + +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.template.spec.containers")) as containers, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.status.conditions")) as status_conditions +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "batch.k8s.io/Job" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_namespace.txt b/clarity_view_importer/views/view_k8s_namespace.txt new file mode 100644 index 0000000..6d270a3 --- /dev/null +++ b/clarity_view_importer/views/view_k8s_namespace.txt @@ -0,0 +1,27 @@ +SELECT +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels") labels, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, + +JSON_EXTRACT(resource.data, "$.spec.finalizers") as dns_policy, +JSON_EXTRACT(resource.data, "$.status") as hostIPC, +JSON_EXTRACT(resource.data, "$.status.phase") as hostNetwork, + +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields + +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "k8s.io/Namespace" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_network_policy.txt b/clarity_view_importer/views/view_k8s_network_policy.txt new file mode 100644 index 0000000..47f38e9 --- /dev/null +++ b/clarity_view_importer/views/view_k8s_network_policy.txt @@ -0,0 +1,27 @@ +SELECT +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels") labels, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, + +JSON_EXTRACT(resource.data, "$.spec.ingress") as ingress, +JSON_EXTRACT(resource.data, "$.spec.podSelector") as podSelector, +JSON_EXTRACT(resource.data, "$.spec.policyTypes") as policyTypes, + +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields + +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "networking.k8s.io/NetworkPolicy" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_pod.txt b/clarity_view_importer/views/view_k8s_pod.txt new file mode 100644 index 0000000..997ea9a --- /dev/null +++ b/clarity_view_importer/views/view_k8s_pod.txt @@ -0,0 +1,42 @@ +SELECT +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.name") as pod_name, +JSON_EXTRACT_SCALAR(resource.data, "$.status.hostIP") as host_ip, +SPLIT(name, '/')[SAFE_OFFSET(4)] as project_name, +SPLIT(resource.parent, '/')[SAFE_OFFSET(8)] as cluster_name, +JSON_EXTRACT_SCALAR(pod_containers, "$.image") as pod_image, +JSON_EXTRACT_SCALAR(init_containers, "$.image") as init_image, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.nodeName") as node_name, +JSON_EXTRACT_SCALAR(resource.data, "$.status.message") as status_message, +name as pod_path, +JSON_EXTRACT_SCALAR(resource.data, "$.status.podIP") as pod_ip, +JSON_EXTRACT_SCALAR(resource.data, "$.status.podIPs") as pod_ips, +JSON_EXTRACT_SCALAR(resource.data, "$.status.reason") as status_reason, +JSON_EXTRACT_SCALAR(resource.data, "$.status.startTime") as start_time, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.volumes") as spec_volumes, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.restartPolicy") as spec_restart_policy, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.schedulerName") as spec_scheduler, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.securityContext") as spec_security_context, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.serviceAccount") as spec_service_account, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.serviceAccountName") as spec_service_account_name, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.subdomain") as spec_domain, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.nodeName") as spec_node_name, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.dnsPolicy") as spec_dns_policy, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.hostNetwork") as spec_host_network, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.subdomain") as spec_subdomain, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.labels") as metadata_labels, +--JSON_EXTRACT_SCALAR(resource.data, "$.metadata.clusterName") as cluster_name, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.annotations") as metadata_annotations, +cluster.*, +readTime, + +FROM +`$project.$dataset.resource` +LEFT JOIN UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.containers")) as pod_containers +LEFT JOIN UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.initContainers")) as init_containers +LEFT JOIN `$project.$dataset.view_cluster` cluster ON (cluster.clusterName = SPLIT(resource.parent, '/')[SAFE_OFFSET(8)] AND cluster.projectName = SPLIT(name, '/')[SAFE_OFFSET(4)]) +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "k8s.io/Pod" + +-- author: kbroughton@nuro.ai, rheins@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_rbac_authorization_ClusterRoleBinding.txt b/clarity_view_importer/views/view_k8s_rbac_authorization_ClusterRoleBinding.txt new file mode 100644 index 0000000..1f8fb71 --- /dev/null +++ b/clarity_view_importer/views/view_k8s_rbac_authorization_ClusterRoleBinding.txt @@ -0,0 +1,29 @@ +SELECT +JSON_EXTRACT(subjects, "$.name") as subject_name, +JSON_EXTRACT(subjects, "$.namespace") as subject_namespace, +JSON_EXTRACT(subjects, "$.kind") as subject_kind, +JSON_EXTRACT(subjects, "$.apiGroup") as subject_apigroup, +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.roleRef.name") as roleref_name, +JSON_EXTRACT(resource.data, "$.roleRef.kind") as roleref_kind, +JSON_EXTRACT(resource.data, "$.roleRef.apiGroup") as roleref_apiGroup, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels") as metadata_labels, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.subjects")) as subjects, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "rbac.authorization.k8s.io/ClusterRoleBinding" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_rbac_authorization_cluster_role.txt b/clarity_view_importer/views/view_k8s_rbac_authorization_cluster_role.txt new file mode 100644 index 0000000..d578c42 --- /dev/null +++ b/clarity_view_importer/views/view_k8s_rbac_authorization_cluster_role.txt @@ -0,0 +1,28 @@ +SELECT +JSON_EXTRACT(rules, "$.apiGroups") as rules_apiGroups, +JSON_EXTRACT(rules, "$.resources") as rules_resources, +JSON_EXTRACT(rules, "$.verbs") as rules_verbs, +JSON_EXTRACT(rules, "$.resourceNames") as resource_names, +JSON_EXTRACT(resource.data, "$.roleRef.name") as roleref_name, +JSON_EXTRACT(resource.data, "$.roleRef.kind") as roleref_kind, +JSON_EXTRACT(resource.data, "$.roleRef.apiGroup") as roleref_apiGroup, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.generationName") as generationName, +JSON_EXTRACT(resource.data, "$.metadata.generation") as generation, +JSON_EXTRACT(resource.data, "$.metadata.labels.app") as metadata_labels_app, +JSON_EXTRACT(resource.data, "$.metadata.labels.chart") as metadata_labels_chart, +JSON_EXTRACT(resource.data, "$.metadata.labels.heritage") as metadata_labels_heritage, +JSON_EXTRACT(resource.data, "$.metadata.labels.release") as metadata_labels_release, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.rules")) as rules +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "rbac.authorization.k8s.io/Role" +LIMIT 100 + +-- author: jezhang@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_k8s_rbac_authorization_role.txt b/clarity_view_importer/views/view_k8s_rbac_authorization_role.txt new file mode 100644 index 0000000..59f212c --- /dev/null +++ b/clarity_view_importer/views/view_k8s_rbac_authorization_role.txt @@ -0,0 +1,30 @@ +-- selected fields for asset_type=rbac.authorization.k8s.io/Role +-- per-asset-export does not yet support most k8s resources including rbac.authorization.k8s.io/Role, so we extract it from "resource" table. + +SELECT +JSON_EXTRACT(rules, "$.apiGroups") as rules_apiGroups, +JSON_EXTRACT(rules, "$.resources") as rules_resources, +JSON_EXTRACT(rules, "$.verbs") as rules_verbs, +JSON_EXTRACT(resource.data, "$.roleRef.name") as roleref_name, +JSON_EXTRACT(resource.data, "$.roleRef.kind") as roleref_kind, +JSON_EXTRACT(resource.data, "$.roleRef.apiGroup") as roleref_apiGroup, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.generationName") as generationName, +JSON_EXTRACT(resource.data, "$.metadata.generation") as generation, +JSON_EXTRACT(resource.data, "$.metadata.labels.app") as metadata_labels_app, +JSON_EXTRACT(resource.data, "$.metadata.labels.chart") as metadata_labels_chart, +JSON_EXTRACT(resource.data, "$.metadata.labels.heritage") as metadata_labels_heritage, +JSON_EXTRACT(resource.data, "$.metadata.labels.release") as metadata_labels_release, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.rules")) as rules +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "rbac.authorization.k8s.io/Role" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_rbac_authorization_role_binding.txt b/clarity_view_importer/views/view_k8s_rbac_authorization_role_binding.txt new file mode 100644 index 0000000..63060be --- /dev/null +++ b/clarity_view_importer/views/view_k8s_rbac_authorization_role_binding.txt @@ -0,0 +1,32 @@ +-- selected fields for asset_type=rbac.authorization.k8s.io/RoleBinding +-- per-asset-export does not yet support most k8s resources including rbac.authorization.k8s.io/RoleBinding, so we extract it from "resource" table. + +SELECT +JSON_EXTRACT(subjects, "$.name") as subject_name, +JSON_EXTRACT(subjects, "$.namespace") as subject_namespace, +JSON_EXTRACT(subjects, "$.kind") as subject_kind, +JSON_EXTRACT(subjects, "$.apiGroup") as subject_apigroup, +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.roleRef.name") as roleref_name, +JSON_EXTRACT(resource.data, "$.roleRef.kind") as roleref_kind, +JSON_EXTRACT(resource.data, "$.roleRef.apiGroup") as roleref_apiGroup, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels") as metadata_labels, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.subjects")) as subjects, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "rbac.authorization.k8s.io/RoleBinding" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_replica_set.txt b/clarity_view_importer/views/view_k8s_replica_set.txt new file mode 100644 index 0000000..93e999f --- /dev/null +++ b/clarity_view_importer/views/view_k8s_replica_set.txt @@ -0,0 +1,74 @@ +-- selected fields for asset_type=apps.k8s.io/ReplicaSet +-- per-asset-export does not yet support most k8s resources including apps.k8s.io/ReplicaSet, so we extract it from "resource" table. + +SELECT +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels") labels, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, + +JSON_EXTRACT(resource.data, "$.spec.template.spec.dnsPolicy") as dns_policy, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostIPC") as hostIPC, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostNetwork") as hostNetwork, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostPID") as hostPID, +JSON_EXTRACT(resource.data, "$.spec.template.spec.hostname") as hostname, +JSON_EXTRACT(resource.data, "$.spec.template.spec.nodeName") as nodename, +JSON_EXTRACT(resource.data, "$.spec.template.spec.serviceAccount") as serviceAccount, +JSON_EXTRACT(resource.data, "$.spec.template.spec.serviceAccountName") as serviceAccountName, + +JSON_EXTRACT(container_ports, "$.containerPort") as containerPort, +JSON_EXTRACT(container_ports, "$.hostIP") as containerPort_hostIP, +JSON_EXTRACT(container_ports, "$.hostPort") as containerPort_hostPort, +JSON_EXTRACT(container_ports, "$.name") as containerPort_name, +JSON_EXTRACT(container_ports, "$.protocol") as containerPort_protocol, + +JSON_EXTRACT(containers, "$.name") as containers_name, +JSON_EXTRACT(containers, "$.image") as containers_image, +JSON_EXTRACT(containers, "$.livenessProbe") as containers_livenessProbe, +JSON_EXTRACT(containers, "$.command") as containers_command, +JSON_EXTRACT(containers, "$.args") as containers_args, + +JSON_EXTRACT(ownerReferences, "$.apiVersion") as ownerReferences_apiVersion, +JSON_EXTRACT(ownerReferences, "$.blockOwnerDeletion") as ownerReferences_blockOwnerDeletion, +JSON_EXTRACT(ownerReferences, "$.controller") as ownerReferences_controller, +JSON_EXTRACT(ownerReferences, "$.name") as ownerReferences_name, +JSON_EXTRACT(ownerReferences, "$.kind") as ownerReferences_kind, + +JSON_EXTRACT(volumes, "$.name") as volumes_name, +JSON_EXTRACT(volumes, "$.volumeSource") as volumes_volumeSource, +JSON_EXTRACT(volumes, "$.volumeSource.projected.sources") as volumes_projected_sources, + +JSON_EXTRACT(container_volumeMounts, "$.mountPath") as volumeMounts_mountPath, +JSON_EXTRACT(container_volumeMounts, "$.name") as volumeMounts_name, +JSON_EXTRACT(container_volumeMounts, "$.readOnly") as volumeMounts_readOnly, +JSON_EXTRACT(container_volumeMounts, "$.subPath") as volumeMounts_subPath, +JSON_EXTRACT(container_volumeMounts, "$.subPathExpr") as volumeMounts_subPathExpr, + +JSON_EXTRACT(tolerations, "$.effect") as effect, +JSON_EXTRACT(tolerations, "$.type") as type, +JSON_EXTRACT(tolerations, "$.operator") as operator, +JSON_EXTRACT(tolerations, "$.value") as value, + +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.ownerReferences")) as ownerReferences, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.template.spec.containers")) as containers, +UNNEST(JSON_EXTRACT_ARRAY(containers, "$.ports")) as container_ports, +UNNEST(JSON_EXTRACT_ARRAY(containers, "$.volumeMounts")) as container_volumeMounts, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.template.spec.volumes")) as volumes, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.template.spec.tolerations")) as tolerations + +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "apps.k8s.io/ReplicaSet" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_k8s_service.txt b/clarity_view_importer/views/view_k8s_service.txt new file mode 100644 index 0000000..7e960e3 --- /dev/null +++ b/clarity_view_importer/views/view_k8s_service.txt @@ -0,0 +1,38 @@ +-- selected fields for asset_type=k8s.io/Service +-- per-asset-export does not yet support most k8s resources including k8s.io/Service, so we extract it from "resource" table. + +SELECT +JSON_EXTRACT(managedFields, "$.apiVersion") as managedFields_apiVersion, +JSON_EXTRACT(managedFields, "$.fieldsType") as managedFields_fieldsType, +JSON_EXTRACT(managedFields, "$.manager") as managedFields_manager, +JSON_EXTRACT(managedFields, "$.operation") as managedFields_operation, +JSON_EXTRACT(managedFields, "$.time") as managedFields_time, +JSON_EXTRACT(resource.data, "$.metadata.namespace") as namespace, +JSON_EXTRACT(resource.data, "$.metadata.labels") labels, +JSON_EXTRACT(resource.data, "$.metadata.clusterName") as metadata_cluster_name, +JSON_EXTRACT(resource.data, "$.metadata.creationTimestamp") as metadata_creation_timestamp, +JSON_EXTRACT(resource.data, "$.metadata.annotations") as metadata_annotations, + +JSON_EXTRACT(resource.data, "$.spec.clusterIP") as clusterIP, +JSON_EXTRACT(resource.data, "$.spec.externalName") as externalName, +JSON_EXTRACT(resource.data, "$.spec.externalTrafficPolicy") as externalTrafficPolicy, +JSON_EXTRACT(resource.data, "$.spec.healthCheckNodePort") as healthCheckNodePort, +JSON_EXTRACT(resource.data, "$.spec.loadBalancerIP") as loadBalancerIP, + +JSON_EXTRACT(ports, "$.name") as port_name, +JSON_EXTRACT(ports, "$.nodePort") as nodePort, +JSON_EXTRACT(ports, "$.port") as port, +JSON_EXTRACT(ports, "$.protocol") as protocol, +JSON_EXTRACT(ports, "$.targetPort") as targetPort, + +readTime, + +FROM +`$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.ports")) as ports, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.metadata.managedFields")) as managedFields +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "k8s.io/Service" +LIMIT 100 + +-- author: jezhang@nuro.ai diff --git a/clarity_view_importer/views/view_loadbalancer.txt b/clarity_view_importer/views/view_loadbalancer.txt new file mode 100644 index 0000000..b1b1105 --- /dev/null +++ b/clarity_view_importer/views/view_loadbalancer.txt @@ -0,0 +1,59 @@ +SELECT + +UrlMapHostRules.hosts as urlMapHost, +UrlMapPathMatchersRules.paths as urlRulePath, +GlobalForwardingRule.resource.data.IPAddress AS fwdruleIP, +GlobalForwardingRule.resource.data.portRange AS fwdrulePortRange, +BackendService.resource.data.protocol AS backendServiceProtocol, +BackendService.resource.data.loadBalancingScheme AS backendServiceScheme, + +UrlMap.name AS urlMapPath, +UrlMap.resource.data.name as urlMapName, +UrlMap.resource.data.selfLink as urlMapSelfLink, +UrlMap.resource.data.description as urlMapDescription, +UrlMapPathMatchersRules.service as urlMapBackendService, + +BackendService.name AS backendServicePath, +BackendService.resource.data.name AS backendServiceName, +BackendService.resource.data.selfLink AS backendServiceSelfLink, +BackendService.resource.data.port AS backendServicePort, +BackendService.resource.data.logConfig.enable as backendLogging, + +Backend.group AS backendGroup, +Backend.description AS backendDescription, + +InstanceGroup.name as instanceGroupPath, +InstanceGroup.resource.data.name AS instanceGroupName, +InstanceGroup.resource.data.description as instanceGroupDescription, +InstanceGroup.resource.data.network AS instanceGroupNetwork, +InstanceGroup.resource.data.selfLink AS instanceGroupSelfLink, +InstanceGroup.resource.data.subnetwork AS instanceGroupSubnet, + +TargetHttpsProxy.name AS targetProxyPath, +TargetHttpsProxy.resource.data.name AS targetProxyName, +TargetHttpsProxy.resource.data.selfLink AS targetProxySelfLink, +TargetHttpsProxy.resource.data.urlMap AS targetProxyURLMap, +TargetHttpsProxy.resource.data.sslCertificates AS targetProxySSLCertificates, + +project.projectName, +GlobalForwardingRule.name, +GlobalForwardingRule.resource.data.creationTimestamp AS fwdruleCreation, +GlobalForwardingRule.resource.data.description AS fwdruleDescription, +GlobalForwardingRule.resource.data.IPProtocol AS fwdruleProtocol, +GlobalForwardingRule.resource.data.target AS fwdruleTarget, +GlobalForwardingRule.resource.data.selfLink AS fwdruleSelfLink, +GlobalForwardingRule.resource.data.loadBalancingScheme AS fwdruleScheme, +GlobalForwardingRule.resource.data.backendService AS fwdruleBackendService, + +FROM `$project.$dataset.resource_compute_googleapis_com_GlobalForwardingRule` GlobalForwardingRule +JOIN `$project.$dataset.view_project` project ON project.projectParent = GlobalForwardingRule.resource.parent +JOIN `$project.$dataset.resource_compute_googleapis_com_TargetHttpsProxy` TargetHttpsProxy ON (GlobalForwardingRule.resource.data.target = TargetHttpsProxy.resource.data.selfLink and DATE(TargetHttpsProxy.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)) +JOIN `$project.$dataset.resource_compute_googleapis_com_UrlMap` UrlMap ON (TargetHttpsProxy.resource.data.urlMap = UrlMap.resource.data.selfLink and DATE(UrlMap.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)) +CROSS JOIN UNNEST(UrlMap.resource.data.pathMatchers) as UrlMapPathMatchers +CROSS JOIN UNNEST(UrlMapPathMatchers.pathRules) as UrlMapPathMatchersRules +CROSS JOIN UNNEST(UrlMap.resource.data.hostRules) as UrlMapHostRules +JOIN `$project.$dataset.resource_compute_googleapis_com_BackendService` BackendService ON (UrlMapPathMatchersRules.service = BackendService.resource.data.selfLink and DATE(BackendService.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)) +CROSS JOIN UNNEST(BackendService.resource.data.backends) as Backend +JOIN `$project.$dataset.resource_compute_googleapis_com_InstanceGroup` InstanceGroup ON (Backend.group = InstanceGroup.resource.data.selfLink and DATE(InstanceGroup.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)) + +WHERE DATE(GlobalForwardingRule.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) \ No newline at end of file diff --git a/clarity_view_importer/views/view_logbucket.txt b/clarity_view_importer/views/view_logbucket.txt new file mode 100644 index 0000000..566c0a8 --- /dev/null +++ b/clarity_view_importer/views/view_logbucket.txt @@ -0,0 +1,25 @@ +-- This is needed because it's not posssible to natively join Log Sinks and Log Buckets +-- because log sinks use the convention: logging.googleapis.com/projects/{$project_name}/ +-- and log buckets use the convention: //logging.googleapis.com/projects/{$project_number}/ +-- so this normalizes it to the log sink syntax + +SELECT +REPLACE(logBucketName, projectNumber, projectName) as logBucketLink, +logBucket.resource.data.description as logBucketDescription, +logBucket.resource.data.retentionDays as logBucketRetentionDays, +projectName as logBucketProject + +FROM ( + SELECT + SPLIT(logBucket.name, '/')[OFFSET(4)] as projectNumber, + project.projectName as projectName, + REPLACE(logBucket.name, "//", "") as logBucketName, + logBucket.* + + FROM `$project.$dataset.resource_logging_googleapis_com_LogBucket` logBucket + LEFT JOIN `$project.$dataset.view_project` project + ON ( + project.projectParent = logBucket.resource.parent) + ) as logBucket + WHERE DATE(logBucket.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_logsink.txt b/clarity_view_importer/views/view_logsink.txt new file mode 100644 index 0000000..b7fc4bc --- /dev/null +++ b/clarity_view_importer/views/view_logsink.txt @@ -0,0 +1,12 @@ +SELECT +logSink.name as logSinkPath, +logSink.resource.data.name as logSinkName, +logSink.resource.data.destination as logSinkDestination, +logSink.resource.data.filter as logSinkFilter, +logSink.resource.data.writerIdentity as logSinkWriterIdentity, +IFNULL(project.projectName, "GCP Org") as logSinkProjectName, +FROM `$project.$dataset.resource_logging_googleapis_com_LogSink` logSink +LEFT JOIN `$project.$dataset.view_project` project ON (project.projectParent = logSink.resource.parent) +WHERE DATE(logSink.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +GROUP BY logSink.name, logSink.resource.data.name, logSink.resource.data.destination,logSink.resource.data.filter,logSink.resource.data.writerIdentity,project.projectName +-- authors: rheins@nuro.ai diff --git a/clarity_view_importer/views/view_logsink_logbuckets.txt b/clarity_view_importer/views/view_logsink_logbuckets.txt new file mode 100644 index 0000000..12d0cff --- /dev/null +++ b/clarity_view_importer/views/view_logsink_logbuckets.txt @@ -0,0 +1,17 @@ +SELECT +logSink.name as logSinkPath, +logSink.resource.data.name as logSinkName, +logSink.resource.data.destination as logSinkDestination, +logSink.resource.data.filter as logSinkFilter, +logSink.resource.data.writerIdentity as logSinkWriterIdentity, +logBucket.logBucketLink, +logBucket.logBucketDescription, +logBucket.logBucketRetentionDays, +logBucket.logBucketProject, +IFNULL(project.projectName, "Org Sink") as logSinkProjectName, +FROM `$project.$dataset.resource_logging_googleapis_com_LogSink` logSink +JOIN `$project.$dataset.view_logbucket` logBucket ON(logSink.resource.data.destination = logBucket.logBucketLink) +LEFT JOIN `$project.$dataset.view_project` project ON (project.projectParent = logSink.resource.parent) +WHERE DATE(logSink.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +GROUP BY logSink.name, logSink.resource.data.name, logSink.resource.data.destination,logSink.resource.data.filter,logSink.resource.data.writerIdentity,project.projectName, logBucket.logBucketLink, logBucket.logBucketDescription, logBucket.logBucketRetentionDays, logBucket.logBucketProject +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_logsink_pubsubs.txt b/clarity_view_importer/views/view_logsink_pubsubs.txt new file mode 100644 index 0000000..79e4176 --- /dev/null +++ b/clarity_view_importer/views/view_logsink_pubsubs.txt @@ -0,0 +1,27 @@ +SELECT +logSink.name as logSinkPath, +logSink.resource.data.name as logSinkName, +logSink.resource.data.destination as logSinkDestination, +logSink.resource.data.filter as logSinkFilter, +logSink.resource.data.writerIdentity as logSinkWriterIdentity, +pubsubTopic.resource.parent as pubsubTopicParent, +pubsubTopic.resource.data.name as pubsubTopicName, +pubsubSubscription.resource.data.name as pubsubSubscriptionName, +pubsubSubscription.resource.data.pushConfig. +pushEndpoint as pubsubSubscriptionPushEndpoint, +IFNULL(project.projectName, "Org Sink") as logSinkProjectName, + +FROM `$project.$dataset.resource_logging_googleapis_com_LogSink` logSink +LEFT JOIN `$project.$dataset.view_project` project ON project.projectParent = logSink.resource.parent + +JOIN +( + SELECT SUBSTR(name, 3) as topicNameShort, * + FROM `$project.$dataset.resource_pubsub_googleapis_com_Topic` + WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +) pubsubTopic ON pubsubTopic.topicNameShort = logSink.resource.data.destination +LEFT JOIN `$project.$dataset.resource_pubsub_googleapis_com_Subscription` pubsubSubscription +ON (pubsubSubscription.resource.data.topic = pubsubTopic.resource.data.name AND DATE(pubsubSubscription.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +) +WHERE DATE(logSink.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_network.txt b/clarity_view_importer/views/view_network.txt new file mode 100644 index 0000000..36084df --- /dev/null +++ b/clarity_view_importer/views/view_network.txt @@ -0,0 +1,28 @@ +SELECT +project.projectName as projectName, +project.projectCreateTime as projectCreationTimestamp, +project.projectUpdateTime as projectUpdateTime, +project.projectXPNStatus as projectXpnProjectStatus, +network.name as networkPath, +network.assetType as networkAssetType, +network.resource.data.name as networkName, +network.resource.data.description as networkDescription, +network.resource.data.selfLink as networkSelfLink, +subnet.resource.data.name as subnetName, +subnet.resource.data.id as subnetId, +subnet.resource.data.ipCidrRange AS subnetIPCidrRange, +subnet.resource.data.gatewayAddress AS subnetGatewayAddress, +subnet.resource.location AS subnetLocation, +subnet.resource.data.logConfig.enable as subnetVPCFlowLogEnable, +subnet.resource.data.selfLink as subnetSelfLink, +firewall.resource.data.logConfig.enable as firewallDefaultDenyLogging, +(SELECT COUNT(*) FROM $project.$dataset.view_instance WHERE view_instance.networkSelfLink = network.resource.data.selfLink) as vpcInstanceCount, +(SELECT COUNT(*) FROM $project.$dataset.view_instance WHERE view_instance.subnetSelfLink = subnet.resource.data.selfLink) as subnetInstanceCount + +FROM `$project.$dataset.resource_compute_googleapis_com_Network` network +JOIN `$project.$dataset.view_project` project ON project.projectParent = network.resource.parent +LEFT JOIN `$project.$dataset.resource_compute_googleapis_com_Subnetwork` subnet ON (subnet.resource.data.network = network.resource.data.selfLink AND DATE(subnet.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)) +LEFT JOIN $project.$dataset.resource_compute_googleapis_com_Firewall firewall ON (DATE(firewall.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) and firewall.resource.data.network = network.resource.data.selfLink and firewall.resource.data.direction = "INGRESS" and firewall.resource.data.sourceRanges[SAFE_OFFSET(0)] = "0.0.0.0/0" and firewall.resource.data.denied[SAFE_OFFSET(0)].IPProtocol = "all" and firewall.resource.data.logConfig.enable = true and network.resource.parent = project.projectParent) +WHERE DATE(network.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai + diff --git a/clarity_view_importer/views/view_node.txt b/clarity_view_importer/views/view_node.txt new file mode 100644 index 0000000..75c6034 --- /dev/null +++ b/clarity_view_importer/views/view_node.txt @@ -0,0 +1,24 @@ +SELECT + +SPLIT(name, '/')[OFFSET(8)] as clusterName, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.labels['kubernetes.io/hostname']") AS nodeHostname, +JSON_EXTRACT_SCALAR(resource.data, '$.metadata.creationTimestamp') AS nodeCreationTimestamp, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.annotations['container.googleapis.com/instance_id']") AS nodeInstanceID, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.annotations['projectcalico.org/IPv4Address']") AS nodeIP, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.labels['node.kubernetes.io/instance-type']") AS nodeInstanceType, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.labels['kubernetes.io/os']") AS nodeOS, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.labels['cloud.google.com/gke-nodepool']") AS nodePool, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.labels['failure-domain.beta.kubernetes.io/zone']") AS nodeZone, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.podCIDR") AS nodePodCIDR, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.providerID") AS nodeProviderID, +clusterPath, +clusterLocation, +name as nodePath, +SPLIT(name, '/')[OFFSET(4)] as projectName, + +FROM `$project.$dataset.resource` +LEFT JOIN `$project.$dataset.view_cluster` cluster ON (cluster.projectName = SPLIT(name, '/')[OFFSET(4)] AND cluster.clusterName = SPLIT(name, '/')[OFFSET(8)]) + +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "k8s.io/Node" +-- author: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_nodepool.txt b/clarity_view_importer/views/view_nodepool.txt new file mode 100644 index 0000000..1d03c5d --- /dev/null +++ b/clarity_view_importer/views/view_nodepool.txt @@ -0,0 +1,21 @@ +SELECT +nodepool.name as nodepoolPath, +nodepool.resource.parent as nodepoolParent, +nodepool.resource.data.name as nodepoolName, +nodepool.resource.data.config.machineType as nodepoolMachineType, +nodepool.resource.data.config.diskSizeGb as nodepoolDiskSizeGb, +nodepool.resource.data.config.serviceAccount as nodepoolSA, +nodepool.resource.data.config.imageType as nodepoolImageType, +nodepool.resource.data.networkConfig.podIpv4CidrBlock as nodepoolCIDR, +nodepool.resource.data.status as nodepoolStatus, +nodepool.resource.data.autoscaling.enabled as nodepoolAutoscalingEnable, +nodepool.resource.data.autoscaling.minNodeCount as nodepoolMinNodecount, +nodepool.resource.data.autoscaling.maxNodeCount as nodepoolMaxNodecount, +nodepool.resource.data.maxPodsConstraint.maxPodsPerNode as nodepoolMaxNodesPerPod, +nodepool.resource.location as nodepoolLocation, +nodepool.updateTime as nodepoolUpdateTime, +cluster.* +FROM `$project.$dataset.resource_container_googleapis_com_NodePool` nodepool +JOIN `$project.$dataset.view_cluster` cluster ON (nodepool.resource.parent = cluster.clusterPath) +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_pod.txt b/clarity_view_importer/views/view_pod.txt new file mode 100644 index 0000000..c4bfadd --- /dev/null +++ b/clarity_view_importer/views/view_pod.txt @@ -0,0 +1,48 @@ +-- selected fields for asset_type=k8s.io/Pods +-- per-asset-export does not yet support most k8s resources including k8s.io/Pods, so we extract it from "resource" table. + +SELECT +JSON_EXTRACT_SCALAR(resource.data, "$.spec.nodeName") AS nodeName, +SPLIT(name, '/')[OFFSET(8)] as clusterName, +SPLIT(name, '/')[OFFSET(4)] as projectName, +JSON_EXTRACT_SCALAR(pod_containers, "$.image") as podImage, +name as podPath, +node.clusterPath, +node.clusterLocation, +node.nodePath, +JSON_EXTRACT_SCALAR(init_containers, "$.image") as initImage, +ARRAY_LENGTH(JSON_EXTRACT_ARRAY(resource.data, "$.spec.containers")) AS podContainerCount, +JSON_EXTRACT_SCALAR(resource.data, "$.status.hostIP") as hostIP, +JSON_EXTRACT_SCALAR(resource.data, "$.status.message") as statusMessage, +JSON_EXTRACT_SCALAR(resource.data, "$.status.podIP") as podIP, +JSON_EXTRACT_SCALAR(resource.data, "$.status.podIPs") as podIPs, +JSON_EXTRACT_SCALAR(resource.data, "$.status.phase") AS podPhase, +JSON_EXTRACT_SCALAR(resource.data, "$.status.reason") as podStatusReason, +JSON_EXTRACT_SCALAR(resource.data, "$.status.startTime") as podStartTime, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.volumes") as podVolumes, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.restartPolicy") as podRestartPolicy, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.schedulerName") as podSchedulerName, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.securityContext") as podSecurityContext, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.serviceAccount") as podSA, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.serviceAccountName") as podSAName, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.subdomain") as podSubdomain, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.nodeName") as podNodeName, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.dnsPolicy") as podDNSPolicy, +JSON_EXTRACT_SCALAR(resource.data, "$.spec.hostNetwork") as podHostNetwork, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.namespace") as podNamespace, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.labels") as podMetadataLabels, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.clusterName") as podClusterName, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.creationTimestamp") as podCreationTime, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.annotations") as podAnnotations, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.annotations['components.gke.io/component-name']") AS podComponentName, +JSON_EXTRACT_SCALAR(resource.data, "$.metadata.annotations['components.gke.io/component-version']") AS podComponentVersion, +readTime, + +FROM `$project.$dataset.resource`, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.containers")) as pod_containers, +UNNEST(JSON_EXTRACT_ARRAY(resource.data, "$.spec.initContainers")) as init_containers +LEFT JOIN `$project.$dataset.view_node` node ON (node.projectName = SPLIT(name, '/')[OFFSET(4)] AND node.nodeHostname = JSON_EXTRACT_SCALAR(resource.data, "$.spec.nodeName")) +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +AND asset_type = "k8s.io/Pod" + +-- author: kbroughton@nuro.ai, rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_project.txt b/clarity_view_importer/views/view_project.txt new file mode 100644 index 0000000..535440d --- /dev/null +++ b/clarity_view_importer/views/view_project.txt @@ -0,0 +1,13 @@ +SELECT +resource.data.name as projectName, +SPLIT(resource.parent, '/')[SAFE_OFFSET(ARRAY_LENGTH(SPLIT(resource.parent, '/')) -1)] as projectNumber, +name as projectPath, +resource.parent as projectParent, +resource.data.defaultServiceAccount as projectDefaultSA, +resource.data.selfLink as projectSelfLink, +resource.data.xpnProjectStatus as projectXPNStatus, +updateTime as projectUpdateTime, +resource.data.creationTimestamp as projectCreateTime, +FROM `$project.$dataset.resource_compute_googleapis_com_Project` +WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- authors: rheins@nuro.ai, kbroughton@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_project_logging_summary.txt b/clarity_view_importer/views/view_project_logging_summary.txt new file mode 100644 index 0000000..50d25d3 --- /dev/null +++ b/clarity_view_importer/views/view_project_logging_summary.txt @@ -0,0 +1,23 @@ +SELECT +project.projectName as projectName, + +(SELECT count(*) FROM $project.$dataset.view_instance instance WHERE instance.projectName = project.projectName) as instances, +(SELECT count(*) FROM $project.$dataset.resource_compute_googleapis_com_Network network WHERE network.resource.parent = project.projectParent) as vpcs, +(SELECT count(*) FROM $project.$dataset.resource_compute_googleapis_com_Network network LEFT JOIN $project.$dataset.resource_compute_googleapis_com_Firewall firewall ON (DATE(firewall.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) and firewall.resource.data.network = network.resource.data.selfLink and firewall.resource.data.direction = "INGRESS" and firewall.resource.data.sourceRanges[SAFE_OFFSET(0)] = "0.0.0.0/0" and firewall.resource.data.denied[SAFE_OFFSET(0)].IPProtocol = "all" and firewall.resource.data.logConfig.enable = true) WHERE network.resource.parent = project.projectParent) as vpcFWLogs, +(SELECT count(*) FROM $project.$dataset.resource_compute_googleapis_com_Subnetwork subnet LEFT JOIN $project.$dataset.resource_compute_googleapis_com_Network network ON (DATE(network.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) and subnet.resource.data.network = network.resource.data.selfLink) WHERE network.resource.parent = project.projectParent) as subnets, +(SELECT COUNT(DISTINCT IF(subnet.resource.data.logConfig.enable, subnet.name, NULL)) FROM $project.$dataset.resource_compute_googleapis_com_Subnetwork subnet + LEFT JOIN $project.$dataset.resource_compute_googleapis_com_Network network + ON(DATE(network.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) and subnet.resource.data.network = network.resource.data.selfLink) +WHERE network.resource.parent = project.projectParent and DATE(subnet.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY)) as subnetFlowLogs, +(SELECT count(*) FROM $project.$dataset.view_external_ip externalIP WHERE externalIP.projectName = project.projectName) as externalIPs, +(SELECT count(*) FROM $project.$dataset.view_cluster cluster WHERE cluster.projectName = project.projectName) as clusters, +(SELECT count(*) FROM $project.$dataset.view_cloudsql cloudsql WHERE cloudsql.projectName = project.projectParent) as cloudSQLs, +(SELECT IFNULL(SUM(bqTableCount), 0) FROM $project.$dataset.view_bqdataset bqDataset WHERE bqDataset.projectName = project.projectName) as BigQueryTables, +(SELECT count(*) FROM $project.$dataset.view_bucket bucket WHERE bucket.bucketParent = project.projectParent ) as GCSBuckets, +(SELECT count(*) FROM $project.$dataset.view_redis redis WHERE redis.redisParent = project.projectParent) as Redis, + +project.projectParent as projectParentId, project.projectCreateTime as projectCreationTimestamp, project.projectUpdateTime as projectUpdateTime + +FROM `$project.$dataset.view_project` project +-- authors: rheins@nuro.ai + diff --git a/clarity_view_importer/views/view_project_summary.txt b/clarity_view_importer/views/view_project_summary.txt new file mode 100644 index 0000000..0a06dd3 --- /dev/null +++ b/clarity_view_importer/views/view_project_summary.txt @@ -0,0 +1,20 @@ +SELECT +project.projectName as projectName, +--billing.cost_after_credits as billingCost, +(SELECT count(*) FROM $project.$dataset.view_instance instance WHERE instance.projectName = project.projectName) as instances, +(SELECT COUNT(DISTINCT(name)) FROM $project.$dataset.resource_compute_googleapis_com_Network network WHERE network.resource.parent = project.projectParent) as vpcs, +(SELECT COUNT(DISTINCT(subnet.name)) FROM $project.$dataset.resource_compute_googleapis_com_Subnetwork subnet LEFT JOIN $project.$dataset.resource_compute_googleapis_com_Network network ON (DATE(network.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) and subnet.resource.data.network = network.resource.data.selfLink) WHERE network.resource.parent = project.projectParent) as subnets, +(SELECT COUNT(DISTINCT(address)) FROM $project.$dataset.view_external_ip externalIP WHERE externalIP.projectName = project.projectName) as externalIPs, +(SELECT count(*) FROM $project.$dataset.view_cluster cluster WHERE cluster.projectName = project.projectName) as clusters, +(SELECT count(*) FROM $project.$dataset.view_cloudsql cloudsql WHERE cloudsql.projectName = project.projectParent) as cloudSQLs, +(SELECT IFNULL(SUM(bqTableCount), 0) FROM $project.$dataset.view_bqdataset bqDataset WHERE bqDataset.projectName = project.projectName) as BigQueryTables, +(SELECT count(*) FROM $project.$dataset.view_bucket bucket WHERE bucket.bucketParent = project.projectParent ) as GCSBuckets, +(SELECT count(*) FROM $project.$dataset.view_redis redis WHERE redis.redisParent = project.projectParent) as redis, + +project.projectParent as projectParentId, project.projectCreateTime as projectCreationTimestamp, project.projectUpdateTime as projectUpdateTime + +FROM `$project.$dataset.view_project` project +LEFT JOIN `$project.$dataset.view_project_billing_last_month` billing ON (billing.project = project.projectName) + +-- authors: rheins@nuro.ai + diff --git a/clarity_view_importer/views/view_redis.txt b/clarity_view_importer/views/view_redis.txt new file mode 100644 index 0000000..5f3dc66 --- /dev/null +++ b/clarity_view_importer/views/view_redis.txt @@ -0,0 +1,23 @@ +SELECT +redis.name as redisPath, +SPLIT(redis.name, '/')[OFFSET(8)] as redisName, +--redis.resource.data.name as redisName, +project.projectName, +redis.resource.data.displayName as redisDisplayName, +redis.resource.parent as redisParent, +redis.resource.data.host as redisIP, +redis.resource.data.port as redisPort, +redis.resource.data.memorySizeGb as redisMemorySizeGB, +redis.resource.data.redisVersion as redisVersion, +redis.resource.data.authorizedNetwork as redisAuthorizedNetwork, +redis.resource.data.authEnabled as redisAuthEnabled, +redis.resource.data.transitEncryptionMode as redisEncryptionMode, +redis.resource.data.connectMode as redisConnectMode, +redis.resource.data.persistenceIamIdentity as redisPersistanceIAMId, +redis.resource.location as redisLocation, +redis.resource.data.reservedIpRange as redisIPRange, +redis.resource.data.createTime as redisCreateTime +FROM `$project.$dataset.resource_redis_googleapis_com_Instance` redis +LEFT JOIN `$project.$dataset.view_project` project ON (project.projectParent = redis.resource.parent) +WHERE DATE(redis.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +-- Authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_resource.txt b/clarity_view_importer/views/view_resource.txt new file mode 100644 index 0000000..cebdad1 --- /dev/null +++ b/clarity_view_importer/views/view_resource.txt @@ -0,0 +1,11 @@ +SELECT +project.projectName as projectName, +resource.asset_type as assetType, +count(*) as resourceCount +FROM `$project.$dataset.resource` resource +JOIN `$project.$dataset.view_project` project + ON project.projectParent = resource.resource.parent +AND DATE(resource.readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +GROUP BY projectName, assetType +ORDER BY resourceCount DESC +-- authors: rheins@nuro.ai \ No newline at end of file diff --git a/clarity_view_importer/views/view_ssl_certs.txt b/clarity_view_importer/views/view_ssl_certs.txt new file mode 100644 index 0000000..96fe460 --- /dev/null +++ b/clarity_view_importer/views/view_ssl_certs.txt @@ -0,0 +1,17 @@ +SELECT +sslAltName, +certs.name as sslCertPath, +project.projectName, +resource.data.name as sslCertName, +resource.data.managed.domains as sslCertDomains, +resource.data.managed.status as sslCertStatus, +resource.data.type as sslCertType, +resource.data.creationTimestamp, +resource.data.expireTime as sslCertExpiration, +resource.data.selfLink as sslCertSelfLink + + FROM `$project.$dataset.resource_compute_googleapis_com_SslCertificate` certs + CROSS JOIN UNNEST(certs.resource.data.subjectAlternativeNames) as sslAltName + JOIN `$project.$dataset.view_project` project ON certs.resource.parent = project.projectParent + WHERE DATE(readTime) = DATE_SUB(CURRENT_DATE(), INTERVAL 1 DAY) +