diff --git a/README.md b/README.md index 60349d27..227d5e62 100644 --- a/README.md +++ b/README.md @@ -19,28 +19,32 @@ - +

-## What is VulcanSQL +## What is VulcanSQL? -**[VulcanSQL](https://vulcansql.com/) is a Data API Framework for data applications** that helps data folks create and share data APIs faster. It turns your SQL templates into data APIs. No backend skills required. +**[VulcanSQL](https://vulcansql.com/) is an Analytical Data API Framework for data apps**. It aims to help data professionals deliver RESTful APIs from databases, data warehouses or data lakes much easier and faster. It turns your SQL queries into REST APIs in no time! ![overview of VulcanSQL](https://i.imgur.com/JvCIZQ1.png) -## Online Playground +## What Problems does VulcanSQL aim to solve? -Use [Online Playground](https://codesandbox.io/p/sandbox/vulcansql-demo-wfd834) to get a taste of VulcanSQL! +Given the vast amount of analytical data in databases, data warehouses, and data lakes, there is currently no easy method for data professionals to share data with relevant stakeholders for operational business use cases. -## Examples +## Online Playground -Need Inspiration?! Discover a [selected compilation of examples](https://github.com/Canner/vulcan-sql-examples) showcasing the use of VulcanSQL! +Use [Online Playground](https://codesandbox.io/p/sandbox/vulcansql-demo-wfd834) to get a taste of VulcanSQL! ## Installation Please visit [the installation guide](https://vulcansql.com/docs/get-started/installation). +## Examples + +Need inspiration? Here are a [selected compilation of examples](https://github.com/Canner/vulcan-sql-examples) showcasing how you can use VulcanSQL! + ## How VulcanSQL works? 💻 **Build** @@ -49,7 +53,7 @@ VulcanSQL offers a development experience similar to dbt. Just insert variables 🚀 **Accelerate** -VulcanSQL uses DuckDB as a caching layer, boosting your query speed and API response time . This means faster, smoother data APIs for you and less strain on your data sources. +VulcanSQL uses DuckDB as a caching layer, boosting your query speed and reducing API response time. This means faster, smoother data APIs for you and less strain on your data sources. 🔥 **Deploy** @@ -82,7 +86,7 @@ Below are some common scenarios that you may be interested: 👏 **Data sharing**: Sharing data with partners, vendors, or customers, which requires a secure and scalable way to expose data. -⚙️ **Internal tools**: Integration with internal tools like AppSmith and Retools, etc. +⚙️ **Internal tools**: Integration with internal tools like Zapier, AppSmith and Retools, etc. ## Community diff --git a/codecov.yml b/codecov.yml index d7a476fe..03fa4c18 100644 --- a/codecov.yml +++ b/codecov.yml @@ -3,3 +3,13 @@ flag_management: # Reference of past coverage for tests that are not run on current commit. # https://docs.codecov.com/docs/carryforward-flags carryforward: true +coverage: + status: + patch: false + project: + default: + target: auto + threshold: "80%" + base: auto + flags: + - unit \ No newline at end of file diff --git a/packages/build/package.json b/packages/build/package.json index b3f35441..3ab75cf0 100644 --- a/packages/build/package.json +++ b/packages/build/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/build", "description": "VulcanSQL package for building projects", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -22,6 +22,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/catalog-server/lib/api.ts b/packages/catalog-server/lib/api.ts index a6fc8544..d68fd015 100755 --- a/packages/catalog-server/lib/api.ts +++ b/packages/catalog-server/lib/api.ts @@ -1,5 +1,8 @@ import axios from 'axios'; import { errorCode } from '@vulcan-sql/catalog-server/utils/errorCode'; +import getConfig from 'next/config'; + +const { publicRuntimeConfig } = getConfig(); enum API { Login = '/api/auth/login', @@ -22,7 +25,7 @@ const handleError = ({ statusCode, errorMessage }) => { }; export const axiosInstance = axios.create({ - baseURL: process.env.API_URL || 'http://localhost:4200', + baseURL: publicRuntimeConfig.baseUrl, responseType: 'json', timeout: 30000, headers: { diff --git a/packages/catalog-server/lib/apollo.ts b/packages/catalog-server/lib/apollo.ts index 71967b32..9596aa55 100755 --- a/packages/catalog-server/lib/apollo.ts +++ b/packages/catalog-server/lib/apollo.ts @@ -7,7 +7,7 @@ import { import { setContext } from '@apollo/client/link/context'; const httpLink = createHttpLink({ - uri: process.env.GQL_API_URL || 'http://localhost:4200/api/graphql', + uri: '/api/graphql', }); const authLink = setContext((_, { headers }) => { diff --git a/packages/catalog-server/next.config.js b/packages/catalog-server/next.config.js index d698ed53..8c09b1cc 100644 --- a/packages/catalog-server/next.config.js +++ b/packages/catalog-server/next.config.js @@ -12,6 +12,9 @@ const nextConfig = { compiler: { styledComponents: true, }, + publicRuntimeConfig: { + baseUrl: process.env.BASE_URL || 'http://localhost:4200', + }, serverRuntimeConfig: { // Will only be available on the server side vulcanSQLHost: process.env.VULCAN_SQL_HOST || 'http://localhost:3000', diff --git a/packages/catalog-server/package.json b/packages/catalog-server/package.json index 458ad23d..c5be3567 100644 --- a/packages/catalog-server/package.json +++ b/packages/catalog-server/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/catalog-server", "description": "Catalog server for VulcanSQL", - "version": "0.9.1", + "version": "0.10.0", "publishConfig": { "access": "public" }, diff --git a/packages/cli/package.json b/packages/cli/package.json index 7bc01658..3a7af488 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/cli", "description": "CLI tools for VulcanSQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "bin": { "vulcan": "./src/index.js" diff --git a/packages/core/package.json b/packages/core/package.json index 21399167..9691a287 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/core", "description": "Core package of VulcanSQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" diff --git a/packages/core/src/containers/modules/extension.ts b/packages/core/src/containers/modules/extension.ts index 721c5e1b..35f4b115 100644 --- a/packages/core/src/containers/modules/extension.ts +++ b/packages/core/src/containers/modules/extension.ts @@ -3,6 +3,7 @@ import { ExtensionLoader } from '../../lib/extension-loader'; import { ICoreOptions } from '../../models/coreOptions'; import templateEngineModules from '../../lib/template-engine/built-in-extensions'; import validatorModule from '../../lib/validators/built-in-validators'; +import LoggerModule from '../../lib/loggers'; import { builtInCodeLoader, builtInTemplateProvider, @@ -23,6 +24,7 @@ export const extensionModule = (options: ICoreOptions) => for (const templateEngineModule of templateEngineModules) { loader.loadInternalExtensionModule(templateEngineModule); } + loader.loadInternalExtensionModule(LoggerModule); // Validator (single module) loader.loadInternalExtensionModule(validatorModule); // Template provider (single module) diff --git a/packages/core/src/containers/types.ts b/packages/core/src/containers/types.ts index d0dabe38..66ccd50e 100644 --- a/packages/core/src/containers/types.ts +++ b/packages/core/src/containers/types.ts @@ -52,4 +52,6 @@ export const TYPES = { Extension_CompilerLoader: Symbol.for('Extension_CompilerLoader'), Extension_DataSource: Symbol.for('Extension_DataSource'), Extension_ProfileReader: Symbol.for('ProfileReader'), + // Logger + Extension_ActivityLogger: Symbol.for('Extension_ActivityLogger'), }; diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 10e530a9..e5a65522 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,5 +1,6 @@ export * from './lib/utils'; export * from './lib/validators'; +export * from './lib/loggers'; export * from './lib/template-engine'; export * from './lib/artifact-builder'; export * from './lib/data-query'; diff --git a/packages/core/src/lib/cache-layer/cacheLayerLoader.ts b/packages/core/src/lib/cache-layer/cacheLayerLoader.ts index c0395da3..d443d009 100644 --- a/packages/core/src/lib/cache-layer/cacheLayerLoader.ts +++ b/packages/core/src/lib/cache-layer/cacheLayerLoader.ts @@ -22,7 +22,6 @@ export class CacheLayerLoader implements ICacheLayerLoader { private options: ICacheLayerOptions; private cacheStorage: DataSource; private logger = getLogger({ scopeName: 'CORE' }); - constructor( @inject(TYPES.CacheLayerOptions) options: CacheLayerOptions, @inject(TYPES.Factory_DataSource) @@ -43,7 +42,14 @@ export class CacheLayerLoader implements ICacheLayerLoader { templateName: string, cache: CacheLayerInfo ): Promise { - const { cacheTableName, sql, profile, indexes, folderSubpath } = cache; + const { + cacheTableName, + sql, + profile, + indexes, + folderSubpath, + options: cacheOptions, + } = cache; const type = this.options.type!; const dataSource = this.dataSourceFactory(profile); @@ -82,6 +88,7 @@ export class CacheLayerLoader implements ICacheLayerLoader { directory, profileName: profile, type, + options: cacheOptions, }); } else { this.logger.debug( diff --git a/packages/core/src/lib/cache-layer/cacheLayerRefresher.ts b/packages/core/src/lib/cache-layer/cacheLayerRefresher.ts index 3bda4ef8..1fc57e8a 100644 --- a/packages/core/src/lib/cache-layer/cacheLayerRefresher.ts +++ b/packages/core/src/lib/cache-layer/cacheLayerRefresher.ts @@ -1,12 +1,24 @@ import ms, { StringValue } from 'ms'; import { uniq } from 'lodash'; import { ToadScheduler, SimpleIntervalJob, AsyncTask } from 'toad-scheduler'; -import { inject, injectable } from 'inversify'; +import { inject, injectable, multiInject } from 'inversify'; import { TYPES } from '@vulcan-sql/core/types'; -import { APISchema } from '@vulcan-sql/core/models'; +import { + APISchema, + ActivityLogContentOptions, + ActivityLogType, + CacheLayerInfo, + IActivityLogger, +} from '@vulcan-sql/core/models'; import { ConfigurationError } from '../utils/errors'; import { ICacheLayerLoader } from './cacheLayerLoader'; +import { getLogger } from '../utils'; +import moment = require('moment'); +enum RefreshResult { + SUCCESS = 'SUCCESS', + FAILED = 'FAILED', +} export interface ICacheLayerRefresher { /** * Start the job to load the data source to cache storage and created tables from cache settings in schemas @@ -22,9 +34,16 @@ export interface ICacheLayerRefresher { export class CacheLayerRefresher implements ICacheLayerRefresher { private cacheLoader: ICacheLayerLoader; private scheduler = new ToadScheduler(); + private activityLoggers: IActivityLogger[]; + private logger = getLogger({ scopeName: 'CORE' }); - constructor(@inject(TYPES.CacheLayerLoader) loader: ICacheLayerLoader) { + constructor( + @inject(TYPES.CacheLayerLoader) loader: ICacheLayerLoader, + @multiInject(TYPES.Extension_ActivityLogger) + activityLoggers: IActivityLogger[] + ) { this.cacheLoader = loader; + this.activityLoggers = activityLoggers; } public async start( @@ -53,16 +72,14 @@ export class CacheLayerRefresher implements ICacheLayerRefresher { const refreshJob = new SimpleIntervalJob( { milliseconds, runImmediately }, new AsyncTask(workerId, async () => { - // load data the to cache storage - - await this.cacheLoader.load(templateName, cache); + await this.loadCacheAndSendActivityLog(schema, cache); }), { preventOverrun: true, id: workerId } ); // add the job to schedule cache refresh task this.scheduler.addIntervalJob(refreshJob); } else { - await this.cacheLoader.load(templateName, cache); + await this.loadCacheAndSendActivityLog(schema, cache); } }) ); @@ -77,6 +94,44 @@ export class CacheLayerRefresher implements ICacheLayerRefresher { this.scheduler.stop(); } + private async loadCacheAndSendActivityLog( + schema: APISchema, + cache: CacheLayerInfo + ) { + const { urlPath } = schema; + const { sql } = cache; + let refreshResult = RefreshResult.SUCCESS; + const now = moment.utc().format('YYYY-MM-DD HH:mm:ss'); + const templateName = schema.templateSource.replace('/', '_'); + try { + // get the current time in format of UTC + await this.cacheLoader.load(templateName, cache); + } catch (error: any) { + refreshResult = RefreshResult.FAILED; + this.logger.debug(`Failed to refresh cache: ${error}`); + } finally { + // send activity log + const content = { + isSuccess: refreshResult === RefreshResult.SUCCESS ? true : false, + activityLogType: ActivityLogType.CACHE_REFRESH, + logTime: now, + urlPath, + sql, + } as ActivityLogContentOptions; + const activityLoggers = this.getActivityLoggers(); + for (const activityLogger of activityLoggers) + activityLogger.log(content).catch((err: any) => { + this.logger.debug( + `Failed to log activity after refreshing cache: ${err}` + ); + }); + } + } + + private getActivityLoggers(): IActivityLogger[] { + return this.activityLoggers.filter((logger) => logger.isEnabled()); + } + private checkDuplicateCacheTableName(schemas: APISchema[]) { const tableNames = schemas // => [[table1, table2], [table1, table3], [table4]] diff --git a/packages/core/src/lib/data-query/builder/dataQueryBuilder.ts b/packages/core/src/lib/data-query/builder/dataQueryBuilder.ts index 4d25cd2a..a6c8ea55 100644 --- a/packages/core/src/lib/data-query/builder/dataQueryBuilder.ts +++ b/packages/core/src/lib/data-query/builder/dataQueryBuilder.ts @@ -3,6 +3,7 @@ import { Pagination, DataResult, isOffsetPagination, + IncomingHttpHeaders, } from '@vulcan-sql/core/models'; import * as uuid from 'uuid'; import { find, isEmpty, isNull, isUndefined } from 'lodash'; @@ -411,6 +412,7 @@ export interface IDataQueryBuilder { take(size: number, move: number): IDataQueryBuilder; // paginate paginate(pagination: Pagination): void; + setHeaders(headers: IncomingHttpHeaders): void; value(): Promise; clone(): IDataQueryBuilder; parameterizeOperations(): Promise>>; @@ -425,6 +427,7 @@ export class DataQueryBuilder implements IDataQueryBuilder { public readonly identifier: string; private profileName: string; private parameterizer: IParameterizer; + private headers: IncomingHttpHeaders; constructor({ statement, @@ -432,12 +435,14 @@ export class DataQueryBuilder implements IDataQueryBuilder { parameterizer, dataSource, profileName, + headers, }: { statement: string; operations?: SQLClauseOperation; parameterizer: IParameterizer; dataSource: DataSource; profileName: string; + headers: IncomingHttpHeaders; }) { this.identifier = uuid.v4(); this.statement = statement; @@ -453,6 +458,7 @@ export class DataQueryBuilder implements IDataQueryBuilder { limit: null, offset: null, }; + this.headers = headers; this.profileName = profileName; } @@ -647,6 +653,7 @@ export class DataQueryBuilder implements IDataQueryBuilder { dataSource: this.dataSource, parameterizer: this.parameterizer, profileName: this.profileName, + headers: this.headers, }); builderCallback(wrappedBuilder); this.recordWhere({ @@ -1096,6 +1103,7 @@ export class DataQueryBuilder implements IDataQueryBuilder { operations: this.operations, parameterizer: this.parameterizer.clone(), profileName: this.profileName, + headers: this.headers, }); } @@ -1107,6 +1115,11 @@ export class DataQueryBuilder implements IDataQueryBuilder { this.take(pagination.limit, pagination.offset); } + public setHeaders(headers: IncomingHttpHeaders) { + if (!headers) return; + this.headers = headers; + } + public async parameterizeOperations(): Promise< Partial> > { @@ -1127,6 +1140,7 @@ export class DataQueryBuilder implements IDataQueryBuilder { operations: await this.parameterizeOperations(), bindParams: this.parameterizer.getBinding(), profileName: this.profileName, + headers: this.headers, }); return result; diff --git a/packages/core/src/lib/data-query/executor.ts b/packages/core/src/lib/data-query/executor.ts index dfd9322a..840372ce 100644 --- a/packages/core/src/lib/data-query/executor.ts +++ b/packages/core/src/lib/data-query/executor.ts @@ -1,5 +1,6 @@ import { DataSource, + IncomingHttpHeaders, PrepareParameterFunc, RequestParameter, } from '@vulcan-sql/core/models'; @@ -12,7 +13,8 @@ export interface IExecutor { createBuilder( profileName: string, query: string, - parameterizer: IParameterizer + parameterizer: IParameterizer, + headers?: IncomingHttpHeaders ): Promise; prepare: PrepareParameterFunc; } @@ -39,13 +41,15 @@ export class QueryExecutor implements IExecutor { public async createBuilder( profileName: string, query: string, - parameterizer: IParameterizer + parameterizer: IParameterizer, + headers?: IncomingHttpHeaders ) { return new DataQueryBuilder({ statement: query, parameterizer, dataSource: this.dataSourceFactory(profileName)!, profileName, + headers: headers || {}, }); } } diff --git a/packages/core/src/lib/loggers/httpLogger.ts b/packages/core/src/lib/loggers/httpLogger.ts new file mode 100644 index 00000000..d6167b06 --- /dev/null +++ b/packages/core/src/lib/loggers/httpLogger.ts @@ -0,0 +1,54 @@ +import { + BaseActivityLogger, + ActivityLoggerType, +} from '../../models/extensions/logger'; +import { + VulcanExtensionId, + VulcanInternalExtension, +} from '../../models/extensions'; +import axios, { AxiosRequestHeaders } from 'axios'; +import { ConnectionConfig, getUrl } from '../utils/url'; + +export interface HttpLoggerConfig { + connection?: HttpLoggerConnectionConfig; +} + +export interface HttpLoggerConnectionConfig extends ConnectionConfig { + headers?: Record | undefined; +} + +@VulcanInternalExtension('activity-log') +@VulcanExtensionId(ActivityLoggerType.HTTP_LOGGER) +export class HttpLogger extends BaseActivityLogger { + private logger = this.getLogger(); + + public async log(payload: any): Promise { + if (!this.isEnabled()) return; + const option = this.getOptions(); + if (!option?.connection) { + throw new Error('Http logger connection should be provided'); + } + const headers = option.connection.headers; + const url = getUrl(option.connection); + try { + // get connection info from option and use axios to send a post requet to the endpoint + await this.sendActivityLog(url, payload, headers); + this.logger.debug(`Activity log sent`); + } catch (err) { + this.logger.debug( + `Failed to send activity log to http logger, url: ${url}` + ); + throw err; + } + } + + protected async sendActivityLog( + url: string, + payload: any, + headers: AxiosRequestHeaders | undefined + ): Promise { + await axios.post(url, payload, { + headers: headers, + }); + } +} diff --git a/packages/core/src/lib/loggers/index.ts b/packages/core/src/lib/loggers/index.ts new file mode 100644 index 00000000..93451f9a --- /dev/null +++ b/packages/core/src/lib/loggers/index.ts @@ -0,0 +1,4 @@ +import { HttpLogger } from './httpLogger'; +export * from './httpLogger'; + +export default [HttpLogger]; diff --git a/packages/core/src/lib/template-engine/built-in-extensions/cache/cacheTagRunner.ts b/packages/core/src/lib/template-engine/built-in-extensions/cache/cacheTagRunner.ts index 7d2d4561..3fc32988 100644 --- a/packages/core/src/lib/template-engine/built-in-extensions/cache/cacheTagRunner.ts +++ b/packages/core/src/lib/template-engine/built-in-extensions/cache/cacheTagRunner.ts @@ -25,7 +25,7 @@ export class CacheTagRunner extends TagRunner { this.executor = executor; } - public async run({ context, args, contentArgs }: TagRunnerOptions) { + public async run({ context, args, contentArgs, metadata }: TagRunnerOptions) { // Get the variable name, if the cache tag has variable name, then we use the variable and keep the builder in the variable, and make user could use by xxx.value() like the req feature. // However if the cache tag not has variable name, means you would like to get the result directly after query, then we will replace the original query main builder to the cache builder. const name = String(args[0]); @@ -50,10 +50,12 @@ export class CacheTagRunner extends TagRunner { // Set the default vulcan created cache table schema, so we could query the cache table directly, not need user to type schema in the SQL. query = `set schema=${vulcanCacheSchemaName};`.concat('\n').concat(query); // Create the builder which access "vulcan.cache" data source for cache layer query + const headers = metadata.getHeaders(); const builder = await this.executor.createBuilder( cacheProfileName, query, - parameterizer + parameterizer, + headers ); context.setVariable(name, builder); diff --git a/packages/core/src/lib/template-engine/built-in-extensions/query-builder/reqTagRunner.ts b/packages/core/src/lib/template-engine/built-in-extensions/query-builder/reqTagRunner.ts index 78e12905..83b9cdab 100644 --- a/packages/core/src/lib/template-engine/built-in-extensions/query-builder/reqTagRunner.ts +++ b/packages/core/src/lib/template-engine/built-in-extensions/query-builder/reqTagRunner.ts @@ -47,16 +47,20 @@ export class ReqTagRunner extends TagRunner { .join('\n') .replace(/--.*(?:\n|$)|\/\*[\s\S]*?\*\//g, ''); // remove single-line comments and multi-line comments + const headers = metadata.getHeaders(); let builder: IDataQueryBuilder | undefined; // Replace to put the directly query cache builder to original query main builder of "__wrapper__builder", // it means we can use the cache builder to execute the query directly and get result to be final result builder = context.lookup(CACHE_MAIN_BUILDER_VAR_NAME); - if (builder) context.setVariable(name, builder); - else { + if (builder) { + if (headers) builder.setHeaders(headers); + context.setVariable(name, builder); + } else { builder = await this.executor.createBuilder( profileName, query, - parameterizer + parameterizer, + headers ); context.setVariable(name, builder); } diff --git a/packages/core/src/lib/template-engine/compiler-environment/base.ts b/packages/core/src/lib/template-engine/compiler-environment/base.ts index cbcbc51a..d688ed45 100644 --- a/packages/core/src/lib/template-engine/compiler-environment/base.ts +++ b/packages/core/src/lib/template-engine/compiler-environment/base.ts @@ -7,6 +7,7 @@ import * as nunjucks from 'nunjucks'; export abstract class BaseCompilerEnvironment extends nunjucks.Environment { abstract getExtensions(): ExtensionBase[]; + // initialize template engines extensions public async initializeExtensions() { const extensions = this.getExtensions(); for (const extension of extensions) { diff --git a/packages/core/src/lib/template-engine/compiler.ts b/packages/core/src/lib/template-engine/compiler.ts index 2f29b1f8..7394ed45 100644 --- a/packages/core/src/lib/template-engine/compiler.ts +++ b/packages/core/src/lib/template-engine/compiler.ts @@ -1,4 +1,8 @@ -import { DataResult, KoaRequest } from '@vulcan-sql/core/models'; +import { + DataResult, + IncomingHttpHeaders, + KoaRequest, +} from '@vulcan-sql/core/models'; import { Pagination } from '../../models/pagination'; export interface TemplateLocation { @@ -32,6 +36,7 @@ export interface ExecuteContext { user?: UserInfo; profileName: string; req?: KoaRequest; + headers?: IncomingHttpHeaders; } export interface Compiler { diff --git a/packages/core/src/lib/template-engine/nunjucksExecutionMetadata.ts b/packages/core/src/lib/template-engine/nunjucksExecutionMetadata.ts index f1572411..40519470 100644 --- a/packages/core/src/lib/template-engine/nunjucksExecutionMetadata.ts +++ b/packages/core/src/lib/template-engine/nunjucksExecutionMetadata.ts @@ -1,6 +1,6 @@ import * as nunjucks from 'nunjucks'; import { ExecuteContext, UserInfo } from './compiler'; -import { KoaRequest } from '@vulcan-sql/core/models'; +import { IncomingHttpHeaders, KoaRequest } from '@vulcan-sql/core/models'; export const ReservedContextKeys = { CurrentProfileName: 'RESERVED_CURRENT_PROFILE_NAME', @@ -12,12 +12,20 @@ export class NunjucksExecutionMetadata { private parameters: Record; private userInfo?: UserInfo; private req?: KoaRequest; + private headers?: IncomingHttpHeaders; - constructor({ parameters = {}, profileName, user, req }: ExecuteContext) { + constructor({ + parameters = {}, + profileName, + user, + req, + headers, + }: ExecuteContext) { this.parameters = parameters; this.profileName = profileName; this.userInfo = user; this.req = req; + this.headers = headers; } /** Load from nunjucks context */ @@ -26,6 +34,7 @@ export class NunjucksExecutionMetadata { parameters: context.lookup('context')?.params || {}, user: context.lookup('context')?.user || {}, req: context.lookup('context')?.req || {}, + headers: context.lookup('context')?.headers || {}, profileName: context.lookup(ReservedContextKeys.CurrentProfileName)!, }); } @@ -38,6 +47,7 @@ export class NunjucksExecutionMetadata { user: this.userInfo, req: this.req, profile: this.profileName, + headers: this.headers, }, [ReservedContextKeys.CurrentProfileName]: this.profileName, }; @@ -54,4 +64,8 @@ export class NunjucksExecutionMetadata { public getRequest() { return this.req; } + + public getHeaders() { + return this.headers; + } } diff --git a/packages/core/src/lib/utils/url.ts b/packages/core/src/lib/utils/url.ts new file mode 100644 index 00000000..3c956402 --- /dev/null +++ b/packages/core/src/lib/utils/url.ts @@ -0,0 +1,14 @@ +export interface ConnectionConfig { + ssl?: boolean; + host?: string; + port?: number | string; + path?: string; +} + +export const getUrl = (connection: ConnectionConfig): string => { + const { ssl, host, port, path = '' } = connection; + const protocol = ssl ? 'https' : 'http'; + let urlbase = `${protocol}://${host}`; + urlbase = port ? `${urlbase}:${port}` : urlbase; + return new URL(path, urlbase).href; +}; diff --git a/packages/core/src/models/artifact.ts b/packages/core/src/models/artifact.ts index 43fa109d..3a4a362d 100644 --- a/packages/core/src/models/artifact.ts +++ b/packages/core/src/models/artifact.ts @@ -30,8 +30,10 @@ import { import { Type } from 'class-transformer'; import 'reflect-metadata'; import { Request as KoaRequest } from 'koa'; +import { IncomingHttpHeaders } from 'http'; export type { KoaRequest }; +export type { IncomingHttpHeaders }; // Pagination mode should always be UPPERCASE because schema parser will transform the user inputs. export enum PaginationMode { @@ -116,6 +118,8 @@ export class CacheLayerInfo { indexes?: Record; // cache folder subpath folderSubpath?: string; + // options pass to the data source + options?: any; } export class APISchema { diff --git a/packages/core/src/models/coreOptions.ts b/packages/core/src/models/coreOptions.ts index 288ace84..c4b6523b 100644 --- a/packages/core/src/models/coreOptions.ts +++ b/packages/core/src/models/coreOptions.ts @@ -1,6 +1,7 @@ import { IArtifactBuilderOptions } from './artifactBuilderOptions'; import { ICacheLayerOptions } from './cacheLayerOptions'; import { IDocumentOptions } from './documentOptions'; +import { IActivityLoggerOptions } from './loggerOptions'; import { IProfilesLookupOptions } from './profilesLookupOptions'; import { ITemplateEngineOptions } from './templateEngineOptions'; @@ -24,6 +25,7 @@ export interface ICoreOptions { extensions?: ExtensionAliases; document?: IDocumentOptions; profiles?: IProfilesLookupOptions; + 'activity-log'?: IActivityLoggerOptions; cache?: ICacheLayerOptions; [moduleAlias: string]: any; } diff --git a/packages/core/src/models/extensions/dataSource.ts b/packages/core/src/models/extensions/dataSource.ts index 494c02ac..e62aaa8e 100644 --- a/packages/core/src/models/extensions/dataSource.ts +++ b/packages/core/src/models/extensions/dataSource.ts @@ -1,6 +1,10 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ import { Parameterized, SQLClauseOperation } from '@vulcan-sql/core/data-query'; -import { CacheLayerStoreFormatType, Profile } from '@vulcan-sql/core/models'; +import { + CacheLayerStoreFormatType, + IncomingHttpHeaders, + Profile, +} from '@vulcan-sql/core/models'; import { TYPES } from '@vulcan-sql/core/types'; import { inject, multiInject, optional } from 'inversify'; import { Readable } from 'stream'; @@ -15,6 +19,8 @@ export interface ExportOptions { directory: string; // The profile name to select to export data profileName: string; + // data source options + options?: any; // export file format type type: CacheLayerStoreFormatType | string; } @@ -58,6 +64,7 @@ export interface ExecuteOptions { /** The parameter bindings, we guarantee the order of the keys in the map is the same as the order when they were used in queries. */ bindParams: BindParameters; profileName: string; + headers?: IncomingHttpHeaders; } export type PrepareParameterFunc = { diff --git a/packages/core/src/models/extensions/index.ts b/packages/core/src/models/extensions/index.ts index 2f8b99b0..3f0e58c1 100644 --- a/packages/core/src/models/extensions/index.ts +++ b/packages/core/src/models/extensions/index.ts @@ -12,3 +12,4 @@ export * from './persistentStore'; export * from './codeLoader'; export * from './dataSource'; export * from './profileReader'; +export * from './logger'; diff --git a/packages/core/src/models/extensions/logger.ts b/packages/core/src/models/extensions/logger.ts new file mode 100644 index 00000000..b8b8ca2b --- /dev/null +++ b/packages/core/src/models/extensions/logger.ts @@ -0,0 +1,47 @@ +import { ExtensionBase } from './base'; +import { TYPES } from '@vulcan-sql/core/types'; +import { VulcanExtension } from './decorators'; +import { isEmpty } from 'lodash'; + +export enum ActivityLoggerType { + HTTP_LOGGER = 'http-logger', +} + +export enum ActivityLogType { + CACHE_REFRESH = 'cache-refresh', + API_REQUEST = 'api-request', +} +export interface ActivityLogContentOptions { + isSuccess: boolean; + activityLogType: ActivityLogType; +} +export interface IActivityLogger { + isEnabled(): boolean; + log(content: any): Promise; +} + +@VulcanExtension(TYPES.Extension_ActivityLogger, { enforcedId: true }) +export abstract class BaseActivityLogger + extends ExtensionBase + implements IActivityLogger +{ + public abstract log(context: any): Promise; + + public isEnabled(): boolean { + const config = this.getConfig(); + if (!config || isEmpty(config)) return false; + if (!config.enabled) return false; + if (!config['options']) return false; + if (config['options'][this.getExtensionId()!]) return true; + else return false; + } + + protected getOptions(): ActivityLoggerTypeOption | undefined { + if (!this.getConfig()) return undefined; + if (!this.getConfig()['options']) return undefined; + const option = this.getConfig()['options'][ + this.getExtensionId()! + ] as ActivityLoggerTypeOption; + return option; + } +} diff --git a/packages/core/src/models/index.ts b/packages/core/src/models/index.ts index 8e501e96..005717a6 100644 --- a/packages/core/src/models/index.ts +++ b/packages/core/src/models/index.ts @@ -8,3 +8,4 @@ export * from './documentOptions'; export * from './profilesLookupOptions'; export * from './cacheLayerOptions'; export * from './profile'; +export * from './loggerOptions'; diff --git a/packages/core/src/models/loggerOptions.ts b/packages/core/src/models/loggerOptions.ts new file mode 100644 index 00000000..60cbda4c --- /dev/null +++ b/packages/core/src/models/loggerOptions.ts @@ -0,0 +1,4 @@ +export interface IActivityLoggerOptions { + // different logger type settings + [loggerType: string]: any; +} diff --git a/packages/core/src/models/profile.ts b/packages/core/src/models/profile.ts index 98494776..95a82173 100644 --- a/packages/core/src/models/profile.ts +++ b/packages/core/src/models/profile.ts @@ -29,4 +29,6 @@ export interface Profile> { cache?: C; /** What users have access to this profile */ allow: ProfileAllowConstraints; + /** Properties that can be used when involking the dataSource method */ + properties?: Record; } diff --git a/packages/core/test/cache-layer/cacheLayerRefresher.spec.ts b/packages/core/test/cache-layer/cacheLayerRefresher.spec.ts index b1a74376..adbfe3ee 100644 --- a/packages/core/test/cache-layer/cacheLayerRefresher.spec.ts +++ b/packages/core/test/cache-layer/cacheLayerRefresher.spec.ts @@ -12,12 +12,33 @@ import { vulcanCacheSchemaName, } from '@vulcan-sql/core'; import { MockDataSource, getQueryResults } from './mockDataSource'; +import { HttpLogger } from '../../src/lib/loggers/httpLogger'; // This is a helper function that will flush all pending promises in the event loop when use the setInterval and the callback is promise (jest > 27 version). // reference: https://gist.github.com/apieceofbart/e6dea8d884d29cf88cdb54ef14ddbcc4 const flushPromises = () => new Promise(jest.requireActual('timers').setImmediate); +jest.mock('../../src/lib/loggers/httpLogger', () => { + const originalModule = jest.requireActual('../../src/lib/loggers/httpLogger'); + return { + ...originalModule, + HttpLogger: jest.fn().mockImplementation(() => { + return { + isEnabled: jest.fn().mockReturnValue(true), + log: jest.fn().mockResolvedValue(true), // Spy on the add method + }; + }), + }; +}); +const mockLogger = new HttpLogger( + { + enabled: true, + options: { 'http-logger': { connection: { host: 'localhost' } } }, + }, + 'http-logger' +); + describe('Test cache layer refresher', () => { const folderPath = 'refresher-test-exported-parquets'; const profiles = [ @@ -65,6 +86,10 @@ describe('Test cache layer refresher', () => { fs.rmSync(folderPath, { recursive: true, force: true }); }); + afterEach(() => { + jest.clearAllMocks(); + }); + it('Should fail to start when exist duplicate cache table name over than one API schema', async () => { // Arrange const schemas: Array = [ @@ -98,7 +123,7 @@ describe('Test cache layer refresher', () => { ] as Array, }, ]; - const refresher = new CacheLayerRefresher(stubCacheLoader); + const refresher = new CacheLayerRefresher(stubCacheLoader, [mockLogger]); // Act, Assert await expect(() => refresher.start(schemas)).rejects.toThrow( @@ -149,7 +174,7 @@ describe('Test cache layer refresher', () => { ] as Array, }, ]; - const refresher = new CacheLayerRefresher(stubCacheLoader); + const refresher = new CacheLayerRefresher(stubCacheLoader, [mockLogger]); // Act, Assert await expect(() => refresher.start(schemas)).rejects.toThrow( @@ -195,7 +220,7 @@ describe('Test cache layer refresher', () => { ]; // Act const loader = new CacheLayerLoader(options, stubFactory as any); - const refresher = new CacheLayerRefresher(loader); + const refresher = new CacheLayerRefresher(loader, [mockLogger]); await refresher.start(schemas); // Assert @@ -271,7 +296,7 @@ describe('Test cache layer refresher', () => { // Stub the load method to not do any thing. stubCacheLoader.load.resolves(); - const refresher = new CacheLayerRefresher(stubCacheLoader); + const refresher = new CacheLayerRefresher(stubCacheLoader, [mockLogger]); // Act await refresher.start(schemas); @@ -304,4 +329,147 @@ describe('Test cache layer refresher', () => { refresher.stop(); jest.clearAllTimers(); }); + + it( + 'Should send activity log after cacheLoader execute "load" successfully', + async () => { + // Arrange + const schemas: Array = [ + { + ...sinon.stubInterface(), + templateSource: 'template-1', + profiles: [profiles[0].name, profiles[1].name], + cache: [ + { + cacheTableName: 'orders', + sql: sinon.default.stub() as any, + profile: profiles[0].name, + }, + { + cacheTableName: 'products', + sql: sinon.default.stub() as any, + profile: profiles[1].name, + }, + ] as Array, + }, + { + ...sinon.stubInterface(), + templateSource: 'template-2', + profiles: [profiles[2].name], + cache: [ + { + cacheTableName: 'users', + sql: sinon.default.stub() as any, + profile: profiles[2].name, + }, + ] as Array, + }, + ]; + // Act + const loader = new CacheLayerLoader(options, stubFactory as any); + const refresher = new CacheLayerRefresher(loader, [mockLogger]); + await refresher.start(schemas); + + // Assert + expect(mockLogger.log).toHaveBeenCalledTimes(3); + refresher.stop(); + }, + 100 * 1000 + ); + // Should send activity log when cacheLoader failed on executing "load" + it( + 'Should send activity log after cacheLoader execute "load" failed', + async () => { + const schemas: Array = [ + { + ...sinon.stubInterface(), + templateSource: 'template-1', + profiles: [profiles[0].name, profiles[1].name], + cache: [ + { + cacheTableName: 'orders', + sql: sinon.default.stub() as any, + profile: profiles[0].name, + }, + { + cacheTableName: 'products', + sql: sinon.default.stub() as any, + profile: profiles[1].name, + }, + ] as Array, + }, + { + ...sinon.stubInterface(), + templateSource: 'template-2', + profiles: [profiles[2].name], + cache: [ + { + cacheTableName: 'users', + sql: sinon.default.stub() as any, + profile: profiles[2].name, + }, + ] as Array, + }, + ]; + // Act + const loader = new CacheLayerLoader(options, stubFactory as any); + stubCacheLoader.load.throws(); + const refresher = new CacheLayerRefresher(loader, [mockLogger]); + await refresher.start(schemas); + + // Assert + expect(mockLogger.log).toHaveBeenCalledTimes(3); + refresher.stop(); + }, + 100 * 1000 + ); + // should not send activity log when logger is not enabled + it('should not send activity log when logger is not enabled', async () => { + const schemas: Array = [ + { + ...sinon.stubInterface(), + templateSource: 'template-1', + profiles: [profiles[0].name, profiles[1].name], + cache: [ + { + cacheTableName: 'orders', + sql: sinon.default.stub() as any, + profile: profiles[0].name, + }, + { + cacheTableName: 'products', + sql: sinon.default.stub() as any, + profile: profiles[1].name, + }, + ] as Array, + }, + { + ...sinon.stubInterface(), + templateSource: 'template-2', + profiles: [profiles[2].name], + cache: [ + { + cacheTableName: 'users', + sql: sinon.default.stub() as any, + profile: profiles[2].name, + }, + ] as Array, + }, + ]; + const mockLogger = new HttpLogger( + { + enabled: false, + }, + 'http-logger' + ); + mockLogger.isEnabled = jest.fn().mockReturnValue(false); + // Act + const loader = new CacheLayerLoader(options, stubFactory as any); + const refresher = new CacheLayerRefresher(loader, [mockLogger]); + await refresher.start(schemas); + + // Assert + expect(mockLogger.log).toHaveBeenCalledTimes(0); + refresher.stop(); + }); }); diff --git a/packages/core/test/data-query/builder/group-by-clause.spec.ts b/packages/core/test/data-query/builder/group-by-clause.spec.ts index 5d733123..82b0f9c5 100644 --- a/packages/core/test/data-query/builder/group-by-clause.spec.ts +++ b/packages/core/test/data-query/builder/group-by-clause.spec.ts @@ -35,6 +35,7 @@ describe('Test data query builder > group by clause', () => { dataSource: stubDataSource, parameterizer: stubParameterizer, profileName: '', + headers: {}, }); columns.map((column) => { builder = builder.groupBy(column); @@ -62,6 +63,7 @@ describe('Test data query builder > group by clause', () => { dataSource: stubDataSource, parameterizer: stubParameterizer, profileName: '', + headers: {}, }); builder.groupBy(first, second, third); diff --git a/packages/core/test/data-query/builder/having-clause.spec.ts b/packages/core/test/data-query/builder/having-clause.spec.ts index 28da4d62..2aa18a48 100644 --- a/packages/core/test/data-query/builder/having-clause.spec.ts +++ b/packages/core/test/data-query/builder/having-clause.spec.ts @@ -32,6 +32,7 @@ const createStubBuilder = ({ statement }: { statement: string }) => dataSource: createStub().dataSource, parameterizer: createStub().parameterizer, profileName: '', + headers: {}, }); describe('Test data query builder > having clause', () => { diff --git a/packages/core/test/data-query/builder/join-clause.spec.ts b/packages/core/test/data-query/builder/join-clause.spec.ts index 95b986f3..f7625759 100644 --- a/packages/core/test/data-query/builder/join-clause.spec.ts +++ b/packages/core/test/data-query/builder/join-clause.spec.ts @@ -29,6 +29,7 @@ const createStubBuilder = ({ statement }: { statement: string }) => dataSource: createStub().dataSource, parameterizer: createStub().parameterizer, profileName: '', + headers: {}, }); describe('Test data query builder > join clause', () => { diff --git a/packages/core/test/data-query/builder/limit-offset-clause.spec.ts b/packages/core/test/data-query/builder/limit-offset-clause.spec.ts index 120531df..711b3458 100644 --- a/packages/core/test/data-query/builder/limit-offset-clause.spec.ts +++ b/packages/core/test/data-query/builder/limit-offset-clause.spec.ts @@ -15,6 +15,7 @@ const createStubBuilder = ({ statement }: { statement: string }) => dataSource: createStub().dataSource, parameterizer: createStub().parameterizer, profileName: '', + headers: {}, }); describe('Test data query builder > limit-offset by clause', () => { diff --git a/packages/core/test/data-query/builder/order-by-clause.spec.ts b/packages/core/test/data-query/builder/order-by-clause.spec.ts index dc99556d..a197dedf 100644 --- a/packages/core/test/data-query/builder/order-by-clause.spec.ts +++ b/packages/core/test/data-query/builder/order-by-clause.spec.ts @@ -21,6 +21,7 @@ const createStubBuilder = ({ statement }: { statement: string }) => dataSource: createStub().dataSource, parameterizer: createStub().parameterizer, profileName: '', + headers: {}, }); describe('Test data query builder > order by clause', () => { diff --git a/packages/core/test/data-query/builder/parameterize.spec.ts b/packages/core/test/data-query/builder/parameterize.spec.ts index 84b36999..8f1ed4f5 100644 --- a/packages/core/test/data-query/builder/parameterize.spec.ts +++ b/packages/core/test/data-query/builder/parameterize.spec.ts @@ -13,6 +13,7 @@ const createStubs = ({ statement }: { statement: string }) => { dataSource, parameterizer, profileName: '', + headers: {}, }), dataSource, parameterizer, diff --git a/packages/core/test/data-query/builder/select-clause.spec.ts b/packages/core/test/data-query/builder/select-clause.spec.ts index df7e0753..f64edaab 100644 --- a/packages/core/test/data-query/builder/select-clause.spec.ts +++ b/packages/core/test/data-query/builder/select-clause.spec.ts @@ -51,6 +51,7 @@ const createStubBuilder = ({ statement }: { statement: string }) => dataSource: createStub().dataSource, parameterizer: createStub().parameterizer, profileName: '', + headers: {}, }); describe('Test data query builder > select clause', () => { diff --git a/packages/core/test/data-query/builder/where-clause.spec.ts b/packages/core/test/data-query/builder/where-clause.spec.ts index 504d3852..56946a8f 100644 --- a/packages/core/test/data-query/builder/where-clause.spec.ts +++ b/packages/core/test/data-query/builder/where-clause.spec.ts @@ -25,6 +25,7 @@ const createStubBuilder = ({ statement }: { statement: string }) => dataSource: createStub().dataSource, parameterizer: createStub().parameterizer, profileName: '', + headers: {}, }); jest.mock('uuid'); diff --git a/packages/core/test/httplogger.spec.ts b/packages/core/test/httplogger.spec.ts new file mode 100644 index 00000000..7f333696 --- /dev/null +++ b/packages/core/test/httplogger.spec.ts @@ -0,0 +1,103 @@ +import sinon from 'ts-sinon'; +import { HttpLogger } from '../src/lib/loggers/httpLogger'; +class MockHttpLogger extends HttpLogger { + public override sendActivityLog = jest.fn(); +} +const createMockHttpLogger = (config: any) => { + return new MockHttpLogger(config, 'httpLogger'); +}; +describe('Activity logs', () => { + it('should throw error when logger is enabled but connection is not provided', async () => { + const config = { + enabled: true, + options: { + 'http-logger': { + connection: undefined, + }, + }, + }; + + const httpLogger = createMockHttpLogger(config); + + await expect(httpLogger.log({})).rejects.toThrow( + 'Http logger connection should be provided' + ); + }); + + it('should not throw error when logger is disabled', async () => { + const config = { + enabled: false, + }; + + const httpLogger = createMockHttpLogger(config); + + await expect(httpLogger.log({})).resolves.not.toThrow(); + }); + + // should not throw error when logger is enabled and connection is provided + it('should not throw error when logger is enabled and connection is provided', async () => { + const config = { + enabled: true, + options: { + 'http-logger': { + connection: { + ssl: true, + host: 'localhost', + port: 8080, + path: '/test', + }, + }, + }, + }; + const httpLogger = createMockHttpLogger(config); + sinon.stub(httpLogger, 'sendActivityLog').resolves(); + await expect(httpLogger.log({})).resolves.not.toThrow(); + }); + + // should throw error when logger is enabled and connection is provided but request fails + it('should throw error when logger is enabled and connection is provided but request fails', async () => { + const config = { + enabled: true, + options: { + 'http-logger': { + connection: { + ssl: true, + host: 'localhost', + port: 8080, + path: '/test', + }, + }, + }, + }; + // stub sendActivityLog to throw error + const httpLogger = createMockHttpLogger(config); + sinon.stub(httpLogger, 'sendActivityLog').throws(); + await expect(httpLogger.log({})).rejects.toThrow(); + }); + + // isEnabled should return false when logger is disabled + it.each([ + {}, // empty config + { + enabled: false, // not enabled + }, + { + enabled: false, // not enabled but has logger + options: { + 'http-logger': { connection: { host: 'localhost', port: 80 } }, + }, + }, + { + enabled: true, // enabled but do not have http-logger config + options: { + 'non-http-logger': {}, + }, + }, + ])( + 'isEnabled should return false when logger is disabled', + async (config) => { + const httpLogger = createMockHttpLogger(config); + expect(httpLogger.isEnabled()).toBe(false); + } + ); +}); diff --git a/packages/core/test/template-engine/built-in-extensions/query-builder/operations.spec.ts b/packages/core/test/template-engine/built-in-extensions/query-builder/operations.spec.ts index dfb11236..76cf7f0c 100644 --- a/packages/core/test/template-engine/built-in-extensions/query-builder/operations.spec.ts +++ b/packages/core/test/template-engine/built-in-extensions/query-builder/operations.spec.ts @@ -23,6 +23,7 @@ const createTestCompilerWithBuilder = async () => { statement: query, parameterizer, dataSource, + headers: {}, }); } ); diff --git a/packages/core/test/utils/url.spec.ts b/packages/core/test/utils/url.spec.ts new file mode 100644 index 00000000..da97f07a --- /dev/null +++ b/packages/core/test/utils/url.spec.ts @@ -0,0 +1,66 @@ +import { getUrl, ConnectionConfig } from '../../src/lib/utils/url'; + +describe('url util functions', () => { + it('should return url if all connection properties were set', () => { + const connection = { + ssl: true, + host: 'localhost', + port: 8080, + path: '/test', + } as ConnectionConfig; + + const url = getUrl(connection); + expect(url).toBe('https://localhost:8080/test'); + }); + + it('should return url if ssl or path is not set', () => { + const connection = { + host: 'localhost', + } as ConnectionConfig; + + const url = getUrl(connection); + expect(url).toBe('http://localhost/'); + }); + + it('should return url if host was an IP address', () => { + const connection = { + ssl: false, + host: '127.0.0.1', + port: 8080, + path: '/test', + } as ConnectionConfig; + const url = getUrl(connection); + expect(url).toBe('http://127.0.0.1:8080/test'); + }); + + it.each([ + { + ssl: false, + host: 'localhost', + port: 8080, + path: '/test', + }, + { + host: 'localhost', + port: 8080, + path: '/test', + }, + ])( + 'should use protocal http if ssl was not set or set to false', + (connection) => { + const url = getUrl(connection); + expect(url).toBe('http://localhost:8080/test'); + } + ); + + it('should return url if host was a DNS name and port was not set', () => { + const connection = { + ssl: true, + host: 'DNSName', + path: '/test', + } as ConnectionConfig; + + const url = getUrl(connection); + expect(url).toBe('https://dnsname/test'); + }); +}); diff --git a/packages/doc/docs/connectors/redshift.mdx b/packages/doc/docs/connectors/redshift.mdx new file mode 100644 index 00000000..ef70b57a --- /dev/null +++ b/packages/doc/docs/connectors/redshift.mdx @@ -0,0 +1,44 @@ +# Redshift + +## Installation + +1. Install the package: + + **If you are developing with VulcanSQL's binary version, the package is already bundled in the binary. You can skip this step.** + + ```bash + npm i @vulcan-sql/extension-driver-redshift + ``` + +2. Update your `vulcan.yaml` file to enable the extension: + + ```yaml + extensions: + ... + redshift: '@vulcan-sql/extension-driver-redshift' # Add this line + ``` + +3. Create a new profile in your `profiles.yaml` file or in the designated profile paths. For example if you are using Redshift Serverless: + + ```yaml + - name: redshift # profile name + type: redshift + allow: '*' + connection: + # please see the type definition of RedshiftDataClientConfig + # https://github.com/aws/aws-sdk-js-v3/blob/29056f4ca545f7e5cf951b915bb52178305fc305/clients/client-redshift-data/src/RedshiftDataClient.ts#L253C18-L253C42 + credentials: + accessKeyId: + secretAccessKey: + # please see the type definition of ExecuteStatementCommandInput(omit Sql and Parameters) + # https://github.com/aws/aws-sdk-js-v3/blob/29056f4ca545f7e5cf951b915bb52178305fc305/clients/client-redshift-data/src/models/models_0.ts#L805C18-L805C39 + Database: + WorkgroupName: + ``` + +## Configuration + +For more information, please refer to the [Redshift Data API Client documentation](https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-redshift-data) to learn about the available arguments for the Redshift Data Client. +The configuration is composed of two types defined in Redshift Data API Client, +namely [`RedshiftDataClientConfig`](https://github.com/aws/aws-sdk-js-v3/blob/91e51ab99e58091068d1f4173ecf9f457db92df8/clients/client-redshift-data/src/RedshiftDataClient.ts#L253) +and [`ExecuteStatementCommandInput`](https://github.com/aws/aws-sdk-js-v3/blob/91e51ab99e58091068d1f4173ecf9f457db92df8/clients/client-redshift-data/src/models/models_0.ts#L805)(without `Sql` and `Parameters`). diff --git a/packages/doc/docs/intro.mdx b/packages/doc/docs/intro.mdx index 8ab22336..1bc8d936 100644 --- a/packages/doc/docs/intro.mdx +++ b/packages/doc/docs/intro.mdx @@ -31,6 +31,16 @@ Discover how to validate and sanitize API parameters in VulcanSQL. This feature 1. [Handling Data Privacy](./data-privacy/overview): Explore the mechanisms and practices for handling data privacy in VulcanSQL. It encompasses practices and measures implemented to safeguard personal, confidential, or regulated information from unauthorized access, misuse, or disclosure. +### Extensions + +VulcanSQL allows you to extend its core functionalities through extensions. + +1. [dbt](./extensions/dbt): VulcanSQL supports queries from dbt's SQL models directly. +If you use dbt to create some models, you can make APIs for them instantly. +2. [Hugging Face](./extensions/huggingface/overview): With this plugin, you can leverage +the power of language models to generate SQL quries using natural language. +3. [API](./extensions/api): You can access data from 3rd parties through calling REST APIs with this extension. + ### API Catalog & Documentation 1. [API Catalog](catalog/intro): Learn how to create an API catalog with VulcanSQL, providing a centralized repository for all your Data APIs. This catalog enables easy discovery, management, and sharing of APIs within your organization. diff --git a/packages/doc/docs/references/faq.mdx b/packages/doc/docs/references/faq.mdx new file mode 100644 index 00000000..460e030c --- /dev/null +++ b/packages/doc/docs/references/faq.mdx @@ -0,0 +1,27 @@ +# FAQs + +## How do you deal with SQL injection attacks? + +Because VulcanSQL supports multiple connectors (e.g.: Snowflake, BigQuery, etc.), so we delegate our connectors to handle the SQL injection +by their prepared statement through parameterized queries from the connectors' client: + +BigQuery: https://cloud.google.com/bigquery/docs/parameterized-queries +```sql +SELECT word, word_count FROM `bigquery-public-data.samples.shakespeare` +WHERE corpus = @corpus AND word_count >= @min_word_count +ORDER BY word_count DESC +``` + +Snowflake: https://docs.snowflake.com/en/developer-guide/node-js/nodejs-driver-execute#binding-statement-parameters +```sql +SELECT word, word_count FROM `bigquery-public-data.samples.shakespeare` +WHERE corpus = :1 AND word_count >= :2 +ORDER BY word_count DESC +``` + +Then, we replace the input parameters with parameterized values like $1, $2 ...etc, and record the input values in the +`Parameterizer` when sending a query from an API request with query arguments. Finally we organize the SQL statement with +parameters query in the `DataQueryBuilder` and send it to the connector to delegate the connector client to handle SQL +injection and execute the SQL query. + +You could see https://github.com/Canner/vulcan-sql/pull/40 to read it more. \ No newline at end of file diff --git a/packages/doc/sidebars.js b/packages/doc/sidebars.js index d5325dbc..54deb242 100644 --- a/packages/doc/sidebars.js +++ b/packages/doc/sidebars.js @@ -76,6 +76,10 @@ const sidebars = { type: 'doc', id: 'connectors/clickhouse', }, + { + type: 'doc', + id: 'connectors/redshift', + }, { type: 'doc', id: 'connectors/ksqldb', @@ -431,6 +435,10 @@ const sidebars = { type: 'doc', id: 'references/data-source-profile', }, + { + type: 'doc', + id: 'references/faq', + }, { type: 'html', value: '

', diff --git a/packages/extension-api-caller/package.json b/packages/extension-api-caller/package.json index 33027917..0b128902 100644 --- a/packages/extension-api-caller/package.json +++ b/packages/extension-api-caller/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-api-caller", "description": "Calling APIs to get data from other sources", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -23,6 +23,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "^0.9.1" + "@vulcan-sql/core": "^0.10.0" } } \ No newline at end of file diff --git a/packages/extension-authenticator-canner/package.json b/packages/extension-authenticator-canner/package.json index 0f57f948..0d754ea9 100644 --- a/packages/extension-authenticator-canner/package.json +++ b/packages/extension-authenticator-canner/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-authenticator-canner", "description": "Canner Enterprise authenticator for Vulcan SQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -24,7 +24,7 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0", - "@vulcan-sql/serve": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0", + "@vulcan-sql/serve": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-authenticator-canner/src/lib/authenticator/pat.ts b/packages/extension-authenticator-canner/src/lib/authenticator/pat.ts index daef702e..3c294bef 100644 --- a/packages/extension-authenticator-canner/src/lib/authenticator/pat.ts +++ b/packages/extension-authenticator-canner/src/lib/authenticator/pat.ts @@ -76,7 +76,7 @@ export class CannerPATAuthenticator extends BaseAuthenticator operationName: 'UserMe', variables: {}, query: - 'query UserMe{userMe {accountRole attributes createdAt email groups {id name} lastName firstName username}}', + 'query UserMe{userMe {id accountRole attributes createdAt email groups {id name} lastName firstName username}}', }, { headers: { diff --git a/packages/extension-dbt/package.json b/packages/extension-dbt/package.json index cd11e594..408294c0 100644 --- a/packages/extension-dbt/package.json +++ b/packages/extension-dbt/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-dbt", "description": "Using dbt models form VulcanSQL projects", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -23,6 +23,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-debug-tools/package.json b/packages/extension-debug-tools/package.json index 696f2966..3cbe3fa1 100644 --- a/packages/extension-debug-tools/package.json +++ b/packages/extension-debug-tools/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-debug-tools", "description": "A collection of Vulcan extension debug tools", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -22,9 +22,9 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" }, "devDependencies": { - "@vulcan-sql/test-utility": "~0.9.1-0" + "@vulcan-sql/test-utility": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-driver-bq/package.json b/packages/extension-driver-bq/package.json index cbba0236..07b9825a 100644 --- a/packages/extension-driver-bq/package.json +++ b/packages/extension-driver-bq/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-driver-bq", "description": "BigQuery driver for Vulcan SQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -24,6 +24,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-driver-canner/package.json b/packages/extension-driver-canner/package.json index 1e3d2d07..b77a1c99 100644 --- a/packages/extension-driver-canner/package.json +++ b/packages/extension-driver-canner/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-driver-canner", "description": "Canner Enterprise driver for Vulcan SQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -24,6 +24,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-driver-canner/src/lib/cannerAdapter.ts b/packages/extension-driver-canner/src/lib/cannerAdapter.ts index 52278e58..4094a9c2 100644 --- a/packages/extension-driver-canner/src/lib/cannerAdapter.ts +++ b/packages/extension-driver-canner/src/lib/cannerAdapter.ts @@ -31,15 +31,23 @@ export class CannerAdapter { // When querying Canner enterprise, the Canner enterprise will save the query result as parquet files, // and store them in S3. This method will return the S3 urls of the query result. // For more Canner API ref: https://docs.cannerdata.com/reference/restful - public async createAsyncQueryResultUrls(sql: string): Promise { + public async createAsyncQueryResultUrls( + sql: string, + headers?: Record + ): Promise { this.logger.debug(`Create async request to Canner.`); - let data = await this.getWorkspaceRequestData('post', '/v2/async-queries', { - data: { - sql, - timeout: 600, - noLimit: true, + let data = await this.getWorkspaceRequestData( + 'post', + '/v2/async-queries', + { + data: { + sql, + timeout: 600, + noLimit: true, + }, }, - }); + headers + ); const { id: requestId } = data; this.logger.debug(`Wait Async request to finished.`); @@ -60,14 +68,13 @@ export class CannerAdapter { private async getWorkspaceRequestData( method: string, urlPath: string, - options?: Record + options?: Record, + headers?: Record ) { await this.prepare(); try { const response = await axios({ - headers: { - Authorization: `Token ${this.PAT}`, - }, + headers: { ...headers, Authorization: `Token ${this.PAT}` }, params: { workspaceSqlName: this.workspaceSqlName, }, @@ -78,7 +85,9 @@ export class CannerAdapter { return response.data; } catch (error: any) { const message = error.response - ? `response status: ${error.response.status}, response data: ${error.response.data}` + ? `response status: ${ + error.response.status + }, response data: ${JSON.stringify(error.response.data)}` : `remote server does not response. request ${error.toJSON()}}`; throw new InternalError( `Failed to get workspace request "${urlPath}" data, ${message}` diff --git a/packages/extension-driver-canner/src/lib/cannerDataSource.ts b/packages/extension-driver-canner/src/lib/cannerDataSource.ts index 82317e4c..73f0e0e5 100644 --- a/packages/extension-driver-canner/src/lib/cannerDataSource.ts +++ b/packages/extension-driver-canner/src/lib/cannerDataSource.ts @@ -8,7 +8,7 @@ import { RequestParameter, VulcanExtensionId, } from '@vulcan-sql/core'; -import { Pool, PoolConfig, QueryResult } from 'pg'; +import { Pool, PoolClient, PoolConfig, QueryResult } from 'pg'; import * as Cursor from 'pg-cursor'; import { Readable } from 'stream'; import { buildSQL } from './sqlBuilder'; @@ -24,7 +24,11 @@ export interface PGOptions extends PoolConfig { @VulcanExtensionId('canner') export class CannerDataSource extends DataSource { private logger = this.getLogger(); - private poolMapping = new Map(); + protected poolMapping = new Map< + string, + { pool: Pool; options?: PGOptions; properties?: Record } + >(); + protected UserPool = new Map(); public override async onActivate() { const profiles = this.getProfiles().values(); @@ -48,6 +52,7 @@ export class CannerDataSource extends DataSource { this.poolMapping.set(profile.name, { pool, options: profile.connection, + properties: profile.properties, }); this.logger.debug(`Profile ${profile.name} initialized`); } @@ -57,6 +62,7 @@ export class CannerDataSource extends DataSource { sql, directory, profileName, + options: cannerOptions, }: ExportOptions): Promise { if (!this.poolMapping.has(profileName)) { throw new InternalError(`Profile instance ${profileName} not found`); @@ -65,12 +71,16 @@ export class CannerDataSource extends DataSource { if (!fs.existsSync(directory)) { throw new InternalError(`Directory ${directory} not found`); } - const { options: connection } = this.poolMapping.get(profileName)!; - + const { options: connection, properties } = + this.poolMapping.get(profileName)!; const cannerAdapter = new CannerAdapter(connection); try { this.logger.debug('Send the async query to the Canner Enterprise'); - const presignedUrls = await cannerAdapter.createAsyncQueryResultUrls(sql); + const header = this.getCannerRequestHeader(properties, cannerOptions); + const presignedUrls = await cannerAdapter.createAsyncQueryResultUrls( + sql, + header + ); this.logger.debug( 'Start fetching the query result parquet files from URLs' ); @@ -81,6 +91,21 @@ export class CannerDataSource extends DataSource { throw error; } } + private getCannerRequestHeader( + properties?: Record, + cannerOptions?: any + ) { + const header: Record = {}; + const userId = cannerOptions?.userId; + const rootUserId = properties?.['rootUserId']; + if (userId && rootUserId) { + header[ + 'x-trino-session' + ] = `root_user_id=${rootUserId}, canner_user_id=${userId}`; + this.logger.debug(`Impersonate used: ${userId}`); + } + return header; + } private async downloadFiles(urls: string[], directory: string) { await Promise.all( @@ -108,15 +133,16 @@ export class CannerDataSource extends DataSource { bindParams, profileName, operations, + headers, }: ExecuteOptions): Promise { - if (!this.poolMapping.has(profileName)) { - throw new InternalError(`Profile instance ${profileName} not found`); - } - const { pool, options } = this.poolMapping.get(profileName)!; - this.logger.debug(`Acquiring connection from ${profileName}`); - const client = await pool.connect(); this.logger.debug(`Acquired connection from ${profileName}`); + const { options } = this.poolMapping.get(profileName)!; + const auth = headers?.['authorization']; + const password = auth?.trim().split(' ')[1]; + const pool = this.getPool(profileName, password); + let client: PoolClient | undefined; try { + client = await pool.connect(); const builtSQL = buildSQL(sql, operations); const cursor = client.query( new Cursor(builtSQL, Array.from(bindParams.values())) @@ -127,7 +153,7 @@ export class CannerDataSource extends DataSource { ); // It is important to close the cursor before releasing connection, or the connection might not able to handle next request. await cursor.close(); - client.release(); + if (client) client.release(); }); // All promises MUST fulfilled in this function or we are not able to release the connection when error occurred return await this.getResultFromCursor(cursor, options); @@ -135,7 +161,7 @@ export class CannerDataSource extends DataSource { this.logger.debug( `Errors occurred, release connection from ${profileName}` ); - client.release(); + if (client) client.release(); throw e; } } @@ -150,6 +176,33 @@ export class CannerDataSource extends DataSource { } } + // use protected to make it testable + protected getPool(profileName: string, password?: string): Pool { + if (!this.poolMapping.has(profileName)) { + throw new InternalError(`Profile instance ${profileName} not found`); + } + const { pool: defaultPool, options: poolOptions } = + this.poolMapping.get(profileName)!; + this.logger.debug(`Acquiring connection from ${profileName}`); + if (!password) { + return defaultPool; + } + const database = poolOptions?.database || ''; + const userPoolKey = this.getUserPoolKey(password, database); + if (this.UserPool.has(userPoolKey)) { + const userPool = this.UserPool.get(userPoolKey); + return userPool!; + } + const pool = new Pool({ ...poolOptions, password: password }); + this.UserPool.set(userPoolKey, pool); + return pool; + } + + // use protected to make it testable + protected getUserPoolKey(pat: string, database?: string) { + return `${pat}-${database}`; + } + private async getResultFromCursor( cursor: Cursor, options: PGOptions = {} diff --git a/packages/extension-driver-canner/test/cannerDataSource.spec.ts b/packages/extension-driver-canner/test/cannerDataSource.spec.ts index 9daf0af7..7f8f7896 100644 --- a/packages/extension-driver-canner/test/cannerDataSource.spec.ts +++ b/packages/extension-driver-canner/test/cannerDataSource.spec.ts @@ -1,5 +1,6 @@ import { CannerServer } from './cannerServer'; import { CannerDataSource, PGOptions } from '../src'; +import { MockCannerDataSource } from './mock'; import { ExportOptions, InternalError, streamToArray } from '@vulcan-sql/core'; import { Writable } from 'stream'; import * as sinon from 'ts-sinon'; @@ -8,7 +9,9 @@ import { CannerAdapter } from '../src/lib/cannerAdapter'; const pg = new CannerServer(); let dataSource: CannerDataSource; +let mockDataSource: MockCannerDataSource; +const directory = 'tmp_test_canner'; // restore all sinon mock/stub before each test beforeEach(() => { sinon.default.restore(); @@ -42,7 +45,7 @@ it('Data source should throw error when activating if any profile is invalid', a // export method should be executed successfully it('Data source should export successfully', async () => { - fs.mkdirSync('tmp', { recursive: true }); + fs.mkdirSync(directory, { recursive: true }); dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); await dataSource.activate(); @@ -50,14 +53,15 @@ it('Data source should export successfully', async () => { await expect( dataSource.export({ sql: 'select 1', - directory: 'tmp', + directory, profileName: 'profile1', + options: {}, } as ExportOptions) ).resolves.not.toThrow(); - expect(fs.readdirSync('tmp').length).toBe(1); + expect(fs.readdirSync(directory).length).toBe(1); // clean up - fs.rmSync('tmp', { recursive: true, force: true }); + fs.rmSync(directory, { recursive: true, force: true }); }, 100000); it('Data source should throw error when fail to export data', async () => { @@ -73,7 +77,7 @@ it('Data source should throw error when fail to export data', async () => { ); }); - fs.mkdirSync('tmp', { recursive: true }); + fs.mkdirSync(directory, { recursive: true }); dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); await dataSource.activate(); @@ -81,14 +85,15 @@ it('Data source should throw error when fail to export data', async () => { await expect( dataSource.export({ sql: 'select 1', - directory: 'tmp', + directory, profileName: 'profile1', + options: {}, } as ExportOptions) ).rejects.toThrow(); - expect(fs.readdirSync('tmp').length).toBe(0); + expect(fs.readdirSync(directory).length).toBe(0); // clean up - fs.rmSync('tmp', { recursive: true, force: true }); + fs.rmSync(directory, { recursive: true, force: true }); }, 100000); it('Data source should throw error when given directory is not exist', async () => { @@ -100,8 +105,9 @@ it('Data source should throw error when given directory is not exist', async () await expect( dataSource.export({ sql: 'select 1', - directory: 'tmp', + directory: directory, profileName: 'profile1', + options: {}, } as ExportOptions) ).rejects.toThrow(); }, 100000); @@ -110,14 +116,15 @@ it('Data source should throw error when given profile name is not exist', async // Arrange dataSource = new CannerDataSource({}, '', [pg.getProfile('profile1')]); await dataSource.activate(); - fs.mkdirSync('tmp', { recursive: true }); + fs.mkdirSync(directory, { recursive: true }); // Act, Assert await expect( dataSource.export({ sql: 'select 1', - directory: 'tmp', + directory, profileName: 'profile not exist', + options: {}, } as ExportOptions) ).rejects.toThrow(); }, 100000); @@ -318,3 +325,119 @@ it('Data source should release connection when readable stream is destroyed', as expect(rows.length).toBe(1); // afterEach hook will timeout if any leak occurred. }, 300000); + +it('Should return the same pool when the profile is the same', async () => { + // Arrange + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + // Act + const pool1 = mockDataSource.getPool('profile1'); + const pool2 = mockDataSource.getPool('profile1'); + // Assert + expect(pool1 === pool2).toBeTruthy(); +}, 30000); + +it('Should return the same pool when the profile and authentication is the same', async () => { + // Arrange + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + // Act + const pool1 = mockDataSource.getPool('profile1', 'the-same-authentication'); + const pool2 = mockDataSource.getPool('profile1', 'the-same-authentication'); + // Assert + expect(pool1 === pool2).toBeTruthy(); +}, 30000); + +it('Should return new user pool if user pool not exist', async () => { + // Arrange + const profile1 = pg.getProfile('profile1'); + const database = profile1.connection.database; + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + // Act + const pool1 = mockDataSource.getPool('profile1'); + const pool2 = mockDataSource.getPool('profile1', 'my-authentication'); + const userPool = mockDataSource.getUserPool('my-authentication', database); + // Assert + expect(pool1 == pool2).toBeFalsy(); + expect(userPool === pool2).toBeTruthy(); +}, 30000); + +it('Should return existing user pool if user pool exist', async () => { + // Arrange + const profile1 = pg.getProfile('profile1'); + const database = profile1.connection.database; + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + + // Act + const pool = mockDataSource.getPool('profile1', 'my-authentication'); + const userPool = mockDataSource.getUserPool('my-authentication', database); + // Assert + expect(userPool === pool).toBeTruthy(); +}, 30000); + +it('Should return new user pool if user pool exist but not match', async () => { + // Arrange + const profile1 = pg.getProfile('profile1'); + const database = profile1.connection.database; + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + + // Act + expect(mockDataSource.getUserPool('my-authentication', database)).toBe( + undefined + ); + mockDataSource.getPool('profile1', 'my-authentication'); + // Assert + expect( + mockDataSource.getUserPool('my-authentication', database) + ).toBeDefined(); +}, 30000); + +it('Should return different pool with different authentication even the profile is the same', async () => { + // Arrange + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + // Act + const pool1 = mockDataSource.getPool('profile1', 'authentication'); + const pool2 = mockDataSource.getPool('profile1', 'differ-authentication'); + // Assert + expect(pool1 === pool2).toBeFalsy(); +}, 30000); + +it('Should throw error when the profile is not exist', async () => { + // Arrange + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + // Act, Assert + expect(() => mockDataSource.getPool('profile2')).toThrow( + 'Profile instance profile2 not found' + ); +}, 30000); + +it('Should return default pool when password was not given', async () => { + // Arrange + mockDataSource = new MockCannerDataSource({}, '', [ + pg.getProfile('profile1'), + ]); + await mockDataSource.activate(); + // Act + const pool = mockDataSource.getPool('profile1'); + // Assert + expect(pool).toBeDefined(); +}, 30000); diff --git a/packages/extension-driver-canner/test/cannerServer.ts b/packages/extension-driver-canner/test/cannerServer.ts index a24c2c8e..ce3611bc 100644 --- a/packages/extension-driver-canner/test/cannerServer.ts +++ b/packages/extension-driver-canner/test/cannerServer.ts @@ -20,6 +20,7 @@ export class CannerServer { database: process.env['CANNER_WORKSPACE_SQL_NAME'], } as PGOptions, allow: '*', + properties: {}, }; } } diff --git a/packages/extension-driver-canner/test/mock/index.ts b/packages/extension-driver-canner/test/mock/index.ts new file mode 100644 index 00000000..d1838473 --- /dev/null +++ b/packages/extension-driver-canner/test/mock/index.ts @@ -0,0 +1 @@ +export * from './mockCannerDataSource'; diff --git a/packages/extension-driver-canner/test/mock/mockCannerDataSource.ts b/packages/extension-driver-canner/test/mock/mockCannerDataSource.ts new file mode 100644 index 00000000..8ae10562 --- /dev/null +++ b/packages/extension-driver-canner/test/mock/mockCannerDataSource.ts @@ -0,0 +1,35 @@ +import { CannerDataSource } from '../../src'; +import { InternalError } from '@vulcan-sql/core'; +import { Pool } from 'pg'; + +export class MockCannerDataSource extends CannerDataSource { + public override getPool(profileName: string, password?: string): Pool { + if (!this.poolMapping.has(profileName)) { + throw new InternalError(`Profile instance ${profileName} not found`); + } + const { pool: defaultPool, options: poolOptions } = + this.poolMapping.get(profileName)!; + if (!password) { + return defaultPool; + } + const database = poolOptions?.database || ''; + const userPoolKey = this.getUserPoolKey(password, database); + if (this.UserPool.has(userPoolKey)) { + const userPool = this.UserPool.get(userPoolKey); + return userPool!; + } + const pool = new Pool({ ...poolOptions, password: password }); + this.UserPool.set(userPoolKey, pool); + return pool; + } + + public setUserPool = (userPool: Pool, password: string, database: string) => { + const userPoolKey = this.getUserPoolKey(password, database); + this.UserPool.set(userPoolKey, userPool); + }; + + public getUserPool = (password: string, database: string) => { + const userPoolKey = this.getUserPoolKey(password, database); + return this.UserPool.get(userPoolKey); + }; +} diff --git a/packages/extension-driver-clickhouse/package.json b/packages/extension-driver-clickhouse/package.json index 18d58df6..db9f823f 100644 --- a/packages/extension-driver-clickhouse/package.json +++ b/packages/extension-driver-clickhouse/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-driver-clickhouse", "description": "Clickhouse driver for VulcanSQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -27,6 +27,6 @@ "@clickhouse/client": "^0.1.1" }, "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-driver-duckdb/package.json b/packages/extension-driver-duckdb/package.json index 659450bd..0b6e54b9 100644 --- a/packages/extension-driver-duckdb/package.json +++ b/packages/extension-driver-duckdb/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-driver-duckdb", "description": "duckdb driver for Vulcan SQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -23,6 +23,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-driver-ksqldb/package.json b/packages/extension-driver-ksqldb/package.json index 76c94881..4689242d 100644 --- a/packages/extension-driver-ksqldb/package.json +++ b/packages/extension-driver-ksqldb/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-driver-ksqldb", "description": "ksqlDB driver for VulcanSQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -23,6 +23,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-driver-pg/package.json b/packages/extension-driver-pg/package.json index 8f2c4901..522ca6da 100644 --- a/packages/extension-driver-pg/package.json +++ b/packages/extension-driver-pg/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-driver-pg", "description": "PG driver for Vulcan SQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -24,6 +24,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-driver-redshift/.eslintrc.json b/packages/extension-driver-redshift/.eslintrc.json new file mode 100644 index 00000000..9d9c0db5 --- /dev/null +++ b/packages/extension-driver-redshift/.eslintrc.json @@ -0,0 +1,18 @@ +{ + "extends": ["../../.eslintrc.json"], + "ignorePatterns": ["!**/*"], + "overrides": [ + { + "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], + "rules": {} + }, + { + "files": ["*.ts", "*.tsx"], + "rules": {} + }, + { + "files": ["*.js", "*.jsx"], + "rules": {} + } + ] +} diff --git a/packages/extension-driver-redshift/README.md b/packages/extension-driver-redshift/README.md new file mode 100644 index 00000000..df27c637 --- /dev/null +++ b/packages/extension-driver-redshift/README.md @@ -0,0 +1,76 @@ +# extension-driver-redshift + +[@aws-sdk/client-redshift-data](https://www.npmjs.com/package/@aws-sdk/client-redshift-data) driver for VulcanSQL. + +reference: https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-redshift-data + +## Install + +1. Install package + + ```bash + npm i @vulcan-sql/extension-driver-redshift + ``` + +2. Update `vulcan.yaml`, enable the extension. + + ```yaml + extensions: + redshift: '@vulcan-sql/extension-driver-redshift' + ``` + +3. Create a new profile in `profiles.yaml` or in your profiles' paths. For example if you are using Redshift Serverless: + +```yaml +- name: redshift # profile name + type: redshift + allow: "*" + connection: + # please see the type definition of RedshiftDataClientConfig + # https://github.com/aws/aws-sdk-js-v3/blob/29056f4ca545f7e5cf951b915bb52178305fc305/clients/client-redshift-data/src/RedshiftDataClient.ts#L253C18-L253C42 + credentials: + accessKeyId: + secretAccessKey: + # please see the type definition of ExecuteStatementCommandInput(omit Sql and Parameters) + # https://github.com/aws/aws-sdk-js-v3/blob/29056f4ca545f7e5cf951b915bb52178305fc305/clients/client-redshift-data/src/models/models_0.ts#L805C18-L805C39 + Database: + WorkgroupName: +``` + +## Testing + +```bash +nx test extension-driver-redshift +``` + +This library was generated with [Nx](https://nx.dev). + +To run test, the following environment variables are required: + +- AWS_ACCESS_KEY_ID +- AWS_SECRET_ACCESS_KEY +- AWS_REDSHIFT_DATABASE +- AWS_REDSHIFT_WORKGROUP_NAME + +To enable the test for `test/redshiftDataSource.spec.ts`: +- remove `.skip` inside `test/redshiftDataSource.spec.ts` to enable the test. +- remove `/* istanbul ignore file */` in the `src/lib/redshiftDataSource.ts` + +Local Testing Success Message:(Since the tests run in CI are disabled, so I paste the local testing result here!) + +```bash + PASS extension-driver-redshift packages/extension-driver-redshift/test/redshiftDataSource.spec.ts (41.595 s) + +Test Suites: 2 passed, 2 total +Tests: 15 passed, 15 total +Snapshots: 0 total +Time: 42.048 s +Ran all test suites. + + ————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————— + + > NX Successfully ran target test for project extension-driver-redshift + + +✨ Done in 44.39s. +``` \ No newline at end of file diff --git a/packages/extension-driver-redshift/jest.config.ts b/packages/extension-driver-redshift/jest.config.ts new file mode 100644 index 00000000..8f44ea62 --- /dev/null +++ b/packages/extension-driver-redshift/jest.config.ts @@ -0,0 +1,14 @@ +module.exports = { + displayName: 'extension-driver-redshift', + preset: '../../jest.preset.ts', + globals: { + 'ts-jest': { + tsconfig: '/tsconfig.spec.json', + }, + }, + transform: { + '^.+\\.[tj]s$': 'ts-jest', + }, + moduleFileExtensions: ['ts', 'js', 'html'], + coverageDirectory: '../../coverage/packages/extension-driver-redshift', +}; diff --git a/packages/extension-driver-redshift/package.json b/packages/extension-driver-redshift/package.json new file mode 100644 index 00000000..6ed78e9d --- /dev/null +++ b/packages/extension-driver-redshift/package.json @@ -0,0 +1,32 @@ +{ + "name": "@vulcan-sql/extension-driver-redshift", + "description": "Redshift driver for VulcanSQL", + "version": "0.10.0", + "type": "commonjs", + "publishConfig": { + "access": "public" + }, + "keywords": [ + "vulcan", + "vulcan-sql", + "data", + "sql", + "database", + "data-warehouse", + "data-lake", + "api-builder", + "redshift" + ], + "repository": { + "type": "git", + "url": "https://github.com/Canner/vulcan.git" + }, + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/client-redshift-data": "^3.405.0", + "exponential-backoff": "^3.1.1" + }, + "peerDependencies": { + "@vulcan-sql/core": "~0.10.0-0" + } +} \ No newline at end of file diff --git a/packages/extension-driver-redshift/project.json b/packages/extension-driver-redshift/project.json new file mode 100644 index 00000000..008ba18e --- /dev/null +++ b/packages/extension-driver-redshift/project.json @@ -0,0 +1,85 @@ +{ + "root": "packages/extension-driver-redshift", + "sourceRoot": "packages/extension-driver-redshift/src", + "targets": { + "build": { + "executor": "@nrwl/workspace:run-commands", + "options": { + "command": "yarn ts-node ./tools/scripts/replaceAlias.ts extension-driver-redshift" + }, + "dependsOn": [ + { + "projects": "self", + "target": "tsc" + }, + { + "projects": "self", + "target": "install-dependencies" + } + ] + }, + "tsc": { + "executor": "@nrwl/js:tsc", + "outputs": ["{options.outputPath}"], + "options": { + "outputPath": "dist/packages/extension-driver-redshift", + "main": "packages/extension-driver-redshift/src/index.ts", + "tsConfig": "packages/extension-driver-redshift/tsconfig.lib.json", + "assets": ["packages/extension-driver-redshift/*.md"], + "buildableProjectDepsInPackageJsonType": "dependencies" + }, + "dependsOn": [ + { + "projects": "dependencies", + "target": "build" + }, + { + "projects": "self", + "target": "install-dependencies" + } + ] + }, + "lint": { + "executor": "@nrwl/linter:eslint", + "outputs": ["{options.outputFile}"], + "options": { + "lintFilePatterns": ["packages/extension-driver-redshift/**/*.ts"] + } + }, + "test": { + "executor": "@nrwl/jest:jest", + "outputs": ["coverage/packages/extension-driver-redshift"], + "options": { + "jestConfig": "packages/extension-driver-redshift/jest.config.ts", + "passWithNoTests": true + }, + "dependsOn": [ + { + "projects": "self", + "target": "install-dependencies" + } + ] + }, + "publish": { + "executor": "@nrwl/workspace:run-commands", + "options": { + "command": "node ../../../tools/scripts/publish.mjs {args.tag} {args.version}", + "cwd": "dist/packages/extension-driver-redshift" + }, + "dependsOn": [ + { + "projects": "self", + "target": "build" + } + ] + }, + "install-dependencies": { + "executor": "@nrwl/workspace:run-commands", + "options": { + "command": "yarn", + "cwd": "packages/extension-driver-redshift" + } + } + }, + "tags": [] +} diff --git a/packages/extension-driver-redshift/src/index.ts b/packages/extension-driver-redshift/src/index.ts new file mode 100644 index 00000000..a6013622 --- /dev/null +++ b/packages/extension-driver-redshift/src/index.ts @@ -0,0 +1,3 @@ +export * from './lib/redshiftDataSource'; +import { RedShiftDataSource } from './lib/redshiftDataSource'; +export default [RedShiftDataSource]; diff --git a/packages/extension-driver-redshift/src/lib/redshiftDataSource.ts b/packages/extension-driver-redshift/src/lib/redshiftDataSource.ts new file mode 100644 index 00000000..c31e6965 --- /dev/null +++ b/packages/extension-driver-redshift/src/lib/redshiftDataSource.ts @@ -0,0 +1,197 @@ +/* istanbul ignore file */ + +import { + DataSource, + DataResult, + ExecuteOptions, + InternalError, + RequestParameter, + VulcanExtensionId, +} from '@vulcan-sql/core'; +import { Readable } from 'stream'; +import { buildSQL } from './sqlBuilder'; +import { mapFromRedShiftTypeId } from './typeMapper'; +import { + RedshiftDataClient, + RedshiftDataClientConfig, + ExecuteStatementCommand, + ExecuteStatementCommandInput, + ExecuteStatementCommandOutput, + DescribeStatementCommandInput, + DescribeStatementResponse, + DescribeStatementCommand, + GetStatementResultCommandInput, + GetStatementResultCommand, + SqlParameter, +} from '@aws-sdk/client-redshift-data'; +import { backOff } from 'exponential-backoff'; + +export type RedshiftOptions = RedshiftDataClientConfig & Omit; + +type RedShiftDataRow = { + [column: string]: any; +} + +@VulcanExtensionId('redshift') +export class RedShiftDataSource extends DataSource { + private logger = this.getLogger(); + private redshiftClientMapping = new Map< + string, + { + redshiftClient: RedshiftDataClient; + options?: RedshiftOptions; + } + >(); + public override async onActivate() { + const profiles = this.getProfiles().values(); + for (const profile of profiles) { + this.logger.debug( + `Initializing profile: ${profile.name} using redshift driver` + ); + + const redshiftClient = new RedshiftDataClient(profile.connection!); + this.redshiftClientMapping.set(profile.name, { + redshiftClient: redshiftClient, + options: profile.connection, + }); + + await this.testConnection(profile.name); + this.logger.debug(`Profile ${profile.name} initialized`); + } + } + + public async execute({ + statement: sql, + bindParams, + profileName, + operations, + }: ExecuteOptions): Promise { + this.checkProfileExist(profileName); + const { redshiftClient, options } = this.redshiftClientMapping.get(profileName)!; + + try { + const sqlParams: SqlParameter[] = []; + bindParams.forEach((value, key) => { + sqlParams.push({ name: key.replace(':', ''), value: String(value) }); + }); + + const builtSQL = buildSQL(sql, operations); + let executeStatementCommandParams: ExecuteStatementCommandInput = { + Sql: builtSQL, + Database: options!.Database, + WorkgroupName: options!.WorkgroupName, + }; + if (sqlParams.length) { + executeStatementCommandParams = {...executeStatementCommandParams, Parameters: sqlParams} + } + + const executeStatementCommand = new ExecuteStatementCommand(executeStatementCommandParams); + const statementCommandResult = await redshiftClient.send(executeStatementCommand); + return await this.getResultFromExecuteStatement(statementCommandResult, redshiftClient); + } catch (e: any) { + this.logger.debug( + `Errors occurred, release connection from ${profileName}` + ); + throw e; + } + } + + public async prepare({ parameterIndex }: RequestParameter) { + // see the section of Running SQL statements with parameters when calling the Amazon Redshift Data API + // https://docs.aws.amazon.com/redshift/latest/mgmt/data-api.html + return `:${parameterIndex}`; + } + + private async testConnection(profileName: string): Promise { + const { redshiftClient, options } = this.redshiftClientMapping.get(profileName)!; + const executeStatementCommandParams: ExecuteStatementCommandInput = { + Sql: 'select 1', + Database: options!.Database, + WorkgroupName: options!.WorkgroupName, + }; + + const executeStatementCommand = new ExecuteStatementCommand(executeStatementCommandParams); + + try { + const statementCommandResult = await redshiftClient.send(executeStatementCommand); + return await this.getResultFromExecuteStatement(statementCommandResult, redshiftClient); + } catch (e) { + this.logger.debug( + `Errors occurred, release connection from ${profileName}` + ); + throw e; + } + } + + private async getResultFromExecuteStatement( + statementCommandResult: ExecuteStatementCommandOutput, + redshiftClient: RedshiftDataClient + ): Promise { + let describeStatementResponse: DescribeStatementResponse | undefined; + const describeStatementRequestInput: DescribeStatementCommandInput = { + Id: statementCommandResult.Id, + }; + + // definition of describeStatementResponse.Status + // https://github.com/aws/aws-sdk-js-v3/blob/29056f4ca545f7e5cf951b915bb52178305fc305/clients/client-redshift-data/src/models/models_0.ts#L604 + while (!describeStatementResponse || describeStatementResponse.Status !== 'FINISHED') { + const describeStatementCommand = new DescribeStatementCommand(describeStatementRequestInput); + describeStatementResponse = await backOff(() =>redshiftClient.send(describeStatementCommand)); + + if ( + describeStatementResponse.Status === 'ABORTED' || + describeStatementResponse.Status === 'FAILED' + ) { + throw describeStatementResponse.Error + } + } + + let getStatementResultCommandParams: GetStatementResultCommandInput = { + "Id": describeStatementResponse.Id + }; + let getStatementResultCommand = new GetStatementResultCommand(getStatementResultCommandParams); + let getStatementResultResponse = await redshiftClient.send(getStatementResultCommand); + const records = getStatementResultResponse.Records! || []; + const columns = getStatementResultResponse.ColumnMetadata || []; + + while (getStatementResultResponse.NextToken) { + getStatementResultCommandParams = { + "Id": describeStatementResponse.Id, + "NextToken": getStatementResultResponse.NextToken, + }; + getStatementResultCommand = new GetStatementResultCommand(getStatementResultCommandParams); + getStatementResultResponse = await redshiftClient.send(getStatementResultCommand); + records.push(...(getStatementResultResponse.Records! || [])); + } + + return { + getColumns: () => { + return columns.map((column) => ({ + name: column.name || '', + type: mapFromRedShiftTypeId(column.typeName?.toLowerCase() || ''), + })); + }, + getData: () => new Readable({ + objectMode: true, + read() { + for (const record of records) { + const row: RedShiftDataRow = {}; + for (const [i, recordField] of record.entries()) { + row[columns[i].name!] = Object.values(recordField)[0]; + } + this.push(row); + } + this.push(null); + }, + // automatically destroy() the stream when it emits 'finish' or errors. Node > 10.16 + autoDestroy: true, + }), + }; + } + + private checkProfileExist(profileName: string): void { + if (!this.redshiftClientMapping.has(profileName)) { + throw new InternalError(`Profile instance ${profileName} not found`); + } + } +} diff --git a/packages/extension-driver-redshift/src/lib/sqlBuilder.ts b/packages/extension-driver-redshift/src/lib/sqlBuilder.ts new file mode 100644 index 00000000..b5b94e95 --- /dev/null +++ b/packages/extension-driver-redshift/src/lib/sqlBuilder.ts @@ -0,0 +1,40 @@ +import { Parameterized, SQLClauseOperation } from '@vulcan-sql/core'; +import { isNull, isUndefined } from 'lodash'; + +const isNullOrUndefine = (value: any) => isUndefined(value) || isNull(value); + +export const removeEndingSemiColon = (sql: string) => { + return sql.replace(/;([ \n]+)?$/, ''); +}; + +export const addLimit = (sql: string, limit?: string | null) => { + if (isNullOrUndefine(limit)) return sql; + return [sql, `LIMIT`, limit].join(' '); +}; + +export const addOffset = (sql: string, offset?: string | null) => { + if (isNullOrUndefine(offset)) return sql; + return [sql, `OFFSET`, offset].join(' '); +}; + +// Check if there is no operations +export const isNoOP = ( + operations: Partial> +): boolean => { + if (!isNullOrUndefine(operations.limit)) return false; + if (!isNullOrUndefine(operations.offset)) return false; + return true; +}; + +export const buildSQL = ( + sql: string, + operations: Partial> +): string => { + if (isNoOP(operations)) return sql; + let builtSQL = ''; + builtSQL += `SELECT * FROM (${removeEndingSemiColon(sql)})`; + builtSQL = addLimit(builtSQL, operations.limit); + builtSQL = addOffset(builtSQL, operations.offset); + builtSQL += ';'; + return builtSQL; +}; diff --git a/packages/extension-driver-redshift/src/lib/typeMapper.ts b/packages/extension-driver-redshift/src/lib/typeMapper.ts new file mode 100644 index 00000000..0fcf5559 --- /dev/null +++ b/packages/extension-driver-redshift/src/lib/typeMapper.ts @@ -0,0 +1,40 @@ +const typeMapping = new Map(); + +const register = (redshiftType: string, type: string) => { + typeMapping.set(redshiftType, type); +}; + +// Reference +// https://docs.aws.amazon.com/redshift/latest/dg/c_Supported_data_types.html +register('smallint', 'number'); +register('int2', 'number'); +register('integer', 'number'); +register('int', 'number'); +register('int4', 'number'); +register('bigint', 'number'); +register('int8', 'number'); +register('decimal', 'number'); +register('numeric', 'number'); +register('real', 'number'); +register('float4', 'number'); +register('doubleprecision', 'number'); +register('float8', 'number'); +register('float', 'number'); +register('boolean', 'boolean'); +register('bool', 'boolean'); +register('char', 'string'); +register('character', 'string'); +register('nchar', 'string'); +register('bpchar', 'string'); +register('varchar', 'string'); +register('charactervarying', 'string'); +register('nvarchar', 'string'); +register('text', 'string'); +register('date', 'string'); +register('timestamp', 'string'); +register('super', 'string'); + +export const mapFromRedShiftTypeId = (redshiftType: string) => { + if (typeMapping.has(redshiftType)) return typeMapping.get(redshiftType)!; + return 'string'; +}; diff --git a/packages/extension-driver-redshift/test/redshiftDataSource.spec.ts b/packages/extension-driver-redshift/test/redshiftDataSource.spec.ts new file mode 100644 index 00000000..e2cb455f --- /dev/null +++ b/packages/extension-driver-redshift/test/redshiftDataSource.spec.ts @@ -0,0 +1,180 @@ +import { RedShiftDataSource } from '../src'; +import { RedShiftFakeServer } from './redshiftServer'; +import { streamToArray } from '@vulcan-sql/core'; + +let redShift: RedShiftFakeServer; +let dataSource: RedShiftDataSource; + +// All tests in this file are skipped, since it costs money in AWS. As of now, we only run tests in the local environment. +it.skip('Preparing the data source', async () => { + redShift = new RedShiftFakeServer(); +}); + +it.skip('Data source should be activate without any error when all profiles are valid', async () => { + // Arrange + dataSource = new RedShiftDataSource({}, '', [redShift.getProfile('profile1')]); + + // Act, Assert + await expect(dataSource.activate()).resolves.not.toThrow(); +}); + +it.skip('Data source should throw error when activating any profile which is invalid', async () => { + // Arrange + const invalidProfile = redShift.getProfile('profile1'); + invalidProfile.connection.credentials.accessKeyId = ''; + invalidProfile.connection.credentials.secretAccessKey = ''; + dataSource = new RedShiftDataSource({}, '', [ + invalidProfile, + ]); + + // Act, Assert + await expect(dataSource.activate()).rejects.toThrow(); +}); + +it.skip('Data source should return correct rows with 2 chunks', async () => { + // Arrange + dataSource = new RedShiftDataSource({}, '', [redShift.getProfile('profile1')]); + await dataSource.activate(); + // Act + const sqlStatement = ` + WITH + input_data as ( + SELECT array(1,2,3,10) as id + union all + SELECT array(1) as id + union all + SELECT array(2,3,4,9) as id + ) + SELECT + id2 + FROM + input_data AS ids, + ids.id AS id2 + ` + const { getData } = await dataSource.execute({ + statement: sqlStatement, + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows.length).toBe(9); +}, 30000); + +it.skip('Data source should return correct rows with 1 chunk', async () => { + // Arrange + dataSource = new RedShiftDataSource({}, '', [redShift.getProfile('profile1')]); + await dataSource.activate(); + // Act + const sqlStatement = ` + WITH + input_data as ( + SELECT array(1,2,3,10) as id + union all + SELECT array(1) as id + union all + SELECT array(2,3,4,9) as id + ) + SELECT + id2 + FROM + input_data AS ids, + ids.id AS id2 + LIMIT 5 + ` + const { getData } = await dataSource.execute({ + statement: sqlStatement, + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows.length).toBe(5); +}, 30000); + +it.skip('Data source should return empty data with no row', async () => { + // Arrange + dataSource = new RedShiftDataSource({}, '', [redShift.getProfile('profile1')]); + await dataSource.activate(); + // Act + const sqlStatement = ` + WITH + input_data as ( + SELECT array(1,2,3,10) as id + union all + SELECT array(1) as id + union all + SELECT array(2,3,4,9) as id + ) + SELECT + id2 + FROM + input_data AS ids, + ids.id AS id2 + LIMIT 0 + ` + const { getData } = await dataSource.execute({ + statement: sqlStatement, + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows.length).toBe(0); +}, 30000); + +it.skip('Data source should work with prepare statements', async () => { + // Arrange + dataSource = new RedShiftDataSource({}, '', [redShift.getProfile('profile1')]); + await dataSource.activate(); + // Act + const bindParams = new Map(); + const var1Name = await dataSource.prepare({ + parameterIndex: 1, + value: '123', + profileName: 'profile1', + }); + bindParams.set(var1Name, '123'); + + const var2Name = await dataSource.prepare({ + parameterIndex: 2, + value: '456', + profileName: 'profile1', + }); + bindParams.set(var2Name, '456'); + + const { getData } = await dataSource.execute({ + statement: `select ${var1Name} as v1, ${var2Name} as v2;`, + bindParams, + profileName: 'profile1', + operations: {} as any, + }); + const rows = await streamToArray(getData()); + // Assert + expect(rows[0].v1).toBe('123'); + expect(rows[0].v2).toBe('456'); +}, 30000); + +it.skip('Data source should return correct column types', async () => { + // Arrange + dataSource = new RedShiftDataSource({}, '', [redShift.getProfile('profile1')]); + await dataSource.activate(); + // Act + const { getColumns, getData } = await dataSource.execute({ + statement: `SELECT CAST(1 as bigint) as a, true as b`, + bindParams: new Map(), + profileName: 'profile1', + operations: {} as any, + }); + const column = getColumns(); + // We need to destroy the data stream or the driver waits for us + const data = getData(); + data.destroy(); + + // Assert + expect(column[0]).toEqual({ name: 'a', type: 'number' }); + expect(column[1]).toEqual({ name: 'b', type: 'boolean' }); +}, 30000); diff --git a/packages/extension-driver-redshift/test/redshiftServer.ts b/packages/extension-driver-redshift/test/redshiftServer.ts new file mode 100644 index 00000000..6dc035dd --- /dev/null +++ b/packages/extension-driver-redshift/test/redshiftServer.ts @@ -0,0 +1,28 @@ +export class RedShiftFakeServer { + constructor() { + [ + 'AWS_ACCESS_KEY_ID', + 'AWS_SECRET_ACCESS_KEY', + 'AWS_REDSHIFT_DATABASE', + 'AWS_REDSHIFT_WORKGROUP_NAME', + ].forEach((envName) => { + if (!process.env[envName]) throw new Error(`${envName} not defined`); + }); + } + + public getProfile(name: string) { + return { + name, + type: 'redshift', + connection: { + credentials: { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + }, + Database: process.env['AWS_REDSHIFT_DATABASE'], + WorkgroupName: process.env['AWS_REDSHIFT_WORKGROUP_NAME'], + }, + allow: '*', + }; + } +} diff --git a/packages/extension-driver-redshift/test/sqlBuilder.spec.ts b/packages/extension-driver-redshift/test/sqlBuilder.spec.ts new file mode 100644 index 00000000..6a334ad8 --- /dev/null +++ b/packages/extension-driver-redshift/test/sqlBuilder.spec.ts @@ -0,0 +1,73 @@ +import * as builder from '../src/lib/sqlBuilder'; + +describe('SQL builders components test', () => { + it('removeEndingSemiColon', async () => { + // Arrange + const statement = `SELECT * FROM users; \n `; + // Act + const result = builder.removeEndingSemiColon(statement); + // Arrange + expect(result).toBe('SELECT * FROM users'); + }); + + it('addLimit - string value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addLimit(statement, ':1'); + // Arrange + expect(result).toBe('SELECT * FROM users LIMIT :1'); + }); + + it('addLimit - null value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addLimit(statement, null); + // Arrange + expect(result).toBe('SELECT * FROM users'); + }); + + it('addOffset - string value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addOffset(statement, ':1'); + // Arrange + expect(result).toBe('SELECT * FROM users OFFSET :1'); + }); + + it('addOffset - null value', async () => { + // Arrange + const statement = `SELECT * FROM users`; + // Act + const result = builder.addOffset(statement, null); + // Arrange + expect(result).toBe('SELECT * FROM users'); + }); + + it('isNoOP - empty operation', async () => { + // Act + const result = builder.isNoOP({}); + // Arrange + expect(result).toBe(true); + }); + + it('isNoOP - some operations', async () => { + // Act + const results = [{ limit: ':1' }, { offset: ':1' }].map(builder.isNoOP); + // Arrange + expect(results.every((result) => result === false)).toBeTruthy(); + }); +}); + +it('BuildSQL function should build sql with operations', async () => { + // Arrange + const statement = `SELECT * FROM users;`; + // Act + const result = builder.buildSQL(statement, { limit: ':1', offset: ':2' }); + // Arrange + expect(result).toBe( + 'SELECT * FROM (SELECT * FROM users) LIMIT :1 OFFSET :2;' + ); +}); diff --git a/packages/extension-driver-redshift/tsconfig.json b/packages/extension-driver-redshift/tsconfig.json new file mode 100644 index 00000000..f5b85657 --- /dev/null +++ b/packages/extension-driver-redshift/tsconfig.json @@ -0,0 +1,22 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "module": "commonjs", + "forceConsistentCasingInFileNames": true, + "strict": true, + "noImplicitOverride": true, + "noPropertyAccessFromIndexSignature": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true + }, + "files": [], + "include": [], + "references": [ + { + "path": "./tsconfig.lib.json" + }, + { + "path": "./tsconfig.spec.json" + } + ] +} diff --git a/packages/extension-driver-redshift/tsconfig.lib.json b/packages/extension-driver-redshift/tsconfig.lib.json new file mode 100644 index 00000000..436d0794 --- /dev/null +++ b/packages/extension-driver-redshift/tsconfig.lib.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "declaration": true, + "types": [] + }, + "include": ["**/*.ts"], + "exclude": ["jest.config.ts", "**/*.spec.ts", "**/*.test.ts", "../../types/*.d.ts"] +} diff --git a/packages/extension-driver-redshift/tsconfig.spec.json b/packages/extension-driver-redshift/tsconfig.spec.json new file mode 100644 index 00000000..2c94a339 --- /dev/null +++ b/packages/extension-driver-redshift/tsconfig.spec.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "module": "commonjs", + "types": ["jest", "node"] + }, + "include": ["jest.config.ts", "**/*.test.ts", "**/*.spec.ts", "**/*.d.ts", "../../types/*.d.ts"] +} diff --git a/packages/extension-driver-redshift/yarn.lock b/packages/extension-driver-redshift/yarn.lock new file mode 100644 index 00000000..4ea658ed --- /dev/null +++ b/packages/extension-driver-redshift/yarn.lock @@ -0,0 +1,801 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@aws-crypto/crc32@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-3.0.0.tgz#07300eca214409c33e3ff769cd5697b57fdd38fa" + integrity sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA== + dependencies: + "@aws-crypto/util" "^3.0.0" + "@aws-sdk/types" "^3.222.0" + tslib "^1.11.1" + +"@aws-crypto/ie11-detection@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz#640ae66b4ec3395cee6a8e94ebcd9f80c24cd688" + integrity sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q== + dependencies: + tslib "^1.11.1" + +"@aws-crypto/sha256-browser@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz#05f160138ab893f1c6ba5be57cfd108f05827766" + integrity sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ== + dependencies: + "@aws-crypto/ie11-detection" "^3.0.0" + "@aws-crypto/sha256-js" "^3.0.0" + "@aws-crypto/supports-web-crypto" "^3.0.0" + "@aws-crypto/util" "^3.0.0" + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-locate-window" "^3.0.0" + "@aws-sdk/util-utf8-browser" "^3.0.0" + tslib "^1.11.1" + +"@aws-crypto/sha256-js@3.0.0", "@aws-crypto/sha256-js@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz#f06b84d550d25521e60d2a0e2a90139341e007c2" + integrity sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ== + dependencies: + "@aws-crypto/util" "^3.0.0" + "@aws-sdk/types" "^3.222.0" + tslib "^1.11.1" + +"@aws-crypto/supports-web-crypto@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz#5d1bf825afa8072af2717c3e455f35cda0103ec2" + integrity sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg== + dependencies: + tslib "^1.11.1" + +"@aws-crypto/util@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-3.0.0.tgz#1c7ca90c29293f0883468ad48117937f0fe5bfb0" + integrity sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w== + dependencies: + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-utf8-browser" "^3.0.0" + tslib "^1.11.1" + +"@aws-sdk/client-redshift-data@^3.405.0": + version "3.409.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-redshift-data/-/client-redshift-data-3.409.0.tgz#f9e17401f151c9fd71c4d5c43b656b05d5c7ceef" + integrity sha512-CQMq+2fc87i66B3xKqM2GYkjxtrqz219cSoOWxr/nBdmW/OjHdRCilYXoY/ZRmqXJiSg7lILX6FM8dsqoFLpzg== + dependencies: + "@aws-crypto/sha256-browser" "3.0.0" + "@aws-crypto/sha256-js" "3.0.0" + "@aws-sdk/client-sts" "3.409.0" + "@aws-sdk/credential-provider-node" "3.409.0" + "@aws-sdk/middleware-host-header" "3.408.0" + "@aws-sdk/middleware-logger" "3.408.0" + "@aws-sdk/middleware-recursion-detection" "3.408.0" + "@aws-sdk/middleware-signing" "3.408.0" + "@aws-sdk/middleware-user-agent" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@aws-sdk/util-endpoints" "3.408.0" + "@aws-sdk/util-user-agent-browser" "3.408.0" + "@aws-sdk/util-user-agent-node" "3.408.0" + "@smithy/config-resolver" "^2.0.5" + "@smithy/fetch-http-handler" "^2.0.5" + "@smithy/hash-node" "^2.0.5" + "@smithy/invalid-dependency" "^2.0.5" + "@smithy/middleware-content-length" "^2.0.5" + "@smithy/middleware-endpoint" "^2.0.5" + "@smithy/middleware-retry" "^2.0.5" + "@smithy/middleware-serde" "^2.0.5" + "@smithy/middleware-stack" "^2.0.0" + "@smithy/node-config-provider" "^2.0.6" + "@smithy/node-http-handler" "^2.0.5" + "@smithy/protocol-http" "^2.0.5" + "@smithy/smithy-client" "^2.0.5" + "@smithy/types" "^2.2.2" + "@smithy/url-parser" "^2.0.5" + "@smithy/util-base64" "^2.0.0" + "@smithy/util-body-length-browser" "^2.0.0" + "@smithy/util-body-length-node" "^2.1.0" + "@smithy/util-defaults-mode-browser" "^2.0.6" + "@smithy/util-defaults-mode-node" "^2.0.6" + "@smithy/util-retry" "^2.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.5.0" + uuid "^8.3.2" + +"@aws-sdk/client-sso@3.409.0": + version "3.409.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.409.0.tgz#7f6085ca23f465968eff9ff3bf57ba09bc5e883e" + integrity sha512-vlXcIzcmUhObuEJ6q3lsp1ZHeDeD9bUrG3dmdSTeII4U6A9imgvaXONWI9GFEUsgzCrrCxtCqBX2RqMfZDhylw== + dependencies: + "@aws-crypto/sha256-browser" "3.0.0" + "@aws-crypto/sha256-js" "3.0.0" + "@aws-sdk/middleware-host-header" "3.408.0" + "@aws-sdk/middleware-logger" "3.408.0" + "@aws-sdk/middleware-recursion-detection" "3.408.0" + "@aws-sdk/middleware-user-agent" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@aws-sdk/util-endpoints" "3.408.0" + "@aws-sdk/util-user-agent-browser" "3.408.0" + "@aws-sdk/util-user-agent-node" "3.408.0" + "@smithy/config-resolver" "^2.0.5" + "@smithy/fetch-http-handler" "^2.0.5" + "@smithy/hash-node" "^2.0.5" + "@smithy/invalid-dependency" "^2.0.5" + "@smithy/middleware-content-length" "^2.0.5" + "@smithy/middleware-endpoint" "^2.0.5" + "@smithy/middleware-retry" "^2.0.5" + "@smithy/middleware-serde" "^2.0.5" + "@smithy/middleware-stack" "^2.0.0" + "@smithy/node-config-provider" "^2.0.6" + "@smithy/node-http-handler" "^2.0.5" + "@smithy/protocol-http" "^2.0.5" + "@smithy/smithy-client" "^2.0.5" + "@smithy/types" "^2.2.2" + "@smithy/url-parser" "^2.0.5" + "@smithy/util-base64" "^2.0.0" + "@smithy/util-body-length-browser" "^2.0.0" + "@smithy/util-body-length-node" "^2.1.0" + "@smithy/util-defaults-mode-browser" "^2.0.6" + "@smithy/util-defaults-mode-node" "^2.0.6" + "@smithy/util-retry" "^2.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.5.0" + +"@aws-sdk/client-sts@3.409.0": + version "3.409.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.409.0.tgz#f4be41dd8ae06ca98e6ab6c94e18bb7fb6a2f8e4" + integrity sha512-yNL9zYWDVIOWZhIlsy2tiHetSYvio5ZVJ3nvR4xWPTwqOQveZx/K0PTK+nh6T6w5R3w5IOSKvd+vPCpY4bGx8Q== + dependencies: + "@aws-crypto/sha256-browser" "3.0.0" + "@aws-crypto/sha256-js" "3.0.0" + "@aws-sdk/credential-provider-node" "3.409.0" + "@aws-sdk/middleware-host-header" "3.408.0" + "@aws-sdk/middleware-logger" "3.408.0" + "@aws-sdk/middleware-recursion-detection" "3.408.0" + "@aws-sdk/middleware-sdk-sts" "3.408.0" + "@aws-sdk/middleware-signing" "3.408.0" + "@aws-sdk/middleware-user-agent" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@aws-sdk/util-endpoints" "3.408.0" + "@aws-sdk/util-user-agent-browser" "3.408.0" + "@aws-sdk/util-user-agent-node" "3.408.0" + "@smithy/config-resolver" "^2.0.5" + "@smithy/fetch-http-handler" "^2.0.5" + "@smithy/hash-node" "^2.0.5" + "@smithy/invalid-dependency" "^2.0.5" + "@smithy/middleware-content-length" "^2.0.5" + "@smithy/middleware-endpoint" "^2.0.5" + "@smithy/middleware-retry" "^2.0.5" + "@smithy/middleware-serde" "^2.0.5" + "@smithy/middleware-stack" "^2.0.0" + "@smithy/node-config-provider" "^2.0.6" + "@smithy/node-http-handler" "^2.0.5" + "@smithy/protocol-http" "^2.0.5" + "@smithy/smithy-client" "^2.0.5" + "@smithy/types" "^2.2.2" + "@smithy/url-parser" "^2.0.5" + "@smithy/util-base64" "^2.0.0" + "@smithy/util-body-length-browser" "^2.0.0" + "@smithy/util-body-length-node" "^2.1.0" + "@smithy/util-defaults-mode-browser" "^2.0.6" + "@smithy/util-defaults-mode-node" "^2.0.6" + "@smithy/util-retry" "^2.0.0" + "@smithy/util-utf8" "^2.0.0" + fast-xml-parser "4.2.5" + tslib "^2.5.0" + +"@aws-sdk/credential-provider-env@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.408.0.tgz#199a793e5477e30417f6be9f82aa0262ba96328e" + integrity sha512-GCpgHEHxRTzKaMkwDC2gLb3xlD+ZxhKPUJ1DVcO7I9E3eCGJsYVedIi0/2XE+NP+HVoy8LyW2qH8QQWh64JKow== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/property-provider" "^2.0.0" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/credential-provider-ini@3.409.0": + version "3.409.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.409.0.tgz#5d7596e5a3669767fbe52fd756989cb6f0f435dd" + integrity sha512-Z7hb0Kj0FuqD5HimDrtt0LRjKBHA5pvLcTYYdVorJovaBxEvfDpISSDVRIUmvhMGAlv7XezbvqESOU5cn0Gpzw== + dependencies: + "@aws-sdk/credential-provider-env" "3.408.0" + "@aws-sdk/credential-provider-process" "3.408.0" + "@aws-sdk/credential-provider-sso" "3.409.0" + "@aws-sdk/credential-provider-web-identity" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@smithy/credential-provider-imds" "^2.0.0" + "@smithy/property-provider" "^2.0.0" + "@smithy/shared-ini-file-loader" "^2.0.6" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/credential-provider-node@3.409.0": + version "3.409.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.409.0.tgz#84ba57a60067c450daabda41ed909d1017cef657" + integrity sha512-kXmfBVYnHoEAACo6zskEryDSgMSo1QYiv6P8n6Go/RsJHe4Ec+YtrOMLg3hTOptiIGHOTWZ1ANaU/IfIxmqumA== + dependencies: + "@aws-sdk/credential-provider-env" "3.408.0" + "@aws-sdk/credential-provider-ini" "3.409.0" + "@aws-sdk/credential-provider-process" "3.408.0" + "@aws-sdk/credential-provider-sso" "3.409.0" + "@aws-sdk/credential-provider-web-identity" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@smithy/credential-provider-imds" "^2.0.0" + "@smithy/property-provider" "^2.0.0" + "@smithy/shared-ini-file-loader" "^2.0.6" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/credential-provider-process@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.408.0.tgz#fbcf6571bc87e536b847e14c4c9ee1fdd6b81deb" + integrity sha512-qCTf9tr6+I2s3+v5zP4YRQQrGlYw/jyZ7u/k6bGshhlvgwGPfjNuHrM8uK/W1kv4ng1myxaL1/tAY6RVVdXz4Q== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/property-provider" "^2.0.0" + "@smithy/shared-ini-file-loader" "^2.0.6" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/credential-provider-sso@3.409.0": + version "3.409.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.409.0.tgz#1c9115c6ca82d3810fda54b23e46aae49897bbbe" + integrity sha512-Bh0ykbDpnUK4W8sQMEpRA/TlZxwpPLl4aU8eBLlbEcTL2M8or2nr0dQzOOvabZo8hbaPM6yfOl+vLTvWGs75zg== + dependencies: + "@aws-sdk/client-sso" "3.409.0" + "@aws-sdk/token-providers" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@smithy/property-provider" "^2.0.0" + "@smithy/shared-ini-file-loader" "^2.0.6" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/credential-provider-web-identity@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.408.0.tgz#2e38730a309b81527d23c3d435ea5ab1a3f73688" + integrity sha512-5FbDPF/zY/1t6k1zRI/HnrxcH2v7SwsEYu2SThI2qbzaP/K7MTnTanV5vNFcdQOpuQ7x3PrzTlH3AWZueCr3Vw== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/property-provider" "^2.0.0" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/middleware-host-header@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.408.0.tgz#7b84ce0336c7acd5bc1e82076ef95bde597d6edf" + integrity sha512-eofCXuSZ+ntbLzeCRdHzraXzgWqAplXU7W2qFFVC4O9lZBhADwNPI8n8x98TH0mftnmvZxh5Bo5U8WvEolIDkw== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/protocol-http" "^2.0.5" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/middleware-logger@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.408.0.tgz#6c745f352ba95284ee78a397368c7dc79378da43" + integrity sha512-otwXPCubsGRFv8Hb6nKw6Vvnu4dC8CcPk05buStj42nF8QdjWrKGb2rDCvLph5lr576LF5HN+Y2moyOi7z/I7g== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/middleware-recursion-detection@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.408.0.tgz#036fa1ee8b76d5a0947591590a7a3a867aea8cae" + integrity sha512-QfZwmX5z0IRC2c8pBi9VozSqbJw19V5oxyykSTqdjGe3CG3yNujXObV6xQesK67CWSnPb9wDgVGKUoYuIXwOxw== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/protocol-http" "^2.0.5" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/middleware-sdk-sts@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.408.0.tgz#812deff5fa8388cda6d6908452d6223b059232f9" + integrity sha512-dIO9BTX049P2PwaeAK2lxJeA2rZi9/bWzMP1GIE60VrMDHmN5Ljvh1lLActECLAqNQIqN5Ub0bKV2tC/jMn+CA== + dependencies: + "@aws-sdk/middleware-signing" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/middleware-signing@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.408.0.tgz#89bb56abf5cbddaa9b04026c74362765918b6ff2" + integrity sha512-flLiLKATJ4NLcLb7lPojyQ6NvLSyQ3axqIClqwMRnhSRxvREB7OgBKwmPecSl0I5JxsNEqo+mjARdMjUHadgWQ== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/property-provider" "^2.0.0" + "@smithy/protocol-http" "^2.0.5" + "@smithy/signature-v4" "^2.0.0" + "@smithy/types" "^2.2.2" + "@smithy/util-middleware" "^2.0.0" + tslib "^2.5.0" + +"@aws-sdk/middleware-user-agent@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.408.0.tgz#c1909be2ce2c350273747923c4791a2d37bb0af8" + integrity sha512-UvlKri8/Mgf5W+tFU6ZJ65fC6HljcysIqfRFts/8Wurl322IS1I4j+pyjV2P6eK1054bzynfi3Trv+tRYHtVcA== + dependencies: + "@aws-sdk/types" "3.408.0" + "@aws-sdk/util-endpoints" "3.408.0" + "@smithy/protocol-http" "^2.0.5" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/token-providers@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.408.0.tgz#1de7fbbe25b8526ee7f3eebac26f581e3488a5d3" + integrity sha512-D//BjUrVtDzDdCz1mRdZZSAc822fh75Ssq46smeS6S6NKq3vJeHhfrQJMyVU1GclXu1tn9AwykaQW5Jwb5im+g== + dependencies: + "@aws-crypto/sha256-browser" "3.0.0" + "@aws-crypto/sha256-js" "3.0.0" + "@aws-sdk/middleware-host-header" "3.408.0" + "@aws-sdk/middleware-logger" "3.408.0" + "@aws-sdk/middleware-recursion-detection" "3.408.0" + "@aws-sdk/middleware-user-agent" "3.408.0" + "@aws-sdk/types" "3.408.0" + "@aws-sdk/util-endpoints" "3.408.0" + "@aws-sdk/util-user-agent-browser" "3.408.0" + "@aws-sdk/util-user-agent-node" "3.408.0" + "@smithy/config-resolver" "^2.0.5" + "@smithy/fetch-http-handler" "^2.0.5" + "@smithy/hash-node" "^2.0.5" + "@smithy/invalid-dependency" "^2.0.5" + "@smithy/middleware-content-length" "^2.0.5" + "@smithy/middleware-endpoint" "^2.0.5" + "@smithy/middleware-retry" "^2.0.5" + "@smithy/middleware-serde" "^2.0.5" + "@smithy/middleware-stack" "^2.0.0" + "@smithy/node-config-provider" "^2.0.6" + "@smithy/node-http-handler" "^2.0.5" + "@smithy/property-provider" "^2.0.0" + "@smithy/protocol-http" "^2.0.5" + "@smithy/shared-ini-file-loader" "^2.0.6" + "@smithy/smithy-client" "^2.0.5" + "@smithy/types" "^2.2.2" + "@smithy/url-parser" "^2.0.5" + "@smithy/util-base64" "^2.0.0" + "@smithy/util-body-length-browser" "^2.0.0" + "@smithy/util-body-length-node" "^2.1.0" + "@smithy/util-defaults-mode-browser" "^2.0.6" + "@smithy/util-defaults-mode-node" "^2.0.6" + "@smithy/util-retry" "^2.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.5.0" + +"@aws-sdk/types@3.408.0", "@aws-sdk/types@^3.222.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.408.0.tgz#eb10377130f23aef6594eb0e0a14e82dfa2e4d5a" + integrity sha512-sIsR5224xWQTW7O6h4V0S7DMWs4bK4DCunwOo7Avpq7ZVmH2YyLTs0n4NGL186j8xTosycF1ACQgpM48SLIvaA== + dependencies: + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/util-endpoints@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.408.0.tgz#397c6d9236434063127301f9c4d2117bdb978621" + integrity sha512-N1D5cKEkCqf5Q7IF/pI9kfcNrT+/5ctZ6cQo4Ex6xaOcnUzdOZcXdPqaMRZVZRn8enjK2SpoLlRpXGISOugPaw== + dependencies: + "@aws-sdk/types" "3.408.0" + tslib "^2.5.0" + +"@aws-sdk/util-locate-window@^3.0.0": + version "3.310.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz#b071baf050301adee89051032bd4139bba32cc40" + integrity sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w== + dependencies: + tslib "^2.5.0" + +"@aws-sdk/util-user-agent-browser@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.408.0.tgz#60b9660d4eb8c7ee9b3dc941436f1a025cc62567" + integrity sha512-wOVjDprG5h6kM8aJZk/tRX/RgxNxr73d6kIsUePlAgil13q62M9lcFMcIXduqtDsa1B6FfVB2wx/pyUuOZri5g== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/types" "^2.2.2" + bowser "^2.11.0" + tslib "^2.5.0" + +"@aws-sdk/util-user-agent-node@3.408.0": + version "3.408.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.408.0.tgz#2976414ed440d0a338b1ec6373a220ae71c08cab" + integrity sha512-BzMFV+cIXrtfcfJk3GpXnkANFkzZisvAtD306TMgIscn5FF26K1jD5DU+h5Q5WMq7gx+oXh9kJ3Lu3hi7hahKQ== + dependencies: + "@aws-sdk/types" "3.408.0" + "@smithy/node-config-provider" "^2.0.6" + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@aws-sdk/util-utf8-browser@^3.0.0": + version "3.259.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff" + integrity sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw== + dependencies: + tslib "^2.3.1" + +"@smithy/abort-controller@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-2.0.6.tgz#8d17bb447aa33a43e4d57f98f9dc23560158b6b8" + integrity sha512-4I7g0lyGUlW2onf8mD76IzU37oRWSHsQ5zlW5MjDzgg4I4J9bOK4500Gx6qOuoN7+GulAnGLe1YwyrIluzhakg== + dependencies: + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/config-resolver@^2.0.5", "@smithy/config-resolver@^2.0.7": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-2.0.7.tgz#bfa7de9b19922a071a2b26766bcb116e4becbc77" + integrity sha512-J4J1AWiqaApC+3I9U++SuxAQ3BOoM5VoYnpFzCZcb63aLF80Zpc/nq2pFR1OsEIYyg2UYNdcBKKfHABmwo4WgQ== + dependencies: + "@smithy/node-config-provider" "^2.0.9" + "@smithy/types" "^2.3.0" + "@smithy/util-config-provider" "^2.0.0" + "@smithy/util-middleware" "^2.0.0" + tslib "^2.5.0" + +"@smithy/credential-provider-imds@^2.0.0", "@smithy/credential-provider-imds@^2.0.9": + version "2.0.9" + resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-2.0.9.tgz#f98a941c0b7211e9320a20d5c064d6489c61f6d8" + integrity sha512-K7WZRkHS5HZofRgK+O8W4YXXyaVexU1K6hp9vlUL/8CsnrFbZS9quyH/6hTROrYh2PuJr24yii1kc83NJdxMGQ== + dependencies: + "@smithy/node-config-provider" "^2.0.9" + "@smithy/property-provider" "^2.0.7" + "@smithy/types" "^2.3.0" + "@smithy/url-parser" "^2.0.6" + tslib "^2.5.0" + +"@smithy/eventstream-codec@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-2.0.6.tgz#1ea033e977b58a59ff4b00cf7c899d1ca0c7f81a" + integrity sha512-J9xL82mlYRUMXFnB9VaThXkD7z2JLr52FIVZMoQQ1dxZG5ub+NOGmzaTTZC/cMmKXI/nwCoFuwDWCTjwQhYhQA== + dependencies: + "@aws-crypto/crc32" "3.0.0" + "@smithy/types" "^2.3.0" + "@smithy/util-hex-encoding" "^2.0.0" + tslib "^2.5.0" + +"@smithy/fetch-http-handler@^2.0.5", "@smithy/fetch-http-handler@^2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-2.1.2.tgz#626a4202cc82f4d04fc80424917dd34e204ab8c7" + integrity sha512-3Gm3pQm4viUPU+e7KkRScS9t5phBxSNRS8rQSZ+HeCwK/busrX0/2HJZiwLvGblqPqi1laJB0lD18AdiOioJww== + dependencies: + "@smithy/protocol-http" "^3.0.2" + "@smithy/querystring-builder" "^2.0.6" + "@smithy/types" "^2.3.0" + "@smithy/util-base64" "^2.0.0" + tslib "^2.5.0" + +"@smithy/hash-node@^2.0.5": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-2.0.6.tgz#d13af02d3adb010e0c321035b610d53af2e652ef" + integrity sha512-xz7fzFxSzxohKGGyKPbLReRrY01JOZgRDHIXSks3PxQxG9c8PJMa5nUw0stH8UOySUgkofmMy0n7vTUsF5Mdqg== + dependencies: + "@smithy/types" "^2.3.0" + "@smithy/util-buffer-from" "^2.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.5.0" + +"@smithy/invalid-dependency@^2.0.5": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-2.0.6.tgz#9230517c5a9f5bafee3bf89e9c548801a2681a99" + integrity sha512-L5MUyl9mzawIvBxr0Hg3J/Q5qZFXKcBgMk0PacfK3Mthp4WAR6h7iMxdSQ23Q7X/kxOrpZuoYEdh1BWLKbDc8Q== + dependencies: + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/is-array-buffer@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz#8fa9b8040651e7ba0b2f6106e636a91354ff7d34" + integrity sha512-z3PjFjMyZNI98JFRJi/U0nGoLWMSJlDjAW4QUX2WNZLas5C0CmVV6LJ01JI0k90l7FvpmixjWxPFmENSClQ7ug== + dependencies: + tslib "^2.5.0" + +"@smithy/middleware-content-length@^2.0.5": + version "2.0.8" + resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-2.0.8.tgz#ee2c6614580fea918bae6411cfbcd48ee4af342b" + integrity sha512-fHJFsscHXrYhUSWMFJNXfsZW8KsyhWQfBgU3b0nvDfpm+NAeQLqKYNhywGrDwZQc1k+lt7Fw9faAquhNPxTZRA== + dependencies: + "@smithy/protocol-http" "^3.0.2" + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/middleware-endpoint@^2.0.5": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-2.0.6.tgz#b2350fcf63cd69a595b0f42e9718e1ac5144220e" + integrity sha512-MuSPPtEHFal/M77tR3ffLsdOfX29IZpA990nGuoPj5zQnAYrA4PYBGoqqrASQKm8Xb3C0NwuYzOATT7WX4f5Pg== + dependencies: + "@smithy/middleware-serde" "^2.0.6" + "@smithy/types" "^2.3.0" + "@smithy/url-parser" "^2.0.6" + "@smithy/util-middleware" "^2.0.0" + tslib "^2.5.0" + +"@smithy/middleware-retry@^2.0.5": + version "2.0.9" + resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-2.0.9.tgz#4a8dc376b516fb10558da5b5be5e759aa3106140" + integrity sha512-gneEqWj4l/ZjHdZPk0BFMXoTalRArdQ8i579/KqJgBAc6Ux5vnR/SSppkMCkj2kOQYwdypvzSPeqEW3ZrvIg6g== + dependencies: + "@smithy/node-config-provider" "^2.0.9" + "@smithy/protocol-http" "^3.0.2" + "@smithy/service-error-classification" "^2.0.0" + "@smithy/types" "^2.3.0" + "@smithy/util-middleware" "^2.0.0" + "@smithy/util-retry" "^2.0.0" + tslib "^2.5.0" + uuid "^8.3.2" + +"@smithy/middleware-serde@^2.0.5", "@smithy/middleware-serde@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-2.0.6.tgz#cd2ed49fc22b998f3bbbd28b53a72a26d3dd08fb" + integrity sha512-8/GODBngYbrS28CMZtaHIL4R9rLNSQ/zgb+N1OAZ02NwBUawlnLDcatve9YRzhJC/IWz0/pt+WimJZaO1sGcig== + dependencies: + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/middleware-stack@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-2.0.0.tgz#cd9f442c2788b1ef0ea6b32236d80c76b3c342e9" + integrity sha512-31XC1xNF65nlbc16yuh3wwTudmqs6qy4EseQUGF8A/p2m/5wdd/cnXJqpniy/XvXVwkHPz/GwV36HqzHtIKATQ== + dependencies: + tslib "^2.5.0" + +"@smithy/node-config-provider@^2.0.6", "@smithy/node-config-provider@^2.0.9": + version "2.0.9" + resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-2.0.9.tgz#f2c3f8354e1260cde8c7ebda898f4531e06a4369" + integrity sha512-TlSPbCwtT/jgNnmPQqKuCR5CFN8UIrCCHRrgUfs3NqRMuaLLeP8TPe1fSKq2J8h1M/jd4BF853gneles0gWevg== + dependencies: + "@smithy/property-provider" "^2.0.7" + "@smithy/shared-ini-file-loader" "^2.0.8" + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/node-http-handler@^2.0.5", "@smithy/node-http-handler@^2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-2.1.2.tgz#704100dded1cb94db3f72fbdf841fc59614c4614" + integrity sha512-PdEEDCShuM8zxGoaRxmGB/1ikB8oeqz+ZAF9VIA8FCP3E59j8zDTF+wCELoWd1Y6gtxr+RcTAg5sA8nvn5qH/w== + dependencies: + "@smithy/abort-controller" "^2.0.6" + "@smithy/protocol-http" "^3.0.2" + "@smithy/querystring-builder" "^2.0.6" + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/property-provider@^2.0.0", "@smithy/property-provider@^2.0.7": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-2.0.7.tgz#4b7b780477909026d2fdaef29f0ce5c258f89681" + integrity sha512-XT8Tl7YNxM8tCtGqy7v7DSf6PxyXaPE9cdA/Yj4dEw2b05V3RrPqsP+t5XJiZu0yIsQ7pdeYZWv2sSEWVjNeAg== + dependencies: + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/protocol-http@^2.0.5": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-2.0.5.tgz#ff7779fc8fcd3fe52e71fd07565b518f0937e8ba" + integrity sha512-d2hhHj34mA2V86doiDfrsy2fNTnUOowGaf9hKb0hIPHqvcnShU4/OSc4Uf1FwHkAdYF3cFXTrj5VGUYbEuvMdw== + dependencies: + "@smithy/types" "^2.2.2" + tslib "^2.5.0" + +"@smithy/protocol-http@^3.0.2": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-3.0.2.tgz#06e76dbac488e95f0b0fc2bc2820aa732aafef14" + integrity sha512-LUOWCPRihvJBkdSs+ivK9m1f/rMfF3n9Zpzg8qdry2eIG4HQqqLBMWQyF9bgk7JhsrrOa3//jJKhXzvL7wL5Xw== + dependencies: + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/querystring-builder@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-2.0.6.tgz#6fd9f86dbfe27e0e71e5569768a2b5d599f44119" + integrity sha512-HnU00shCGoV8vKJZTiNBkNvR9NogU3NIUaVMAGJPSqNGJj3psWo+TUrC0BVCDcwiCljXwXCFGJqIcsWtClrktQ== + dependencies: + "@smithy/types" "^2.3.0" + "@smithy/util-uri-escape" "^2.0.0" + tslib "^2.5.0" + +"@smithy/querystring-parser@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-2.0.6.tgz#0b4fc7ec5fe5371113fcb1116216daf2d7e2c3ff" + integrity sha512-i4LKoXHP7pTFAPjLIJyQXYOhWokbcFha3WWsX74sAKmuluv0XM2cxONZoFxwEzmWhsNyM6buSwJSZXyPiec0AQ== + dependencies: + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/service-error-classification@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-2.0.0.tgz#bbce07c9c529d9333d40db881fd4a1795dd84892" + integrity sha512-2z5Nafy1O0cTf69wKyNjGW/sNVMiqDnb4jgwfMG8ye8KnFJ5qmJpDccwIbJNhXIfbsxTg9SEec2oe1cexhMJvw== + +"@smithy/shared-ini-file-loader@^2.0.6", "@smithy/shared-ini-file-loader@^2.0.8": + version "2.0.8" + resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-2.0.8.tgz#1346eea02ad574a2520ce72ad0a6629a08691e97" + integrity sha512-4u+V+Dv7JGpJ0tppB5rxCem7WhdFux950z4cGPhV0kHTPkKe8DDgINzOlVa2RBu5dI33D02OBJcxFjhW4FPORg== + dependencies: + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/signature-v4@^2.0.0": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-2.0.6.tgz#bd0ec98149dfc97e91e227411091e371248309ae" + integrity sha512-4zNTi8w4sky07YKq7oYucZt4ogY00IEaS1NFDXxmCN5V/ywE0WiK+WMim+8wtYQmB0qy3oExZR4LoCAml6j/rA== + dependencies: + "@smithy/eventstream-codec" "^2.0.6" + "@smithy/is-array-buffer" "^2.0.0" + "@smithy/types" "^2.3.0" + "@smithy/util-hex-encoding" "^2.0.0" + "@smithy/util-middleware" "^2.0.0" + "@smithy/util-uri-escape" "^2.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.5.0" + +"@smithy/smithy-client@^2.0.5": + version "2.1.3" + resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-2.1.3.tgz#8e1d37a5d7c9c6e463bc46be02194750a1dc7522" + integrity sha512-nSMMp2AKqcG/ruzCY01ogrMdbq/WS1cvGStTsw7yd6bTpp/bGtlOgXvy3h7e0zP7w2DH1AtvIwzYBD6ejZePsQ== + dependencies: + "@smithy/middleware-stack" "^2.0.0" + "@smithy/types" "^2.3.0" + "@smithy/util-stream" "^2.0.9" + tslib "^2.5.0" + +"@smithy/types@^2.2.2", "@smithy/types@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-2.3.0.tgz#a5c3869465f384fd4d811b2f1f37779e069ef06e" + integrity sha512-pJce3rd39MElkV57UTPAoSYAApjQLELUxjU5adHNLYk9gnPvyIGbJNJTZVVFu00BrgZH3W/cQe8QuFcknDyodQ== + dependencies: + tslib "^2.5.0" + +"@smithy/url-parser@^2.0.5", "@smithy/url-parser@^2.0.6": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-2.0.6.tgz#e926d1bcbe4bb0e244ed25ea58bc48ac5ae41436" + integrity sha512-9i6j5QW6bapHZ4rtkXOAm0hOUG1+5IVdVJXNSUTcNskwJchZH5IQuDNPCbgUi/u2P8EZazKt4wXT51QxOXCz1A== + dependencies: + "@smithy/querystring-parser" "^2.0.6" + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/util-base64@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-2.0.0.tgz#1beeabfb155471d1d41c8d0603be1351f883c444" + integrity sha512-Zb1E4xx+m5Lud8bbeYi5FkcMJMnn+1WUnJF3qD7rAdXpaL7UjkFQLdmW5fHadoKbdHpwH9vSR8EyTJFHJs++tA== + dependencies: + "@smithy/util-buffer-from" "^2.0.0" + tslib "^2.5.0" + +"@smithy/util-body-length-browser@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-2.0.0.tgz#5447853003b4c73da3bc5f3c5e82c21d592d1650" + integrity sha512-JdDuS4ircJt+FDnaQj88TzZY3+njZ6O+D3uakS32f2VNnDo3vyEuNdBOh/oFd8Df1zSZOuH1HEChk2AOYDezZg== + dependencies: + tslib "^2.5.0" + +"@smithy/util-body-length-node@^2.1.0": + version "2.1.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-2.1.0.tgz#313a5f7c5017947baf5fa018bfc22628904bbcfa" + integrity sha512-/li0/kj/y3fQ3vyzn36NTLGmUwAICb7Jbe/CsWCktW363gh1MOcpEcSO3mJ344Gv2dqz8YJCLQpb6hju/0qOWw== + dependencies: + tslib "^2.5.0" + +"@smithy/util-buffer-from@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz#7eb75d72288b6b3001bc5f75b48b711513091deb" + integrity sha512-/YNnLoHsR+4W4Vf2wL5lGv0ksg8Bmk3GEGxn2vEQt52AQaPSCuaO5PM5VM7lP1K9qHRKHwrPGktqVoAHKWHxzw== + dependencies: + "@smithy/is-array-buffer" "^2.0.0" + tslib "^2.5.0" + +"@smithy/util-config-provider@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-2.0.0.tgz#4dd6a793605559d94267312fd06d0f58784b4c38" + integrity sha512-xCQ6UapcIWKxXHEU4Mcs2s7LcFQRiU3XEluM2WcCjjBtQkUN71Tb+ydGmJFPxMUrW/GWMgQEEGipLym4XG0jZg== + dependencies: + tslib "^2.5.0" + +"@smithy/util-defaults-mode-browser@^2.0.6": + version "2.0.7" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-2.0.7.tgz#322822e064450ec59e3ae288f3f2eed0a5acbfb1" + integrity sha512-s1caKxC7Y87Q72Goll//clZs2WNBfG9WtFDWVRS+Qgk147YPCOUYtkpuD0XZAh/vbayObFz5tQ1fiX4G19HSCA== + dependencies: + "@smithy/property-provider" "^2.0.7" + "@smithy/types" "^2.3.0" + bowser "^2.11.0" + tslib "^2.5.0" + +"@smithy/util-defaults-mode-node@^2.0.6": + version "2.0.9" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-2.0.9.tgz#0d3acadbbb54c0c401089fc22576aafd52d130e9" + integrity sha512-HlV4iNL3/PgPpmDGs0+XrAKtwFQ8rOs5P2y5Dye8dUYaJauadlzHRrNKk7wH2aBYswvT2HM+PIgXamvrE7xbcw== + dependencies: + "@smithy/config-resolver" "^2.0.7" + "@smithy/credential-provider-imds" "^2.0.9" + "@smithy/node-config-provider" "^2.0.9" + "@smithy/property-provider" "^2.0.7" + "@smithy/types" "^2.3.0" + tslib "^2.5.0" + +"@smithy/util-hex-encoding@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-2.0.0.tgz#0aa3515acd2b005c6d55675e377080a7c513b59e" + integrity sha512-c5xY+NUnFqG6d7HFh1IFfrm3mGl29lC+vF+geHv4ToiuJCBmIfzx6IeHLg+OgRdPFKDXIw6pvi+p3CsscaMcMA== + dependencies: + tslib "^2.5.0" + +"@smithy/util-middleware@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-2.0.0.tgz#706681d4a1686544a2275f68266304233f372c99" + integrity sha512-eCWX4ECuDHn1wuyyDdGdUWnT4OGyIzV0LN1xRttBFMPI9Ff/4heSHVxneyiMtOB//zpXWCha1/SWHJOZstG7kA== + dependencies: + tslib "^2.5.0" + +"@smithy/util-retry@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-2.0.0.tgz#7ac5d5f12383a9d9b2a43f9ff25f3866c8727c24" + integrity sha512-/dvJ8afrElasuiiIttRJeoS2sy8YXpksQwiM/TcepqdRVp7u4ejd9C4IQURHNjlfPUT7Y6lCDSa2zQJbdHhVTg== + dependencies: + "@smithy/service-error-classification" "^2.0.0" + tslib "^2.5.0" + +"@smithy/util-stream@^2.0.9": + version "2.0.9" + resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-2.0.9.tgz#50ff280b754a1d11e2b16ffe9fc87f6736a9c0b7" + integrity sha512-Fn2/3IMwqu0l2hOC7K3bbtSqFEJ6nOzMLoPVIhuH84yw/95itNkFBwVbIIiAfDaout0ZfZ26+5ch86E2q3avww== + dependencies: + "@smithy/fetch-http-handler" "^2.1.2" + "@smithy/node-http-handler" "^2.1.2" + "@smithy/types" "^2.3.0" + "@smithy/util-base64" "^2.0.0" + "@smithy/util-buffer-from" "^2.0.0" + "@smithy/util-hex-encoding" "^2.0.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.5.0" + +"@smithy/util-uri-escape@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-2.0.0.tgz#19955b1a0f517a87ae77ac729e0e411963dfda95" + integrity sha512-ebkxsqinSdEooQduuk9CbKcI+wheijxEb3utGXkCoYQkJnwTnLbH1JXGimJtUkQwNQbsbuYwG2+aFVyZf5TLaw== + dependencies: + tslib "^2.5.0" + +"@smithy/util-utf8@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.0.0.tgz#b4da87566ea7757435e153799df9da717262ad42" + integrity sha512-rctU1VkziY84n5OXe3bPNpKR001ZCME2JCaBBFgtiM2hfKbHFudc/BkMuPab8hRbLd0j3vbnBTTZ1igBf0wgiQ== + dependencies: + "@smithy/util-buffer-from" "^2.0.0" + tslib "^2.5.0" + +bowser@^2.11.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== + +exponential-backoff@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/exponential-backoff/-/exponential-backoff-3.1.1.tgz#64ac7526fe341ab18a39016cd22c787d01e00bf6" + integrity sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw== + +fast-xml-parser@4.2.5: + version "4.2.5" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz#a6747a09296a6cb34f2ae634019bf1738f3b421f" + integrity sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g== + dependencies: + strnum "^1.0.5" + +strnum@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db" + integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== + +tslib@^1.11.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.3.1, tslib@^2.5.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== + +uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== diff --git a/packages/extension-driver-snowflake/package.json b/packages/extension-driver-snowflake/package.json index 33340ecf..979f1d9c 100644 --- a/packages/extension-driver-snowflake/package.json +++ b/packages/extension-driver-snowflake/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-driver-snowflake", "description": "Snowflake driver for VulcanSQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -30,6 +30,6 @@ "@types/snowflake-sdk": "^1.6.8" }, "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-huggingface/package.json b/packages/extension-huggingface/package.json index 45e6cf9d..e639bca5 100644 --- a/packages/extension-huggingface/package.json +++ b/packages/extension-huggingface/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-huggingface", "description": "Hugging Face feature for VulcanSQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -23,6 +23,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/packages/extension-huggingface/test/tableQuestionAnswering.spec.ts b/packages/extension-huggingface/test/tableQuestionAnswering.spec.ts index cdbad3dd..220463d9 100644 --- a/packages/extension-huggingface/test/tableQuestionAnswering.spec.ts +++ b/packages/extension-huggingface/test/tableQuestionAnswering.spec.ts @@ -149,7 +149,7 @@ describe('Test "huggingface_table_question_answering" filter', () => { 50 * 1000 ); - it( + it.skip( 'Should get correct expected value when provided "neulab/omnitab-large-1024shot-finetuned-wtq-1024shot" model and wait it for model', async () => { const expected = JSON.stringify({ diff --git a/packages/extension-store-canner/README.md b/packages/extension-store-canner/README.md index 267455ce..74e4106d 100644 --- a/packages/extension-store-canner/README.md +++ b/packages/extension-store-canner/README.md @@ -63,6 +63,8 @@ export PROFILE_CANNER_DRIVER_PASSWORD= export PROFILE_CANNER_DRIVER_HOST= # Canner enterprise driver port, the default is 7432 export PROFILE_CANNER_DRIVER_PORT= +# Canner enterprise root user id +export PROFILE_CANNER_DRIVER_ROOT_USER_ID= ``` ### Connect Canner Enterprise used storage. diff --git a/packages/extension-store-canner/package.json b/packages/extension-store-canner/package.json index 8d3692a2..0d397f8f 100644 --- a/packages/extension-store-canner/package.json +++ b/packages/extension-store-canner/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/extension-store-canner", "description": "Canner persistence store for Vulcan SQL", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -24,7 +24,7 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" }, "dependencies": { "@canner/canner-storage": "^0.0.7" diff --git a/packages/extension-store-canner/src/lib/canner/profileReader.ts b/packages/extension-store-canner/src/lib/canner/profileReader.ts index 29ca4367..f6dfc6b4 100644 --- a/packages/extension-store-canner/src/lib/canner/profileReader.ts +++ b/packages/extension-store-canner/src/lib/canner/profileReader.ts @@ -44,6 +44,7 @@ export class CannerProfileReader extends ProfileReader { // generate profiles from the indicator files of each workspaces const { user, password, host, port, max } = this.envConfig.profile; + const { rootUserId } = this.envConfig.properties; if (!user || !password || !host) throw new ConfigurationError( 'Canner profile reader needs username, password, host properties.' @@ -67,6 +68,9 @@ export class CannerProfileReader extends ProfileReader { max, }, allow: '*', + properties: { + rootUserId, + }, } as Profile>; this.logger.debug(`created "${profile.name}".`); return profile; diff --git a/packages/extension-store-canner/src/lib/config.ts b/packages/extension-store-canner/src/lib/config.ts index 3a3646cd..1276a46a 100644 --- a/packages/extension-store-canner/src/lib/config.ts +++ b/packages/extension-store-canner/src/lib/config.ts @@ -1,8 +1,13 @@ export interface CannerStoreConfig { storage: StorageServiceOptions; + properties: CannnerDriverProfileProperties; profile: CannerDriverProfileOptions; } +export interface CannnerDriverProfileProperties { + rootUserId?: string; +} + export interface CannerDriverProfileOptions { // user to connect to canner enterprise. Default is canner user?: string; @@ -64,6 +69,9 @@ export const getEnvConfig = (): CannerStoreConfig => { max: Number(process.env['PROFILE_CANNER_DRIVER_CONNECTION_POOL_MAX']) || 10, }, + properties: { + rootUserId: process.env['PROFILE_CANNER_DRIVER_ROOT_USER_ID'], + }, storage: { provider: process.env['STORAGE_PROVIDER'], // MINIO Provider options diff --git a/packages/extension-store-canner/src/test/cannerProfileReader.spec.ts b/packages/extension-store-canner/src/test/cannerProfileReader.spec.ts index 68b7c1f3..223cf94e 100644 --- a/packages/extension-store-canner/src/test/cannerProfileReader.spec.ts +++ b/packages/extension-store-canner/src/test/cannerProfileReader.spec.ts @@ -98,6 +98,7 @@ describe('Test CannerProfileReader', () => { sinon.default.stub(configModule, 'getEnvConfig').returns({ storage: sinon.stubInterface(), + properties: {}, profile: { host, password, @@ -119,6 +120,7 @@ describe('Test CannerProfileReader', () => { user: 'canner', password: 'secret-password', port: 7432, + max: 10, }; const expected = [ { @@ -128,6 +130,9 @@ describe('Test CannerProfileReader', () => { ...connectionInfo, database: fakeWorkspaces.ws1.sqlName, }, + properties: { + rootUserId: 'fakeRootUserId', + }, allow: '*', }, { @@ -137,6 +142,9 @@ describe('Test CannerProfileReader', () => { ...connectionInfo, database: fakeWorkspaces.ws2.sqlName, }, + properties: { + rootUserId: 'fakeRootUserId', + }, allow: '*', }, ] as Profile>[]; @@ -164,6 +172,9 @@ describe('Test CannerProfileReader', () => { sinon.default.stub(configModule, 'getEnvConfig').returns({ storage: sinon.stubInterface(), + properties: { + rootUserId: 'fakeRootUserId', + }, profile: { ...connectionInfo, }, diff --git a/packages/serve/package.json b/packages/serve/package.json index 2d214163..6a07abd8 100644 --- a/packages/serve/package.json +++ b/packages/serve/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/serve", "description": "VulcanSQL package for serving projects", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -22,7 +22,7 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" }, "dependencies": { "redoc": "2.0.0-rc.76" diff --git a/packages/serve/src/lib/middleware/activityLogMiddleware.ts b/packages/serve/src/lib/middleware/activityLogMiddleware.ts new file mode 100644 index 00000000..fc7e97f6 --- /dev/null +++ b/packages/serve/src/lib/middleware/activityLogMiddleware.ts @@ -0,0 +1,70 @@ +import { + TYPES as CORE_TYPES, + BaseActivityLogger, + VulcanInternalExtension, + IActivityLoggerOptions, + getLogger, + ActivityLogType, + ActivityLogContentOptions, +} from '@vulcan-sql/core'; +import { Next, KoaContext, BuiltInMiddleware } from '@vulcan-sql/serve/models'; +import { inject, multiInject } from 'inversify'; +import moment = require('moment'); + +const logger = getLogger({ scopeName: 'SERVE' }); + +@VulcanInternalExtension('activity-log') +export class ActivityLogMiddleware extends BuiltInMiddleware { + private activityLoggers: BaseActivityLogger[]; + private activityLoggerMap: Record> = {}; + constructor( + @inject(CORE_TYPES.ExtensionConfig) config: any, + @inject(CORE_TYPES.ExtensionName) name: string, + @multiInject(CORE_TYPES.Extension_ActivityLogger) + activityLoggers: BaseActivityLogger[] + ) { + super(config, name); + this.activityLoggers = activityLoggers; + } + public override async onActivate(): Promise { + for (const logger of this.activityLoggers) { + if (logger.isEnabled()) { + const id = logger.getExtensionId(); + this.activityLoggerMap[id!] = logger; + } + } + } + public async handle(context: KoaContext, next: Next) { + if (!this.enabled) return next(); + const logTime = moment.utc().format('YYYY-MM-DD HH:mm:ss'); + const startTime = Date.now(); + await next(); + const endTime = Date.now(); + const duration = endTime - startTime; + const body = context.response.body as any; + const error = body?.message; + const user = context.state.user; + const status = context.response.status || context.status; + const activityLog = { + activityLogType: ActivityLogType.API_REQUEST, + isSuccess: status.toString().startsWith('2') ? true : false, + logTime, + duration, + method: context.request.method, + url: context.request.originalUrl, + href: context.request.href, + ip: context.request.ip, + header: context.request.header, + params: context.params, + query: context.request.query, + status, + error, + user, + } as ActivityLogContentOptions; + for (const activityLogger of Object.values(this.activityLoggerMap)) { + activityLogger.log(activityLog).catch((e) => { + logger.debug(`Error when logging activity: ${e}`); + }); + } + } +} diff --git a/packages/serve/src/lib/middleware/index.ts b/packages/serve/src/lib/middleware/index.ts index 97671fb5..bb8f5b45 100644 --- a/packages/serve/src/lib/middleware/index.ts +++ b/packages/serve/src/lib/middleware/index.ts @@ -7,6 +7,7 @@ export * from './response-format'; export * from './enforceHttpsMiddleware'; export * from './docRouterMiddleware'; export * from './errorHandlerMIddleware'; +export * from './activityLogMiddleware'; import { CorsMiddleware } from './corsMiddleware'; import { @@ -23,10 +24,12 @@ import { ClassType, ExtensionBase } from '@vulcan-sql/core'; import { DocRouterMiddleware } from './docRouterMiddleware'; import { ErrorHandlerMiddleware } from './errorHandlerMIddleware'; import { CatalogRouterMiddleware } from './catalogRouterMiddleware'; +import { ActivityLogMiddleware } from './activityLogMiddleware'; // The array is the middleware running order export const BuiltInRouteMiddlewares: ClassType[] = [ RequestIdMiddleware, + ActivityLogMiddleware, ErrorHandlerMiddleware, AccessLogMiddleware, CorsMiddleware, diff --git a/packages/serve/src/lib/route/route-component/baseRoute.ts b/packages/serve/src/lib/route/route-component/baseRoute.ts index 92c03c96..c7e4fa2c 100644 --- a/packages/serve/src/lib/route/route-component/baseRoute.ts +++ b/packages/serve/src/lib/route/route-component/baseRoute.ts @@ -9,7 +9,7 @@ import { IRequestValidator } from './requestValidator'; import { IRequestTransformer, RequestParameters } from './requestTransformer'; import { IPaginationTransformer } from './paginationTransformer'; import { Evaluator } from '@vulcan-sql/serve/evaluator'; -import { KoaRequest } from '@vulcan-sql/core'; +import { KoaRequest, IncomingHttpHeaders } from '@vulcan-sql/core'; export interface TransformedRequest { reqParams: RequestParameters; @@ -61,7 +61,8 @@ export abstract class BaseRoute implements IRoute { protected async handle( user: AuthUserInfo, transformed: TransformedRequest, - req: KoaRequest + req: KoaRequest, + headers: IncomingHttpHeaders ) { const { reqParams, pagination } = transformed; // could template name or template path, use for template engine @@ -81,6 +82,7 @@ export abstract class BaseRoute implements IRoute { user, req, profileName: profile, + headers: headers, }, pagination ); diff --git a/packages/serve/src/lib/route/route-component/graphQLRoute.ts b/packages/serve/src/lib/route/route-component/graphQLRoute.ts index 9084780c..9f8725a8 100644 --- a/packages/serve/src/lib/route/route-component/graphQLRoute.ts +++ b/packages/serve/src/lib/route/route-component/graphQLRoute.ts @@ -20,7 +20,8 @@ export class GraphQLRoute extends BaseRoute { const transformed = await this.prepare(ctx); const authUser = ctx.state.user; const req = ctx.request as KoaRequest; - await this.handle(authUser, transformed, req); + const headers = ctx.headers; + await this.handle(authUser, transformed, req, headers); // TODO: get template engine handled result and return response by checking API schema return transformed; } diff --git a/packages/serve/src/lib/route/route-component/restfulRoute.ts b/packages/serve/src/lib/route/route-component/restfulRoute.ts index 931d4528..32876671 100644 --- a/packages/serve/src/lib/route/route-component/restfulRoute.ts +++ b/packages/serve/src/lib/route/route-component/restfulRoute.ts @@ -1,6 +1,6 @@ import { BaseRoute, RouteOptions } from './baseRoute'; import { KoaContext } from '@vulcan-sql/serve/models'; -import { KoaRequest } from '@vulcan-sql/core'; +import { KoaRequest, IncomingHttpHeaders } from '@vulcan-sql/core'; export class RestfulRoute extends BaseRoute { public readonly urlPath: string; @@ -15,7 +15,8 @@ export class RestfulRoute extends BaseRoute { const transformed = await this.prepare(ctx); const authUser = ctx.state.user; const req = ctx.request as KoaRequest; - const result = await this.handle(authUser, transformed, req); + const headers = ctx.headers as IncomingHttpHeaders; + const result = await this.handle(authUser, transformed, req, headers); ctx.response.body = { data: result.getData(), columns: result.getColumns(), diff --git a/packages/serve/test/middlewares/built-in-middlewares/activityLogMiddleware.spec.ts b/packages/serve/test/middlewares/built-in-middlewares/activityLogMiddleware.spec.ts new file mode 100644 index 00000000..81749c07 --- /dev/null +++ b/packages/serve/test/middlewares/built-in-middlewares/activityLogMiddleware.spec.ts @@ -0,0 +1,225 @@ +import faker from '@faker-js/faker'; +import * as sinon from 'ts-sinon'; +import { Request, Response } from 'koa'; +import { IncomingHttpHeaders } from 'http'; +import { ParsedUrlQuery } from 'querystring'; +import { KoaContext } from '@vulcan-sql/serve/models'; + +import { ActivityLogType, HttpLogger } from '@vulcan-sql/core'; +import { ActivityLogMiddleware } from '@vulcan-sql/serve/middleware/activityLogMiddleware'; + +jest.mock('../../../../core/src/lib/loggers/httpLogger', () => { + const originalModule = jest.requireActual( + '../../../../core/src/lib/loggers/httpLogger' + ); + return { + ...originalModule, + HttpLogger: jest.fn().mockImplementation(() => { + return { + getExtensionId: jest.fn().mockReturnValue('http-logger'), + isEnabled: jest.fn().mockReturnValue(true), + log: jest.fn().mockResolvedValue(true), // Spy on the add method + }; + }), + }; +}); +const extensionConfig = { + enabled: true, + options: { 'http-logger': { connection: { host: 'localhost' } } }, +}; +const mockLogger = new HttpLogger(extensionConfig, 'http-logger'); + +describe('Test activity log middlewares', () => { + afterEach(() => { + sinon.default.restore(); + jest.clearAllMocks(); + }); + it('Should log with correct info when response is status 200', async () => { + // Arrange + const ctx: KoaContext = { + ...sinon.stubInterface(), + + params: { + uuid: faker.datatype.uuid(), + }, + state: { + user: { + name: faker.name.firstName(), + attr: { + email: faker.internet.email(), + id: faker.datatype.uuid(), + }, + }, + }, + request: { + ...sinon.stubInterface(), + ip: faker.internet.ip(), + method: faker.internet.httpMethod(), + originalUrl: faker.internet.url(), + header: { + ...sinon.stubInterface(), + 'X-Agent': 'test-normal-client', + }, + query: { + ...sinon.stubInterface(), + sortby: 'name', + }, + }, + response: { + ...sinon.stubInterface(), + status: 200, + length: faker.datatype.number({ min: 100, max: 100000 }), + body: { + result: 'OK', + }, + }, + }; + + const expected = { + isSuccess: true, + activityLogType: ActivityLogType.API_REQUEST, + method: ctx.request.method, + url: ctx.request.originalUrl, + href: ctx.request.href, + status: ctx.response.status, + headers: ctx.request.headers, + error: undefined, + ip: ctx.request.ip, + params: ctx.params, + user: ctx.state.user, + }; + // Act + const middleware = new ActivityLogMiddleware(extensionConfig, '', [ + mockLogger, + ]); + await middleware.activate(); + await middleware.handle(ctx, async () => Promise.resolve()); + + // Assert + const logMock = mockLogger.log as jest.Mock; + const actual = logMock.mock.calls[0]; + expect(actual[0].isSuccess).toEqual(expected.isSuccess); + expect(actual[0].activityLogType).toEqual(expected.activityLogType); + expect(actual[0].method).toEqual(expected.method); + expect(actual[0].url).toEqual(expected.url); + expect(actual[0].href).toEqual(expected.href); + expect(actual[0].status).toEqual(expected.status); + expect(actual[0].headers).toEqual(expected.headers); + expect(actual[0].ip).toEqual(expected.ip); + expect(actual[0].params).toEqual(expected.params); + expect(actual[0].error).toEqual(expected.error); + expect(actual[0].user).toEqual(expected.user); + }); + it('Should log with correct info when response is not status 200', async () => { + // Arrange + const ctx: KoaContext = { + ...sinon.stubInterface(), + + params: { + uuid: faker.datatype.uuid(), + }, + state: { + user: { + name: faker.name.firstName(), + attr: { + email: faker.internet.email(), + id: faker.datatype.uuid(), + }, + }, + }, + request: { + ...sinon.stubInterface(), + ip: faker.internet.ip(), + method: faker.internet.httpMethod(), + originalUrl: faker.internet.url(), + header: { + ...sinon.stubInterface(), + 'X-Agent': 'test-normal-client', + }, + query: { + ...sinon.stubInterface(), + sortby: 'name', + }, + }, + response: { + ...sinon.stubInterface(), + status: 401, + body: { + message: 'Unauthorized', + result: 'OK', + }, + }, + }; + const body = ctx.response.body as any; + const expected = { + isSucess: false, + activityLogType: ActivityLogType.API_REQUEST, + method: ctx.request.method, + url: ctx.request.originalUrl, + href: ctx.request.href, + status: ctx.response.status, + headers: ctx.request.headers, + error: body.message, + ip: ctx.request.ip, + params: ctx.params, + user: ctx.state.user, + }; + // Act + const middleware = new ActivityLogMiddleware(extensionConfig, '', [ + mockLogger, + ]); + await middleware.activate(); + await middleware.handle(ctx, async () => Promise.resolve()); + + // Assert + const logMock = mockLogger.log as jest.Mock; + const actual = logMock.mock.calls[0]; + expect(actual[0].isSuccess).toEqual(expected.isSucess); + expect(actual[0].activityLogType).toEqual(expected.activityLogType); + expect(actual[0].method).toEqual(expected.method); + expect(actual[0].url).toEqual(expected.url); + expect(actual[0].href).toEqual(expected.href); + expect(actual[0].status).toEqual(expected.status); + expect(actual[0].headers).toEqual(expected.headers); + expect(actual[0].ip).toEqual(expected.ip); + expect(actual[0].params).toEqual(expected.params); + expect(actual[0].error).toEqual(expected.error); + expect(actual[0].user).toEqual(expected.user); + }); + // should not log when logger is disabled + it('should not log when logger is disabled', async () => { + // Arrange + const ctx: KoaContext = { + ...sinon.stubInterface(), + params: { + uuid: faker.datatype.uuid(), + }, + request: { + ...sinon.stubInterface(), + query: { + ...sinon.stubInterface(), + sortby: 'name', + }, + }, + response: { + ...sinon.stubInterface(), + status: 200, + body: { + result: 'OK', + }, + }, + }; + // Act + const middleware = new ActivityLogMiddleware( + { ...extensionConfig, enabled: false }, + '', + [mockLogger] + ); + await middleware.activate(); + await middleware.handle(ctx, async () => Promise.resolve()); + + // Assert + const logMock = mockLogger.log as jest.Mock; + expect(logMock).not.toHaveBeenCalled(); + }); +}); diff --git a/packages/test-utility/package.json b/packages/test-utility/package.json index 3c99272b..e5d860d4 100644 --- a/packages/test-utility/package.json +++ b/packages/test-utility/package.json @@ -1,7 +1,7 @@ { "name": "@vulcan-sql/test-utility", "description": "Vulcan package for extension testing", - "version": "0.9.1", + "version": "0.10.0", "type": "commonjs", "publishConfig": { "access": "public" @@ -23,6 +23,6 @@ }, "license": "Apache-2.0", "peerDependencies": { - "@vulcan-sql/core": "~0.9.1-0" + "@vulcan-sql/core": "~0.10.0-0" } } \ No newline at end of file diff --git a/tsconfig.base.json b/tsconfig.base.json index fc48bd1a..749ae77b 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -70,6 +70,9 @@ "@vulcan-sql/core/validators/built-in-validators/*": [ "packages/core/src/lib/validators/built-in-validators/*" ], + "@vulcan-sql/extension-api-caller": [ + "packages/extension-api-caller/src/index.ts" + ], "@vulcan-sql/extension-authenticator-canner": [ "packages/extension-authenticator-canner/src/index.ts" ], @@ -95,6 +98,9 @@ "@vulcan-sql/extension-driver-pg": [ "packages/extension-driver-pg/src/index.ts" ], + "@vulcan-sql/extension-driver-redshift": [ + "packages/extension-driver-redshift/src/index.ts" + ], "@vulcan-sql/extension-driver-snowflake": [ "packages/extension-driver-snowflake/src/index.ts" ], @@ -104,9 +110,6 @@ "@vulcan-sql/extension-store-canner": [ "packages/extension-store-canner/src/index.ts" ], - "@vulcan-sql/extension-api-caller": [ - "packages/extension-api-caller/src/index.ts" - ], "@vulcan-sql/integration-testing": [ "packages/integration-testing/src/index" ], diff --git a/workspace.json b/workspace.json index 0b19fa81..973517f1 100644 --- a/workspace.json +++ b/workspace.json @@ -15,6 +15,7 @@ "extension-driver-duckdb": "packages/extension-driver-duckdb", "extension-driver-ksqldb": "packages/extension-driver-ksqldb", "extension-driver-pg": "packages/extension-driver-pg", + "extension-driver-redshift": "packages/extension-driver-redshift", "extension-driver-snowflake": "packages/extension-driver-snowflake", "extension-huggingface": "packages/extension-huggingface", "extension-store-canner": "packages/extension-store-canner",