From 029395166753641ebd6fb61a68f84006c8c15d9d Mon Sep 17 00:00:00 2001 From: achettyiitr Date: Sun, 22 Dec 2024 22:38:57 +0530 Subject: [PATCH] exp: rs append directly to the main table --- warehouse/integrations/redshift/redshift.go | 131 +++- .../integrations/redshift/redshift_test.go | 652 +++++++++--------- warehouse/integrations/testhelper/eventmap.go | 6 +- 3 files changed, 427 insertions(+), 362 deletions(-) diff --git a/warehouse/integrations/redshift/redshift.go b/warehouse/integrations/redshift/redshift.go index 1ea054046d..28298f1505 100644 --- a/warehouse/integrations/redshift/redshift.go +++ b/warehouse/integrations/redshift/redshift.go @@ -427,13 +427,13 @@ func (rs *Redshift) generateManifest(ctx context.Context, tableName string) (str } func (rs *Redshift) dropStagingTables(ctx context.Context, stagingTableNames []string) { - for _, stagingTableName := range stagingTableNames { - rs.logger.Infof("WH: dropping table %+v\n", stagingTableName) - _, err := rs.DB.ExecContext(ctx, fmt.Sprintf(`DROP TABLE "%[1]s"."%[2]s"`, rs.Namespace, stagingTableName)) - if err != nil { - rs.logger.Errorf("WH: RS: Error dropping staging tables in redshift: %v", err) - } - } + //for _, stagingTableName := range stagingTableNames { + // rs.logger.Infof("WH: dropping table %+v\n", stagingTableName) + // _, err := rs.DB.ExecContext(ctx, fmt.Sprintf(`DROP TABLE "%[1]s"."%[2]s"`, rs.Namespace, stagingTableName)) + // if err != nil { + // rs.logger.Errorf("WH: RS: Error dropping staging tables in redshift: %v", err) + // } + //} } func (rs *Redshift) loadTable( @@ -461,6 +461,30 @@ func (rs *Redshift) loadTable( } log.Debugw("generated manifest", "manifestLocation", manifestLocation) + strKeys := warehouseutils.GetColumnsFromTableSchema(tableSchemaInUpload) + sort.Strings(strKeys) + + if !rs.ShouldMerge(tableName) { + log.Infow("copying data into main table") + + result, err := rs.copyInto( + ctx, rs.DB, tableName, + manifestLocation, strKeys, + ) + if err != nil { + return nil, "", fmt.Errorf("loading data into staging table: %w", err) + } + + rowsInserted, err := result.RowsAffected() + if err != nil { + return nil, "", fmt.Errorf("getting rows affected: %w", err) + } + + return &types.LoadTableStats{ + RowsInserted: rowsInserted, + }, "", nil + } + stagingTableName := warehouseutils.StagingTableName( provider, tableName, @@ -493,9 +517,6 @@ func (rs *Redshift) loadTable( } }() - strKeys := warehouseutils.GetColumnsFromTableSchema(tableSchemaInUpload) - sort.Strings(strKeys) - log.Infow("loading data into staging table") err = rs.copyIntoLoadTable( ctx, txn, stagingTableName, @@ -509,15 +530,14 @@ func (rs *Redshift) loadTable( rowsDeletedResult, rowsInsertedResult sql.Result rowsDeleted, rowsInserted int64 ) - if rs.ShouldMerge(tableName) { - log.Infow("deleting from load table") - rowsDeletedResult, err = rs.deleteFromLoadTable( - ctx, txn, tableName, - stagingTableName, tableSchemaAfterUpload, - ) - if err != nil { - return nil, "", fmt.Errorf("delete from load table: %w", err) - } + + log.Infow("deleting from load table") + rowsDeletedResult, err = rs.deleteFromLoadTable( + ctx, txn, tableName, + stagingTableName, tableSchemaAfterUpload, + ) + if err != nil { + return nil, "", fmt.Errorf("delete from load table: %w", err) } log.Infow("inserting into load table") @@ -555,6 +575,67 @@ func (rs *Redshift) loadTable( }, stagingTableName, nil } +func (rs *Redshift) copyInto(ctx context.Context, db *sqlmiddleware.DB, stagingTableName, manifestLocation string, strKeys []string) (sql.Result, error) { + tempAccessKeyId, tempSecretAccessKey, token, err := warehouseutils.GetTemporaryS3Cred(&rs.Warehouse.Destination) + if err != nil { + return nil, fmt.Errorf("getting temporary s3 credentials: %w", err) + } + + manifestS3Location, region := warehouseutils.GetS3Location(manifestLocation) + if region == "" { + region = "us-east-1" + } + + sortedColumnNames := warehouseutils.JoinWithFormatting(strKeys, func(_ int, name string) string { + return fmt.Sprintf(`%q`, name) + }, ",") + + var copyStmt string + if rs.Uploader.GetLoadFileType() == warehouseutils.LoadFileTypeParquet { + copyStmt = fmt.Sprintf( + `COPY %s + FROM '%s' + ACCESS_KEY_ID '%s' + SECRET_ACCESS_KEY '%s' + SESSION_TOKEN '%s' + MANIFEST FORMAT PARQUET;`, + fmt.Sprintf(`%q.%q`, rs.Namespace, stagingTableName), + manifestS3Location, + tempAccessKeyId, + tempSecretAccessKey, + token, + ) + } else { + copyStmt = fmt.Sprintf( + `COPY %s(%s) + FROM '%s' + CSV GZIP + ACCESS_KEY_ID '%s' + SECRET_ACCESS_KEY '%s' + SESSION_TOKEN '%s' + REGION '%s' + DATEFORMAT 'auto' + TIMEFORMAT 'auto' + MANIFEST TRUNCATECOLUMNS EMPTYASNULL BLANKSASNULL FILLRECORD ACCEPTANYDATE TRIMBLANKS ACCEPTINVCHARS + COMPUPDATE OFF + STATUPDATE OFF;`, + fmt.Sprintf(`%q.%q`, rs.Namespace, stagingTableName), + sortedColumnNames, + manifestS3Location, + tempAccessKeyId, + tempSecretAccessKey, + token, + region, + ) + } + + result, err := db.ExecContext(ctx, copyStmt) + if err != nil { + return nil, fmt.Errorf("running copy command: %w", normalizeError(err)) + } + return result, nil +} + func (rs *Redshift) copyIntoLoadTable( ctx context.Context, txn *sqlmiddleware.Tx, @@ -1288,12 +1369,12 @@ func (rs *Redshift) TestConnection(ctx context.Context, _ model.Warehouse) error func (rs *Redshift) Cleanup(ctx context.Context) { if rs.DB != nil { - err := rs.dropDanglingStagingTables(ctx) - if err != nil { - rs.logger.Errorw("Error dropping dangling staging tables", - logfield.Error, err.Error(), - ) - } + //err := rs.dropDanglingStagingTables(ctx) + //if err != nil { + // rs.logger.Errorw("Error dropping dangling staging tables", + // logfield.Error, err.Error(), + // ) + //} _ = rs.DB.Close() } } diff --git a/warehouse/integrations/redshift/redshift_test.go b/warehouse/integrations/redshift/redshift_test.go index 63c3ab90a4..9b95a87bd2 100644 --- a/warehouse/integrations/redshift/redshift_test.go +++ b/warehouse/integrations/redshift/redshift_test.go @@ -120,10 +120,10 @@ func TestIntegration(t *testing.T) { } } - serverlessCredentials, err := getRedshiftTestCredentials(testServerlessKey) - require.NoError(t, err) - serverlessIAMCredentials, err := getRedshiftTestCredentials(testServerlessIAMKey) - require.NoError(t, err) + // serverlessCredentials, err := getRedshiftTestCredentials(testServerlessKey) + // require.NoError(t, err) + // serverlessIAMCredentials, err := getRedshiftTestCredentials(testServerlessIAMKey) + // require.NoError(t, err) httpPort, err := kithelper.GetFreePort() require.NoError(t, err) @@ -140,17 +140,16 @@ func TestIntegration(t *testing.T) { expectedUploadJobSchema := model.Schema{ "screens": {"context_source_id": "character varying", "user_id": "character varying", "sent_at": "timestamp without time zone", "context_request_ip": "character varying", "original_timestamp": "timestamp without time zone", "url": "character varying", "context_source_type": "character varying", "_between": "character varying", "timestamp": "timestamp without time zone", "context_ip": "character varying", "context_destination_type": "character varying", "received_at": "timestamp without time zone", "title": "character varying", "uuid_ts": "timestamp without time zone", "context_destination_id": "character varying", "name": "character varying", "id": "character varying", "_as": "character varying"}, "identifies": {"context_ip": "character varying", "context_destination_id": "character varying", "email": "character varying", "context_request_ip": "character varying", "sent_at": "timestamp without time zone", "uuid_ts": "timestamp without time zone", "_as": "character varying", "logins": "bigint", "context_source_type": "character varying", "context_traits_logins": "bigint", "name": "character varying", "context_destination_type": "character varying", "_between": "character varying", "id": "character varying", "timestamp": "timestamp without time zone", "received_at": "timestamp without time zone", "user_id": "character varying", "context_traits_email": "character varying", "context_traits_as": "character varying", "context_traits_name": "character varying", "original_timestamp": "timestamp without time zone", "context_traits_between": "character varying", "context_source_id": "character varying"}, - "users": {"context_traits_name": "character varying", "context_traits_between": "character varying", "context_request_ip": "character varying", "context_traits_logins": "bigint", "context_destination_id": "character varying", "email": "character varying", "logins": "bigint", "_as": "character varying", "context_source_id": "character varying", "uuid_ts": "timestamp without time zone", "context_source_type": "character varying", "context_traits_email": "character varying", "name": "character varying", "id": "character varying", "_between": "character varying", "context_ip": "character varying", "received_at": "timestamp without time zone", "sent_at": "timestamp without time zone", "context_traits_as": "character varying", "context_destination_type": "character varying", "timestamp": "timestamp without time zone", "original_timestamp": "timestamp without time zone"}, "product_track": {"review_id": "character varying", "context_source_id": "character varying", "user_id": "character varying", "timestamp": "timestamp without time zone", "uuid_ts": "timestamp without time zone", "review_body": "character varying", "context_source_type": "character varying", "_as": "character varying", "_between": "character varying", "id": "character varying", "rating": "bigint", "event": "character varying", "original_timestamp": "timestamp without time zone", "context_destination_type": "character varying", "context_ip": "character varying", "context_destination_id": "character varying", "sent_at": "timestamp without time zone", "received_at": "timestamp without time zone", "event_text": "character varying", "product_id": "character varying", "context_request_ip": "character varying"}, "tracks": {"original_timestamp": "timestamp without time zone", "context_destination_id": "character varying", "event": "character varying", "context_request_ip": "character varying", "uuid_ts": "timestamp without time zone", "context_destination_type": "character varying", "user_id": "character varying", "sent_at": "timestamp without time zone", "context_source_type": "character varying", "context_ip": "character varying", "timestamp": "timestamp without time zone", "received_at": "timestamp without time zone", "context_source_id": "character varying", "event_text": "character varying", "id": "character varying"}, "aliases": {"context_request_ip": "character varying", "context_destination_type": "character varying", "context_destination_id": "character varying", "previous_id": "character varying", "context_ip": "character varying", "sent_at": "timestamp without time zone", "id": "character varying", "uuid_ts": "timestamp without time zone", "timestamp": "timestamp without time zone", "original_timestamp": "timestamp without time zone", "context_source_id": "character varying", "user_id": "character varying", "context_source_type": "character varying", "received_at": "timestamp without time zone"}, "pages": {"name": "character varying", "url": "character varying", "id": "character varying", "timestamp": "timestamp without time zone", "title": "character varying", "user_id": "character varying", "context_source_id": "character varying", "context_source_type": "character varying", "original_timestamp": "timestamp without time zone", "context_request_ip": "character varying", "received_at": "timestamp without time zone", "_between": "character varying", "context_destination_type": "character varying", "uuid_ts": "timestamp without time zone", "context_destination_id": "character varying", "sent_at": "timestamp without time zone", "context_ip": "character varying", "_as": "character varying"}, "groups": {"_as": "character varying", "user_id": "character varying", "context_destination_type": "character varying", "sent_at": "timestamp without time zone", "context_source_type": "character varying", "received_at": "timestamp without time zone", "context_ip": "character varying", "industry": "character varying", "timestamp": "timestamp without time zone", "group_id": "character varying", "uuid_ts": "timestamp without time zone", "context_source_id": "character varying", "context_request_ip": "character varying", "_between": "character varying", "original_timestamp": "timestamp without time zone", "name": "character varying", "plan": "character varying", "context_destination_id": "character varying", "employees": "bigint", "id": "character varying"}, } - expectedSourceJobSchema := model.Schema{ - "tracks": {"original_timestamp": "timestamp without time zone", "sent_at": "timestamp without time zone", "timestamp": "timestamp without time zone", "context_source_id": "character varying", "context_ip": "character varying", "context_destination_type": "character varying", "uuid_ts": "timestamp without time zone", "event_text": "character varying", "context_request_ip": "character varying", "context_sources_job_id": "character varying", "context_sources_version": "character varying", "context_sources_task_run_id": "character varying", "id": "character varying", "channel": "character varying", "received_at": "timestamp without time zone", "context_destination_id": "character varying", "context_source_type": "character varying", "user_id": "character varying", "context_sources_job_run_id": "character varying", "event": "character varying"}, - "google_sheet": {"_as": "character varying", "review_body": "character varying", "rating": "bigint", "context_source_type": "character varying", "_between": "character varying", "context_destination_id": "character varying", "review_id": "character varying", "context_sources_version": "character varying", "context_destination_type": "character varying", "id": "character varying", "user_id": "character varying", "context_request_ip": "character varying", "original_timestamp": "timestamp without time zone", "received_at": "timestamp without time zone", "product_id": "character varying", "context_sources_task_run_id": "character varying", "event": "character varying", "context_source_id": "character varying", "sent_at": "timestamp without time zone", "uuid_ts": "timestamp without time zone", "timestamp": "timestamp without time zone", "context_sources_job_run_id": "character varying", "context_ip": "character varying", "context_sources_job_id": "character varying", "channel": "character varying", "event_text": "character varying"}, - } + //expectedSourceJobSchema := model.Schema{ + // "tracks": {"original_timestamp": "timestamp without time zone", "sent_at": "timestamp without time zone", "timestamp": "timestamp without time zone", "context_source_id": "character varying", "context_ip": "character varying", "context_destination_type": "character varying", "uuid_ts": "timestamp without time zone", "event_text": "character varying", "context_request_ip": "character varying", "context_sources_job_id": "character varying", "context_sources_version": "character varying", "context_sources_task_run_id": "character varying", "id": "character varying", "channel": "character varying", "received_at": "timestamp without time zone", "context_destination_id": "character varying", "context_source_type": "character varying", "user_id": "character varying", "context_sources_job_run_id": "character varying", "event": "character varying"}, + // "google_sheet": {"_as": "character varying", "review_body": "character varying", "rating": "bigint", "context_source_type": "character varying", "_between": "character varying", "context_destination_id": "character varying", "review_id": "character varying", "context_sources_version": "character varying", "context_destination_type": "character varying", "id": "character varying", "user_id": "character varying", "context_request_ip": "character varying", "original_timestamp": "timestamp without time zone", "received_at": "timestamp without time zone", "product_id": "character varying", "context_sources_task_run_id": "character varying", "event": "character varying", "context_source_id": "character varying", "sent_at": "timestamp without time zone", "uuid_ts": "timestamp without time zone", "timestamp": "timestamp without time zone", "context_sources_job_run_id": "character varying", "context_ip": "character varying", "context_sources_job_id": "character varying", "channel": "character varying", "event_text": "character varying"}, + //} userIDFormat := "userId_rs" userIDSQL := "SUBSTRING(user_id from 1 for 9)" uuidTSSQL := "TO_CHAR(uuid_ts, 'YYYY-MM-DD')" @@ -170,167 +169,159 @@ func TestIntegration(t *testing.T) { verifySchema func(t *testing.T, db *sql.DB, namespace string) verifyRecords func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) }{ - { - name: "Upload Job", - credentials: credentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, - eventFilePath1: "../testdata/upload-job.events-1.json", - eventFilePath2: "../testdata/upload-job.events-2.json", - configOverride: map[string]any{ - "host": credentials.Host, - "port": credentials.Port, - "user": credentials.UserName, - "password": credentials.Password, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) - require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersRecords(userIDFormat, sourceID, destinationID, destType)) - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) - productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) - require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) - pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) - require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) - screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) - require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) - aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) - require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) - groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) - require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) - }, - }, - { - name: "IAM Upload Job", - credentials: iamCredentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, - eventFilePath1: "../testdata/upload-job.events-1.json", - eventFilePath2: "../testdata/upload-job.events-2.json", - configOverride: map[string]any{ - "useIAMForAuth": true, - "user": iamCredentials.UserName, - "iamRoleARNForAuth": iamCredentials.IAMRoleARN, - "clusterId": iamCredentials.ClusterID, - "clusterRegion": iamCredentials.ClusterRegion, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) - require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersRecords(userIDFormat, sourceID, destinationID, destType)) - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) - productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) - require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) - pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) - require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) - screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) - require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) - aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) - require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) - groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) - require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) - }, - }, - { - name: "Serverless Upload Job", - credentials: serverlessCredentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, - eventFilePath1: "../testdata/upload-job.events-1.json", - eventFilePath2: "../testdata/upload-job.events-2.json", - configOverride: map[string]any{ - "host": serverlessCredentials.Host, - "port": serverlessCredentials.Port, - "user": serverlessCredentials.UserName, - "password": serverlessCredentials.Password, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) - require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersRecords(userIDFormat, sourceID, destinationID, destType)) - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) - productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) - require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) - pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) - require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) - screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) - require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) - aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) - require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) - groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) - require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) - }, - }, - { - name: "Serverless IAM Upload Job", - credentials: serverlessIAMCredentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, - eventFilePath1: "../testdata/upload-job.events-1.json", - eventFilePath2: "../testdata/upload-job.events-2.json", - configOverride: map[string]any{ - "useIAMForAuth": true, - "useServerless": true, - "iamRoleARNForAuth": serverlessIAMCredentials.IAMRoleARN, - "workgroupName": serverlessIAMCredentials.WorkgroupName, - "clusterRegion": serverlessIAMCredentials.ClusterRegion, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) - require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersRecords(userIDFormat, sourceID, destinationID, destType)) - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) - productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) - require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) - pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) - require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) - screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) - require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) - aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) - require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) - groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) - require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) - }, - }, + //{ + // name: "Upload Job", + // credentials: credentials, + // tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + // eventFilePath1: "../testdata/upload-job.events-1.json", + // eventFilePath2: "../testdata/upload-job.events-2.json", + // configOverride: map[string]any{ + // "host": credentials.Host, + // "port": credentials.Port, + // "user": credentials.UserName, + // "password": credentials.Password, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) + // require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) + // productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) + // require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) + // pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) + // require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) + // screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) + // require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) + // aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) + // require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) + // groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) + // require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) + // }, + //}, + //{ + // name: "IAM Upload Job", + // credentials: iamCredentials, + // tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + // eventFilePath1: "../testdata/upload-job.events-1.json", + // eventFilePath2: "../testdata/upload-job.events-2.json", + // configOverride: map[string]any{ + // "useIAMForAuth": true, + // "user": iamCredentials.UserName, + // "iamRoleARNForAuth": iamCredentials.IAMRoleARN, + // "clusterId": iamCredentials.ClusterID, + // "clusterRegion": iamCredentials.ClusterRegion, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) + // require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) + // productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) + // require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) + // pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) + // require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) + // screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) + // require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) + // aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) + // require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) + // groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) + // require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) + // }, + //}, + //{ + // name: "Serverless Upload Job", + // credentials: serverlessCredentials, + // tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + // eventFilePath1: "../testdata/upload-job.events-1.json", + // eventFilePath2: "../testdata/upload-job.events-2.json", + // configOverride: map[string]any{ + // "host": serverlessCredentials.Host, + // "port": serverlessCredentials.Port, + // "user": serverlessCredentials.UserName, + // "password": serverlessCredentials.Password, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) + // require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) + // productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) + // require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) + // pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) + // require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) + // screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) + // require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) + // aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) + // require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) + // groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) + // require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) + // }, + //}, + //{ + // name: "Serverless IAM Upload Job", + // credentials: serverlessIAMCredentials, + // tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + // eventFilePath1: "../testdata/upload-job.events-1.json", + // eventFilePath2: "../testdata/upload-job.events-2.json", + // configOverride: map[string]any{ + // "useIAMForAuth": true, + // "useServerless": true, + // "iamRoleARNForAuth": serverlessIAMCredentials.IAMRoleARN, + // "workgroupName": serverlessIAMCredentials.WorkgroupName, + // "clusterRegion": serverlessIAMCredentials.ClusterRegion, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) + // require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesRecords(userIDFormat, sourceID, destinationID, destType)) + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksRecords(userIDFormat, sourceID, destinationID, destType)) + // productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) + // require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackRecords(userIDFormat, sourceID, destinationID, destType)) + // pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) + // require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesRecords(userIDFormat, sourceID, destinationID, destType)) + // screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) + // require.ElementsMatch(t, screensRecords, whth.UploadJobScreensRecords(userIDFormat, sourceID, destinationID, destType)) + // aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) + // require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesRecords(userIDFormat, sourceID, destinationID, destType)) + // groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) + // require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsRecords(userIDFormat, sourceID, destinationID, destType)) + // }, + //}, { name: "Append Mode", credentials: credentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, warehouseEventsMap2: whth.EventsCountMap{ // For all tables except users we will be appending because of: // * preferAppend // For users table we will not be appending since the following config are not set // * Warehouse.rs.skipDedupDestinationIDs // * Warehouse.rs.skipComputingUserLatestTraits - "identifies": 8, "users": 1, "tracks": 8, "product_track": 8, "pages": 8, "screens": 8, "aliases": 8, "groups": 8, + "identifies": 8, "tracks": 8, "product_track": 8, "pages": 8, "screens": 8, "aliases": 8, "groups": 8, }, eventFilePath1: "../testdata/upload-job.events-1.json", eventFilePath2: "../testdata/upload-job.events-1.json", @@ -351,8 +342,6 @@ func TestIntegration(t *testing.T) { t.Helper() identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesAppendRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersMergeRecord(userIDFormat, sourceID, destinationID, destType)) tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksAppendRecords(userIDFormat, sourceID, destinationID, destType)) productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) @@ -367,176 +356,170 @@ func TestIntegration(t *testing.T) { require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsAppendRecords(userIDFormat, sourceID, destinationID, destType)) }, }, - { - name: "IAM Append Mode", - credentials: iamCredentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, - warehouseEventsMap2: whth.EventsCountMap{ - // For all tables except users we will be appending because of: - // * preferAppend - // For users table we will not be appending since the following config are not set - // * Warehouse.rs.skipDedupDestinationIDs - // * Warehouse.rs.skipComputingUserLatestTraits - "identifies": 8, "users": 1, "tracks": 8, "product_track": 8, "pages": 8, "screens": 8, "aliases": 8, "groups": 8, - }, - eventFilePath1: "../testdata/upload-job.events-1.json", - eventFilePath2: "../testdata/upload-job.events-1.json", - useSameUserID: true, - configOverride: map[string]any{ - "preferAppend": true, - "useIAMForAuth": true, - "user": iamCredentials.UserName, - "iamRoleARNForAuth": iamCredentials.IAMRoleARN, - "clusterId": iamCredentials.ClusterID, - "clusterRegion": iamCredentials.ClusterRegion, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) - require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesAppendRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersMergeRecord(userIDFormat, sourceID, destinationID, destType)) - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksAppendRecords(userIDFormat, sourceID, destinationID, destType)) - productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) - require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackAppendRecords(userIDFormat, sourceID, destinationID, destType)) - pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) - require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesAppendRecords(userIDFormat, sourceID, destinationID, destType)) - screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) - require.ElementsMatch(t, screensRecords, whth.UploadJobScreensAppendRecords(userIDFormat, sourceID, destinationID, destType)) - aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) - require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesAppendRecords(userIDFormat, sourceID, destinationID, destType)) - groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) - require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsAppendRecords(userIDFormat, sourceID, destinationID, destType)) - }, - }, - { - name: "Undefined preferAppend", - credentials: credentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, - eventFilePath1: "../testdata/upload-job.events-1.json", - eventFilePath2: "../testdata/upload-job.events-1.json", - useSameUserID: true, - configOverride: map[string]any{ - "host": credentials.Host, - "port": credentials.Port, - "user": credentials.UserName, - "password": credentials.Password, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) - require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesMergeRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersMergeRecord(userIDFormat, sourceID, destinationID, destType)) - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksMergeRecords(userIDFormat, sourceID, destinationID, destType)) - productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) - require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackMergeRecords(userIDFormat, sourceID, destinationID, destType)) - pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) - require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesMergeRecords(userIDFormat, sourceID, destinationID, destType)) - screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) - require.ElementsMatch(t, screensRecords, whth.UploadJobScreensMergeRecords(userIDFormat, sourceID, destinationID, destType)) - aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) - require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesMergeRecords(userIDFormat, sourceID, destinationID, destType)) - groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) - require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsMergeRecords(userIDFormat, sourceID, destinationID, destType)) - }, - }, - { - name: "Append Users", - credentials: credentials, - tables: []string{"identifies", "users", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, - warehouseEventsMap2: whth.EventsCountMap{ - // For all tables except users we will be appending because of: - // * preferAppend - // * Warehouse.postgres.skipComputingUserLatestTraits - // For users table we will be appending because of: - // * Warehouse.postgres.skipDedupDestinationIDs - // * Warehouse.postgres.skipComputingUserLatestTraits - "identifies": 8, "users": 2, "tracks": 8, "product_track": 8, "pages": 8, "screens": 8, "aliases": 8, "groups": 8, - }, - eventFilePath1: "../testdata/upload-job.events-1.json", - eventFilePath2: "../testdata/upload-job.events-1.json", - useSameUserID: true, - additionalEnvs: func(destinationID string) map[string]string { - return map[string]string{ - "RSERVER_WAREHOUSE_REDSHIFT_SKIP_DEDUP_DESTINATION_IDS": destinationID, - "RSERVER_WAREHOUSE_REDSHIFT_SKIP_COMPUTING_USER_LATEST_TRAITS": "true", - } - }, - configOverride: map[string]any{ - "preferAppend": true, - "host": credentials.Host, - "port": credentials.Port, - "user": credentials.UserName, - "password": credentials.Password, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) - require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesAppendRecords(userIDFormat, sourceID, destinationID, destType)) - usersRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_type, context_request_ip, context_traits_name, context_traits_between, _as, logins, sent_at, context_traits_logins, context_ip, _between, context_traits_email, "timestamp", context_destination_id, email, context_traits_as, context_source_type, substring(id from 1 for 9), %s, received_at, name, original_timestamp FROM %q.%q ORDER BY id;`, uuidTSSQL, namespace, "users")) - require.ElementsMatch(t, usersRecords, whth.UploadJobUsersAppendRecordsUsingUsersLoadFiles(userIDFormat, sourceID, destinationID, destType)) - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksAppendRecords(userIDFormat, sourceID, destinationID, destType)) - productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) - require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackAppendRecords(userIDFormat, sourceID, destinationID, destType)) - pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) - require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesAppendRecords(userIDFormat, sourceID, destinationID, destType)) - screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) - require.ElementsMatch(t, screensRecords, whth.UploadJobScreensAppendRecords(userIDFormat, sourceID, destinationID, destType)) - aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) - require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesAppendRecords(userIDFormat, sourceID, destinationID, destType)) - groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) - require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsAppendRecords(userIDFormat, sourceID, destinationID, destType)) - }, - }, - { - name: "Source Job", - credentials: credentials, - tables: []string{"tracks", "google_sheet"}, - sourceJob: true, - eventFilePath1: "../testdata/source-job.events-1.json", - eventFilePath2: "../testdata/source-job.events-2.json", - jobRunID1: misc.FastUUID().String(), - taskRunID1: misc.FastUUID().String(), - jobRunID2: misc.FastUUID().String(), - taskRunID2: misc.FastUUID().String(), - configOverride: map[string]any{ - "host": credentials.Host, - "port": credentials.Port, - "user": credentials.UserName, - "password": credentials.Password, - }, - verifySchema: func(t *testing.T, db *sql.DB, namespace string) { - t.Helper() - schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) - require.Equal(t, expectedSourceJobSchema, whth.ConvertRecordsToSchema(schema)) - }, - verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { - t.Helper() - tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT channel, context_sources_job_id, received_at, context_sources_version, %s, sent_at, context_ip, event, event_text, %s, context_destination_id, id, context_request_ip, context_source_type, original_timestamp, context_sources_job_run_id, context_sources_task_run_id, context_source_id, context_destination_type, "timestamp" FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) - require.ElementsMatch(t, tracksRecords, whth.SourceJobTracksRecords(userIDFormat, sourceID, destinationID, destType, jobRunID, taskRunID)) - googleSheetRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT product_id, sent_at, _between, context_request_ip, context_sources_job_run_id, channel, review_body, context_source_id, original_timestamp, context_destination_id, context_sources_job_id, event, context_sources_task_run_id, context_source_type, %s, context_ip, "timestamp", id, received_at, review_id, %s, context_sources_version, context_destination_type, event_text, _as, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "google_sheet")) - require.ElementsMatch(t, googleSheetRecords, whth.SourceJobGoogleSheetRecords(userIDFormat, sourceID, destinationID, destType, jobRunID, taskRunID)) - }, - }, + //{ + // name: "IAM Append Mode", + // credentials: iamCredentials, + // tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + // warehouseEventsMap2: whth.EventsCountMap{ + // // For all tables except users we will be appending because of: + // // * preferAppend + // // For users table we will not be appending since the following config are not set + // // * Warehouse.rs.skipDedupDestinationIDs + // // * Warehouse.rs.skipComputingUserLatestTraits + // "identifies": 8, "tracks": 8, "product_track": 8, "pages": 8, "screens": 8, "aliases": 8, "groups": 8, + // }, + // eventFilePath1: "../testdata/upload-job.events-1.json", + // eventFilePath2: "../testdata/upload-job.events-1.json", + // useSameUserID: true, + // configOverride: map[string]any{ + // "preferAppend": true, + // "useIAMForAuth": true, + // "user": iamCredentials.UserName, + // "iamRoleARNForAuth": iamCredentials.IAMRoleARN, + // "clusterId": iamCredentials.ClusterID, + // "clusterRegion": iamCredentials.ClusterRegion, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) + // require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) + // require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) + // require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) + // require.ElementsMatch(t, screensRecords, whth.UploadJobScreensAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) + // require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) + // require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // }, + //}, + //{ + // name: "Undefined preferAppend", + // credentials: credentials, + // tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + // eventFilePath1: "../testdata/upload-job.events-1.json", + // eventFilePath2: "../testdata/upload-job.events-1.json", + // useSameUserID: true, + // configOverride: map[string]any{ + // "host": credentials.Host, + // "port": credentials.Port, + // "user": credentials.UserName, + // "password": credentials.Password, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) + // require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesMergeRecords(userIDFormat, sourceID, destinationID, destType)) + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksMergeRecords(userIDFormat, sourceID, destinationID, destType)) + // productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) + // require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackMergeRecords(userIDFormat, sourceID, destinationID, destType)) + // pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) + // require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesMergeRecords(userIDFormat, sourceID, destinationID, destType)) + // screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) + // require.ElementsMatch(t, screensRecords, whth.UploadJobScreensMergeRecords(userIDFormat, sourceID, destinationID, destType)) + // aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) + // require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesMergeRecords(userIDFormat, sourceID, destinationID, destType)) + // groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) + // require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsMergeRecords(userIDFormat, sourceID, destinationID, destType)) + // }, + //}, + //{ + // name: "Append Users", + // credentials: credentials, + // tables: []string{"identifies", "tracks", "product_track", "pages", "screens", "aliases", "groups"}, + // warehouseEventsMap2: whth.EventsCountMap{ + // // For all tables except users we will be appending because of: + // // * preferAppend + // // * Warehouse.postgres.skipComputingUserLatestTraits + // // For users table we will be appending because of: + // // * Warehouse.postgres.skipDedupDestinationIDs + // // * Warehouse.postgres.skipComputingUserLatestTraits + // "identifies": 8, "tracks": 8, "product_track": 8, "pages": 8, "screens": 8, "aliases": 8, "groups": 8, + // }, + // eventFilePath1: "../testdata/upload-job.events-1.json", + // eventFilePath2: "../testdata/upload-job.events-1.json", + // useSameUserID: true, + // additionalEnvs: func(destinationID string) map[string]string { + // return map[string]string{ + // "RSERVER_WAREHOUSE_REDSHIFT_SKIP_DEDUP_DESTINATION_IDS": destinationID, + // "RSERVER_WAREHOUSE_REDSHIFT_SKIP_COMPUTING_USER_LATEST_TRAITS": "true", + // } + // }, + // configOverride: map[string]any{ + // "preferAppend": true, + // "host": credentials.Host, + // "port": credentials.Port, + // "user": credentials.UserName, + // "password": credentials.Password, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedUploadJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // identifiesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, %s, context_traits_logins, _as, name, logins, email, original_timestamp, context_ip, context_traits_as, "timestamp", received_at, context_destination_type, sent_at, context_source_type, context_traits_between, context_source_id, context_traits_name, context_request_ip, _between, context_traits_email, context_destination_id, id FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "identifies")) + // require.ElementsMatch(t, identifiesRecords, whth.UploadJobIdentifiesAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT original_timestamp, context_destination_id, context_destination_type, %s, context_source_type, "timestamp", id, event, sent_at, context_ip, event_text, context_source_id, context_request_ip, received_at, %s FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.UploadJobTracksAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // productTrackRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT "timestamp", %s, product_id, received_at, context_source_id, sent_at, context_source_type, context_ip, context_destination_type, original_timestamp, context_request_ip, context_destination_id, %s, _as, review_body, _between, review_id, event_text, id, event, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "product_track")) + // require.ElementsMatch(t, productTrackRecords, whth.UploadJobProductTrackAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // pagesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT %s, context_source_id, id, title, "timestamp", context_source_type, _as, received_at, context_destination_id, context_ip, context_destination_type, name, original_timestamp, _between, context_request_ip, sent_at, url, %s FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "pages")) + // require.ElementsMatch(t, pagesRecords, whth.UploadJobPagesAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // screensRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, url, context_source_type, title, original_timestamp, %s, _between, context_ip, name, context_request_ip, %s, context_source_id, id, received_at, context_destination_id, "timestamp", sent_at, _as FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "screens")) + // require.ElementsMatch(t, screensRecords, whth.UploadJobScreensAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // aliasesRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_source_id, context_destination_id, context_ip, sent_at, id, %s, %s, previous_id, original_timestamp, context_source_type, received_at, context_destination_type, context_request_ip, "timestamp" FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "aliases")) + // require.ElementsMatch(t, aliasesRecords, whth.UploadJobAliasesAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // groupsRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT context_destination_type, id, _between, plan, original_timestamp, %s, context_source_id, sent_at, %s, group_id, industry, context_request_ip, context_source_type, "timestamp", employees, _as, context_destination_id, received_at, name, context_ip FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "groups")) + // require.ElementsMatch(t, groupsRecords, whth.UploadJobGroupsAppendRecords(userIDFormat, sourceID, destinationID, destType)) + // }, + //}, + //{ + // name: "Source Job", + // credentials: credentials, + // tables: []string{"tracks", "google_sheet"}, + // sourceJob: true, + // eventFilePath1: "../testdata/source-job.events-1.json", + // eventFilePath2: "../testdata/source-job.events-2.json", + // jobRunID1: misc.FastUUID().String(), + // taskRunID1: misc.FastUUID().String(), + // jobRunID2: misc.FastUUID().String(), + // taskRunID2: misc.FastUUID().String(), + // configOverride: map[string]any{ + // "host": credentials.Host, + // "port": credentials.Port, + // "user": credentials.UserName, + // "password": credentials.Password, + // }, + // verifySchema: func(t *testing.T, db *sql.DB, namespace string) { + // t.Helper() + // schema := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT table_name, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS WHERE table_schema = '%s';`, namespace)) + // require.Equal(t, expectedSourceJobSchema, whth.ConvertRecordsToSchema(schema)) + // }, + // verifyRecords: func(t *testing.T, db *sql.DB, sourceID, destinationID, namespace, jobRunID, taskRunID string) { + // t.Helper() + // tracksRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT channel, context_sources_job_id, received_at, context_sources_version, %s, sent_at, context_ip, event, event_text, %s, context_destination_id, id, context_request_ip, context_source_type, original_timestamp, context_sources_job_run_id, context_sources_task_run_id, context_source_id, context_destination_type, "timestamp" FROM %q.%q ORDER BY id;`, uuidTSSQL, userIDSQL, namespace, "tracks")) + // require.ElementsMatch(t, tracksRecords, whth.SourceJobTracksRecords(userIDFormat, sourceID, destinationID, destType, jobRunID, taskRunID)) + // googleSheetRecords := whth.RetrieveRecordsFromWarehouse(t, db, fmt.Sprintf(`SELECT product_id, sent_at, _between, context_request_ip, context_sources_job_run_id, channel, review_body, context_source_id, original_timestamp, context_destination_id, context_sources_job_id, event, context_sources_task_run_id, context_source_type, %s, context_ip, "timestamp", id, received_at, review_id, %s, context_sources_version, context_destination_type, event_text, _as, rating FROM %q.%q ORDER BY id;`, userIDSQL, uuidTSSQL, namespace, "google_sheet")) + // require.ElementsMatch(t, googleSheetRecords, whth.SourceJobGoogleSheetRecords(userIDFormat, sourceID, destinationID, destType, jobRunID, taskRunID)) + // }, + //}, } for _, tc := range testcase { @@ -560,7 +543,8 @@ func TestIntegration(t *testing.T) { WithConfigOption("useRudderStorage", false). WithConfigOption("syncFrequency", "30"). WithConfigOption("allowUsersContextTraits", true). - WithConfigOption("underscoreDivideNumbers", true) + WithConfigOption("underscoreDivideNumbers", true). + WithConfigOption("skipUsersTable", true) for k, v := range tc.configOverride { destinationBuilder = destinationBuilder.WithConfigOption(k, v) } diff --git a/warehouse/integrations/testhelper/eventmap.go b/warehouse/integrations/testhelper/eventmap.go index 33c12d1bbe..8d331dbd21 100644 --- a/warehouse/integrations/testhelper/eventmap.go +++ b/warehouse/integrations/testhelper/eventmap.go @@ -8,7 +8,7 @@ type EventsCountMap map[string]int func defaultStagingFilesEventsMap() EventsCountMap { return EventsCountMap{ - "wh_staging_files": 32, + "wh_staging_files": 28, } } @@ -21,11 +21,11 @@ func defaultStagingFilesWithIDResolutionEventsMap() EventsCountMap { func defaultTableUploadsEventsMap(destType string) EventsCountMap { if destType == whutils.BQ { return EventsCountMap{ - "identifies": 4, "users": 4, "tracks": 4, "product_track": 4, "pages": 4, "screens": 4, "aliases": 4, "_groups": 4, + "identifies": 4, "tracks": 4, "product_track": 4, "pages": 4, "screens": 4, "aliases": 4, "_groups": 4, } } else { return EventsCountMap{ - "identifies": 4, "users": 4, "tracks": 4, "product_track": 4, "pages": 4, "screens": 4, "aliases": 4, "groups": 4, + "identifies": 4, "tracks": 4, "product_track": 4, "pages": 4, "screens": 4, "aliases": 4, "groups": 4, } } }