Skip to content

Commit

Permalink
fixup! fixup! feat: redshift sdk driver
Browse files Browse the repository at this point in the history
  • Loading branch information
atzoum committed Mar 22, 2024
1 parent 1eadb3e commit c649f10
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 30 deletions.
15 changes: 8 additions & 7 deletions sqlconnect/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,12 @@ import (
)

type (
BigQuery = bigquery.Config
Databricks = databricks.Config
Mysql = mysql.Config
Postgres = postgres.Config
Redshift = redshift.Config
Snowflake = snowflake.Config
Trino = trino.Config
BigQuery = bigquery.Config
Databricks = databricks.Config
Mysql = mysql.Config
Postgres = postgres.Config
Redshift = redshift.PostgresConfig
RedshiftData = redshift.Config
Snowflake = snowflake.Config
Trino = trino.Config
)
16 changes: 8 additions & 8 deletions sqlconnect/internal/redshift/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ import (
"github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/postgres"
)

const SDKConfigType = "sdk"
const RedshiftDataConfigType = "sdk"

// Config is the configuration for a redshift database when using postgres driver
type Config = postgres.Config
// PostgresConfig is the configuration for a redshift database when using the postgres driver
type PostgresConfig = postgres.Config

// SDKConfig is the configuration for a redshift database when using the AWS SDK
type SDKConfig struct {
// Config is the configuration for a redshift database when using the redshift data api driver
type Config struct {
ClusterIdentifier string `json:"clusterIdentifier"`
Database string `json:"database"`
User string `json:"user"`
Expand All @@ -37,15 +37,15 @@ type SDKConfig struct {
UseLegacyMappings bool `json:"useLegacyMappings"`
}

func (c *SDKConfig) MarshalJSON() ([]byte, error) {
func (c *Config) MarshalJSON() ([]byte, error) {
bytes, err := json.Marshal(*c)
if err != nil {
return nil, err
}
return sjson.SetBytes(bytes, "type", SDKConfigType)
return sjson.SetBytes(bytes, "type", RedshiftDataConfigType)
}

func (c *SDKConfig) Parse(input json.RawMessage) error {
func (c *Config) Parse(input json.RawMessage) error {
err := json.Unmarshal(input, c)
if err != nil {
return err
Expand Down
10 changes: 5 additions & 5 deletions sqlconnect/internal/redshift/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ import (
"github.com/rudderlabs/sqlconnect-go/sqlconnect/internal/redshift"
)

func TestRedshiftSDKConfig(t *testing.T) {
// Create a new SDKConfig
config := redshift.SDKConfig{
func TestRedshiftConfig(t *testing.T) {
// Create a new redshift config
config := redshift.Config{
ClusterIdentifier: "cluster-identifier",
Database: "database",
User: "user",
Expand All @@ -23,10 +23,10 @@ func TestRedshiftSDKConfig(t *testing.T) {
}
configJSON, err := json.Marshal(&config)
require.NoError(t, err)
require.Equal(t, "sdk", gjson.GetBytes(configJSON, "type").String())
require.Equal(t, redshift.RedshiftDataConfigType, gjson.GetBytes(configJSON, "type").String())

// Unmarshal the JSON back into a new SDKConfig
var newConfig redshift.SDKConfig
var newConfig redshift.Config
err = newConfig.Parse(configJSON)
require.NoError(t, err)
require.Equal(t, config, newConfig)
Expand Down
14 changes: 7 additions & 7 deletions sqlconnect/internal/redshift/db.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ func NewDB(credentialsJSON json.RawMessage) (*DB, error) {
)
useLegacyMappings := gjson.GetBytes(credentialsJSON, "useLegacyMappings").Bool()
// Use the SDK if the credentials are for the SDK
if configType := gjson.GetBytes(credentialsJSON, "type").Str; configType == SDKConfigType {
db, err = newSdkDB(credentialsJSON)
if configType := gjson.GetBytes(credentialsJSON, "type").Str; configType == RedshiftDataConfigType {
db, err = newRedshiftDataDB(credentialsJSON)
} else {
db, err = newPgDB(credentialsJSON)
db, err = newPostgresDB(credentialsJSON)
}
if err != nil {
return nil, err
Expand All @@ -57,8 +57,8 @@ func NewDB(credentialsJSON json.RawMessage) (*DB, error) {
}, nil
}

func newPgDB(credentialsJSON json.RawMessage) (*sql.DB, error) {
var config Config
func newPostgresDB(credentialsJSON json.RawMessage) (*sql.DB, error) {
var config PostgresConfig
err := config.Parse(credentialsJSON)
if err != nil {
return nil, err
Expand All @@ -67,8 +67,8 @@ func newPgDB(credentialsJSON json.RawMessage) (*sql.DB, error) {
return sql.Open(postgres.DatabaseType, config.ConnectionString())
}

func newSdkDB(credentialsJSON json.RawMessage) (*sql.DB, error) {
var config SDKConfig
func newRedshiftDataDB(credentialsJSON json.RawMessage) (*sql.DB, error) {
var config Config
err := config.Parse(credentialsJSON)
if err != nil {
return nil, err
Expand Down
6 changes: 3 additions & 3 deletions sqlconnect/internal/redshift/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,16 @@ func TestRedshiftDB(t *testing.T) {
t.Run("postgres driver", func(t *testing.T) {
configJSON, ok := os.LookupEnv("REDSHIFT_TEST_ENVIRONMENT_CREDENTIALS")
if !ok {
t.Skip("skipping redshift pg integration test due to lack of a test environment")
t.Skip("skipping redshift postgres driver integration test due to lack of a test environment")
}

integrationtest.TestDatabaseScenarios(t, redshift.DatabaseType, []byte(configJSON), strings.ToLower, integrationtest.Options{LegacySupport: true})
})

t.Run("sdk driver", func(t *testing.T) {
t.Run("redshift data driver", func(t *testing.T) {
configJSON, ok := os.LookupEnv("REDSHIFT_SDK_TEST_ENVIRONMENT_CREDENTIALS")
if !ok {
t.Skip("skipping redshift sdk integration test due to lack of a test environment")
t.Skip("skipping redshift data driver integration test due to lack of a test environment")
}
integrationtest.TestDatabaseScenarios(t, redshift.DatabaseType, []byte(configJSON), strings.ToLower, integrationtest.Options{LegacySupport: true})
})
Expand Down

0 comments on commit c649f10

Please sign in to comment.