diff --git a/CHANGELOG.md b/CHANGELOG.md index 156c37ea..25945cc1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # EDGEGRID GOLANG RELEASE NOTES +## 2.7.0 (Oct 19, 2021) + +#### FEATURES/ENHANCEMENTS: +* [IMPORTANT] Added DataStream API support + * Stream operations + * Stream activation operations + * Read access to various DataStream properties +* Added HAPI v1 support + * Delete edge hostname + ## 2.6.0 (Aug 16, 2021) #### BUG FIXES: diff --git a/pkg/datastream/connectors.go b/pkg/datastream/connectors.go new file mode 100644 index 00000000..10a23f4e --- /dev/null +++ b/pkg/datastream/connectors.go @@ -0,0 +1,259 @@ +package datastream + +import validation "github.com/go-ozzo/ozzo-validation/v4" + +type ( + // S3Connector provides details about the Amazon S3 connector in a stream + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#amazons3 + S3Connector struct { + ConnectorType ConnectorType `json:"connectorType"` + AccessKey string `json:"accessKey"` + Bucket string `json:"bucket"` + ConnectorName string `json:"connectorName"` + Path string `json:"path"` + Region string `json:"region"` + SecretAccessKey string `json:"secretAccessKey"` + } + + // AzureConnector provides details about the Azure Storage connector configuration in a data stream + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#azurestorage + AzureConnector struct { + ConnectorType ConnectorType `json:"connectorType"` + AccessKey string `json:"accessKey"` + AccountName string `json:"accountName"` + ConnectorName string `json:"connectorName"` + ContainerName string `json:"containerName"` + Path string `json:"path"` + } + + // DatadogConnector provides detailed information about Datadog connector + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#datadog + DatadogConnector struct { + ConnectorType ConnectorType `json:"connectorType"` + AuthToken string `json:"authToken"` + CompressLogs bool `json:"compressLogs"` + ConnectorName string `json:"connectorName"` + Service string `json:"service,omitempty"` + Source string `json:"source,omitempty"` + Tags string `json:"tags,omitempty"` + URL string `json:"url"` + } + + // SplunkConnector provides detailed information about the Splunk connector + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#splunk + SplunkConnector struct { + ConnectorType ConnectorType `json:"connectorType"` + CompressLogs bool `json:"compressLogs"` + ConnectorName string `json:"connectorName"` + EventCollectorToken string `json:"eventCollectorToken"` + URL string `json:"url"` + } + + // GCSConnector provides detailed information about the Google Cloud Storage connector + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#googlecloudstorage + GCSConnector struct { + ConnectorType ConnectorType `json:"connectorType"` + Bucket string `json:"bucket"` + ConnectorName string `json:"connectorName"` + Path string `json:"path,omitempty"` + PrivateKey string `json:"privateKey"` + ProjectID string `json:"projectId"` + ServiceAccountName string `json:"serviceAccountName"` + } + + // CustomHTTPSConnector provides detailed information about the custom HTTPS endpoint + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#customhttps + CustomHTTPSConnector struct { + ConnectorType ConnectorType `json:"connectorType"` + AuthenticationType AuthenticationType `json:"authenticationType"` + CompressLogs bool `json:"compressLogs"` + ConnectorName string `json:"connectorName"` + Password string `json:"password,omitempty"` + URL string `json:"url"` + UserName string `json:"userName,omitempty"` + } + + // SumoLogicConnector provides detailed information about the Sumo Logic connector + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#sumologic + SumoLogicConnector struct { + ConnectorType ConnectorType `json:"connectorType"` + CollectorCode string `json:"collectorCode"` + CompressLogs bool `json:"compressLogs"` + ConnectorName string `json:"connectorName"` + Endpoint string `json:"endpoint"` + } + + // OracleCloudStorageConnector provides details about the Oracle Cloud Storage connector + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#oraclecloudstorage + OracleCloudStorageConnector struct { + ConnectorType ConnectorType `json:"connectorType"` + AccessKey string `json:"accessKey"` + Bucket string `json:"bucket"` + ConnectorName string `json:"connectorName"` + Namespace string `json:"namespace"` + Path string `json:"path"` + Region string `json:"region"` + SecretAccessKey string `json:"secretAccessKey"` + } + + // ConnectorType is used to create an "enum" of possible ConnectorTypes + ConnectorType string + + // AuthenticationType is used to create an "enum" of possible AuthenticationTypes of the CustomHTTPSConnector + AuthenticationType string +) + +const ( + // ConnectorTypeAzure const + ConnectorTypeAzure ConnectorType = "AZURE" + // ConnectorTypeS3 const + ConnectorTypeS3 ConnectorType = "S3" + // ConnectorTypeDataDog const + ConnectorTypeDataDog ConnectorType = "DATADOG" + // ConnectorTypeSplunk const + ConnectorTypeSplunk ConnectorType = "SPLUNK" + // ConnectorTypeGcs const + ConnectorTypeGcs ConnectorType = "GCS" + // ConnectorTypeHTTPS const + ConnectorTypeHTTPS ConnectorType = "HTTPS" + // ConnectorTypeSumoLogic const + ConnectorTypeSumoLogic ConnectorType = "SUMO_LOGIC" + // ConnectorTypeOracle const + ConnectorTypeOracle ConnectorType = "Oracle_Cloud_Storage" + + // AuthenticationTypeNone const + AuthenticationTypeNone AuthenticationType = "NONE" + // AuthenticationTypeBasic const + AuthenticationTypeBasic AuthenticationType = "BASIC" +) + +// SetConnectorType for S3Connector +func (c *S3Connector) SetConnectorType() { + c.ConnectorType = ConnectorTypeS3 +} + +// Validate validates S3Connector +func (c *S3Connector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeS3)), + "AccessKey": validation.Validate(c.AccessKey, validation.Required), + "Bucket": validation.Validate(c.Bucket, validation.Required), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "Path": validation.Validate(c.Path, validation.Required), + "Region": validation.Validate(c.Region, validation.Required), + "SecretAccessKey": validation.Validate(c.SecretAccessKey, validation.Required), + }.Filter() +} + +// SetConnectorType for AzureConnector +func (c *AzureConnector) SetConnectorType() { + c.ConnectorType = ConnectorTypeAzure +} + +// Validate validates AzureConnector +func (c *AzureConnector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeAzure)), + "AccessKey": validation.Validate(c.AccessKey, validation.Required), + "AccountName": validation.Validate(c.AccountName, validation.Required), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "ContainerName": validation.Validate(c.ContainerName, validation.Required), + "Path": validation.Validate(c.Path, validation.Required), + }.Filter() +} + +// SetConnectorType for DatadogConnector +func (c *DatadogConnector) SetConnectorType() { + c.ConnectorType = ConnectorTypeDataDog +} + +// Validate validates DatadogConnector +func (c *DatadogConnector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeDataDog)), + "AuthToken": validation.Validate(c.AuthToken, validation.Required), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "URL": validation.Validate(c.URL, validation.Required), + }.Filter() +} + +// SetConnectorType for SplunkConnector +func (c *SplunkConnector) SetConnectorType() { + c.ConnectorType = ConnectorTypeSplunk +} + +// Validate validates SplunkConnector +func (c *SplunkConnector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeSplunk)), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "EventCollectorToken": validation.Validate(c.EventCollectorToken, validation.Required), + "URL": validation.Validate(c.URL, validation.Required), + }.Filter() +} + +// SetConnectorType for GCSConnector +func (c *GCSConnector) SetConnectorType() { + c.ConnectorType = ConnectorTypeGcs +} + +// Validate validates GCSConnector +func (c *GCSConnector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeGcs)), + "Bucket": validation.Validate(c.Bucket, validation.Required), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "PrivateKey": validation.Validate(c.PrivateKey, validation.Required), + "ProjectID": validation.Validate(c.ProjectID, validation.Required), + "ServiceAccountName": validation.Validate(c.ServiceAccountName, validation.Required), + }.Filter() +} + +// SetConnectorType for CustomHTTPSConnector +func (c *CustomHTTPSConnector) SetConnectorType() { + c.ConnectorType = ConnectorTypeHTTPS +} + +// Validate validates CustomHTTPSConnector +func (c *CustomHTTPSConnector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeHTTPS)), + "AuthenticationType": validation.Validate(c.AuthenticationType, validation.Required, validation.In(AuthenticationTypeBasic, AuthenticationTypeNone)), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "URL": validation.Validate(c.URL, validation.Required), + }.Filter() +} + +// SetConnectorType for SumoLogicConnector +func (c *SumoLogicConnector) SetConnectorType() { + c.ConnectorType = ConnectorTypeSumoLogic +} + +// Validate validates SumoLogicConnector +func (c *SumoLogicConnector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeSumoLogic)), + "CollectorCode": validation.Validate(c.CollectorCode, validation.Required), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "Endpoint": validation.Validate(c.Endpoint, validation.Required), + }.Filter() +} + +// SetConnectorType for OracleCloudStorageConnector +func (c *OracleCloudStorageConnector) SetConnectorType() { + c.ConnectorType = ConnectorTypeOracle +} + +// Validate validates OracleCloudStorageConnector +func (c *OracleCloudStorageConnector) Validate() error { + return validation.Errors{ + "ConnectorType": validation.Validate(c.ConnectorType, validation.Required, validation.In(ConnectorTypeOracle)), + "AccessKey": validation.Validate(c.AccessKey, validation.Required), + "Bucket": validation.Validate(c.Bucket, validation.Required), + "ConnectorName": validation.Validate(c.ConnectorName, validation.Required), + "Namespace": validation.Validate(c.Namespace, validation.Required), + "Path": validation.Validate(c.Path, validation.Required), + "Region": validation.Validate(c.Region, validation.Required), + "SecretAccessKey": validation.Validate(c.SecretAccessKey, validation.Required), + }.Filter() +} diff --git a/pkg/datastream/ds.go b/pkg/datastream/ds.go new file mode 100644 index 00000000..28784991 --- /dev/null +++ b/pkg/datastream/ds.go @@ -0,0 +1,47 @@ +package datastream + +import ( + "errors" + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/session" +) + +var ( + // ErrStructValidation is returned when given struct validation failed + ErrStructValidation = errors.New("struct validation") +) + +type ( + // DS is the ds api interface + DS interface { + Activation + Properties + Stream + } + + ds struct { + session.Session + } + + // Option defines a DS option + Option func(*ds) + + // ClientFunc is a ds client new method, this can be used for mocking + ClientFunc func(sess session.Session, ops ...Option) DS +) + +// Client returns a new ds Client instance with the specified controller +func Client(sess session.Session, opts ...Option) DS { + c := &ds{ + Session: sess, + } + + for _, opt := range opts { + opt(c) + } + return c +} + +// DelimiterTypePtr returns the address of the DelimiterType +func DelimiterTypePtr(d DelimiterType) *DelimiterType { + return &d +} diff --git a/pkg/datastream/ds_test.go b/pkg/datastream/ds_test.go new file mode 100644 index 00000000..c68f16d5 --- /dev/null +++ b/pkg/datastream/ds_test.go @@ -0,0 +1,65 @@ +package datastream + +import ( + "crypto/tls" + "crypto/x509" + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/edgegrid" + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/session" + "github.com/stretchr/testify/require" + "github.com/tj/assert" + "net/http" + "net/http/httptest" + "net/url" + "testing" +) + +func mockAPIClient(t *testing.T, mockServer *httptest.Server) DS { + serverURL, err := url.Parse(mockServer.URL) + require.NoError(t, err) + certPool := x509.NewCertPool() + certPool.AddCert(mockServer.Certificate()) + httpClient := &http.Client{ + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + RootCAs: certPool, + }, + }, + } + + s, err := session.New( + session.WithClient(httpClient), + session.WithSigner(&edgegrid.Config{Host: serverURL.Host}), + ) + assert.NoError(t, err) + return Client(s) +} + +func TestClient(t *testing.T) { + sess, err := session.New() + require.NoError(t, err) + tests := map[string]struct { + options []Option + expected *ds + }{ + "no options provided, return default": { + options: nil, + expected: &ds{ + Session: sess, + }, + }, + "option provided, overwrite session": { + options: []Option{func(c *ds) { + c.Session = nil + }}, + expected: &ds{ + Session: nil, + }, + }, + } + for name, test := range tests { + t.Run(name, func(t *testing.T) { + res := Client(sess, test.options...) + assert.Equal(t, res, test.expected) + }) + } +} diff --git a/pkg/datastream/errors.go b/pkg/datastream/errors.go new file mode 100644 index 00000000..8c4eb94c --- /dev/null +++ b/pkg/datastream/errors.go @@ -0,0 +1,81 @@ +package datastream + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" +) + +type ( + // Error is a ds error interface + Error struct { + Type string `json:"type"` + Title string `json:"title"` + Detail string `json:"detail"` + Instance string `json:"instance"` + StatusCode int `json:"statusCode"` + Errors []RequestErrors `json:"errors"` + } + + // RequestErrors is an optional errors array that lists potentially more than one problem detected in the request + RequestErrors struct { + Type string `json:"type"` + Title string `json:"title"` + Instance string `json:"instance,omitempty"` + Detail string `json:"detail"` + } +) + +// Error parses an error from the response +func (d *ds) Error(r *http.Response) error { + var e Error + + var body []byte + + body, err := ioutil.ReadAll(r.Body) + if err != nil { + d.Log(r.Request.Context()).Errorf("reading error response body: %s", err) + e.StatusCode = r.StatusCode + e.Title = fmt.Sprintf("Failed to read error body") + e.Detail = err.Error() + return &e + } + + if err := json.Unmarshal(body, &e); err != nil { + d.Log(r.Request.Context()).Errorf("could not unmarshal API error: %s", err) + e.Title = fmt.Sprintf("Failed to unmarshal error body") + e.Detail = err.Error() + } + + e.StatusCode = r.StatusCode + + return &e +} + +func (e *Error) Error() string { + msg, err := json.MarshalIndent(e, "", "\t") + if err != nil { + return fmt.Sprintf("error marshaling API error: %s", err) + } + return fmt.Sprintf("API error: \n%s", msg) +} + +// Is handles error comparisons +func (e *Error) Is(target error) bool { + var t *Error + if !errors.As(target, &t) { + return false + } + + if e == t { + return true + } + + if e.StatusCode != t.StatusCode { + return false + } + + return e.Error() == t.Error() +} diff --git a/pkg/datastream/errors_test.go b/pkg/datastream/errors_test.go new file mode 100644 index 00000000..b191096c --- /dev/null +++ b/pkg/datastream/errors_test.go @@ -0,0 +1,67 @@ +package datastream + +import ( + "context" + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/session" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "io/ioutil" + "net/http" + "strings" + "testing" +) + +func TestNewError(t *testing.T) { + sess, err := session.New() + require.NoError(t, err) + + req, err := http.NewRequestWithContext( + context.TODO(), + http.MethodHead, + "/", + nil) + require.NoError(t, err) + + tests := map[string]struct { + response *http.Response + expected *Error + }{ + "valid response, status code 500": { + response: &http.Response{ + Status: "Internal Server Error", + StatusCode: http.StatusInternalServerError, + Body: ioutil.NopCloser(strings.NewReader( + `{"type":"a","title":"b","detail":"c"}`), + ), + Request: req, + }, + expected: &Error{ + Type: "a", + Title: "b", + Detail: "c", + StatusCode: http.StatusInternalServerError, + }, + }, + "invalid response body, assign status code": { + response: &http.Response{ + Status: "Internal Server Error", + StatusCode: http.StatusInternalServerError, + Body: ioutil.NopCloser(strings.NewReader( + `test`), + ), + Request: req, + }, + expected: &Error{ + Title: "Failed to unmarshal error body", + Detail: "invalid character 'e' in literal true (expecting 'r')", + StatusCode: http.StatusInternalServerError, + }, + }, + } + for name, test := range tests { + t.Run(name, func(t *testing.T) { + res := Client(sess).(*ds).Error(test.response) + assert.Equal(t, test.expected, res) + }) + } +} diff --git a/pkg/datastream/properties.go b/pkg/datastream/properties.go new file mode 100644 index 00000000..e8de8518 --- /dev/null +++ b/pkg/datastream/properties.go @@ -0,0 +1,176 @@ +package datastream + +import ( + "context" + "errors" + "fmt" + validation "github.com/go-ozzo/ozzo-validation/v4" + "net/http" + "net/url" +) + +type ( + // Properties is an interface for listing various DS API properties + Properties interface { + // GetProperties returns properties that are active on the production and staging network for a specific product type that are available within a group + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#getproperties + GetProperties(context.Context, GetPropertiesRequest) ([]Property, error) + + // GetPropertiesByGroup returns properties that are active on the production and staging network and available within a specific group + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#getpropertiesbygroup + GetPropertiesByGroup(context.Context, GetPropertiesByGroupRequest) ([]Property, error) + + // GetDatasetFields returns groups of data set fields available in the template. + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#gettemplatename + GetDatasetFields(context.Context, GetDatasetFieldsRequest) ([]DataSets, error) + } + + // GetPropertiesRequest contains parameters necessary to send a GetProperties request + GetPropertiesRequest struct { + GroupId int + ProductId string + } + + // GetPropertiesByGroupRequest contains parameters necessary to send a GetPropertiesByGroup request + GetPropertiesByGroupRequest struct { + GroupId int + } + + // GetDatasetFieldsRequest contains parameters necessary to send a GetDatasetFields request + GetDatasetFieldsRequest struct { + TemplateName TemplateName + } +) + +// Validate performs validation on GetPropertiesRequest +func (r GetPropertiesRequest) Validate() error { + return validation.Errors{ + "GroupId": validation.Validate(r.GroupId, validation.Required), + "ProductId": validation.Validate(r.ProductId, validation.Required), + }.Filter() +} + +// Validate performs validation on GetPropertiesRequest +func (r GetPropertiesByGroupRequest) Validate() error { + return validation.Errors{ + "GroupId": validation.Validate(r.GroupId, validation.Required), + }.Filter() +} + +// Validate performs validation on GetDatasetFieldsRequest +func (r GetDatasetFieldsRequest) Validate() error { + return validation.Errors{ + "TemplateName": validation.Validate(r.TemplateName, validation.Required, validation.In(TemplateNameEdgeLogs)), + }.Filter() +} + +var ( + // ErrGetProperties is returned when GetProperties fails + ErrGetProperties = errors.New("list properties") + // ErrGetPropertiesByGroup is returned when GetPropertiesByGroup fails + ErrGetPropertiesByGroup = errors.New("list properties by group") + // ErrGetDatasetFields is returned when GetDatasetFields fails + ErrGetDatasetFields = errors.New("list data set fields") +) + +func (d *ds) GetProperties(ctx context.Context, params GetPropertiesRequest) ([]Property, error) { + logger := d.Log(ctx) + logger.Debug("GetProperties") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrGetProperties, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/properties/product/%s/group/%d", + params.ProductId, params.GroupId)) + if err != nil { + return nil, fmt.Errorf("%w: parsing URL: %s", ErrGetProperties, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrGetProperties, err) + } + + var rval []Property + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrGetProperties, err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s: %w", ErrGetProperties, d.Error(resp)) + } + + return rval, nil +} + +func (d *ds) GetPropertiesByGroup(ctx context.Context, params GetPropertiesByGroupRequest) ([]Property, error) { + logger := d.Log(ctx) + logger.Debug("GetPropertiesByGroup") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrGetPropertiesByGroup, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/properties/group/%d", + params.GroupId)) + if err != nil { + return nil, fmt.Errorf("%w: parsing URL: %s", ErrGetPropertiesByGroup, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrGetPropertiesByGroup, err) + } + + var rval []Property + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrGetPropertiesByGroup, err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s: %w", ErrGetPropertiesByGroup, d.Error(resp)) + } + + return rval, nil +} + +func (d *ds) GetDatasetFields(ctx context.Context, params GetDatasetFieldsRequest) ([]DataSets, error) { + logger := d.Log(ctx) + logger.Debug("GetDatasetFields") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrGetDatasetFields, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/datasets/template/%s", + params.TemplateName)) + if err != nil { + return nil, fmt.Errorf("%w: parsing URL: %s", ErrGetDatasetFields, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrGetDatasetFields, err) + } + + var rval []DataSets + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrGetDatasetFields, err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s: %w", ErrGetDatasetFields, d.Error(resp)) + } + + return rval, nil +} diff --git a/pkg/datastream/properties_test.go b/pkg/datastream/properties_test.go new file mode 100644 index 00000000..b2af4564 --- /dev/null +++ b/pkg/datastream/properties_test.go @@ -0,0 +1,393 @@ +package datastream + +import ( + "context" + "errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "net/http" + "net/http/httptest" + "testing" +) + +func TestDs_GetProperties(t *testing.T) { + tests := map[string]struct { + request GetPropertiesRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse []Property + withError error + }{ + "200 OK": { + request: GetPropertiesRequest{ + GroupId: 12345, + ProductId: "Download_Delivery", + }, + responseStatus: http.StatusOK, + responseBody: ` +[ + { + "propertyId": 382631, + "propertyName": "customp.akamai.com", + "productId": "Ion_Standard", + "productName": "Ion Standard", + "hostnames": [ + "customp.akamaize.net", + "customp.akamaized-staging.net" + ] + }, + { + "propertyId": 347459, + "propertyName": "example.com", + "productId": "Dynamic_Site_Accelerator", + "productName": "Dynamic Site Accelerator", + "hostnames": [ + "example.edgekey.net" + ] + } +] +`, + expectedPath: "/datastream-config-api/v1/log/properties/product/Download_Delivery/group/12345", + expectedResponse: []Property{ + { + PropertyID: 382631, + PropertyName: "customp.akamai.com", + ProductID: "Ion_Standard", + ProductName: "Ion Standard", + Hostnames: []string{ + "customp.akamaize.net", + "customp.akamaized-staging.net", + }, + }, + { + PropertyID: 347459, + PropertyName: "example.com", + ProductID: "Dynamic_Site_Accelerator", + ProductName: "Dynamic Site Accelerator", + Hostnames: []string{ + "example.edgekey.net", + }, + }, + }, + }, + "validation error": { + request: GetPropertiesRequest{}, + withError: ErrStructValidation, + }, + "400 bad request": { + request: GetPropertiesRequest{GroupId: 12345, ProductId: "testProductName"}, + responseStatus: http.StatusBadRequest, + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "", + "instance": "baf2671f-7b3a-406d-9dd8-63ef20a01296", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "Invalid Product Name" + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/properties/product/testProductName/group/12345", + withError: &Error{ + Type: "bad-request", + Title: "Bad Request", + Instance: "baf2671f-7b3a-406d-9dd8-63ef20a01296", + StatusCode: http.StatusBadRequest, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "Invalid Product Name", + }, + }, + }, + }, + "403 forbidden": { + request: GetPropertiesRequest{GroupId: 12345, ProductId: "testProductName"}, + responseStatus: http.StatusForbidden, + responseBody: ` +{ + "type": "forbidden", + "title": "Forbidden", + "detail": "", + "instance": "28eb43a8-97ae-4c57-98aa-258081582b92", + "statusCode": 403, + "errors": [ + { + "type": "forbidden", + "title": "Forbidden", + "detail": "User is not having access for the group. Access denied, please contact support." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/properties/product/testProductName/group/12345", + withError: &Error{ + Type: "forbidden", + Title: "Forbidden", + Instance: "28eb43a8-97ae-4c57-98aa-258081582b92", + StatusCode: http.StatusForbidden, + Errors: []RequestErrors{ + { + Type: "forbidden", + Title: "Forbidden", + Detail: "User is not having access for the group. Access denied, please contact support.", + }, + }, + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodGet, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.GetProperties(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_GetPropertiesByGroup(t *testing.T) { + tests := map[string]struct { + request GetPropertiesByGroupRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse []Property + withError error + }{ + "200 OK": { + request: GetPropertiesByGroupRequest{ + GroupId: 12345, + }, + responseStatus: http.StatusOK, + responseBody: ` +[ + { + "propertyId": 382631, + "propertyName": "customp.akamai.com", + "productId": "Ion_Standard", + "productName": "Ion Standard", + "hostnames": [ + "customp.akamaize.net", + "customp.akamaized-staging.net" + ] + }, + { + "propertyId": 347459, + "propertyName": "example.com", + "productId": "Dynamic_Site_Accelerator", + "productName": "Dynamic Site Accelerator", + "hostnames": [ + "example.edgekey.net" + ] + } +] +`, + expectedPath: "/datastream-config-api/v1/log/properties/group/12345", + expectedResponse: []Property{ + { + PropertyID: 382631, + PropertyName: "customp.akamai.com", + ProductID: "Ion_Standard", + ProductName: "Ion Standard", + Hostnames: []string{ + "customp.akamaize.net", + "customp.akamaized-staging.net", + }, + }, + { + PropertyID: 347459, + PropertyName: "example.com", + ProductID: "Dynamic_Site_Accelerator", + ProductName: "Dynamic Site Accelerator", + Hostnames: []string{ + "example.edgekey.net", + }, + }, + }, + }, + "validation error": { + request: GetPropertiesByGroupRequest{}, + withError: ErrStructValidation, + }, + "403 access forbidden": { + request: GetPropertiesByGroupRequest{GroupId: 12345}, + responseStatus: http.StatusForbidden, + responseBody: ` +{ + "type": "forbidden", + "title": "Forbidden", + "detail": "", + "instance": "04fde003-428b-4c2c-94fe-6109af9d231c", + "statusCode": 403, + "errors": [ + { + "type": "forbidden", + "title": "Forbidden", + "detail": "User is not having access for the group. Access denied, please contact support." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/properties/group/12345", + withError: &Error{ + Type: "forbidden", + Title: "Forbidden", + Instance: "04fde003-428b-4c2c-94fe-6109af9d231c", + StatusCode: http.StatusForbidden, + Errors: []RequestErrors{ + { + Type: "forbidden", + Title: "Forbidden", + Detail: "User is not having access for the group. Access denied, please contact support.", + }, + }, + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodGet, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.GetPropertiesByGroup(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_GetDatasetFields(t *testing.T) { + tests := map[string]struct { + request GetDatasetFieldsRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse []DataSets + withError error + }{ + "200 OK": { + request: GetDatasetFieldsRequest{ + TemplateName: TemplateNameEdgeLogs, + }, + responseStatus: http.StatusOK, + responseBody: ` +[ + { + "datasetGroupName":"group_name_1", + "datasetGroupDescription":"group_desc_1", + "datasetFields":[ + { + "datasetFieldId":1000, + "datasetFieldName":"dataset_field_name_1", + "datasetFieldDescription":"dataset_field_desc_1" + }, + { + "datasetFieldId":1002, + "datasetFieldName":"dataset_field_name_2", + "datasetFieldDescription":"dataset_field_desc_2" + } + ] + }, + { + "datasetGroupName":"group_name_2", + "datasetFields":[ + { + "datasetFieldId":1082, + "datasetFieldName":"dataset_field_name_3", + "datasetFieldDescription":"dataset_field_desc_3" + } + ] + } +] +`, + expectedPath: "/datastream-config-api/v1/log/datasets/template/EDGE_LOGS", + expectedResponse: []DataSets{ + { + DatasetGroupName: "group_name_1", + DatasetGroupDescription: "group_desc_1", + DatasetFields: []DatasetFields{ + { + DatasetFieldID: 1000, + DatasetFieldName: "dataset_field_name_1", + DatasetFieldDescription: "dataset_field_desc_1", + }, + { + DatasetFieldID: 1002, + DatasetFieldName: "dataset_field_name_2", + DatasetFieldDescription: "dataset_field_desc_2", + }, + }, + }, + { + DatasetGroupName: "group_name_2", + DatasetFields: []DatasetFields{ + { + DatasetFieldID: 1082, + DatasetFieldName: "dataset_field_name_3", + DatasetFieldDescription: "dataset_field_desc_3", + }, + }, + }, + }, + }, + "validation error - empty request": { + request: GetDatasetFieldsRequest{}, + withError: ErrStructValidation, + }, + "validation error - invalid enum value": { + request: GetDatasetFieldsRequest{TemplateName: "invalidTemplateName"}, + withError: ErrStructValidation, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodGet, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.GetDatasetFields(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} diff --git a/pkg/datastream/stream.go b/pkg/datastream/stream.go new file mode 100644 index 00000000..f3c9ed4f --- /dev/null +++ b/pkg/datastream/stream.go @@ -0,0 +1,447 @@ +package datastream + +import ( + "context" + "errors" + "fmt" + validation "github.com/go-ozzo/ozzo-validation/v4" + "net/http" + "net/url" + "strconv" +) + +type ( + // Stream is a ds stream operations API interface + Stream interface { + // CreateStream creates a stream + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#poststreams + CreateStream(context.Context, CreateStreamRequest) (*StreamUpdate, error) + + // GetStream gets stream details + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#getstream + GetStream(context.Context, GetStreamRequest) (*DetailedStreamVersion, error) + + // UpdateStream updates a stream + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#putstream + UpdateStream(context.Context, UpdateStreamRequest) (*StreamUpdate, error) + + // DeleteStream deletes a stream + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#deletestream + DeleteStream(context.Context, DeleteStreamRequest) (*DeleteStreamResponse, error) + } + + // DetailedStreamVersion is returned from GetStream + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#detailedstreamversion + DetailedStreamVersion struct { + ActivationStatus ActivationStatus `json:"activationStatus"` + Config Config `json:"config"` + Connectors []ConnectorDetails `json:"connectors"` + ContractID string `json:"contractId"` + CreatedBy string `json:"createdBy"` + CreatedDate string `json:"createdDate"` + Datasets []DataSets `json:"datasets"` + EmailIDs string `json:"emailIds"` + Errors []Errors `json:"errors"` + GroupID int `json:"groupId"` + GroupName string `json:"groupName"` + ModifiedBy string `json:"modifiedBy"` + ModifiedDate string `json:"modifiedDate"` + ProductID string `json:"productId"` + ProductName string `json:"productName"` + Properties []Property `json:"properties"` + StreamID int64 `json:"streamId"` + StreamName string `json:"streamName"` + StreamType StreamType `json:"streamType"` + StreamVersionID int64 `json:"streamVersionId"` + TemplateName TemplateName `json:"templateName"` + } + + // ConnectorDetails provides detailed information about the connector’s configuration in the stream + ConnectorDetails struct { + AuthenticationType AuthenticationType `json:"authenticationType"` + ConnectorID int `json:"connectorId"` + CompressLogs bool `json:"compressLogs"` + ConnectorName string `json:"connectorName"` + ConnectorType ConnectorType `json:"connectorType"` + Path string `json:"path"` + URL string `json:"url"` + Endpoint string `json:"endpoint"` + ServiceAccountName string `json:"serviceAccountName"` + ProjectID string `json:"projectId"` + Service string `json:"service"` + Bucket string `json:"bucket"` + Tags string `json:"tags"` + Region string `json:"region"` + AccountName string `json:"accountName"` + Namespace string `json:"namespace"` + ContainerName string `json:"containerName"` + Source string `json:"source"` + } + + // StreamConfiguration is used in CreateStream as a request body + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#streamconfigurationcf8299f3 + StreamConfiguration struct { + ActivateNow bool `json:"activateNow"` + Config Config `json:"config"` + Connectors []AbstractConnector `json:"connectors,omitempty"` + ContractID string `json:"contractId"` + DatasetFieldIDs []int `json:"datasetFieldIds"` + EmailIDs string `json:"emailIds,omitempty"` + GroupID *int `json:"groupId"` + PropertyIDs []int `json:"propertyIds"` + StreamName string `json:"streamName"` + StreamType StreamType `json:"streamType"` + TemplateName TemplateName `json:"templateName"` + } + + // Config of the configuration of log lines, names of the files sent to a destination, and delivery frequency for these files + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#e6140e78 + Config struct { + Delimiter *DelimiterType `json:"delimiter,omitempty"` + Format FormatType `json:"format,omitempty"` + Frequency Frequency `json:"frequency"` + UploadFilePrefix string `json:"uploadFilePrefix,omitempty"` + UploadFileSuffix string `json:"uploadFileSuffix,omitempty"` + } + + // The Frequency of collecting logs from each uploader and sending these logs to a destination. + Frequency struct { + TimeInSec TimeInSec `json:"timeInSec"` + } + + // DataSets is a list of fields selected from the associated template that the stream monitors in logs + DataSets struct { + DatasetFields []DatasetFields `json:"datasetFields"` + DatasetGroupDescription string `json:"datasetGroupDescription"` + DatasetGroupName string `json:"datasetGroupName"` + } + + // DatasetFields is list of data set fields selected from the associated template that the stream monitors in logs + DatasetFields struct { + DatasetFieldID int `json:"datasetFieldId"` + DatasetFieldDescription string `json:"datasetFieldDescription"` + DatasetFieldJsonKey string `json:"datasetFieldJsonKey"` + DatasetFieldName string `json:"datasetFieldName"` + Order int `json:"order"` + } + + // Errors associated to the stream + Errors struct { + Detail string `json:"detail"` + Title string `json:"title"` + Type string `json:"type"` + } + + // Property identifies the properties monitored in the stream. + Property struct { + Hostnames []string `json:"hostnames"` + ProductID string `json:"productId"` + ProductName string `json:"productName"` + PropertyID int `json:"propertyId"` + PropertyName string `json:"propertyName"` + } + + // ActivationStatus is used to create an enum of possible ActivationStatus values + ActivationStatus string + + // AbstractConnector is an interface for all Connector types + AbstractConnector interface { + SetConnectorType() + Validate() error + } + + // DelimiterType enum + DelimiterType string + + // FormatType enum + FormatType string + + // TemplateName enum + TemplateName string + + // StreamType enum + StreamType string + + // TimeInSec enum + TimeInSec int + + // CreateStreamRequest is passed to CreateStream + CreateStreamRequest struct { + StreamConfiguration StreamConfiguration + } + + // GetStreamRequest is passed to GetStream + GetStreamRequest struct { + StreamID int64 + Version *int64 + } + + // UpdateStreamRequest is passed to UpdateStream + UpdateStreamRequest struct { + StreamID int64 + StreamConfiguration StreamConfiguration + } + + // StreamUpdate contains information about stream ID and version + StreamUpdate struct { + StreamVersionKey StreamVersionKey `json:"streamVersionKey"` + } + + // StreamVersionKey contains information about stream ID and version + StreamVersionKey struct { + StreamID int64 `json:"streamId"` + StreamVersionID int64 `json:"streamVersionId"` + } + + // DeleteStreamRequest is passed to DeleteStream + DeleteStreamRequest struct { + StreamID int64 + } + + // DeleteStreamResponse is returned from DeleteStream + DeleteStreamResponse struct { + Message string `json:"message"` + } +) + +const ( + // ActivationStatusActivated const + ActivationStatusActivated ActivationStatus = "ACTIVATED" + // ActivationStatusDeactivated const + ActivationStatusDeactivated ActivationStatus = "DEACTIVATED" + // ActivationStatusActivating const + ActivationStatusActivating ActivationStatus = "ACTIVATING" + // ActivationStatusDeactivating const state + ActivationStatusDeactivating ActivationStatus = "DEACTIVATING" + // ActivationStatusInactive const + ActivationStatusInactive ActivationStatus = "INACTIVE" + + // StreamTypeRawLogs const + StreamTypeRawLogs StreamType = "RAW_LOGS" + + // TemplateNameEdgeLogs const + TemplateNameEdgeLogs TemplateName = "EDGE_LOGS" + + // DelimiterTypeSpace const + DelimiterTypeSpace DelimiterType = "SPACE" + + // FormatTypeStructured const + FormatTypeStructured FormatType = "STRUCTURED" + // FormatTypeJson const + FormatTypeJson FormatType = "JSON" + + // TimeInSec30 const + TimeInSec30 TimeInSec = 30 + // TimeInSec60 const + TimeInSec60 TimeInSec = 60 +) + +// Validate validates CreateStreamRequest +func (r CreateStreamRequest) Validate() error { + return validation.Errors{ + "StreamConfiguration.Config": validation.Validate(r.StreamConfiguration.Config, validation.Required), + "StreamConfiguration.Config.Delimiter": validation.Validate(r.StreamConfiguration.Config.Delimiter, validation.When(r.StreamConfiguration.Config.Format == FormatTypeStructured, validation.Required, validation.In(DelimiterTypeSpace)), validation.When(r.StreamConfiguration.Config.Format == FormatTypeJson, validation.Nil)), + "StreamConfiguration.Config.Format": validation.Validate(r.StreamConfiguration.Config.Format, validation.Required, validation.In(FormatTypeStructured, FormatTypeJson), validation.When(r.StreamConfiguration.Config.Delimiter != nil, validation.Required, validation.In(FormatTypeStructured))), + "StreamConfiguration.Config.Frequency": validation.Validate(r.StreamConfiguration.Config.Frequency, validation.Required), + "StreamConfiguration.Config.Frequency.TimeInSec": validation.Validate(r.StreamConfiguration.Config.Frequency.TimeInSec, validation.Required, validation.In(TimeInSec30, TimeInSec60)), + "StreamConfiguration.Connectors": validation.Validate(r.StreamConfiguration.Connectors, validation.Required, validation.Length(1, 1)), + "StreamConfiguration.ContractId": validation.Validate(r.StreamConfiguration.ContractID, validation.Required), + "StreamConfiguration.DatasetFields": validation.Validate(r.StreamConfiguration.DatasetFieldIDs, validation.Required), + "StreamConfiguration.GroupID": validation.Validate(r.StreamConfiguration.GroupID, validation.Required), + "StreamConfiguration.PropertyIDs": validation.Validate(r.StreamConfiguration.PropertyIDs, validation.Required), + "StreamConfiguration.StreamName": validation.Validate(r.StreamConfiguration.StreamName, validation.Required), + "StreamConfiguration.StreamType": validation.Validate(r.StreamConfiguration.StreamType, validation.Required, validation.In(StreamTypeRawLogs)), + "StreamConfiguration.TemplateName": validation.Validate(r.StreamConfiguration.TemplateName, validation.Required, validation.In(TemplateNameEdgeLogs)), + }.Filter() +} + +// Validate validates GetStreamRequest +func (r GetStreamRequest) Validate() error { + return validation.Errors{ + "streamId": validation.Validate(r.StreamID, validation.Required), + }.Filter() +} + +// Validate validates UpdateStreamRequest +func (r UpdateStreamRequest) Validate() error { + return validation.Errors{ + "StreamConfiguration.Config": validation.Validate(r.StreamConfiguration.Config, validation.Required), + "StreamConfiguration.Config.Delimiter": validation.Validate(r.StreamConfiguration.Config.Delimiter, validation.When(r.StreamConfiguration.Config.Format == FormatTypeStructured, validation.Required, validation.In(DelimiterTypeSpace)), validation.When(r.StreamConfiguration.Config.Format == FormatTypeJson, validation.Nil)), + "StreamConfiguration.Config.Format": validation.Validate(r.StreamConfiguration.Config.Format, validation.In(FormatTypeStructured, FormatTypeJson)), + "StreamConfiguration.Config.Frequency": validation.Validate(r.StreamConfiguration.Config.Frequency, validation.Required), + "StreamConfiguration.Config.Frequency.TimeInSec": validation.Validate(r.StreamConfiguration.Config.Frequency.TimeInSec, validation.Required, validation.In(TimeInSec30, TimeInSec60)), + "StreamConfiguration.Connectors": validation.Validate(r.StreamConfiguration.Connectors, validation.When(r.StreamConfiguration.Connectors != nil, validation.Length(1, 1))), + "StreamConfiguration.ContractId": validation.Validate(r.StreamConfiguration.ContractID, validation.Required), + "StreamConfiguration.DatasetFields": validation.Validate(r.StreamConfiguration.DatasetFieldIDs, validation.Required), + "StreamConfiguration.GroupID": validation.Validate(r.StreamConfiguration.GroupID, validation.Nil), + "StreamConfiguration.PropertyIDs": validation.Validate(r.StreamConfiguration.PropertyIDs, validation.Required), + "StreamConfiguration.StreamName": validation.Validate(r.StreamConfiguration.StreamName, validation.Required), + "StreamConfiguration.StreamType": validation.Validate(r.StreamConfiguration.StreamType, validation.Required, validation.In(StreamTypeRawLogs)), + "StreamConfiguration.TemplateName": validation.Validate(r.StreamConfiguration.TemplateName, validation.Required, validation.In(TemplateNameEdgeLogs)), + }.Filter() +} + +// Validate validates DeleteStreamRequest +func (r DeleteStreamRequest) Validate() error { + return validation.Errors{ + "streamId": validation.Validate(r.StreamID, validation.Required), + }.Filter() +} + +var ( + // ErrCreateStream represents error when creating stream fails + ErrCreateStream = errors.New("creating stream") + // ErrGetStream represents error when fetching stream fails + ErrGetStream = errors.New("fetching stream information") + // ErrUpdateStream represents error when updating stream fails + ErrUpdateStream = errors.New("updating stream") + // ErrDeleteStream represents error when deleting stream fails + ErrDeleteStream = errors.New("deleting stream") +) + +func (d *ds) CreateStream(ctx context.Context, params CreateStreamRequest) (*StreamUpdate, error) { + logger := d.Log(ctx) + logger.Debug("CreateStream") + + setConnectorTypes(¶ms.StreamConfiguration) + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrCreateStream, ErrStructValidation, err) + } + + uri := "/datastream-config-api/v1/log/streams" + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, uri, nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrCreateStream, err) + } + + var rval StreamUpdate + resp, err := d.Exec(req, &rval, params.StreamConfiguration) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrCreateStream, err) + } + + if resp.StatusCode != http.StatusAccepted { + return nil, fmt.Errorf("%s: %w", ErrCreateStream, d.Error(resp)) + } + + return &rval, nil +} + +func (d *ds) GetStream(ctx context.Context, params GetStreamRequest) (*DetailedStreamVersion, error) { + logger := d.Log(ctx) + logger.Debug("GetStream") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrGetStream, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/streams/%d", + params.StreamID)) + if err != nil { + return nil, fmt.Errorf("%w: failed to parse url: %s", ErrGetStream, err) + } + + if params.Version != nil { + query := uri.Query() + query.Add("version", strconv.FormatInt(*params.Version, 10)) + uri.RawQuery = query.Encode() + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrGetStream, err) + } + + var rval DetailedStreamVersion + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrGetStream, err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s: %w", ErrGetStream, d.Error(resp)) + } + + return &rval, nil +} + +func (d *ds) UpdateStream(ctx context.Context, params UpdateStreamRequest) (*StreamUpdate, error) { + logger := d.Log(ctx) + logger.Debug("UpdateStream") + + setConnectorTypes(¶ms.StreamConfiguration) + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrUpdateStream, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/streams/%d", + params.StreamID), + ) + if err != nil { + return nil, fmt.Errorf("%w: failed to parse url: %s", ErrUpdateStream, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPut, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrUpdateStream, err) + } + + var rval StreamUpdate + resp, err := d.Exec(req, &rval, params.StreamConfiguration) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrUpdateStream, err) + } + + if resp.StatusCode != http.StatusAccepted { + return nil, fmt.Errorf("%s: %w", ErrUpdateStream, d.Error(resp)) + } + + return &rval, nil +} + +func (d *ds) DeleteStream(ctx context.Context, params DeleteStreamRequest) (*DeleteStreamResponse, error) { + logger := d.Log(ctx) + logger.Debug("DeleteStream") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrDeleteStream, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/streams/%d", + params.StreamID), + ) + if err != nil { + return nil, fmt.Errorf("%w: failed to parse url: %s", ErrDeleteStream, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrDeleteStream, err) + } + + var rval DeleteStreamResponse + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrDeleteStream, err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s: %w", ErrDeleteStream, d.Error(resp)) + } + + return &rval, nil +} + +func setConnectorTypes(configuration *StreamConfiguration) { + for _, connector := range configuration.Connectors { + connector.SetConnectorType() + } +} diff --git a/pkg/datastream/stream_activation.go b/pkg/datastream/stream_activation.go new file mode 100644 index 00000000..df912e13 --- /dev/null +++ b/pkg/datastream/stream_activation.go @@ -0,0 +1,187 @@ +package datastream + +import ( + "context" + "errors" + "fmt" + validation "github.com/go-ozzo/ozzo-validation/v4" + "net/http" + "net/url" +) + +type ( + // Activation is a ds stream activations API interface + Activation interface { + // ActivateStream activates stream with given ID + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#putactivate + ActivateStream(context.Context, ActivateStreamRequest) (*ActivateStreamResponse, error) + + // DeactivateStream deactivates stream with given ID + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#putdeactivate + DeactivateStream(context.Context, DeactivateStreamRequest) (*DeactivateStreamResponse, error) + + // GetActivationHistory returns a history of activation status changes for all versions of a stream + // + // See: https://developer.akamai.com/api/core_features/datastream2_config/v1.html#getactivationhistory + GetActivationHistory(context.Context, GetActivationHistoryRequest) ([]ActivationHistoryEntry, error) + } + + // ActivateStreamResponse contains response body returned after successful stream activation + ActivateStreamResponse struct { + StreamVersionKey StreamVersionKey `json:"streamVersionKey"` + } + + // ActivationHistoryEntry contains single ActivationHistory item + ActivationHistoryEntry struct { + CreatedBy string `json:"createdBy"` + CreatedDate string `json:"createdDate"` + IsActive bool `json:"isActive"` + StreamID int64 `json:"streamId"` + StreamVersionID int64 `json:"streamVersionId"` + } + + // DeactivateStreamResponse contains response body returned after successful stream activation + DeactivateStreamResponse ActivateStreamResponse + + // ActivateStreamRequest contains parameters necessary to send a ActivateStream request + ActivateStreamRequest struct { + StreamID int64 + } + + // DeactivateStreamRequest contains parameters necessary to send a DeactivateStream request + DeactivateStreamRequest ActivateStreamRequest + + // GetActivationHistoryRequest contains parameters necessary to send a GetActivationHistory request + GetActivationHistoryRequest ActivateStreamRequest +) + +// Validate performs validation on ActivateStreamRequest +func (r ActivateStreamRequest) Validate() error { + return validation.Errors{ + "streamId": validation.Validate(r.StreamID, validation.Required), + }.Filter() +} + +// Validate performs validation on DeactivateStreamRequest +func (r DeactivateStreamRequest) Validate() error { + return validation.Errors{ + "streamId": validation.Validate(r.StreamID, validation.Required), + }.Filter() +} + +// Validate performs validation on DeactivateStreamRequest +func (r GetActivationHistoryRequest) Validate() error { + return validation.Errors{ + "streamId": validation.Validate(r.StreamID, validation.Required), + }.Filter() +} + +var ( + // ErrActivateStream is returned when ActivateStream fails + ErrActivateStream = errors.New("activate stream") + // ErrDeactivateStream is returned when DeactivateStream fails + ErrDeactivateStream = errors.New("deactivate stream") + // ErrGetActivationHistory is returned when DeactivateStream fails + ErrGetActivationHistory = errors.New("view activation history") +) + +func (d *ds) ActivateStream(ctx context.Context, params ActivateStreamRequest) (*ActivateStreamResponse, error) { + logger := d.Log(ctx) + logger.Debug("ActivateStream") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrActivateStream, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/streams/%d/activate", + params.StreamID)) + if err != nil { + return nil, fmt.Errorf("%w: parsing URL: %s", ErrActivateStream, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPut, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrActivateStream, err) + } + + var rval ActivateStreamResponse + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrActivateStream, err) + } + + if resp.StatusCode != http.StatusAccepted { + return nil, fmt.Errorf("%s: %w", ErrActivateStream, d.Error(resp)) + } + + return &rval, nil +} + +func (d *ds) DeactivateStream(ctx context.Context, params DeactivateStreamRequest) (*DeactivateStreamResponse, error) { + logger := d.Log(ctx) + logger.Debug("DeactivateStream") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrDeactivateStream, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/streams/%d/deactivate", + params.StreamID)) + if err != nil { + return nil, fmt.Errorf("%w: parsing URL: %s", ErrDeactivateStream, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPut, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrDeactivateStream, err) + } + + var rval DeactivateStreamResponse + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrDeactivateStream, err) + } + + if resp.StatusCode != http.StatusAccepted { + return nil, fmt.Errorf("%s: %w", ErrDeactivateStream, d.Error(resp)) + } + + return &rval, nil +} + +func (d *ds) GetActivationHistory(ctx context.Context, params GetActivationHistoryRequest) ([]ActivationHistoryEntry, error) { + logger := d.Log(ctx) + logger.Debug("GetActivationHistory") + + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrGetActivationHistory, ErrStructValidation, err) + } + + uri, err := url.Parse(fmt.Sprintf( + "/datastream-config-api/v1/log/streams/%d/activationHistory", + params.StreamID)) + if err != nil { + return nil, fmt.Errorf("%w: parsing URL: %s", ErrGetActivationHistory, err) + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, uri.String(), nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrGetActivationHistory, err) + } + + var rval []ActivationHistoryEntry + resp, err := d.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrGetActivationHistory, err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s: %w", ErrGetActivationHistory, d.Error(resp)) + } + + return rval, nil +} diff --git a/pkg/datastream/stream_activation_test.go b/pkg/datastream/stream_activation_test.go new file mode 100644 index 00000000..60f02d8f --- /dev/null +++ b/pkg/datastream/stream_activation_test.go @@ -0,0 +1,298 @@ +package datastream + +import ( + "context" + "errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "net/http" + "net/http/httptest" + "testing" +) + +func TestDs_ActivateStream(t *testing.T) { + tests := map[string]struct { + request ActivateStreamRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse *ActivateStreamResponse + withError error + }{ + "202 accepted": { + request: ActivateStreamRequest{StreamID: 3}, + responseStatus: http.StatusAccepted, + responseBody: ` +{ + "streamVersionKey": { + "streamId": 1, + "streamVersionId": 3 + } +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/3/activate", + expectedResponse: &ActivateStreamResponse{ + StreamVersionKey: StreamVersionKey{ + StreamID: 1, + StreamVersionID: 3, + }, + }, + }, + "validation error": { + request: ActivateStreamRequest{}, + withError: ErrStructValidation, + }, + "400 bad request": { + request: ActivateStreamRequest{StreamID: 123}, + responseStatus: http.StatusBadRequest, + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "", + "instance": "df22bc0f-ca8d-4bdb-afea-ffdeef819e22", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "Stream does not exist. Please provide valid stream." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/123/activate", + withError: &Error{ + Type: "bad-request", + Title: "Bad Request", + Instance: "df22bc0f-ca8d-4bdb-afea-ffdeef819e22", + StatusCode: http.StatusBadRequest, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "Stream does not exist. Please provide valid stream.", + }, + }, + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodPut, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.ActivateStream(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_DeactivateStream(t *testing.T) { + tests := map[string]struct { + request DeactivateStreamRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse *DeactivateStreamResponse + withError error + }{ + "202 accepted": { + request: DeactivateStreamRequest{StreamID: 3}, + responseStatus: http.StatusAccepted, + responseBody: ` +{ + "streamVersionKey": { + "streamId": 1, + "streamVersionId": 3 + } +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/3/deactivate", + expectedResponse: &DeactivateStreamResponse{ + StreamVersionKey: StreamVersionKey{ + StreamID: 1, + StreamVersionID: 3, + }, + }, + }, + "validation error": { + request: DeactivateStreamRequest{}, + withError: ErrStructValidation, + }, + "400 bad request": { + request: DeactivateStreamRequest{StreamID: 123}, + responseStatus: http.StatusBadRequest, + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "", + "instance": "df22bc0f-ca8d-4bdb-afea-ffdeef819e22", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "Stream does not exist. Please provide valid stream." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/123/deactivate", + withError: &Error{ + Type: "bad-request", + Title: "Bad Request", + Instance: "df22bc0f-ca8d-4bdb-afea-ffdeef819e22", + StatusCode: http.StatusBadRequest, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "Stream does not exist. Please provide valid stream.", + }, + }, + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodPut, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.DeactivateStream(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_GetActivationHistory(t *testing.T) { + tests := map[string]struct { + request GetActivationHistoryRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse []ActivationHistoryEntry + withError error + }{ + "200 OK": { + request: GetActivationHistoryRequest{StreamID: 3}, + responseStatus: http.StatusOK, + responseBody: ` +[ + { + "streamId": 7050, + "streamVersionId": 2, + "createdBy": "user1", + "createdDate": "16-01-2020 11:07:12 GMT", + "isActive": false + }, + { + "streamId": 7050, + "streamVersionId": 2, + "createdBy": "user2", + "createdDate": "16-01-2020 09:31:02 GMT", + "isActive": true + } +] +`, + expectedPath: "/datastream-config-api/v1/log/streams/3/activationHistory", + expectedResponse: []ActivationHistoryEntry{ + { + CreatedBy: "user1", + CreatedDate: "16-01-2020 11:07:12 GMT", + IsActive: false, + StreamID: 7050, + StreamVersionID: 2, + }, + { + CreatedBy: "user2", + CreatedDate: "16-01-2020 09:31:02 GMT", + IsActive: true, + StreamID: 7050, + StreamVersionID: 2, + }, + }, + }, + "validation error": { + request: GetActivationHistoryRequest{}, + withError: ErrStructValidation, + }, + "400 bad request": { + request: GetActivationHistoryRequest{StreamID: 123}, + responseStatus: http.StatusBadRequest, + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "", + "instance": "df22bc0f-ca8d-4bdb-afea-ffdeef819e22", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "Stream does not exist. Please provide valid stream." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/123/activationHistory", + withError: &Error{ + Type: "bad-request", + Title: "Bad Request", + Instance: "df22bc0f-ca8d-4bdb-afea-ffdeef819e22", + StatusCode: http.StatusBadRequest, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "Stream does not exist. Please provide valid stream.", + }, + }, + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodGet, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.GetActivationHistory(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} diff --git a/pkg/datastream/stream_test.go b/pkg/datastream/stream_test.go new file mode 100644 index 00000000..8cf8a346 --- /dev/null +++ b/pkg/datastream/stream_test.go @@ -0,0 +1,1028 @@ +package datastream + +import ( + "context" + "encoding/json" + "errors" + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/datastream/tools" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "net/http" + "net/http/httptest" + "reflect" + "testing" +) + +func TestDs_GetStream(t *testing.T) { + tests := map[string]struct { + request GetStreamRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse *DetailedStreamVersion + withError func(*testing.T, error) + }{ + "200 OK": { + request: GetStreamRequest{ + StreamID: 1, + }, + responseStatus: http.StatusOK, + responseBody: ` +{ + "streamId":1, + "streamVersionId":2, + "streamName":"ds2-sample-name", + "datasets":[ + { + "datasetGroupName":"group_name_1", + "datasetGroupDescription":"group_desc_1", + "datasetFields":[ + { + "datasetFieldId":1000, + "datasetFieldName":"dataset_field_name_1", + "datasetFieldDescription":"dataset_field_desc_1", + "order":0 + }, + { + "datasetFieldId":1002, + "datasetFieldName":"dataset_field_name_2", + "datasetFieldDescription":"dataset_field_desc_2", + "order":1 + } + ] + }, + { + "datasetGroupName":"group_name_2", + "datasetFields":[ + { + "datasetFieldId":1082, + "datasetFieldName":"dataset_field_name_3", + "datasetFieldDescription":"dataset_field_desc_3", + "order":32 + } + ] + } + ], + "connectors":[ + { + "connectorType":"S3", + "connectorId":13174, + "bucket":"amzdemods2", + "path":"/sample_path", + "compressLogs":true, + "connectorName":"aws_ds2_amz_demo", + "region":"us-east-1" + } + ], + "productName":"Adaptive Media Delivery", + "productId":"Adaptive_Media_Delivery", + "templateName":"EDGE_LOGS", + "config":{ + "delimiter":"SPACE", + "uploadFilePrefix":"ak", + "uploadFileSuffix":"ds", + "frequency":{ + "timeInSec":30 + }, + "useStaticPublicIP":false, + "format":"STRUCTURED" + }, + "groupId":171647, + "groupName":"Akamai Data Delivery-P-132NZF456", + "contractId":"P-132NZF456", + "properties":[ + { + "propertyId":678154, + "propertyName":"amz.demo.com" + } + ], + "streamType":"RAW_LOGS", + "activationStatus":"ACTIVATED", + "createdBy":"sample_username", + "createdDate":"08-07-2021 06:00:27 GMT", + "modifiedBy":"sample_username2", + "modifiedDate":"08-07-2021 16:00:27 GMT", + "emailIds":"sample_username@akamai.com" +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/1", + expectedResponse: &DetailedStreamVersion{ + ActivationStatus: ActivationStatusActivated, + Config: Config{ + Delimiter: DelimiterTypePtr(DelimiterTypeSpace), + Format: FormatTypeStructured, + Frequency: Frequency{ + TimeInSec: TimeInSec30, + }, + UploadFilePrefix: "ak", + UploadFileSuffix: "ds", + }, + Connectors: []ConnectorDetails{ + { + ConnectorID: 13174, + CompressLogs: true, + ConnectorName: "aws_ds2_amz_demo", + ConnectorType: ConnectorTypeS3, + Path: "/sample_path", + Bucket: "amzdemods2", + Region: "us-east-1", + }, + }, + ContractID: "P-132NZF456", + CreatedBy: "sample_username", + CreatedDate: "08-07-2021 06:00:27 GMT", + Datasets: []DataSets{ + { + DatasetGroupName: "group_name_1", + DatasetGroupDescription: "group_desc_1", + DatasetFields: []DatasetFields{ + { + DatasetFieldID: 1000, + DatasetFieldName: "dataset_field_name_1", + DatasetFieldDescription: "dataset_field_desc_1", + Order: 0, + }, + { + DatasetFieldID: 1002, + DatasetFieldName: "dataset_field_name_2", + DatasetFieldDescription: "dataset_field_desc_2", + Order: 1, + }, + }, + }, + { + DatasetGroupName: "group_name_2", + DatasetFields: []DatasetFields{ + { + DatasetFieldID: 1082, + DatasetFieldName: "dataset_field_name_3", + DatasetFieldDescription: "dataset_field_desc_3", + Order: 32, + }, + }, + }, + }, + EmailIDs: "sample_username@akamai.com", + GroupID: 171647, + GroupName: "Akamai Data Delivery-P-132NZF456", + ModifiedBy: "sample_username2", + ModifiedDate: "08-07-2021 16:00:27 GMT", + ProductID: "Adaptive_Media_Delivery", + ProductName: "Adaptive Media Delivery", + Properties: []Property{ + { + PropertyID: 678154, + PropertyName: "amz.demo.com", + }, + }, + StreamID: 1, + StreamName: "ds2-sample-name", + StreamType: StreamTypeRawLogs, + StreamVersionID: 2, + TemplateName: TemplateNameEdgeLogs, + }, + }, + "validation error": { + request: GetStreamRequest{}, + withError: func(t *testing.T, err error) { + want := ErrStructValidation + assert.True(t, errors.Is(err, want), "want: %s; got: %s", want, err) + }, + }, + "400 bad request": { + request: GetStreamRequest{StreamID: 12}, + responseStatus: http.StatusBadRequest, + expectedPath: "/datastream-config-api/v1/log/streams/12", + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "bad request", + "instance": "82b67b97-d98d-4bee-ac1e-ef6eaf7cac82", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "Stream does not exist. Please provide valid stream." + } + ] +} +`, + withError: func(t *testing.T, err error) { + want := &Error{ + Type: "bad-request", + Title: "Bad Request", + Detail: "bad request", + Instance: "82b67b97-d98d-4bee-ac1e-ef6eaf7cac82", + StatusCode: http.StatusBadRequest, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "Stream does not exist. Please provide valid stream.", + }, + }, + } + assert.True(t, errors.Is(err, want), "want: %s; got: %s", want, err) + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodGet, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.GetStream(context.Background(), test.request) + if test.withError != nil { + test.withError(t, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_CreateStream(t *testing.T) { + createStreamRequest := CreateStreamRequest{ + StreamConfiguration: StreamConfiguration{ + ActivateNow: true, + Config: Config{ + Delimiter: DelimiterTypePtr(DelimiterTypeSpace), + Format: FormatTypeStructured, + Frequency: Frequency{TimeInSec: TimeInSec30}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + }, + Connectors: []AbstractConnector{ + &S3Connector{ + Path: "log/edgelogs/{ %Y/%m/%d }", + ConnectorName: "S3Destination", + Bucket: "datastream.akamai.com", + Region: "ap-south-1", + AccessKey: "AKIA6DK7TDQLVGZ3TYP1", + SecretAccessKey: "1T2ll1H4dXWx5itGhpc7FlSbvvOvky1098nTtEMg", + }, + }, + ContractID: "2-FGHIJ", + DatasetFieldIDs: []int{ + 1002, 1005, 1006, 1008, 1009, 1011, 1012, + 1013, 1014, 1015, 1016, 1017, 1101, + }, + EmailIDs: "useremail@akamai.com", + GroupID: tools.IntPtr(21484), + PropertyIDs: []int{123123, 123123}, + StreamName: "TestStream", + StreamType: StreamTypeRawLogs, + TemplateName: TemplateNameEdgeLogs, + }, + } + + modifyRequest := func(r CreateStreamRequest, opt func(r *CreateStreamRequest)) CreateStreamRequest { + opt(&r) + return r + } + + tests := map[string]struct { + request CreateStreamRequest + responseStatus int + responseBody string + expectedPath string + expectedBody string + expectedResponse *StreamUpdate + withError error + }{ + "202 Accepted": { + request: createStreamRequest, + responseStatus: http.StatusAccepted, + responseBody: ` +{ + "streamVersionKey": { + "streamId": 7050, + "streamVersionId": 1 + } +}`, + expectedPath: "/datastream-config-api/v1/log/streams", + expectedResponse: &StreamUpdate{ + StreamVersionKey: StreamVersionKey{ + StreamID: 7050, + StreamVersionID: 1, + }, + }, + expectedBody: ` +{ + "streamName": "TestStream", + "activateNow": true, + "streamType": "RAW_LOGS", + "templateName": "EDGE_LOGS", + "groupId": 21484, + "contractId": "2-FGHIJ", + "emailIds": "useremail@akamai.com", + "propertyIds": [ + 123123, + 123123 + ], + "datasetFieldIds": [ + 1002, + 1005, + 1006, + 1008, + 1009, + 1011, + 1012, + 1013, + 1014, + 1015, + 1016, + 1017, + 1101 + ], + "config": { + "uploadFilePrefix": "logs", + "uploadFileSuffix": "ak", + "delimiter": "SPACE", + "format": "STRUCTURED", + "frequency": { + "timeInSec": 30 + } + }, + "connectors": [ + { + "path": "log/edgelogs/{ %Y/%m/%d }", + "connectorName": "S3Destination", + "bucket": "datastream.akamai.com", + "region": "ap-south-1", + "accessKey": "AKIA6DK7TDQLVGZ3TYP1", + "secretAccessKey": "1T2ll1H4dXWx5itGhpc7FlSbvvOvky1098nTtEMg", + "connectorType": "S3" + } + ] +}`, + }, + "validation error - empty request": { + request: CreateStreamRequest{}, + withError: ErrStructValidation, + }, + "validation error - empty connectors list": { + request: modifyRequest(createStreamRequest, func(r *CreateStreamRequest) { + r.StreamConfiguration.Connectors = []AbstractConnector{} + }), + withError: ErrStructValidation, + }, + "validation error - delimiter with JSON format": { + request: modifyRequest(createStreamRequest, func(r *CreateStreamRequest) { + r.StreamConfiguration.Config = Config{ + Delimiter: DelimiterTypePtr(DelimiterTypeSpace), + Format: FormatTypeJson, + Frequency: Frequency{TimeInSec: TimeInSec30}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + } + }), + withError: ErrStructValidation, + }, + "validation error - no delimiter with STRUCTURED format": { + request: modifyRequest(createStreamRequest, func(r *CreateStreamRequest) { + r.StreamConfiguration.Config = Config{ + Format: FormatTypeStructured, + Frequency: Frequency{TimeInSec: TimeInSec30}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + } + }), + withError: ErrStructValidation, + }, + "validation error - missing connector configuration fields": { + request: modifyRequest(createStreamRequest, func(r *CreateStreamRequest) { + r.StreamConfiguration.Connectors = []AbstractConnector{ + &S3Connector{ + Path: "log/edgelogs/{ %Y/%m/%d }", + ConnectorName: "S3Destination", + Bucket: "datastream.akamai.com", + Region: "ap-south-1", + }, + } + }), + withError: ErrStructValidation, + }, + "403 forbidden": { + request: createStreamRequest, + responseStatus: http.StatusForbidden, + responseBody: ` +{ + "type": "forbidden", + "title": "Forbidden", + "detail": "forbidden", + "instance": "72a7654e-3f95-454f-a174-104bc946be52", + "statusCode": 403, + "errors": [ + { + "type": "forbidden", + "title": "Forbidden", + "detail": "User is not having access for the group. Access denied, please contact support." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/streams", + withError: &Error{ + Type: "forbidden", + Title: "Forbidden", + Detail: "forbidden", + Instance: "72a7654e-3f95-454f-a174-104bc946be52", + StatusCode: http.StatusForbidden, + Errors: []RequestErrors{ + { + Type: "forbidden", + Title: "Forbidden", + Detail: "User is not having access for the group. Access denied, please contact support.", + }, + }, + }, + }, + "400 bad request": { + request: createStreamRequest, + responseStatus: http.StatusBadRequest, + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "bad-request", + "instance": "d0d2497e-ed93-4685-b44c-93a8eb8f3dea", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "The credentials provided don’t give you write access to the bucket. Check your AWS credentials or bucket permissions in the S3 account and try again." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/streams", + withError: &Error{ + Type: "bad-request", + Title: "Bad Request", + Detail: "bad-request", + Instance: "d0d2497e-ed93-4685-b44c-93a8eb8f3dea", + StatusCode: http.StatusBadRequest, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "The credentials provided don’t give you write access to the bucket. Check your AWS credentials or bucket permissions in the S3 account and try again.", + }, + }, + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodPost, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + + //check request body only if we aren't testing errors + if test.withError == nil { + var reqBody interface{} + err = json.NewDecoder(r.Body).Decode(&reqBody) + require.NoError(t, err, "Error while decoding request body") + + var expectedBody interface{} + err = json.Unmarshal([]byte(test.expectedBody), &expectedBody) + require.NoError(t, err, "Error while parsing expected body to JSON") + + assert.Equal(t, expectedBody, reqBody) + } + })) + client := mockAPIClient(t, mockServer) + result, err := client.CreateStream(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_UpdateStream(t *testing.T) { + updateRequest := UpdateStreamRequest{ + StreamID: 7050, + StreamConfiguration: StreamConfiguration{ + ActivateNow: true, + Config: Config{ + Delimiter: DelimiterTypePtr(DelimiterTypeSpace), + Format: "STRUCTURED", + Frequency: Frequency{TimeInSec: TimeInSec30}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + }, + Connectors: []AbstractConnector{}, + ContractID: "P-132NZF456", + DatasetFieldIDs: []int{1, 2, 3}, + EmailIDs: "test@aka.mai", + PropertyIDs: []int{123123, 123123}, + StreamName: "TestStream", + StreamType: "RAW_LOGS", + TemplateName: "EDGE_LOGS", + }, + } + + modifyRequest := func(r UpdateStreamRequest, opt func(r *UpdateStreamRequest)) UpdateStreamRequest { + opt(&r) + return r + } + + tests := map[string]struct { + request UpdateStreamRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse *StreamUpdate + withError error + }{ + "202 Accepted": { + request: updateRequest, + responseStatus: http.StatusAccepted, + responseBody: ` +{ + "streamVersionKey": { + "streamId": 7050, + "streamVersionId": 2 + } +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/7050", + expectedResponse: &StreamUpdate{ + StreamVersionKey: StreamVersionKey{ + StreamID: 7050, + StreamVersionID: 2, + }, + }, + }, + "validation error - empty request": { + request: UpdateStreamRequest{}, + withError: ErrStructValidation, + }, + "validation error - delimiter with JSON format": { + request: modifyRequest(updateRequest, func(r *UpdateStreamRequest) { + r.StreamConfiguration.Config = Config{ + Delimiter: DelimiterTypePtr(DelimiterTypeSpace), + Format: FormatTypeJson, + Frequency: Frequency{TimeInSec: TimeInSec30}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + } + }), + withError: ErrStructValidation, + }, + "validation error - no delimiter with STRUCTURED format": { + request: modifyRequest(updateRequest, func(r *UpdateStreamRequest) { + r.StreamConfiguration.Config = Config{ + Format: FormatTypeStructured, + Frequency: Frequency{TimeInSec: TimeInSec60}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + } + }), + withError: ErrStructValidation, + }, + "validation error - groupId modification": { + request: modifyRequest(updateRequest, func(r *UpdateStreamRequest) { + r.StreamConfiguration.GroupID = tools.IntPtr(1337) + }), + withError: ErrStructValidation, + }, + "validation error - missing contractId": { + request: modifyRequest(updateRequest, func(r *UpdateStreamRequest) { + r.StreamConfiguration.ContractID = "" + }), + withError: ErrStructValidation, + }, + "400 bad request": { + request: updateRequest, + responseStatus: http.StatusBadRequest, + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "bad request", + "instance": "a42cc1e6-fea4-4e3a-91ce-9da9819e089a", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "Stream does not exist. Please provide valid stream." + } + ] +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/7050", + withError: &Error{ + Type: "bad-request", + Title: "Bad Request", + Detail: "bad request", + Instance: "a42cc1e6-fea4-4e3a-91ce-9da9819e089a", + StatusCode: http.StatusBadRequest, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "Stream does not exist. Please provide valid stream.", + }, + }, + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodPut, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.UpdateStream(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_DeleteStream(t *testing.T) { + tests := map[string]struct { + request DeleteStreamRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse *DeleteStreamResponse + withError func(*testing.T, error) + }{ + "200 OK": { + request: DeleteStreamRequest{ + StreamID: 1, + }, + responseStatus: http.StatusOK, + responseBody: ` +{ + "message": "Success" +} +`, + expectedPath: "/datastream-config-api/v1/log/streams/1", + expectedResponse: &DeleteStreamResponse{ + Message: "Success", + }, + }, + "validation error": { + request: DeleteStreamRequest{}, + withError: func(t *testing.T, err error) { + want := ErrStructValidation + assert.True(t, errors.Is(err, want), "want: %s; got: %s", want, err) + }, + }, + "400 bad request": { + request: DeleteStreamRequest{StreamID: 12}, + responseStatus: http.StatusBadRequest, + expectedPath: "/datastream-config-api/v1/log/streams/12", + responseBody: ` +{ + "type": "bad-request", + "title": "Bad Request", + "detail": "bad request", + "instance": "82b67b97-d98d-4bee-ac1e-ef6eaf7cac82", + "statusCode": 400, + "errors": [ + { + "type": "bad-request", + "title": "Bad Request", + "detail": "Stream does not exist. Please provide valid stream." + } + ] +} +`, + withError: func(t *testing.T, err error) { + want := &Error{ + Type: "bad-request", + Title: "Bad Request", + Detail: "bad request", + Instance: "82b67b97-d98d-4bee-ac1e-ef6eaf7cac82", + StatusCode: 400, + Errors: []RequestErrors{ + { + Type: "bad-request", + Title: "Bad Request", + Detail: "Stream does not exist. Please provide valid stream.", + }, + }, + } + assert.True(t, errors.Is(err, want), "want: %s; got: %s", want, err) + }, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodDelete, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.DeleteStream(context.Background(), test.request) + if test.withError != nil { + test.withError(t, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} + +func TestDs_Connectors(t *testing.T) { + tests := map[string]struct { + connector AbstractConnector + expectedJSON string + }{ + "S3Connector": { + connector: &S3Connector{ + Path: "testPath", + ConnectorName: "testConnectorName", + Bucket: "testBucket", + Region: "testRegion", + AccessKey: "testAccessKey", + SecretAccessKey: "testSecretKey", + }, + expectedJSON: ` +[{ + "path": "testPath", + "connectorName": "testConnectorName", + "bucket": "testBucket", + "region": "testRegion", + "accessKey": "testAccessKey", + "secretAccessKey": "testSecretKey", + "connectorType": "S3" +}] +`, + }, + "AzureConnector": { + connector: &AzureConnector{ + AccountName: "testAccountName", + AccessKey: "testAccessKey", + ConnectorName: "testConnectorName", + ContainerName: "testContainerName", + Path: "testPath", + }, + expectedJSON: ` +[{ + "accountName": "testAccountName", + "accessKey": "testAccessKey", + "connectorName": "testConnectorName", + "containerName": "testContainerName", + "path": "testPath", + "connectorType": "AZURE" +}] +`, + }, + "DatadogConnector": { + connector: &DatadogConnector{ + Service: "testService", + AuthToken: "testAuthToken", + ConnectorName: "testConnectorName", + URL: "testURL", + Source: "testSource", + Tags: "testTags", + CompressLogs: false, + }, + expectedJSON: ` +[{ + "service": "testService", + "authToken": "testAuthToken", + "connectorName": "testConnectorName", + "url": "testURL", + "source": "testSource", + "tags": "testTags", + "connectorType": "DATADOG", + "compressLogs": false +}] +`, + }, + "SplunkConnector": { + connector: &SplunkConnector{ + ConnectorName: "testConnectorName", + URL: "testURL", + EventCollectorToken: "testEventCollector", + CompressLogs: true, + }, + expectedJSON: ` +[{ + "connectorName": "testConnectorName", + "url": "testURL", + "eventCollectorToken": "testEventCollector", + "connectorType": "SPLUNK", + "compressLogs": true +}] +`, + }, + "GCSConnector": { + connector: &GCSConnector{ + ConnectorName: "testConnectorName", + Bucket: "testBucket", + Path: "testPath", + ProjectID: "testProjectID", + ServiceAccountName: "testServiceAccountName", + PrivateKey: "testPrivateKey", + }, + expectedJSON: ` +[{ + "connectorType": "GCS", + "connectorName": "testConnectorName", + "bucket": "testBucket", + "path": "testPath", + "projectId": "testProjectID", + "serviceAccountName": "testServiceAccountName", + "privateKey": "testPrivateKey" +}] +`, + }, + "CustomHTTPSConnector": { + connector: &CustomHTTPSConnector{ + AuthenticationType: AuthenticationTypeNone, + ConnectorName: "testConnectorName", + URL: "testURL", + UserName: "testUserName", + Password: "testPassword", + CompressLogs: true, + }, + expectedJSON: ` +[{ + "authenticationType": "NONE", + "connectorName": "testConnectorName", + "url": "testURL", + "userName": "testUserName", + "password": "testPassword", + "connectorType": "HTTPS", + "compressLogs": true +}] +`, + }, + "SumoLogicConnector": { + connector: &SumoLogicConnector{ + ConnectorName: "testConnectorName", + Endpoint: "testEndpoint", + CollectorCode: "testCollectorCode", + CompressLogs: true, + }, + expectedJSON: ` +[{ + "connectorType": "SUMO_LOGIC", + "connectorName": "testConnectorName", + "endpoint": "testEndpoint", + "collectorCode": "testCollectorCode", + "compressLogs": true +}] +`, + }, + "OracleCloudStorageConnector": { + connector: &OracleCloudStorageConnector{ + AccessKey: "testAccessKey", + ConnectorName: "testConnectorName", + Path: "testPath", + Bucket: "testBucket", + Region: "testRegion", + SecretAccessKey: "testSecretAccessKey", + Namespace: "testNamespace", + }, + expectedJSON: ` +[{ + "accessKey": "testAccessKey", + "connectorName": "testConnectorName", + "path": "testPath", + "bucket": "testBucket", + "region": "testRegion", + "secretAccessKey": "testSecretAccessKey", + "connectorType": "Oracle_Cloud_Storage", + "namespace": "testNamespace" +}] +`, + }, + } + + request := CreateStreamRequest{ + StreamConfiguration: StreamConfiguration{ + ActivateNow: true, + Config: Config{ + Delimiter: DelimiterTypePtr(DelimiterTypeSpace), + Format: FormatTypeStructured, + Frequency: Frequency{TimeInSec: TimeInSec30}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + }, + Connectors: nil, + ContractID: "P-132NZF456", + DatasetFieldIDs: []int{1, 2, 3}, + EmailIDs: "test@aka.mai", + GroupID: tools.IntPtr(123231), + PropertyIDs: []int{123123, 123123}, + StreamName: "TestStream", + StreamType: StreamTypeRawLogs, + TemplateName: TemplateNameEdgeLogs, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + request.StreamConfiguration.Connectors = []AbstractConnector{test.connector} + + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + var connectorMap map[string]interface{} + err := json.NewDecoder(r.Body).Decode(&connectorMap) + require.NoError(t, err) + + var expectedMap interface{} + err = json.Unmarshal([]byte(test.expectedJSON), &expectedMap) + require.NoError(t, err) + + res := reflect.DeepEqual(expectedMap, connectorMap["connectors"]) + assert.True(t, res) + })) + + client := mockAPIClient(t, mockServer) + _, _ = client.CreateStream(context.Background(), request) + }) + } +} + +type mockConnector struct { + Called bool +} + +func (c *mockConnector) SetConnectorType() { + c.Called = true +} + +func (c *mockConnector) Validate() error { + return nil +} + +func TestDs_setConnectorTypes(t *testing.T) { + mockConnector := mockConnector{Called: false} + + request := CreateStreamRequest{ + StreamConfiguration: StreamConfiguration{ + ActivateNow: true, + Config: Config{ + Delimiter: DelimiterTypePtr(DelimiterTypeSpace), + Format: FormatTypeStructured, + Frequency: Frequency{TimeInSec: TimeInSec30}, + UploadFilePrefix: "logs", + UploadFileSuffix: "ak", + }, + Connectors: []AbstractConnector{ + &mockConnector, + }, + ContractID: "P-132NZF456", + DatasetFieldIDs: []int{1, 2, 3}, + EmailIDs: "test@aka.mai", + GroupID: tools.IntPtr(123231), + PropertyIDs: []int{123123, 123123}, + StreamName: "TestStream", + StreamType: StreamTypeRawLogs, + TemplateName: TemplateNameEdgeLogs, + }, + } + + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusAccepted) + _, err := w.Write([]byte("{}")) + require.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + _, err := client.CreateStream(context.Background(), request) + require.NoError(t, err) + + assert.True(t, mockConnector.Called) +} diff --git a/pkg/datastream/tools/ptr.go b/pkg/datastream/tools/ptr.go new file mode 100644 index 00000000..8f540446 --- /dev/null +++ b/pkg/datastream/tools/ptr.go @@ -0,0 +1,6 @@ +package tools + +// IntPtr returns the address of the int +func IntPtr(i int) *int { + return &i +} diff --git a/pkg/hapi/edgehostname.go b/pkg/hapi/edgehostname.go new file mode 100644 index 00000000..0f2779a6 --- /dev/null +++ b/pkg/hapi/edgehostname.go @@ -0,0 +1,124 @@ +package hapi + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + + validation "github.com/go-ozzo/ozzo-validation/v4" +) + +type ( + // EdgeHostnames contains operations available on Edge Hostname resource + // See: https://developer.akamai.com/api/core_features/edge_hostnames/v1.html#edgehostname + EdgeHostnames interface { + // DeleteEdgeHostname allows deleting a specific edge hostname. + // You must have an Admin or Technical role in order to delete an edge hostname. + // You can delete any hostname that’s not currently part of an active Property Manager configuration. + // + // See: https://developer.akamai.com/api/core_features/edge_hostnames/v1.html#deleteedgehostnamebyname + DeleteEdgeHostname(context.Context, DeleteEdgeHostnameRequest) (*DeleteEdgeHostnameResponse, error) + } + + // DeleteEdgeHostnameRequest is used to delete edge hostname + DeleteEdgeHostnameRequest struct { + DNSZone string + RecordName string + StatusUpdateEmail []string + Comments string + } + + // DeleteEdgeHostnameResponse is a response from deleting edge hostname + DeleteEdgeHostnameResponse struct { + Action string `json:"action"` + ChangeID int `json:"changeId"` + Comments string `json:"comments"` + Status string `json:"status"` + StatusMessage string `json:"statusMessage"` + StatusUpdateDate string `json:"statusUpdateDate"` + StatusUpdateEmail string `json:"statusUpdateEmail"` + SubmitDate string `json:"submitDate"` + Submitter string `json:"submitter"` + SubmitterEmail string `json:"submitterEmail"` + EdgeHostnames []EdgeHostname `json:"edgeHostnames"` + } + + // EdgeHostname represents edge hostname part of DeleteEdgeHostnameResponse + EdgeHostname struct { + EdgeHostnameID int `json:"edgeHostnameId"` + RecordName string `json:"recordName"` + DNSZone string `json:"dnsZone"` + SecurityType string `json:"securityType"` + UseDefaultTTL bool `json:"useDefaultTtl"` + UseDefaultMap bool `json:"useDefaultMap"` + TTL int `json:"ttl"` + Map string `json:"map"` + SlotNumber int `json:"slotNumber"` + IPVersionBehavior string `json:"ipVersionBehavior"` + Comments string `json:"comments"` + ChinaCDN ChinaCDN `json:"chinaCdn"` + } + + // ChinaCDN represents China CDN settings of EdgeHostname + ChinaCDN struct { + IsChinaCDN bool `json:"isChinaCdn"` + CustomChinaCDNMap string `json:"customChinaCdnMap,omitempty"` + } +) + +// Validate validates DeleteEdgeHostnameRequest +func (r DeleteEdgeHostnameRequest) Validate() error { + return validation.Errors{ + "DNSZone": validation.Validate(r.DNSZone, validation.Required), + "RecordName": validation.Validate(r.RecordName, validation.Required), + }.Filter() +} + +var ( + // ErrDeleteEdgeHostname represents error when deleting edge hostname fails + ErrDeleteEdgeHostname = errors.New("delete edge hostname") +) + +func (h *hapi) DeleteEdgeHostname(ctx context.Context, params DeleteEdgeHostnameRequest) (*DeleteEdgeHostnameResponse, error) { + if err := params.Validate(); err != nil { + return nil, fmt.Errorf("%s: %w: %s", ErrDeleteEdgeHostname, ErrStructValidation, err) + } + + logger := h.Log(ctx) + logger.Debug("DeleteEdgeHostname") + + uri := fmt.Sprintf( + "/hapi/v1/dns-zones/%s/edge-hostnames/%s", + params.DNSZone, + params.RecordName, + ) + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, uri, nil) + if err != nil { + return nil, fmt.Errorf("%w: failed to create request: %s", ErrDeleteEdgeHostname, err) + } + + q := req.URL.Query() + if len(params.StatusUpdateEmail) > 0 { + emails := strings.Join(params.StatusUpdateEmail, ",") + q.Add("statusUpdateEmail", emails) + } + if params.Comments != "" { + q.Add("comments", params.Comments) + } + req.URL.RawQuery = q.Encode() + + var rval DeleteEdgeHostnameResponse + + resp, err := h.Exec(req, &rval) + if err != nil { + return nil, fmt.Errorf("%w: request failed: %s", ErrDeleteEdgeHostname, err) + } + + if resp.StatusCode != http.StatusAccepted { + return nil, fmt.Errorf("%s: %w", ErrDeleteEdgeHostname, h.Error(resp)) + } + + return &rval, nil +} diff --git a/pkg/hapi/edgehostname_test.go b/pkg/hapi/edgehostname_test.go new file mode 100644 index 00000000..fc593263 --- /dev/null +++ b/pkg/hapi/edgehostname_test.go @@ -0,0 +1,167 @@ +package hapi + +import ( + "context" + "errors" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDeleteEdgeHostname(t *testing.T) { + tests := map[string]struct { + request DeleteEdgeHostnameRequest + responseStatus int + responseBody string + expectedPath string + expectedResponse *DeleteEdgeHostnameResponse + withError error + }{ + "202 Accepted": { + request: DeleteEdgeHostnameRequest{ + DNSZone: "edgesuite.net", + RecordName: "mgw-test-001", + StatusUpdateEmail: []string{"some@example.com"}, + Comments: "some comment", + }, + responseStatus: http.StatusAccepted, + responseBody: ` +{ + "action": "DELETE", + "changeId": 66025603, + "edgeHostnames": [ + { + "chinaCdn": { + "isChinaCdn": false + }, + "dnsZone": "edgesuite.net", + "edgeHostnameId": 4558392, + "recordName": "mgw-test-001", + "securityType": "STANDARD-TLS", + "useDefaultMap": false, + "useDefaultTtl": false + } + ], + "status": "PENDING", + "statusMessage": "File uploaded and awaiting validation", + "statusUpdateDate": "2021-09-23T15:07:10.000+00:00", + "submitDate": "2021-09-23T15:07:10.000+00:00", + "submitter": "ftzgvvigljhoq5ib", + "submitterEmail": "ftzgvvigljhoq5ib@nomail-akamai.com" +}`, + expectedPath: "/hapi/v1/dns-zones/edgesuite.net/edge-hostnames/mgw-test-001?comments=some+comment&statusUpdateEmail=some%40example.com", + expectedResponse: &DeleteEdgeHostnameResponse{ + Action: "DELETE", + ChangeID: 66025603, + EdgeHostnames: []EdgeHostname{{ + ChinaCDN: ChinaCDN{ + IsChinaCDN: false, + }, + DNSZone: "edgesuite.net", + EdgeHostnameID: 4558392, + RecordName: "mgw-test-001", + SecurityType: "STANDARD-TLS", + UseDefaultMap: false, + UseDefaultTTL: false, + }, + }, + Status: "PENDING", + StatusMessage: "File uploaded and awaiting validation", + StatusUpdateDate: "2021-09-23T15:07:10.000+00:00", + SubmitDate: "2021-09-23T15:07:10.000+00:00", + Submitter: "ftzgvvigljhoq5ib", + SubmitterEmail: "ftzgvvigljhoq5ib@nomail-akamai.com", + }, + }, + "404 could not find edge hostname": { + request: DeleteEdgeHostnameRequest{ + DNSZone: "edgesuite.net", + RecordName: "mgw-test-003", + StatusUpdateEmail: []string{"some@example.com"}, + Comments: "some comment", + }, + responseStatus: http.StatusNotFound, + responseBody: ` +{ + "type": "/hapi/problems/record-name-dns-zone-not-found", + "title": "Invalid Record Name/DNS Zone", + "status": 404, + "detail": "Could not find edge hostname with record name mgw-test-003 and DNS Zone edgesuite.net", + "instance": "/hapi/error-instances/47f08d26-00b4-4c05-a8c0-bcbc542b9bce", + "requestInstance": "http://cloud-qa-resource-impl.luna-dev.akamaiapis.net/hapi/open/v1/dns-zones/edgesuite.net/edge-hostnames/mgw-test-003#9ea9060c", + "method": "DELETE", + "requestTime": "2021-09-23T15:37:28.383173Z", + "errors": [], + "domainPrefix": "mgw-test-003", + "domainSuffix": "edgesuite.net" +}`, + expectedPath: "/hapi/v1/dns-zones/edgesuite.net/edge-hostnames/mgw-test-003?comments=some+comment&statusUpdateEmail=some%40example.com", + withError: &Error{ + Type: "/hapi/problems/record-name-dns-zone-not-found", + Title: "Invalid Record Name/DNS Zone", + Status: 404, + Detail: "Could not find edge hostname with record name mgw-test-003 and DNS Zone edgesuite.net", + Instance: "/hapi/error-instances/47f08d26-00b4-4c05-a8c0-bcbc542b9bce", + RequestInstance: "http://cloud-qa-resource-impl.luna-dev.akamaiapis.net/hapi/open/v1/dns-zones/edgesuite.net/edge-hostnames/mgw-test-003#9ea9060c", + Method: "DELETE", + RequestTime: "2021-09-23T15:37:28.383173Z", + DomainPrefix: "mgw-test-003", + DomainSuffix: "edgesuite.net", + }, + }, + "500 internal server error": { + request: DeleteEdgeHostnameRequest{ + DNSZone: "edgesuite.net", + RecordName: "mgw-test-002", + StatusUpdateEmail: []string{"some@example.com"}, + Comments: "some comment", + }, + responseStatus: http.StatusInternalServerError, + responseBody: ` +{ + "type": "internal_error", + "title": "Internal Server Error", + "detail": "Error deleting activation", + "status": 500 +}`, + expectedPath: "/hapi/v1/dns-zones/edgesuite.net/edge-hostnames/mgw-test-002?comments=some+comment&statusUpdateEmail=some%40example.com", + withError: &Error{ + Type: "internal_error", + Title: "Internal Server Error", + Detail: "Error deleting activation", + Status: http.StatusInternalServerError, + }, + }, + "validation error": { + request: DeleteEdgeHostnameRequest{ + RecordName: "atv_1696855", + StatusUpdateEmail: []string{"some@example.com"}, + Comments: "some comment", + }, + withError: ErrStructValidation, + }, + } + + for name, test := range tests { + t.Run(name, func(t *testing.T) { + mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, test.expectedPath, r.URL.String()) + assert.Equal(t, http.MethodDelete, r.Method) + w.WriteHeader(test.responseStatus) + _, err := w.Write([]byte(test.responseBody)) + assert.NoError(t, err) + })) + client := mockAPIClient(t, mockServer) + result, err := client.DeleteEdgeHostname(context.Background(), test.request) + if test.withError != nil { + assert.True(t, errors.Is(err, test.withError), "want: %s; got: %s", test.withError, err) + return + } + require.NoError(t, err) + assert.Equal(t, test.expectedResponse, result) + }) + } +} diff --git a/pkg/hapi/errors.go b/pkg/hapi/errors.go new file mode 100644 index 00000000..4c21e996 --- /dev/null +++ b/pkg/hapi/errors.go @@ -0,0 +1,86 @@ +package hapi + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "net/http" +) + +type ( + // Error is a hapi error interface + Error struct { + Type string `json:"type"` + Title string `json:"title"` + Detail string `json:"detail"` + Instance string `json:"instance,omitempty"` + RequestInstance string `json:"requestInstance,omitempty"` + Method string `json:"method,omitempty"` + RequestTime string `json:"requestTime,omitempty"` + BehaviorName string `json:"behaviorName,omitempty"` + ErrorLocation string `json:"errorLocation,omitempty"` + Status int `json:"status,omitempty"` + DomainPrefix string `json:"domainPrefix,omitempty"` + DomainSuffix string `json:"domainSuffix,omitempty"` + Errors []ErrorItem `json:"errors,omitempty"` + } + + // ErrorItem represents single error item + ErrorItem struct { + Key string `json:"key,omitempty"` + Value string `json:"value,omitempty"` + } +) + +// Error parses an error from the response +func (h *hapi) Error(r *http.Response) error { + var e Error + + var body []byte + + body, err := ioutil.ReadAll(r.Body) + if err != nil { + h.Log(r.Request.Context()).Errorf("reading error response body: %s", err) + e.Status = r.StatusCode + e.Title = fmt.Sprintf("Failed to read error body") + e.Detail = err.Error() + return &e + } + + if err := json.Unmarshal(body, &e); err != nil { + h.Log(r.Request.Context()).Errorf("could not unmarshal API error: %s", err) + e.Title = fmt.Sprintf("Failed to unmarshal error body") + e.Detail = err.Error() + } + + e.Status = r.StatusCode + + return &e +} + +func (e *Error) Error() string { + msg, err := json.MarshalIndent(e, "", "\t") + if err != nil { + return fmt.Sprintf("error marshaling API error: %s", err) + } + return fmt.Sprintf("API error: \n%s", msg) +} + +// Is handles error comparisons +func (e *Error) Is(target error) bool { + var t *Error + if !errors.As(target, &t) { + return false + } + + if e == t { + return true + } + + if e.Status != t.Status { + return false + } + + return e.Error() == t.Error() +} diff --git a/pkg/hapi/errors_test.go b/pkg/hapi/errors_test.go new file mode 100644 index 00000000..f62731d8 --- /dev/null +++ b/pkg/hapi/errors_test.go @@ -0,0 +1,68 @@ +package hapi + +import ( + "context" + "io/ioutil" + "net/http" + "strings" + "testing" + + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/session" + "github.com/stretchr/testify/require" + "github.com/tj/assert" +) + +func TestNewError(t *testing.T) { + sess, err := session.New() + require.NoError(t, err) + + req, err := http.NewRequestWithContext( + context.TODO(), + http.MethodHead, + "/", + nil) + require.NoError(t, err) + + tests := map[string]struct { + response *http.Response + expected *Error + }{ + "valid response, status code 500": { + response: &http.Response{ + Status: "Internal Server Error", + StatusCode: http.StatusInternalServerError, + Body: ioutil.NopCloser(strings.NewReader( + `{"type":"a","title":"b","detail":"c"}`), + ), + Request: req, + }, + expected: &Error{ + Type: "a", + Title: "b", + Detail: "c", + Status: http.StatusInternalServerError, + }, + }, + "invalid response body, assign status code": { + response: &http.Response{ + Status: "Internal Server Error", + StatusCode: http.StatusInternalServerError, + Body: ioutil.NopCloser(strings.NewReader( + `test`), + ), + Request: req, + }, + expected: &Error{ + Title: "Failed to unmarshal error body", + Detail: "invalid character 'e' in literal true (expecting 'r')", + Status: http.StatusInternalServerError, + }, + }, + } + for name, test := range tests { + t.Run(name, func(t *testing.T) { + res := Client(sess).(*hapi).Error(test.response) + assert.Equal(t, test.expected, res) + }) + } +} diff --git a/pkg/hapi/hapi.go b/pkg/hapi/hapi.go new file mode 100644 index 00000000..2c5bae13 --- /dev/null +++ b/pkg/hapi/hapi.go @@ -0,0 +1,42 @@ +// Package hapi provides access to the Akamai Edge Hostnames APIs +package hapi + +import ( + "errors" + + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/session" +) + +var ( + // ErrStructValidation is returned when given struct validation failed + ErrStructValidation = errors.New("struct validation") +) + +type ( + // HAPI is the hapi api interface + HAPI interface { + EdgeHostnames + } + + hapi struct { + session.Session + } + + // Option defines a HAPI option + Option func(*hapi) + + // ClientFunc is a hapi client new method, this can be used for mocking + ClientFunc func(sess session.Session, opts ...Option) HAPI +) + +// Client returns a new hapi Client instance with the specified controller +func Client(sess session.Session, opts ...Option) HAPI { + h := &hapi{ + Session: sess, + } + + for _, opt := range opts { + opt(h) + } + return h +} diff --git a/pkg/hapi/hapi_test.go b/pkg/hapi/hapi_test.go new file mode 100644 index 00000000..ababd09e --- /dev/null +++ b/pkg/hapi/hapi_test.go @@ -0,0 +1,54 @@ +package hapi + +import ( + "crypto/tls" + "crypto/x509" + "net/http" + "net/http/httptest" + "net/url" + "testing" + + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/edgegrid" + "github.com/akamai/AkamaiOPEN-edgegrid-golang/v2/pkg/session" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func mockAPIClient(t *testing.T, mockServer *httptest.Server) HAPI { + serverURL, err := url.Parse(mockServer.URL) + require.NoError(t, err) + certPool := x509.NewCertPool() + certPool.AddCert(mockServer.Certificate()) + httpClient := &http.Client{ + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + RootCAs: certPool, + }, + }, + } + s, err := session.New(session.WithClient(httpClient), session.WithSigner(&edgegrid.Config{Host: serverURL.Host})) + assert.NoError(t, err) + return Client(s) +} + +func TestClient(t *testing.T) { + sess, err := session.New() + require.NoError(t, err) + tests := map[string]struct { + options []Option + expected *hapi + }{ + "no options provided, return default": { + options: nil, + expected: &hapi{ + Session: sess, + }, + }, + } + for name, test := range tests { + t.Run(name, func(t *testing.T) { + res := Client(sess, test.options...) + assert.Equal(t, res, test.expected) + }) + } +} diff --git a/pkg/networklists/network_list.go b/pkg/networklists/network_list.go index cef3d39a..066259a7 100644 --- a/pkg/networklists/network_list.go +++ b/pkg/networklists/network_list.go @@ -230,11 +230,13 @@ type ( Method string `json:"method,omitempty"` } + // nolint:revive // NetworkListsResponseLinks contains LinkInfo NetworkListsResponseLinks struct { Create *LinkInfo `json:"create,omitempty"` } + // nolint:revive // NetworkListsLinks encapsulates the set of API hypermedia NetworkListsLinks struct { ActivateInProduction *LinkInfo `json:"activateInProduction,omitempty"` diff --git a/pkg/papi/activation.go b/pkg/papi/activation.go index 7e90a05a..4a1561d9 100644 --- a/pkg/papi/activation.go +++ b/pkg/papi/activation.go @@ -236,9 +236,9 @@ var ( // ErrCreateActivation represents error when creating activation fails ErrCreateActivation = errors.New("creating activation") // ErrGetActivations represents error when fetching activations fails - ErrGetActivations = errors.New("fetching activations") + ErrGetActivations = errors.New("fetching activations") // ErrGetActivation represents error when fetching activation fails - ErrGetActivation = errors.New("fetching activation") + ErrGetActivation = errors.New("fetching activation") // ErrCancelActivation represents error when canceling activation fails ErrCancelActivation = errors.New("canceling activation") ) diff --git a/pkg/papi/clientsettings.go b/pkg/papi/clientsettings.go index 78bf19ef..b0fc0333 100644 --- a/pkg/papi/clientsettings.go +++ b/pkg/papi/clientsettings.go @@ -28,7 +28,7 @@ type ( var ( // ErrGetClientSettings represents error when fetching client setting fails - ErrGetClientSettings = errors.New("fetching client settings") + ErrGetClientSettings = errors.New("fetching client settings") // ErrUpdateClientSettings represents error when updating client setting fails ErrUpdateClientSettings = errors.New("updating client settings") ) diff --git a/pkg/papi/cpcode.go b/pkg/papi/cpcode.go index 5980d465..79d515d4 100644 --- a/pkg/papi/cpcode.go +++ b/pkg/papi/cpcode.go @@ -118,9 +118,9 @@ func (cp CreateCPCode) Validate() error { var ( // ErrGetCPCodes represents error when fetching CP Codes fails - ErrGetCPCodes = errors.New("fetching CP Codes") + ErrGetCPCodes = errors.New("fetching CP Codes") // ErrGetCPCode represents error when fetching CP Code fails - ErrGetCPCode = errors.New("fetching CP Code") + ErrGetCPCode = errors.New("fetching CP Code") // ErrCreateCPCode represents error when creating CP Code fails ErrCreateCPCode = errors.New("creating CP Code") ) diff --git a/pkg/papi/edgehostname.go b/pkg/papi/edgehostname.go index 38860f5d..90d76e42 100644 --- a/pkg/papi/edgehostname.go +++ b/pkg/papi/edgehostname.go @@ -177,9 +177,9 @@ func (eh GetEdgeHostnameRequest) Validate() error { var ( // ErrGetEdgeHostnames represents error when fetching edge hostnames fails - ErrGetEdgeHostnames = errors.New("fetching edge hostnames") + ErrGetEdgeHostnames = errors.New("fetching edge hostnames") // ErrGetEdgeHostname represents error when fetching edge hostname fails - ErrGetEdgeHostname = errors.New("fetching edge hostname") + ErrGetEdgeHostname = errors.New("fetching edge hostname") // ErrCreateEdgeHostname represents error when creating edge hostname fails ErrCreateEdgeHostname = errors.New("creating edge hostname") ) diff --git a/pkg/papi/property.go b/pkg/papi/property.go index 5b9102e8..8f979c5d 100644 --- a/pkg/papi/property.go +++ b/pkg/papi/property.go @@ -159,9 +159,9 @@ func (v RemovePropertyRequest) Validate() error { var ( // ErrGetProperties represents error when fetching properties fails - ErrGetProperties = errors.New("fetching properties") + ErrGetProperties = errors.New("fetching properties") // ErrGetProperty represents error when fetching property fails - ErrGetProperty = errors.New("fetching property") + ErrGetProperty = errors.New("fetching property") // ErrCreateProperty represents error when creating property fails ErrCreateProperty = errors.New("creating property") // ErrRemoveProperty represents error when removing property fails diff --git a/pkg/papi/propertyhostname.go b/pkg/papi/propertyhostname.go index f61d1c76..4c50ef9a 100644 --- a/pkg/papi/propertyhostname.go +++ b/pkg/papi/propertyhostname.go @@ -125,7 +125,7 @@ func (ch UpdatePropertyVersionHostnamesRequest) Validate() error { var ( // ErrGetPropertyVersionHostnames represents error when fetching hostnames fails - ErrGetPropertyVersionHostnames = errors.New("fetching hostnames") + ErrGetPropertyVersionHostnames = errors.New("fetching hostnames") // ErrUpdatePropertyVersionHostnames represents error when updating hostnames fails ErrUpdatePropertyVersionHostnames = errors.New("updating hostnames") ) diff --git a/pkg/papi/propertyversion.go b/pkg/papi/propertyversion.go index db7e1151..d3dc5d17 100644 --- a/pkg/papi/propertyversion.go +++ b/pkg/papi/propertyversion.go @@ -209,17 +209,17 @@ func (v GetFeaturesRequest) Validate() error { var ( // ErrGetPropertyVersions represents error when fetching property versions fails - ErrGetPropertyVersions = errors.New("fetching property versions") + ErrGetPropertyVersions = errors.New("fetching property versions") // ErrGetPropertyVersion represents error when fetching property version fails - ErrGetPropertyVersion = errors.New("fetching property version") + ErrGetPropertyVersion = errors.New("fetching property version") // ErrGetLatestVersion represents error when fetching latest property version fails - ErrGetLatestVersion = errors.New("fetching latest property version") + ErrGetLatestVersion = errors.New("fetching latest property version") // ErrCreatePropertyVersion represents error when creating property version fails ErrCreatePropertyVersion = errors.New("creating property version") // ErrGetAvailableBehaviors represents error when fetching available behaviors fails ErrGetAvailableBehaviors = errors.New("fetching available behaviors") // ErrGetAvailableCriteria represents error when fetching available criteria fails - ErrGetAvailableCriteria = errors.New("fetching available criteria") + ErrGetAvailableCriteria = errors.New("fetching available criteria") ) // GetPropertyVersions returns list of property versions for give propertyID, contractID and groupID diff --git a/pkg/papi/rule.go b/pkg/papi/rule.go index 3a7e0434..e3e6fce4 100644 --- a/pkg/papi/rule.go +++ b/pkg/papi/rule.go @@ -215,7 +215,7 @@ func (v RuleVariable) Validate() error { var ( // ErrGetRuleTree represents error when fetching rule tree fails - ErrGetRuleTree = errors.New("fetching rule tree") + ErrGetRuleTree = errors.New("fetching rule tree") // ErrUpdateRuleTree represents error when updating rule tree fails ErrUpdateRuleTree = errors.New("updating rule tree") ) diff --git a/pkg/papi/ruleformats.go b/pkg/papi/ruleformats.go index 9fd0e15d..7532d018 100644 --- a/pkg/papi/ruleformats.go +++ b/pkg/papi/ruleformats.go @@ -28,7 +28,7 @@ type ( ) var ( - // ErrGetRuleFormats represents error when fetching rule formats fails + // ErrGetRuleFormats represents error when fetching rule formats fails ErrGetRuleFormats = errors.New("fetching rule formats") )