diff --git a/bench/benchmark.py b/bench/benchmark.py
new file mode 100644
index 00000000..d365e878
--- /dev/null
+++ b/bench/benchmark.py
@@ -0,0 +1,222 @@
+import timeit
+import io
+import logging
+import zlib
+from datetime import datetime
+
+import urllib3
+
+COUNT = 1000
+
+benchmark_results = []
+benchmark_registry = {}
+
+
+def register_benchmark(testname):
+ def _wrap(func):
+ benchmark_registry[testname] = func
+ return func
+ return _wrap
+
+
+def results_new_benchmark(name: str) -> None:
+ benchmark_results.append((name, {}))
+ print(name)
+
+
+def results_record_result(callback, count):
+ callback_name = callback.__name__
+ bench_name = callback_name.split('_', 1)[-1]
+ try:
+ results = timeit.repeat(
+ f"{callback_name}()",
+ setup=f"from __main__ import patch_urllib3, {callback_name}; patch_urllib3()",
+ repeat=10,
+ number=count,
+ )
+ except Exception:
+ logging.exception(f"error running {bench_name}")
+ return
+ result = count / min(results)
+ benchmark_results.append((bench_name, str(result)))
+
+ print(f"{bench_name}: {result:,.02f} calls/sec")
+
+
+# =============================================================================
+# Monkeypatching
+# =============================================================================
+
+def mock_urlopen(self, method, url, body, headers, **kwargs):
+ target = headers.get('X-Amz-Target')
+ if target.endswith(b'DescribeTable'):
+ body = """{
+ "Table": {
+ "TableName": "users",
+ "TableArn": "arn",
+ "CreationDateTime": "1421866952.062",
+ "ItemCount": 0,
+ "TableSizeBytes": 0,
+ "TableStatus": "ACTIVE",
+ "ProvisionedThroughput": {
+ "NumberOfDecreasesToday": 0,
+ "ReadCapacityUnits": 1,
+ "WriteCapacityUnits": 25
+ },
+ "AttributeDefinitions": [{"AttributeName": "user_name", "AttributeType": "S"}],
+ "KeySchema": [{"AttributeName": "user_name", "KeyType": "HASH"}],
+ "LocalSecondaryIndexes": [],
+ "GlobalSecondaryIndexes": []
+ }
+ }
+ """
+ elif target.endswith(b'GetItem'):
+ # TODO: sometimes raise exc
+ body = """{
+ "Item": {
+ "user_name": {"S": "some_user"},
+ "email": {"S": "some_user@gmail.com"},
+ "first_name": {"S": "John"},
+ "last_name": {"S": "Doe"},
+ "phone_number": {"S": "4155551111"},
+ "country": {"S": "USA"},
+ "preferences": {
+ "M": {
+ "timezone": {"S": "America/New_York"},
+ "allows_notifications": {"BOOL": 1},
+ "date_of_birth": {"S": "2022-10-26T20:00:00.000000+0000"}
+ }
+ },
+ "last_login": {"S": "2022-10-27T20:00:00.000000+0000"}
+ }
+ }
+ """
+ elif target.endswith(b'PutItem'):
+ body = """{
+ "Attributes": {
+ "user_name": {"S": "some_user"},
+ "email": {"S": "some_user@gmail.com"},
+ "first_name": {"S": "John"},
+ "last_name": {"S": "Doe"},
+ "phone_number": {"S": "4155551111"},
+ "country": {"S": "USA"},
+ "preferences": {
+ "M": {
+ "timezone": {"S": "America/New_York"},
+ "allows_notifications": {"BOOL": 1},
+ "date_of_birth": {"S": "2022-10-26T20:44:49.207740+0000"}
+ }
+ },
+ "last_login": {"S": "2022-10-27T20:00:00.000000+0000"}
+ }
+ }
+ """
+ else:
+ body = ""
+
+ body_bytes = body.encode('utf-8')
+ headers = {
+ "content-type": "application/x-amz-json-1.0",
+ "content-length": str(len(body_bytes)),
+ "x-amz-crc32": str(zlib.crc32(body_bytes)),
+ "x-amz-requestid": "YB5DURFL1EQ6ULM39GSEEHFTYTPBBUXDJSYPFZPR4EL7M3AYV0RS",
+ }
+
+ # TODO: consumed capacity?
+
+ body = io.BytesIO(body_bytes)
+ resp = urllib3.HTTPResponse(
+ body,
+ preload_content=False,
+ headers=headers,
+ status=200,
+ )
+ resp.chunked = False
+ return resp
+
+
+def patch_urllib3():
+ urllib3.connectionpool.HTTPConnectionPool.urlopen = mock_urlopen
+
+
+# =============================================================================
+# Setup
+# =============================================================================
+
+import os
+from pynamodb.models import Model
+from pynamodb.attributes import UnicodeAttribute, BooleanAttribute, MapAttribute, UTCDateTimeAttribute
+
+
+os.environ["AWS_ACCESS_KEY_ID"] = "1"
+os.environ["AWS_SECRET_ACCESS_KEY"] = "1"
+os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
+
+
+class UserPreferences(MapAttribute):
+ timezone = UnicodeAttribute()
+ allows_notifications = BooleanAttribute()
+ date_of_birth = UTCDateTimeAttribute()
+
+
+class UserModel(Model):
+ class Meta:
+ table_name = 'User'
+ max_retry_attempts = 0 # TODO: do this conditionally. need to replace the connection object
+ user_name = UnicodeAttribute(hash_key=True)
+ first_name = UnicodeAttribute()
+ last_name = UnicodeAttribute()
+ phone_number = UnicodeAttribute()
+ country = UnicodeAttribute()
+ email = UnicodeAttribute()
+ preferences = UserPreferences(null=True)
+ last_login = UTCDateTimeAttribute()
+
+
+# =============================================================================
+# GetItem
+# =============================================================================
+
+@register_benchmark("get_item")
+def bench_get_item():
+ UserModel.get("username")
+
+
+# =============================================================================
+# PutItem
+# =============================================================================
+
+@register_benchmark("put_item")
+def bench_put_item():
+ UserModel(
+ "username",
+ email="some_user@gmail.com",
+ first_name="John",
+ last_name="Doe",
+ phone_number="4155551111",
+ country="USA",
+ preferences=UserPreferences(
+ timezone="America/New_York",
+ allows_notifications=True,
+ date_of_birth=datetime.utcnow(),
+ ),
+ last_login=datetime.utcnow(),
+ ).save()
+
+
+# =============================================================================
+# Benchmarks.
+# =============================================================================
+
+def main():
+ results_new_benchmark("Basic operations")
+
+ results_record_result(benchmark_registry["get_item"], COUNT)
+ results_record_result(benchmark_registry["put_item"], COUNT)
+
+ print()
+ print("Above metrics are in call/sec, larger is better.")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/settings.rst b/docs/settings.rst
index bfa4c3c0..4d3a7861 100644
--- a/docs/settings.rst
+++ b/docs/settings.rst
@@ -34,15 +34,6 @@ The number of times to retry certain failed DynamoDB API calls. The most common
retries include ``ProvisionedThroughputExceededException`` and ``5xx`` errors.
-base_backoff_ms
----------------
-
-Default: ``25``
-
-The base number of milliseconds used for `exponential backoff and jitter
-`_ on retries.
-
-
region
------
diff --git a/mypy.ini b/mypy.ini
index 39bd93b5..530dc976 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -17,3 +17,6 @@ ignore_errors = True
# TODO: burn these down
[mypy-tests.*]
ignore_errors = True
+
+[mypy-benchmark]
+ignore_errors = True
diff --git a/pynamodb/connection/_botocore_private.py b/pynamodb/connection/_botocore_private.py
index b007a469..5f3b7a8d 100644
--- a/pynamodb/connection/_botocore_private.py
+++ b/pynamodb/connection/_botocore_private.py
@@ -1,7 +1,7 @@
"""
Type-annotates the private botocore APIs that we're currently relying on.
"""
-from typing import Any, Dict, Optional
+from typing import Dict
import botocore.client
import botocore.credentials
@@ -22,25 +22,10 @@ class BotocoreRequestSignerPrivate(botocore.signers.RequestSigner):
class BotocoreBaseClientPrivate(botocore.client.BaseClient):
_endpoint: BotocoreEndpointPrivate
_request_signer: BotocoreRequestSignerPrivate
- _service_model: botocore.model.ServiceModel
- def _resolve_endpoint_ruleset(
+ def _make_api_call(
self,
- operation_model: botocore.model.OperationModel,
- params: Dict[str, Any],
- request_context: Dict[str, Any],
- ignore_signing_region: bool = ...,
- ):
- raise NotImplementedError
-
- def _convert_to_request_dict(
- self,
- api_params: Dict[str, Any],
- operation_model: botocore.model.OperationModel,
- *,
- endpoint_url: str = ..., # added in botocore 1.28
- context: Optional[Dict[str, Any]] = ...,
- headers: Optional[Dict[str, Any]] = ...,
- set_user_agent_header: bool = ...,
- ) -> Dict[str, Any]:
+ operation_name: str,
+ operation_kwargs: Dict,
+ ) -> Dict:
raise NotImplementedError
diff --git a/pynamodb/connection/base.py b/pynamodb/connection/base.py
index 975b02e8..3e20255d 100644
--- a/pynamodb/connection/base.py
+++ b/pynamodb/connection/base.py
@@ -1,21 +1,14 @@
"""
Lowest level connection
"""
-import inspect
-import json
import logging
-import random
-import sys
-import time
import uuid
from threading import local
from typing import Any, Dict, List, Mapping, Optional, Sequence, cast
import botocore.client
import botocore.exceptions
-from botocore.awsrequest import AWSPreparedRequest, create_request_object
from botocore.client import ClientError
-from botocore.hooks import first_non_none_response
from botocore.exceptions import BotoCoreError
from botocore.session import get_session
@@ -42,8 +35,7 @@
TRANSACT_GET, TRANSACT_PUT, TRANSACT_DELETE, TRANSACT_UPDATE, UPDATE_EXPRESSION,
RETURN_VALUES_ON_CONDITION_FAILURE_VALUES, RETURN_VALUES_ON_CONDITION_FAILURE,
AVAILABLE_BILLING_MODES, DEFAULT_BILLING_MODE, BILLING_MODE, PAY_PER_REQUEST_BILLING_MODE,
- PROVISIONED_BILLING_MODE,
- TIME_TO_LIVE_SPECIFICATION, ENABLED, UPDATE_TIME_TO_LIVE, TAGS, VALUE
+ PROVISIONED_BILLING_MODE, TIME_TO_LIVE_SPECIFICATION, ENABLED, UPDATE_TIME_TO_LIVE, TAGS, VALUE
)
from pynamodb.exceptions import (
TableError, QueryError, PutError, DeleteError, UpdateError, GetError, ScanError, TableDoesNotExist,
@@ -54,7 +46,7 @@
from pynamodb.expressions.operand import Path
from pynamodb.expressions.projection import create_projection_expression
from pynamodb.expressions.update import Action, Update
-from pynamodb.settings import get_settings_value, OperationSettings
+from pynamodb.settings import get_settings_value
from pynamodb.signals import pre_dynamodb_send, post_dynamodb_send
from pynamodb.types import HASH, RANGE
@@ -255,7 +247,6 @@ def __init__(self,
read_timeout_seconds: Optional[float] = None,
connect_timeout_seconds: Optional[float] = None,
max_retry_attempts: Optional[int] = None,
- base_backoff_ms: Optional[int] = None,
max_pool_connections: Optional[int] = None,
extra_headers: Optional[Mapping[str, str]] = None,
aws_access_key_id: Optional[str] = None,
@@ -286,11 +277,6 @@ def __init__(self,
else:
self._max_retry_attempts_exception = get_settings_value('max_retry_attempts')
- if base_backoff_ms is not None:
- self._base_backoff_ms = base_backoff_ms
- else:
- self._base_backoff_ms = get_settings_value('base_backoff_ms')
-
if max_pool_connections is not None:
self._max_pool_connections = max_pool_connections
else:
@@ -308,28 +294,7 @@ def __init__(self,
def __repr__(self) -> str:
return "Connection<{}>".format(self.client.meta.endpoint_url)
- def _sign_request(self, request):
- auth = self.client._request_signer.get_auth_instance(
- self.client._request_signer.signing_name,
- self.client._request_signer.region_name,
- self.client._request_signer.signature_version)
- auth.add_auth(request)
-
- def _create_prepared_request(
- self,
- params: Dict,
- settings: OperationSettings,
- ) -> AWSPreparedRequest:
- request = create_request_object(params)
- self._sign_request(request)
- prepared_request = self.client._endpoint.prepare_request(request)
- if self._extra_headers is not None:
- prepared_request.headers.update(self._extra_headers)
- if settings.extra_headers is not None:
- prepared_request.headers.update(settings.extra_headers)
- return prepared_request
-
- def dispatch(self, operation_name: str, operation_kwargs: Dict, settings: OperationSettings = OperationSettings.default) -> Dict:
+ def dispatch(self, operation_name: str, operation_kwargs: Dict) -> Dict:
"""
Dispatches `operation_name` with arguments `operation_kwargs`
@@ -344,7 +309,7 @@ def dispatch(self, operation_name: str, operation_kwargs: Dict, settings: Operat
req_uuid = uuid.uuid4()
self.send_pre_boto_callback(operation_name, req_uuid, table_name)
- data = self._make_api_call(operation_name, operation_kwargs, settings)
+ data = self._make_api_call(operation_name, operation_kwargs)
self.send_post_boto_callback(operation_name, req_uuid, table_name)
if data and CONSUMED_CAPACITY in data:
@@ -357,197 +322,58 @@ def dispatch(self, operation_name: str, operation_kwargs: Dict, settings: Operat
def send_post_boto_callback(self, operation_name, req_uuid, table_name):
try:
post_dynamodb_send.send(self, operation_name=operation_name, table_name=table_name, req_uuid=req_uuid)
- except Exception as e:
+ except Exception:
log.exception("post_boto callback threw an exception.")
def send_pre_boto_callback(self, operation_name, req_uuid, table_name):
try:
pre_dynamodb_send.send(self, operation_name=operation_name, table_name=table_name, req_uuid=req_uuid)
- except Exception as e:
+ except Exception:
log.exception("pre_boto callback threw an exception.")
- def _make_api_call(self, operation_name: str, operation_kwargs: Dict, settings: OperationSettings = OperationSettings.default) -> Dict:
- """
- This private method is here for two reasons:
- 1. It's faster to avoid using botocore's response parsing
- 2. It provides a place to monkey patch HTTP requests for unit testing
- """
- operation_model = self.client._service_model.operation_model(operation_name)
- if self._convert_to_request_dict__endpoint_url:
- request_context = {
- 'client_region': self.region,
- 'client_config': self.client.meta.config,
- 'has_streaming_input': operation_model.has_streaming_input,
- 'auth_type': operation_model.auth_type,
- }
- endpoint_url, additional_headers = self.client._resolve_endpoint_ruleset(
- operation_model, operation_kwargs, request_context
- )
- request_dict = self.client._convert_to_request_dict(
- api_params=operation_kwargs,
- operation_model=operation_model,
- endpoint_url=endpoint_url,
- context=request_context,
- headers=additional_headers,
- )
- else:
- request_dict = self.client._convert_to_request_dict(
- operation_kwargs,
- operation_model,
- )
+ def _before_sign(self, request, **_) -> None:
+ if self._extra_headers is not None:
+ for k, v in self._extra_headers.items():
+ request.headers.add_header(k, v)
- for i in range(0, self._max_retry_attempts_exception + 1):
- attempt_number = i + 1
- is_last_attempt_for_exceptions = i == self._max_retry_attempts_exception
-
- http_response = None
- try:
- # Create a new request for each retry (including a new signature).
- prepared_request = self._create_prepared_request(request_dict, settings)
-
- # Implement the before-send event from botocore
- event_name = 'before-send.dynamodb.{}'.format(operation_model.name)
- event_responses = self.client._endpoint._event_emitter.emit(event_name, request=prepared_request)
- event_response: Optional[botocore.awsrequest.AWSResponse] = first_non_none_response(event_responses)
-
- if event_response is None:
- http_response = self.client._endpoint.http_session.send(prepared_request)
- else:
- http_response = event_response
- is_last_attempt_for_exceptions = True # don't retry if we have an event response
-
- # json.loads accepts bytes in >= 3.6.0
- if sys.version_info < (3, 6, 0):
- data = json.loads(http_response.text)
- else:
- data = json.loads(http_response.content)
- except (ValueError, botocore.exceptions.HTTPClientError, botocore.exceptions.ConnectionError) as e:
- if is_last_attempt_for_exceptions:
- log.debug('Reached the maximum number of retry attempts: %s', attempt_number)
- if http_response:
- e.args += (http_response.text,)
- raise
- else:
- # No backoff for fast-fail exceptions that likely failed at the frontend
- log.debug(
- 'Retry needed for (%s) after attempt %s, retryable %s caught: %s',
- operation_name,
- attempt_number,
- e.__class__.__name__,
- e
- )
- continue
-
- status_code = http_response.status_code
- headers = http_response.headers
- if status_code >= 300:
- # Extract error code from __type
- code = data.get('__type', '')
- if '#' in code:
- code = code.rsplit('#', 1)[1]
- botocore_expected_format = {'Error': {'Message': data.get('message', '') or data.get('Message', ''), 'Code': code}}
- verbose_properties = {
- 'request_id': headers.get('x-amzn-RequestId')
- }
+ def _make_api_call(self, operation_name: str, operation_kwargs: Dict) -> Dict:
+ try:
+ return self.client._make_api_call(operation_name, operation_kwargs)
+ except ClientError as e:
+ resp_metadata = e.response.get('ResponseMetadata', {}).get('HTTPHeaders', {})
+ cancellation_reasons = e.response.get('CancellationReasons', [])
- if REQUEST_ITEMS in operation_kwargs:
- # Batch operations can hit multiple tables, report them comma separated
- verbose_properties['table_name'] = ','.join(operation_kwargs[REQUEST_ITEMS])
- elif TRANSACT_ITEMS in operation_kwargs:
- # Transactional operations can also hit multiple tables, or have multiple updates within
- # the same table
- table_names = []
- for item in operation_kwargs[TRANSACT_ITEMS]:
- for op in item.values():
- table_names.append(op[TABLE_NAME])
- verbose_properties['table_name'] = ','.join(table_names)
- else:
- verbose_properties['table_name'] = operation_kwargs.get(TABLE_NAME)
-
- try:
- raise VerboseClientError(
- botocore_expected_format,
- operation_name,
- verbose_properties,
- cancellation_reasons=(
- (
- CancellationReason(
- code=d['Code'],
- message=d.get('Message'),
- ) if d['Code'] != 'None' else None
- )
- for d in data.get('CancellationReasons', [])
- ),
+ botocore_props = {'Error': e.response.get('Error', {})}
+ verbose_props = {
+ 'request_id': resp_metadata.get('x-amzn-requestid', ''),
+ 'table_name': self._get_table_name_for_error_context(operation_kwargs),
+ }
+ raise VerboseClientError(
+ botocore_props,
+ operation_name,
+ verbose_props,
+ cancellation_reasons=(
+ (
+ CancellationReason(
+ code=d['Code'],
+ message=d.get('Message'),
+ ) if d['Code'] != 'None' else None
)
- except VerboseClientError as e:
- if is_last_attempt_for_exceptions:
- log.debug('Reached the maximum number of retry attempts: %s', attempt_number)
- raise
- elif status_code < 500 and code not in RATE_LIMITING_ERROR_CODES:
- # We don't retry on a ConditionalCheckFailedException or other 4xx (except for
- # throughput related errors) because we assume they will fail in perpetuity.
- # Retrying when there is already contention could cause other problems
- # in part due to unnecessary consumption of throughput.
- raise
- else:
- # We use fully-jittered exponentially-backed-off retries:
- # https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
- sleep_time_ms = random.randint(0, self._base_backoff_ms * (2 ** i))
- log.debug(
- 'Retry with backoff needed for (%s) after attempt %s,'
- 'sleeping for %s milliseconds, retryable %s caught: %s',
- operation_name,
- attempt_number,
- sleep_time_ms,
- e.__class__.__name__,
- e
- )
- time.sleep(sleep_time_ms / 1000.0)
- continue
-
- return self._handle_binary_attributes(data)
-
- assert False # unreachable code
-
- @staticmethod
- def _handle_binary_attributes(data):
- """ Simulate botocore's binary attribute handling """
- if ITEM in data:
- for attr in data[ITEM].values():
- bin_decode_attr(attr)
- if ITEMS in data:
- for item in data[ITEMS]:
- for attr in item.values():
- bin_decode_attr(attr)
- if RESPONSES in data:
- if isinstance(data[RESPONSES], list): # ExecuteTransaction response
- for item in data[RESPONSES]:
- for attr in item.values():
- bin_decode_attr(attr)
- else: # BatchGetItem response
- for table_items in data[RESPONSES].values():
- for item in table_items:
- for attr in item.values():
- bin_decode_attr(attr)
- if LAST_EVALUATED_KEY in data:
- for attr in data[LAST_EVALUATED_KEY].values():
- bin_decode_attr(attr)
- if UNPROCESSED_KEYS in data:
- for table_data in data[UNPROCESSED_KEYS].values():
- for item in table_data[KEYS]:
- for attr in item.values():
- bin_decode_attr(attr)
- if UNPROCESSED_ITEMS in data:
- for table_unprocessed_requests in data[UNPROCESSED_ITEMS].values():
- for request in table_unprocessed_requests:
- for item_mapping in request.values():
- for item in item_mapping.values():
- for attr in item.values():
- bin_decode_attr(attr)
- if ATTRIBUTES in data:
- for attr in data[ATTRIBUTES].values():
- bin_decode_attr(attr)
- return data
+ for d in cancellation_reasons
+ ),
+ ) from e
+
+ def _get_table_name_for_error_context(self, operation_kwargs) -> str:
+ # First handle the two multi-table cases: batch and transaction operations
+ if REQUEST_ITEMS in operation_kwargs:
+ return ','.join(operation_kwargs[REQUEST_ITEMS])
+ elif TRANSACT_ITEMS in operation_kwargs:
+ table_names = []
+ for item in operation_kwargs[TRANSACT_ITEMS]:
+ for op in item.values():
+ table_names.append(op[TABLE_NAME])
+ return ",".join(table_names)
+ return operation_kwargs.get(TABLE_NAME)
@property
def session(self) -> botocore.session.Session:
@@ -578,9 +404,14 @@ def client(self) -> BotocoreBaseClientPrivate:
connect_timeout=self._connect_timeout_seconds,
read_timeout=self._read_timeout_seconds,
max_pool_connections=self._max_pool_connections,
+ retries={
+ 'total_max_attempts': 1 + self._max_retry_attempts_exception,
+ 'mode': 'standard',
+ }
)
self._client = cast(BotocoreBaseClientPrivate, self.session.create_client(SERVICE_NAME, self.region, endpoint_url=self.host, config=config))
- self._convert_to_request_dict__endpoint_url = 'endpoint_url' in inspect.signature(self._client._convert_to_request_dict).parameters
+
+ self._client.meta.events.register_first('before-sign.*.*', self._before_sign)
return self._client
def add_meta_table(self, meta_table: MetaTable) -> None:
@@ -998,7 +829,6 @@ def delete_item(
return_values: Optional[str] = None,
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the DeleteItem operation and returns the result
@@ -1013,7 +843,7 @@ def delete_item(
return_item_collection_metrics=return_item_collection_metrics
)
try:
- return self.dispatch(DELETE_ITEM, operation_kwargs, settings)
+ return self.dispatch(DELETE_ITEM, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise DeleteError("Failed to delete item: {}".format(e), e)
@@ -1027,7 +857,6 @@ def update_item(
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
return_values: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the UpdateItem operation
@@ -1046,7 +875,7 @@ def update_item(
return_item_collection_metrics=return_item_collection_metrics,
)
try:
- return self.dispatch(UPDATE_ITEM, operation_kwargs, settings)
+ return self.dispatch(UPDATE_ITEM, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise UpdateError("Failed to update item: {}".format(e), e)
@@ -1060,7 +889,6 @@ def put_item(
return_values: Optional[str] = None,
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the PutItem operation and returns the result
@@ -1077,7 +905,7 @@ def put_item(
return_item_collection_metrics=return_item_collection_metrics
)
try:
- return self.dispatch(PUT_ITEM, operation_kwargs, settings)
+ return self.dispatch(PUT_ITEM, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise PutError("Failed to put item: {}".format(e), e)
@@ -1106,7 +934,6 @@ def transact_write_items(
client_request_token: Optional[str] = None,
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the TransactWrite operation and returns the result
@@ -1133,7 +960,7 @@ def transact_write_items(
operation_kwargs[TRANSACT_ITEMS] = transact_items
try:
- return self.dispatch(TRANSACT_WRITE_ITEMS, operation_kwargs, settings)
+ return self.dispatch(TRANSACT_WRITE_ITEMS, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise TransactWriteError("Failed to write transaction items", e)
@@ -1141,7 +968,6 @@ def transact_get_items(
self,
get_items: Sequence[Dict],
return_consumed_capacity: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the TransactGet operation and returns the result
@@ -1152,7 +978,7 @@ def transact_get_items(
]
try:
- return self.dispatch(TRANSACT_GET_ITEMS, operation_kwargs, settings)
+ return self.dispatch(TRANSACT_GET_ITEMS, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise TransactGetError("Failed to get transaction items", e)
@@ -1163,7 +989,6 @@ def batch_write_item(
delete_items: Optional[Any] = None,
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the batch_write_item operation
@@ -1193,7 +1018,7 @@ def batch_write_item(
})
operation_kwargs[REQUEST_ITEMS][table_name] = delete_items_list + put_items_list
try:
- return self.dispatch(BATCH_WRITE_ITEM, operation_kwargs, settings)
+ return self.dispatch(BATCH_WRITE_ITEM, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise PutError("Failed to batch write items: {}".format(e), e)
@@ -1204,7 +1029,6 @@ def batch_get_item(
consistent_read: Optional[bool] = None,
return_consumed_capacity: Optional[str] = None,
attributes_to_get: Optional[Any] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the batch get item operation
@@ -1235,7 +1059,7 @@ def batch_get_item(
)
operation_kwargs[REQUEST_ITEMS][table_name].update(keys_map)
try:
- return self.dispatch(BATCH_GET_ITEM, operation_kwargs, settings)
+ return self.dispatch(BATCH_GET_ITEM, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise GetError("Failed to batch get items: {}".format(e), e)
@@ -1246,7 +1070,6 @@ def get_item(
range_key: Optional[str] = None,
consistent_read: bool = False,
attributes_to_get: Optional[Any] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the GetItem operation and returns the result
@@ -1259,7 +1082,7 @@ def get_item(
attributes_to_get=attributes_to_get
)
try:
- return self.dispatch(GET_ITEM, operation_kwargs, settings)
+ return self.dispatch(GET_ITEM, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise GetError("Failed to get item: {}".format(e), e)
@@ -1275,7 +1098,6 @@ def scan(
total_segments: Optional[int] = None,
consistent_read: Optional[bool] = None,
index_name: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the scan operation
@@ -1312,7 +1134,7 @@ def scan(
operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values
try:
- return self.dispatch(SCAN, operation_kwargs, settings)
+ return self.dispatch(SCAN, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise ScanError("Failed to scan table: {}".format(e), e)
@@ -1330,7 +1152,6 @@ def query(
return_consumed_capacity: Optional[str] = None,
scan_index_forward: Optional[bool] = None,
select: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the Query operation and returns the result
@@ -1387,7 +1208,7 @@ def query(
operation_kwargs[EXPRESSION_ATTRIBUTE_VALUES] = expression_attribute_values
try:
- return self.dispatch(QUERY, operation_kwargs, settings)
+ return self.dispatch(QUERY, operation_kwargs)
except BOTOCORE_EXCEPTIONS as e:
raise QueryError("Failed to query items: {}".format(e), e)
diff --git a/pynamodb/connection/table.py b/pynamodb/connection/table.py
index 22d16cad..5e70ba5c 100644
--- a/pynamodb/connection/table.py
+++ b/pynamodb/connection/table.py
@@ -5,7 +5,7 @@
from typing import Any, Dict, Mapping, Optional, Sequence
-from pynamodb.connection.base import Connection, MetaTable, OperationSettings
+from pynamodb.connection.base import Connection, MetaTable
from pynamodb.constants import DEFAULT_BILLING_MODE, KEY
from pynamodb.expressions.condition import Condition
from pynamodb.expressions.update import Action
@@ -24,7 +24,6 @@ def __init__(
connect_timeout_seconds: Optional[float] = None,
read_timeout_seconds: Optional[float] = None,
max_retry_attempts: Optional[int] = None,
- base_backoff_ms: Optional[int] = None,
max_pool_connections: Optional[int] = None,
extra_headers: Optional[Mapping[str, str]] = None,
aws_access_key_id: Optional[str] = None,
@@ -39,7 +38,6 @@ def __init__(
connect_timeout_seconds=connect_timeout_seconds,
read_timeout_seconds=read_timeout_seconds,
max_retry_attempts=max_retry_attempts,
- base_backoff_ms=base_backoff_ms,
max_pool_connections=max_pool_connections,
extra_headers=extra_headers,
aws_access_key_id=aws_access_key_id,
@@ -94,7 +92,6 @@ def delete_item(
return_values: Optional[str] = None,
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the DeleteItem operation and returns the result
@@ -107,7 +104,6 @@ def delete_item(
return_values=return_values,
return_consumed_capacity=return_consumed_capacity,
return_item_collection_metrics=return_item_collection_metrics,
- settings=settings,
)
def update_item(
@@ -119,7 +115,6 @@ def update_item(
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
return_values: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the UpdateItem operation
@@ -133,7 +128,6 @@ def update_item(
return_consumed_capacity=return_consumed_capacity,
return_item_collection_metrics=return_item_collection_metrics,
return_values=return_values,
- settings=settings,
)
def put_item(
@@ -145,7 +139,6 @@ def put_item(
return_values: Optional[str] = None,
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the PutItem operation and returns the result
@@ -159,7 +152,6 @@ def put_item(
return_values=return_values,
return_consumed_capacity=return_consumed_capacity,
return_item_collection_metrics=return_item_collection_metrics,
- settings=settings,
)
def batch_write_item(
@@ -168,7 +160,6 @@ def batch_write_item(
delete_items: Optional[Any] = None,
return_consumed_capacity: Optional[str] = None,
return_item_collection_metrics: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the batch_write_item operation
@@ -179,7 +170,6 @@ def batch_write_item(
delete_items=delete_items,
return_consumed_capacity=return_consumed_capacity,
return_item_collection_metrics=return_item_collection_metrics,
- settings=settings,
)
def batch_get_item(
@@ -188,7 +178,6 @@ def batch_get_item(
consistent_read: Optional[bool] = None,
return_consumed_capacity: Optional[str] = None,
attributes_to_get: Optional[Any] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the batch get item operation
@@ -199,7 +188,6 @@ def batch_get_item(
consistent_read=consistent_read,
return_consumed_capacity=return_consumed_capacity,
attributes_to_get=attributes_to_get,
- settings=settings,
)
def get_item(
@@ -208,7 +196,6 @@ def get_item(
range_key: Optional[str] = None,
consistent_read: bool = False,
attributes_to_get: Optional[Any] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the GetItem operation and returns the result
@@ -219,7 +206,6 @@ def get_item(
range_key=range_key,
consistent_read=consistent_read,
attributes_to_get=attributes_to_get,
- settings=settings,
)
def scan(
@@ -233,7 +219,6 @@ def scan(
exclusive_start_key: Optional[str] = None,
consistent_read: Optional[bool] = None,
index_name: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the scan operation
@@ -249,7 +234,6 @@ def scan(
exclusive_start_key=exclusive_start_key,
consistent_read=consistent_read,
index_name=index_name,
- settings=settings,
)
def query(
@@ -265,7 +249,6 @@ def query(
return_consumed_capacity: Optional[str] = None,
scan_index_forward: Optional[bool] = None,
select: Optional[str] = None,
- settings: OperationSettings = OperationSettings.default,
) -> Dict:
"""
Performs the Query operation and returns the result
@@ -283,7 +266,6 @@ def query(
return_consumed_capacity=return_consumed_capacity,
scan_index_forward=scan_index_forward,
select=select,
- settings=settings,
)
def describe_table(self) -> Dict:
diff --git a/pynamodb/exceptions.py b/pynamodb/exceptions.py
index 39dd5437..1c78c69f 100644
--- a/pynamodb/exceptions.py
+++ b/pynamodb/exceptions.py
@@ -125,15 +125,14 @@ def __init__(self, table_name: str) -> None:
class CancellationReason:
"""
A reason for a transaction cancellation.
+
+ For a list of possible cancellation reasons and their semantics,
+ see `TransactGetItems`_ and `TransactWriteItems`_ in the AWS documentation.
+
+ .. _TransactGetItems: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_TransactGetItems.html
+ .. _TransactWriteItems: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_TransactWriteItems.html
"""
- code: Literal[
- 'ConditionalCheckFailed',
- 'ItemCollectionSizeLimitExceeded',
- 'TransactionConflict',
- 'ProvisionedThroughputExceeded',
- 'ThrottlingError',
- 'ValidationError',
- ]
+ code: str
message: Optional[str] = None
diff --git a/pynamodb/models.py b/pynamodb/models.py
index f05ed485..569a9551 100644
--- a/pynamodb/models.py
+++ b/pynamodb/models.py
@@ -43,7 +43,7 @@
from pynamodb.types import HASH, RANGE
from pynamodb.indexes import Index
from pynamodb.pagination import ResultIterator
-from pynamodb.settings import get_settings_value, OperationSettings
+from pynamodb.settings import get_settings_value
from pynamodb import constants
from pynamodb.constants import (
ATTR_NAME, ATTR_TYPE,
@@ -70,13 +70,12 @@ class BatchWrite(Generic[_T]):
"""
A class for batch writes
"""
- def __init__(self, model: Type[_T], auto_commit: bool = True, settings: OperationSettings = OperationSettings.default):
+ def __init__(self, model: Type[_T], auto_commit: bool = True):
self.model = model
self.auto_commit = auto_commit
self.max_operations = BATCH_WRITE_PAGE_LIMIT
self.pending_operations: List[Dict[str, Any]] = []
self.failed_operations: List[Any] = []
- self.settings = settings
def save(self, put_item: _T) -> None:
"""
@@ -146,15 +145,15 @@ def commit(self) -> None:
data = self.model._get_connection().batch_write_item(
put_items=put_items,
delete_items=delete_items,
- settings=self.settings,
)
if data is None:
return
retries = 0
unprocessed_items = data.get(UNPROCESSED_ITEMS, {}).get(self.model.Meta.table_name)
while unprocessed_items:
- sleep_time = random.randint(0, self.model.Meta.base_backoff_ms * (2 ** retries)) / 1000
- time.sleep(sleep_time)
+ # TODO: we should consider using exponential backoff here
+ # TODO: it is somewhat unintuitive that we retry unprocessed items max_retry_attempts times,
+ # since each `batch_write_item` operation is also subject to max_retry_attempts
retries += 1
if retries >= self.model.Meta.max_retry_attempts:
self.failed_operations = unprocessed_items
@@ -166,12 +165,10 @@ def commit(self) -> None:
put_items.append(item.get(PUT_REQUEST).get(ITEM)) # type: ignore
elif DELETE_REQUEST in item:
delete_items.append(item.get(DELETE_REQUEST).get(KEY)) # type: ignore
- log.info("Resending %d unprocessed keys for batch operation after %d seconds sleep",
- len(unprocessed_items), sleep_time)
+ log.info("Resending %d unprocessed keys for batch operation (retry %d)", len(unprocessed_items), retries)
data = self.model._get_connection().batch_write_item(
put_items=put_items,
delete_items=delete_items,
- settings=self.settings,
)
unprocessed_items = data.get(UNPROCESSED_ITEMS, {}).get(self.model.Meta.table_name)
@@ -184,7 +181,6 @@ class MetaProtocol(Protocol):
host: Optional[str]
connect_timeout_seconds: int
read_timeout_seconds: int
- base_backoff_ms: int
max_retry_attempts: int
max_pool_connections: int
extra_headers: Mapping[str, str]
@@ -243,8 +239,6 @@ def __init__(self, name, bases, namespace, discriminator=None) -> None:
setattr(attr_obj, 'connect_timeout_seconds', get_settings_value('connect_timeout_seconds'))
if not hasattr(attr_obj, 'read_timeout_seconds'):
setattr(attr_obj, 'read_timeout_seconds', get_settings_value('read_timeout_seconds'))
- if not hasattr(attr_obj, 'base_backoff_ms'):
- setattr(attr_obj, 'base_backoff_ms', get_settings_value('base_backoff_ms'))
if not hasattr(attr_obj, 'max_retry_attempts'):
setattr(attr_obj, 'max_retry_attempts', get_settings_value('max_retry_attempts'))
if not hasattr(attr_obj, 'max_pool_connections'):
@@ -328,7 +322,6 @@ def batch_get(
items: Iterable[Union[_KeyType, Iterable[_KeyType]]],
consistent_read: Optional[bool] = None,
attributes_to_get: Optional[Sequence[str]] = None,
- settings: OperationSettings = OperationSettings.default
) -> Iterator[_T]:
"""
BatchGetItem for this model
@@ -347,7 +340,6 @@ def batch_get(
keys_to_get,
consistent_read=consistent_read,
attributes_to_get=attributes_to_get,
- settings=settings,
)
for batch_item in page:
yield cls.from_raw_data(batch_item)
@@ -381,7 +373,6 @@ def batch_get(
keys_to_get,
consistent_read=consistent_read,
attributes_to_get=attributes_to_get,
- settings=settings,
)
for batch_item in page:
yield cls.from_raw_data(batch_item)
@@ -391,7 +382,7 @@ def batch_get(
keys_to_get = []
@classmethod
- def batch_write(cls: Type[_T], auto_commit: bool = True, settings: OperationSettings = OperationSettings.default) -> BatchWrite[_T]:
+ def batch_write(cls: Type[_T], auto_commit: bool = True) -> BatchWrite[_T]:
"""
Returns a BatchWrite context manager for a batch operation.
@@ -401,10 +392,9 @@ def batch_write(cls: Type[_T], auto_commit: bool = True, settings: OperationSett
passed here, changes automatically commit on context exit
(whether successful or not).
"""
- return BatchWrite(cls, auto_commit=auto_commit, settings=settings)
+ return BatchWrite(cls, auto_commit=auto_commit)
- def delete(self, condition: Optional[Condition] = None, settings: OperationSettings = OperationSettings.default,
- *, add_version_condition: bool = True) -> Any:
+ def delete(self, condition: Optional[Condition] = None, *, add_version_condition: bool = True) -> Any:
"""
Deletes this object from DynamoDB.
@@ -419,9 +409,9 @@ def delete(self, condition: Optional[Condition] = None, settings: OperationSetti
if add_version_condition and version_condition is not None:
condition &= version_condition
- return self._get_connection().delete_item(hk_value, range_key=rk_value, condition=condition, settings=settings)
+ return self._get_connection().delete_item(hk_value, range_key=rk_value, condition=condition)
- def update(self, actions: List[Action], condition: Optional[Condition] = None, settings: OperationSettings = OperationSettings.default, *, add_version_condition: bool = True) -> Any:
+ def update(self, actions: List[Action], condition: Optional[Condition] = None, *, add_version_condition: bool = True) -> Any:
"""
Updates an item using the UpdateItem operation.
@@ -443,7 +433,7 @@ def update(self, actions: List[Action], condition: Optional[Condition] = None, s
if add_version_condition and version_condition is not None:
condition &= version_condition
- data = self._get_connection().update_item(hk_value, range_key=rk_value, return_values=ALL_NEW, condition=condition, actions=actions, settings=settings)
+ data = self._get_connection().update_item(hk_value, range_key=rk_value, return_values=ALL_NEW, condition=condition, actions=actions)
item_data = data[ATTRIBUTES]
stored_cls = self._get_discriminator_class(item_data)
if stored_cls and stored_cls != type(self):
@@ -451,17 +441,16 @@ def update(self, actions: List[Action], condition: Optional[Condition] = None, s
self.deserialize(item_data)
return data
- def save(self, condition: Optional[Condition] = None, settings: OperationSettings = OperationSettings.default, *, add_version_condition: bool = True) -> Dict[str, Any]:
+ def save(self, condition: Optional[Condition] = None, *, add_version_condition: bool = True) -> Dict[str, Any]:
"""
Save this object to dynamodb
"""
args, kwargs = self._get_save_args(condition=condition, add_version_condition=add_version_condition)
- kwargs['settings'] = settings
data = self._get_connection().put_item(*args, **kwargs)
self.update_local_version_attribute()
return data
- def refresh(self, consistent_read: bool = False, settings: OperationSettings = OperationSettings.default) -> None:
+ def refresh(self, consistent_read: bool = False) -> None:
"""
Retrieves this object's data from dynamodb and syncs this local object
@@ -470,7 +459,7 @@ def refresh(self, consistent_read: bool = False, settings: OperationSettings = O
:raises ModelInstance.DoesNotExist: if the object to be updated does not exist
"""
hk_value, rk_value = self._get_hash_range_key_serialized_values()
- attrs = self._get_connection().get_item(hk_value, range_key=rk_value, consistent_read=consistent_read, settings=settings)
+ attrs = self._get_connection().get_item(hk_value, range_key=rk_value, consistent_read=consistent_read)
item_data = attrs.get(ITEM, None)
if item_data is None:
raise self.DoesNotExist("This item does not exist in the table.")
@@ -541,7 +530,6 @@ def get(
range_key: Optional[_KeyType] = None,
consistent_read: bool = False,
attributes_to_get: Optional[Sequence[Text]] = None,
- settings: OperationSettings = OperationSettings.default
) -> _T:
"""
Returns a single object using the provided keys
@@ -559,7 +547,6 @@ def get(
range_key=range_key,
consistent_read=consistent_read,
attributes_to_get=attributes_to_get,
- settings=settings,
)
if data:
item_data = data.get(ITEM)
@@ -590,7 +577,6 @@ def count(
index_name: Optional[str] = None,
limit: Optional[int] = None,
rate_limit: Optional[float] = None,
- settings: OperationSettings = OperationSettings.default,
) -> int:
"""
Provides a filtered count
@@ -633,7 +619,6 @@ def count(
query_kwargs,
limit=limit,
rate_limit=rate_limit,
- settings=settings,
)
# iterate through results
@@ -655,7 +640,6 @@ def query(
attributes_to_get: Optional[Iterable[str]] = None,
page_size: Optional[int] = None,
rate_limit: Optional[float] = None,
- settings: OperationSettings = OperationSettings.default,
) -> ResultIterator[_T]:
"""
Provides a high level query API
@@ -705,7 +689,6 @@ def query(
map_fn=cls.from_raw_data,
limit=limit,
rate_limit=rate_limit,
- settings=settings,
)
@classmethod
@@ -721,7 +704,6 @@ def scan(
index_name: Optional[str] = None,
rate_limit: Optional[float] = None,
attributes_to_get: Optional[Sequence[str]] = None,
- settings: OperationSettings = OperationSettings.default,
) -> ResultIterator[_T]:
"""
Iterates through all items in the table
@@ -764,7 +746,6 @@ def scan(
map_fn=cls.from_raw_data,
limit=limit,
rate_limit=rate_limit,
- settings=settings,
)
@classmethod
@@ -1028,7 +1009,7 @@ def _get_serialized_keys(self) -> Tuple[_KeyType, _KeyType]:
return self._serialize_keys(hash_key, range_key)
@classmethod
- def _batch_get_page(cls, keys_to_get, consistent_read, attributes_to_get, settings: OperationSettings):
+ def _batch_get_page(cls, keys_to_get, consistent_read, attributes_to_get):
"""
Returns a single page from BatchGetItem
Also returns any unprocessed items
@@ -1039,7 +1020,7 @@ def _batch_get_page(cls, keys_to_get, consistent_read, attributes_to_get, settin
"""
log.debug("Fetching a BatchGetItem page")
data = cls._get_connection().batch_get_item(
- keys_to_get, consistent_read=consistent_read, attributes_to_get=attributes_to_get, settings=settings,
+ keys_to_get, consistent_read=consistent_read, attributes_to_get=attributes_to_get,
)
item_data = data.get(RESPONSES).get(cls.Meta.table_name) # type: ignore
unprocessed_items = data.get(UNPROCESSED_KEYS).get(cls.Meta.table_name, {}).get(KEYS, None) # type: ignore
@@ -1096,7 +1077,6 @@ def _get_connection(cls) -> TableConnection:
connect_timeout_seconds=cls.Meta.connect_timeout_seconds,
read_timeout_seconds=cls.Meta.read_timeout_seconds,
max_retry_attempts=cls.Meta.max_retry_attempts,
- base_backoff_ms=cls.Meta.base_backoff_ms,
max_pool_connections=cls.Meta.max_pool_connections,
extra_headers=cls.Meta.extra_headers,
aws_access_key_id=cls.Meta.aws_access_key_id,
diff --git a/pynamodb/pagination.py b/pynamodb/pagination.py
index 94de4763..daab3adc 100644
--- a/pynamodb/pagination.py
+++ b/pynamodb/pagination.py
@@ -3,7 +3,6 @@
from pynamodb.constants import (CAMEL_COUNT, ITEMS, LAST_EVALUATED_KEY, SCANNED_COUNT,
CONSUMED_CAPACITY, TOTAL, CAPACITY_UNITS)
-from pynamodb.settings import OperationSettings
_T = TypeVar('_T')
@@ -85,7 +84,6 @@ def __init__(
args: Any,
kwargs: Dict[str, Any],
rate_limit: Optional[float] = None,
- settings: OperationSettings = OperationSettings.default,
) -> None:
self._operation = operation
self._args = args
@@ -96,7 +94,6 @@ def __init__(
self._rate_limiter = None
if rate_limit:
self._rate_limiter = RateLimiter(rate_limit)
- self._settings = settings
def __iter__(self) -> Iterator[_T]:
return self
@@ -110,7 +107,7 @@ def __next__(self) -> _T:
if self._rate_limiter:
self._rate_limiter.acquire()
self._kwargs['return_consumed_capacity'] = TOTAL
- page = self._operation(*self._args, settings=self._settings, **self._kwargs)
+ page = self._operation(*self._args, **self._kwargs)
self._last_evaluated_key = page.get(LAST_EVALUATED_KEY)
self._is_last_page = self._last_evaluated_key is None
self._total_scanned_count += page[SCANNED_COUNT]
@@ -166,9 +163,8 @@ def __init__(
map_fn: Optional[Callable] = None,
limit: Optional[int] = None,
rate_limit: Optional[float] = None,
- settings: OperationSettings = OperationSettings.default,
) -> None:
- self.page_iter: PageIterator = PageIterator(operation, args, kwargs, rate_limit, settings)
+ self.page_iter: PageIterator = PageIterator(operation, args, kwargs, rate_limit)
self._map_fn = map_fn
self._limit = limit
self._total_count = 0
diff --git a/pynamodb/settings.py b/pynamodb/settings.py
index 7283dce0..330ca7a3 100644
--- a/pynamodb/settings.py
+++ b/pynamodb/settings.py
@@ -4,7 +4,7 @@
import warnings
from os import getenv
-from typing import Any, Optional, Mapping, ClassVar
+from typing import Any
log = logging.getLogger(__name__)
@@ -12,7 +12,6 @@
'connect_timeout_seconds': 15,
'read_timeout_seconds': 30,
'max_retry_attempts': 3,
- 'base_backoff_ms': 25,
'region': None,
'max_pool_connections': 10,
'extra_headers': None,
@@ -52,23 +51,3 @@ def get_settings_value(key: str) -> Any:
return default_settings_dict[key]
return None
-
-
-class OperationSettings:
- """
- Settings applicable to an individual operation.
- When set, the settings in this object supersede the global and model settings.
- """
- default: ClassVar['OperationSettings']
-
- def __init__(self, *, extra_headers: Optional[Mapping[str, Optional[str]]] = None) -> None:
- """
- Initializes operation settings.
- :param extra_headers: if set, extra headers to add to the HTTP request. The headers are merged
- on top of extra headers derived from settings or models' Meta classes. To delete a header, set its value
- to `None`.
- """
- self.extra_headers = extra_headers
-
-
-OperationSettings.default = OperationSettings()
diff --git a/tests/test_base_connection.py b/tests/test_base_connection.py
index 3006666d..7e5f5223 100644
--- a/tests/test_base_connection.py
+++ b/tests/test_base_connection.py
@@ -2,15 +2,13 @@
Tests for the base connection class
"""
import base64
-import io
import json
-from unittest import mock, TestCase
+import urllib3
+from unittest import mock
from unittest.mock import patch
import botocore.exceptions
-import botocore.httpsession
-import urllib3
-from botocore.awsrequest import AWSPreparedRequest, AWSRequest, AWSResponse
+from botocore.awsrequest import AWSResponse
from botocore.client import ClientError
from botocore.exceptions import BotoCoreError
@@ -18,1664 +16,1602 @@
from pynamodb.connection import Connection
from pynamodb.connection.base import MetaTable
-from pynamodb.constants import LIST
-from pynamodb.constants import MAP
from pynamodb.exceptions import (
- TableError, DeleteError, PutError, ScanError, GetError, UpdateError, TableDoesNotExist)
+ TableError, DeleteError, PutError, ScanError, GetError, UpdateError, TableDoesNotExist, VerboseClientError)
from pynamodb.constants import (
- UNPROCESSED_ITEMS, STRING, BINARY, TABLE_KEY,
+ UNPROCESSED_ITEMS, STRING, BINARY, DEFAULT_ENCODING, TABLE_KEY,
PAY_PER_REQUEST_BILLING_MODE)
from pynamodb.expressions.operand import Path, Value
from pynamodb.expressions.update import SetAction
-from pynamodb.settings import OperationSettings
from .data import DESCRIBE_TABLE_DATA, GET_ITEM_DATA, LIST_TABLE_DATA
PATCH_METHOD = 'pynamodb.connection.Connection._make_api_call'
+TEST_TABLE_NAME = DESCRIBE_TABLE_DATA['Table']['TableName']
+REGION = 'us-east-1'
-class MetaTableTestCase(TestCase):
- """
- Tests for the meta table class
- """
+@pytest.fixture
+def meta_table():
+ return MetaTable(DESCRIBE_TABLE_DATA.get(TABLE_KEY))
- def setUp(self):
- self.meta_table = MetaTable(DESCRIBE_TABLE_DATA.get(TABLE_KEY))
- def test_get_key_names(self):
- key_names = self.meta_table.get_key_names()
- self.assertEqual(key_names, ["ForumName", "Subject"])
+def test_meta_table_get_key_names(meta_table):
+ key_names = meta_table.get_key_names()
+ assert key_names == ["ForumName", "Subject"]
- def test_get_key_names_index(self):
- key_names = self.meta_table.get_key_names("LastPostIndex")
- self.assertEqual(key_names, ["ForumName", "Subject", "LastPostDateTime"])
- def test_get_attribute_type(self):
- assert self.meta_table.get_attribute_type('ForumName') == 'S'
- with pytest.raises(ValueError):
- self.meta_table.get_attribute_type('wrongone')
+def test_meta_table_get_key_names__index(meta_table):
+ key_names = meta_table.get_key_names("LastPostIndex")
+ assert key_names == ["ForumName", "Subject", "LastPostDateTime"]
- def test_has_index_name(self):
- self.assertTrue(self.meta_table.has_index_name("LastPostIndex"))
- self.assertFalse(self.meta_table.has_index_name("NonExistentIndexName"))
+def test_meta_table_get_attribute_type(meta_table):
+ assert meta_table.get_attribute_type('ForumName') == 'S'
+ with pytest.raises(ValueError):
+ meta_table.get_attribute_type('wrongone')
-class ConnectionTestCase(TestCase):
- """
- Tests for the base connection class
- """
- def setUp(self):
- self.test_table_name = 'Thread'
- self.region = 'us-east-1'
+def test_meta_table_has_index_name(meta_table):
+ assert meta_table.has_index_name("LastPostIndex")
+ assert not meta_table.has_index_name("NonExistentIndexName")
+
- def test_create_connection(self):
- """
- Connection()
- """
+def test_connection__create():
+ _ = Connection()
+ conn = Connection(host='http://foohost')
+ assert conn.client
+ assert repr(conn) == "Connection"
+
+
+def test_connection__subsequent_client_is_not_cached_when_credentials_none():
+ with patch('pynamodb.connection.Connection.session') as session_mock:
+ session_mock.create_client.return_value._request_signer._credentials = None
conn = Connection()
- self.assertIsNotNone(conn)
- conn = Connection(host='http://foohost')
- self.assertIsNotNone(conn.client)
- self.assertIsNotNone(conn)
- self.assertEqual(repr(conn), "Connection<{}>".format(conn.host))
-
- def test_subsequent_client_is_not_cached_when_credentials_none(self):
- with patch('pynamodb.connection.Connection.session') as session_mock:
- session_mock.create_client.return_value._request_signer._credentials = None
- conn = Connection()
-
- # make two calls to .client property, expect two calls to create client
- self.assertIsNotNone(conn.client)
- self.assertIsNotNone(conn.client)
-
- session_mock.create_client.assert_has_calls(
- [
- mock.call('dynamodb', None, endpoint_url=None, config=mock.ANY),
- mock.call('dynamodb', None, endpoint_url=None, config=mock.ANY),
- ],
- any_order=True
- )
- def test_subsequent_client_is_cached_when_credentials_truthy(self):
- with patch('pynamodb.connection.Connection.session') as session_mock:
- session_mock.create_client.return_value._request_signer._credentials = True
- conn = Connection()
+ # make two calls to .client property, expect two calls to create client
+ assert conn.client
+ conn.client
- # make two calls to .client property, expect one call to create client
- self.assertIsNotNone(conn.client)
- self.assertIsNotNone(conn.client)
+ session_mock.create_client.assert_has_calls(
+ [
+ mock.call('dynamodb', None, endpoint_url=None, config=mock.ANY),
+ mock.call('dynamodb', None, endpoint_url=None, config=mock.ANY),
+ ],
+ any_order=True
+ )
- self.assertEqual(
- session_mock.create_client.mock_calls.count(mock.call('dynamodb', None, endpoint_url=None, config=mock.ANY)),
- 1
- )
-
- def test_client_is_passed_region_when_set(self):
- with patch('pynamodb.connection.Connection.session') as session_mock:
- session_mock.create_client.return_value._request_signer._credentials = True
- conn = Connection(self.region)
- self.assertIsNotNone(conn.client)
+def test_connection__subsequent_client_is_cached_when_credentials_truthy():
+ with patch('pynamodb.connection.Connection.session') as session_mock:
+ session_mock.create_client.return_value._request_signer._credentials = True
+ conn = Connection()
- self.assertEqual(
- session_mock.create_client.mock_calls.count(mock.call('dynamodb', self.region, endpoint_url=None, config=mock.ANY)),
- 1
- )
+ # make two calls to .client property, expect one call to create client
+ assert conn.client
+ assert conn.client
- def test_create_table(self):
- """
- Connection.create_table
- """
- conn = Connection(self.region)
- kwargs = {
- 'read_capacity_units': 1,
- 'write_capacity_units': 1,
+ assert (
+ session_mock.create_client.mock_calls.count(mock.call('dynamodb', None, endpoint_url=None, config=mock.ANY)) ==
+ 1
+ )
+
+
+def test_connection__client_is_passed_region_when_set():
+ with patch('pynamodb.connection.Connection.session') as session_mock:
+ session_mock.create_client.return_value._request_signer._credentials = True
+ conn = Connection(REGION)
+
+ assert conn.client
+
+ assert (
+ session_mock.create_client.mock_calls.count(mock.call('dynamodb', REGION, endpoint_url=None, config=mock.ANY)) ==
+ 1
+ )
+
+
+def test_connection_create_table():
+ """
+ Connection.create_table
+ """
+ conn = Connection(REGION)
+ kwargs = {
+ 'read_capacity_units': 1,
+ 'write_capacity_units': 1,
+ }
+ with pytest.raises(ValueError):
+ conn.create_table(TEST_TABLE_NAME, **kwargs)
+
+ kwargs['attribute_definitions'] = [
+ {
+ 'attribute_name': 'key1',
+ 'attribute_type': 'S'
+ },
+ {
+ 'attribute_name': 'key2',
+ 'attribute_type': 'S'
}
- self.assertRaises(ValueError, conn.create_table, self.test_table_name, **kwargs)
- kwargs['attribute_definitions'] = [
+ ]
+ with pytest.raises(ValueError):
+ conn.create_table(TEST_TABLE_NAME, **kwargs)
+
+ kwargs['key_schema'] = [
+ {
+ 'attribute_name': 'key1',
+ 'key_type': 'hash'
+ },
+ {
+ 'attribute_name': 'key2',
+ 'key_type': 'range'
+ }
+ ]
+ params = {
+ 'TableName': TEST_TABLE_NAME,
+ 'ProvisionedThroughput': {
+ 'WriteCapacityUnits': 1,
+ 'ReadCapacityUnits': 1
+ },
+ 'AttributeDefinitions': [
{
- 'attribute_name': 'key1',
- 'attribute_type': 'S'
+ 'AttributeType': 'S',
+ 'AttributeName': 'key1'
},
{
- 'attribute_name': 'key2',
- 'attribute_type': 'S'
+ 'AttributeType': 'S',
+ 'AttributeName': 'key2'
}
- ]
- self.assertRaises(ValueError, conn.create_table, self.test_table_name, **kwargs)
- kwargs['key_schema'] = [
+ ],
+ 'KeySchema': [
{
- 'attribute_name': 'key1',
- 'key_type': 'hash'
+ 'KeyType': 'HASH',
+ 'AttributeName': 'key1'
},
{
- 'attribute_name': 'key2',
- 'key_type': 'range'
+ 'KeyType': 'RANGE',
+ 'AttributeName': 'key2'
}
]
- params = {
- 'TableName': 'Thread',
- 'ProvisionedThroughput': {
+ }
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(TableError):
+ conn.create_table(TEST_TABLE_NAME, **kwargs)
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn.create_table(
+ TEST_TABLE_NAME,
+ **kwargs
+ )
+ assert req.call_args[0][1] == params
+
+ kwargs['global_secondary_indexes'] = [
+ {
+ 'index_name': 'alt-index',
+ 'key_schema': [
+ {
+ 'KeyType': 'HASH',
+ 'AttributeName': 'AltKey'
+ }
+ ],
+ 'projection': {
+ 'ProjectionType': 'KEYS_ONLY'
+ },
+ 'provisioned_throughput': {
+ 'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1,
- 'ReadCapacityUnits': 1
},
- 'AttributeDefinitions': [
- {
- 'AttributeType': 'S',
- 'AttributeName': 'key1'
- },
+ }
+ ]
+ params['GlobalSecondaryIndexes'] = [{'IndexName': 'alt-index', 'Projection': {'ProjectionType': 'KEYS_ONLY'},
+ 'KeySchema': [{'AttributeName': 'AltKey', 'KeyType': 'HASH'}],
+ 'ProvisionedThroughput': {'ReadCapacityUnits': 1,
+ 'WriteCapacityUnits': 1}}]
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn.create_table(
+ TEST_TABLE_NAME,
+ **kwargs
+ )
+ # Ensure that the hash key is first when creating indexes
+ assert req.call_args[0][1]['GlobalSecondaryIndexes'][0]['KeySchema'][0]['KeyType'] == 'HASH'
+ assert req.call_args[0][1] == params
+ del(kwargs['global_secondary_indexes'])
+ del(params['GlobalSecondaryIndexes'])
+
+ kwargs['local_secondary_indexes'] = [
+ {
+ 'index_name': 'alt-index',
+ 'projection': {
+ 'ProjectionType': 'KEYS_ONLY'
+ },
+ 'key_schema': [
{
- 'AttributeType': 'S',
- 'AttributeName': 'key2'
+ 'AttributeName': 'AltKey', 'KeyType': 'HASH'
}
],
+ 'provisioned_throughput': {
+ 'ReadCapacityUnits': 1,
+ 'WriteCapacityUnits': 1
+ }
+ }
+ ]
+ params['LocalSecondaryIndexes'] = [
+ {
+ 'Projection': {
+ 'ProjectionType': 'KEYS_ONLY'
+ },
'KeySchema': [
{
'KeyType': 'HASH',
- 'AttributeName': 'key1'
- },
- {
- 'KeyType': 'RANGE',
- 'AttributeName': 'key2'
+ 'AttributeName': 'AltKey'
}
- ]
+ ],
+ 'IndexName': 'alt-index'
}
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(TableError, conn.create_table, self.test_table_name, **kwargs)
+ ]
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn.create_table(
+ TEST_TABLE_NAME,
+ **kwargs
+ )
+ assert req.call_args[0][1] == params
+
+ kwargs['stream_specification'] = {
+ 'stream_enabled': True,
+ 'stream_view_type': 'NEW_IMAGE'
+ }
+ params['StreamSpecification'] = {
+ 'StreamEnabled': True,
+ 'StreamViewType': 'NEW_IMAGE'
+ }
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn.create_table(
+ TEST_TABLE_NAME,
+ **kwargs
+ )
+ assert req.call_args[0][1] == params
+
+ kwargs['billing_mode'] = PAY_PER_REQUEST_BILLING_MODE
+ params['BillingMode'] = PAY_PER_REQUEST_BILLING_MODE
+ del params['ProvisionedThroughput']
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn.create_table(
+ TEST_TABLE_NAME,
+ **kwargs
+ )
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn.create_table(
- self.test_table_name,
- **kwargs
- )
- self.assertEqual(req.call_args[0][1], params)
- kwargs['global_secondary_indexes'] = [
- {
- 'index_name': 'alt-index',
- 'key_schema': [
- {
- 'KeyType': 'HASH',
- 'AttributeName': 'AltKey'
- }
- ],
- 'projection': {
- 'ProjectionType': 'KEYS_ONLY'
- },
- 'provisioned_throughput': {
- 'ReadCapacityUnits': 1,
- 'WriteCapacityUnits': 1,
- },
- }
- ]
- params['GlobalSecondaryIndexes'] = [{'IndexName': 'alt-index', 'Projection': {'ProjectionType': 'KEYS_ONLY'},
- 'KeySchema': [{'AttributeName': 'AltKey', 'KeyType': 'HASH'}],
- 'ProvisionedThroughput': {'ReadCapacityUnits': 1,
- 'WriteCapacityUnits': 1}}]
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn.create_table(
- self.test_table_name,
- **kwargs
- )
- # Ensure that the hash key is first when creating indexes
- self.assertEqual(req.call_args[0][1]['GlobalSecondaryIndexes'][0]['KeySchema'][0]['KeyType'], 'HASH')
- self.assertEqual(req.call_args[0][1], params)
- del(kwargs['global_secondary_indexes'])
- del(params['GlobalSecondaryIndexes'])
+def test_connection_delete_table():
+ """
+ Connection.delete_table
+ """
+ params = {'TableName': TEST_TABLE_NAME}
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn = Connection(REGION)
+ conn.delete_table(TEST_TABLE_NAME)
+ kwargs = req.call_args[0][1]
+ assert kwargs == params
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ conn = Connection(REGION)
+ with pytest.raises(TableError):
+ conn.delete_table(TEST_TABLE_NAME)
+
+
+def test_connection_update_table():
+ """
+ Connection.update_table
+ """
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn = Connection(REGION)
+ params = {
+ 'ProvisionedThroughput': {
+ 'WriteCapacityUnits': 2,
+ 'ReadCapacityUnits': 2
+ },
+ 'TableName': TEST_TABLE_NAME,
+ }
+ conn.update_table(
+ TEST_TABLE_NAME,
+ read_capacity_units=2,
+ write_capacity_units=2
+ )
+ assert req.call_args[0][1] == params
+
+ with pytest.raises(ValueError):
+ conn.update_table(TEST_TABLE_NAME, read_capacity_units=2)
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ conn = Connection(REGION)
+ with pytest.raises(TableError):
+ conn.update_table(TEST_TABLE_NAME, read_capacity_units=2, write_capacity_units=2)
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = None
+ conn = Connection(REGION)
- kwargs['local_secondary_indexes'] = [
+ global_secondary_index_updates = [
{
- 'index_name': 'alt-index',
- 'projection': {
- 'ProjectionType': 'KEYS_ONLY'
- },
- 'key_schema': [
- {
- 'AttributeName': 'AltKey', 'KeyType': 'HASH'
- }
- ],
- 'provisioned_throughput': {
- 'ReadCapacityUnits': 1,
- 'WriteCapacityUnits': 1
- }
+ "index_name": "foo-index",
+ "read_capacity_units": 2,
+ "write_capacity_units": 2
}
]
- params['LocalSecondaryIndexes'] = [
- {
- 'Projection': {
- 'ProjectionType': 'KEYS_ONLY'
- },
- 'KeySchema': [
- {
- 'KeyType': 'HASH',
- 'AttributeName': 'AltKey'
+ params = {
+ 'TableName': TEST_TABLE_NAME,
+ 'ProvisionedThroughput': {
+ 'ReadCapacityUnits': 2,
+ 'WriteCapacityUnits': 2,
+ },
+ 'GlobalSecondaryIndexUpdates': [
+ {
+ 'Update': {
+ 'IndexName': 'foo-index',
+ 'ProvisionedThroughput': {
+ 'ReadCapacityUnits': 2,
+ 'WriteCapacityUnits': 2,
+ }
}
- ],
- 'IndexName': 'alt-index'
- }
- ]
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn.create_table(
- self.test_table_name,
- **kwargs
- )
- self.assertEqual(req.call_args[0][1], params)
+ }
- kwargs['stream_specification'] = {
- 'stream_enabled': True,
- 'stream_view_type': 'NEW_IMAGE'
- }
- params['StreamSpecification'] = {
- 'StreamEnabled': True,
- 'StreamViewType': 'NEW_IMAGE'
+ ]
}
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn.create_table(
- self.test_table_name,
- **kwargs
- )
- self.assertEqual(req.call_args[0][1], params)
+ conn.update_table(
+ TEST_TABLE_NAME,
+ read_capacity_units=2,
+ write_capacity_units=2,
+ global_secondary_index_updates=global_secondary_index_updates
+ )
+ assert req.call_args[0][1] == params
- kwargs['billing_mode'] = PAY_PER_REQUEST_BILLING_MODE
- params['BillingMode'] = PAY_PER_REQUEST_BILLING_MODE
- del params['ProvisionedThroughput']
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn.create_table(
- self.test_table_name,
- **kwargs
- )
- self.assertEqual(req.call_args[0][1], params)
- def test_delete_table(self):
- """
- Connection.delete_table
- """
- params = {'TableName': 'Thread'}
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn = Connection(self.region)
- conn.delete_table(self.test_table_name)
- kwargs = req.call_args[0][1]
- self.assertEqual(kwargs, params)
+def test_connection_describe_table():
+ """
+ Connection.describe_table
+ """
+ with patch(PATCH_METHOD) as req:
+ req.return_value = DESCRIBE_TABLE_DATA
+ conn = Connection(REGION)
+ conn.describe_table(TEST_TABLE_NAME)
+ assert req.call_args[0][1] == {'TableName': TEST_TABLE_NAME}
+ with pytest.raises(TableDoesNotExist):
with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- conn = Connection(self.region)
- self.assertRaises(TableError, conn.delete_table, self.test_table_name)
-
- def test_update_table(self):
- """
- Connection.update_table
- """
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn = Connection(self.region)
- params = {
- 'ProvisionedThroughput': {
- 'WriteCapacityUnits': 2,
- 'ReadCapacityUnits': 2
- },
- 'TableName': 'Thread'
- }
- conn.update_table(
- self.test_table_name,
- read_capacity_units=2,
- write_capacity_units=2
- )
- self.assertEqual(req.call_args[0][1], params)
+ req.side_effect = ClientError({'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Not Found'}}, "DescribeTable")
+ conn = Connection(REGION)
+ conn.describe_table(TEST_TABLE_NAME)
- self.assertRaises(ValueError, conn.update_table, self.test_table_name, read_capacity_units=2)
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- conn = Connection(self.region)
- self.assertRaises(
- TableError,
- conn.update_table,
- self.test_table_name,
- read_capacity_units=2,
- write_capacity_units=2)
+def test_connection_list_tables():
+ """
+ Connection.list_tables
+ """
+ with patch(PATCH_METHOD) as req:
+ req.return_value = LIST_TABLE_DATA
+ conn = Connection(REGION)
+ conn.list_tables(exclusive_start_table_name='Thread')
+ assert req.call_args[0][1] == {'ExclusiveStartTableName': 'Thread'}
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = LIST_TABLE_DATA
+ conn = Connection(REGION)
+ conn.list_tables(limit=3)
+ assert req.call_args[0][1] == {'Limit': 3}
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = LIST_TABLE_DATA
+ conn = Connection(REGION)
+ conn.list_tables()
+ assert req.call_args[0][1] == {}
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ conn = Connection(REGION)
+ with pytest.raises(TableError):
+ conn.list_tables()
- with patch(PATCH_METHOD) as req:
- req.return_value = None
- conn = Connection(self.region)
- global_secondary_index_updates = [
- {
- "index_name": "foo-index",
- "read_capacity_units": 2,
- "write_capacity_units": 2
+@pytest.mark.filterwarnings("ignore:Legacy conditional")
+def test_connection_delete_item():
+ """
+ Connection.delete_item
+ """
+ conn = Connection(REGION)
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(DeleteError):
+ conn.delete_item(TEST_TABLE_NAME, "foo", "bar")
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.delete_item(
+ TEST_TABLE_NAME,
+ "Amazon DynamoDB",
+ "How do I update multiple items?")
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'Key': {
+ 'ForumName': {
+ 'S': 'Amazon DynamoDB'
+ },
+ 'Subject': {
+ 'S': 'How do I update multiple items?'
}
- ]
- params = {
- 'TableName': 'Thread',
- 'ProvisionedThroughput': {
- 'ReadCapacityUnits': 2,
- 'WriteCapacityUnits': 2,
+ },
+ 'TableName': TEST_TABLE_NAME}
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.delete_item(
+ TEST_TABLE_NAME,
+ "Amazon DynamoDB",
+ "How do I update multiple items?",
+ return_values='ALL_NEW'
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'Key': {
+ 'ForumName': {
+ 'S': 'Amazon DynamoDB'
},
- 'GlobalSecondaryIndexUpdates': [
- {
- 'Update': {
- 'IndexName': 'foo-index',
- 'ProvisionedThroughput': {
- 'ReadCapacityUnits': 2,
- 'WriteCapacityUnits': 2,
- }
- }
- }
-
- ]
- }
- conn.update_table(
- self.test_table_name,
- read_capacity_units=2,
- write_capacity_units=2,
- global_secondary_index_updates=global_secondary_index_updates
- )
- self.assertEqual(req.call_args[0][1], params)
+ 'Subject': {
+ 'S': 'How do I update multiple items?'
+ }
+ },
+ 'TableName': TEST_TABLE_NAME,
+ 'ReturnValues': 'ALL_NEW'
+ }
+ assert req.call_args[0][1] == params
- def test_describe_table(self):
- """
- Connection.describe_table
- """
- with patch(PATCH_METHOD) as req:
- req.return_value = DESCRIBE_TABLE_DATA
- conn = Connection(self.region)
- conn.describe_table(self.test_table_name)
- self.assertEqual(req.call_args[0][1], {'TableName': 'Thread'})
-
- with self.assertRaises(TableDoesNotExist):
- with patch(PATCH_METHOD) as req:
- req.side_effect = ClientError({'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Not Found'}}, "DescribeTable")
- conn = Connection(self.region)
- conn.describe_table(self.test_table_name)
-
- def test_list_tables(self):
- """
- Connection.list_tables
- """
- with patch(PATCH_METHOD) as req:
- req.return_value = LIST_TABLE_DATA
- conn = Connection(self.region)
- conn.list_tables(exclusive_start_table_name='Thread')
- self.assertEqual(req.call_args[0][1], {'ExclusiveStartTableName': 'Thread'})
+ with pytest.raises(ValueError):
+ conn.delete_item(TEST_TABLE_NAME, "foo", "bar", return_values='bad_values')
- with patch(PATCH_METHOD) as req:
- req.return_value = LIST_TABLE_DATA
- conn = Connection(self.region)
- conn.list_tables(limit=3)
- self.assertEqual(req.call_args[0][1], {'Limit': 3})
+ with pytest.raises(ValueError):
+ conn.delete_item(TEST_TABLE_NAME, "foo", "bar", return_consumed_capacity='badvalue')
- with patch(PATCH_METHOD) as req:
- req.return_value = LIST_TABLE_DATA
- conn = Connection(self.region)
- conn.list_tables()
- self.assertEqual(req.call_args[0][1], {})
+ with pytest.raises(ValueError):
+ conn.delete_item(TEST_TABLE_NAME, "foo", "bar", return_item_collection_metrics='badvalue')
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- conn = Connection(self.region)
- self.assertRaises(TableError, conn.list_tables)
-
- @pytest.mark.filterwarnings("ignore:Legacy conditional")
- def test_delete_item(self):
- """
- Connection.delete_item
- """
- conn = Connection(self.region)
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.delete_item(
+ TEST_TABLE_NAME,
+ "Amazon DynamoDB",
+ "How do I update multiple items?",
+ return_consumed_capacity='TOTAL'
+ )
+ params = {
+ 'Key': {
+ 'ForumName': {
+ 'S': 'Amazon DynamoDB'
+ },
+ 'Subject': {
+ 'S': 'How do I update multiple items?'
+ }
+ },
+ 'TableName': TEST_TABLE_NAME,
+ 'ReturnConsumedCapacity': 'TOTAL'
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.delete_item(
+ TEST_TABLE_NAME,
+ "Amazon DynamoDB",
+ "How do I update multiple items?",
+ return_item_collection_metrics='SIZE'
+ )
+ params = {
+ 'Key': {
+ 'ForumName': {
+ 'S': 'Amazon DynamoDB'
+ },
+ 'Subject': {
+ 'S': 'How do I update multiple items?'
+ }
+ },
+ 'TableName': TEST_TABLE_NAME,
+ 'ReturnItemCollectionMetrics': 'SIZE',
+ 'ReturnConsumedCapacity': 'TOTAL'
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.delete_item(
+ TEST_TABLE_NAME,
+ "Amazon DynamoDB",
+ "How do I update multiple items?",
+ condition=Path('ForumName').does_not_exist(),
+ return_item_collection_metrics='SIZE'
+ )
+ params = {
+ 'Key': {
+ 'ForumName': {
+ 'S': 'Amazon DynamoDB'
+ },
+ 'Subject': {
+ 'S': 'How do I update multiple items?'
+ }
+ },
+ 'ConditionExpression': 'attribute_not_exists (#0)',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName'
+ },
+ 'TableName': TEST_TABLE_NAME,
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'ReturnItemCollectionMetrics': 'SIZE'
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(DeleteError, conn.delete_item, self.test_table_name, "foo", "bar")
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.delete_item(
- self.test_table_name,
- "Amazon DynamoDB",
- "How do I update multiple items?")
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'Key': {
- 'ForumName': {
- 'S': 'Amazon DynamoDB'
- },
- 'Subject': {
- 'S': 'How do I update multiple items?'
- }
+def test_connection_get_item():
+ """
+ Connection.get_item
+ """
+ conn = Connection(REGION)
+ table_name = 'Thread'
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = GET_ITEM_DATA
+ item = conn.get_item(table_name, "Amazon DynamoDB", "How do I update multiple items?")
+ assert item == GET_ITEM_DATA
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(GetError):
+ conn.get_item(table_name, "Amazon DynamoDB", "How do I update multiple items?")
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = GET_ITEM_DATA
+ conn.get_item(
+ table_name,
+ "Amazon DynamoDB",
+ "How do I update multiple items?",
+ attributes_to_get=['ForumName']
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'ProjectionExpression': '#0',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName'
+ },
+ 'Key': {
+ 'ForumName': {
+ 'S': 'Amazon DynamoDB'
},
- 'TableName': self.test_table_name}
- self.assertEqual(req.call_args[0][1], params)
+ 'Subject': {
+ 'S': 'How do I update multiple items?'
+ }
+ },
+ 'ConsistentRead': False,
+ 'TableName': 'Thread'
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.delete_item(
- self.test_table_name,
- "Amazon DynamoDB",
- "How do I update multiple items?",
- return_values='ALL_NEW'
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'Key': {
- 'ForumName': {
- 'S': 'Amazon DynamoDB'
- },
- 'Subject': {
- 'S': 'How do I update multiple items?'
- }
- },
- 'TableName': self.test_table_name,
- 'ReturnValues': 'ALL_NEW'
- }
- self.assertEqual(req.call_args[0][1], params)
-
- self.assertRaises(
- ValueError,
- conn.delete_item,
- self.test_table_name,
- "foo",
- "bar",
- return_values='bad_values')
-
- self.assertRaises(
- ValueError,
- conn.delete_item,
- self.test_table_name,
- "foo",
- "bar",
- return_consumed_capacity='badvalue')
-
- self.assertRaises(
- ValueError,
- conn.delete_item,
- self.test_table_name,
- "foo",
- "bar",
- return_item_collection_metrics='badvalue')
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.delete_item(
- self.test_table_name,
- "Amazon DynamoDB",
- "How do I update multiple items?",
- return_consumed_capacity='TOTAL'
- )
- params = {
- 'Key': {
- 'ForumName': {
- 'S': 'Amazon DynamoDB'
- },
- 'Subject': {
- 'S': 'How do I update multiple items?'
- }
+@pytest.mark.filterwarnings("ignore")
+def test_connection_update_item():
+ """
+ Connection.update_item
+ """
+ conn = Connection()
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+
+ with pytest.raises(ValueError):
+ conn.update_item(TEST_TABLE_NAME, 'foo-key')
+
+ with pytest.raises(ValueError):
+ conn.update_item(TEST_TABLE_NAME, 'foo', actions=[])
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.update_item(
+ TEST_TABLE_NAME,
+ 'foo-key',
+ return_consumed_capacity='TOTAL',
+ return_item_collection_metrics='NONE',
+ return_values='ALL_NEW',
+ actions=[Path('Subject').set('foo-subject')],
+ condition=Path('Forum').does_not_exist(),
+ range_key='foo-range-key',
+ )
+ params = {
+ 'ReturnValues': 'ALL_NEW',
+ 'ReturnItemCollectionMetrics': 'NONE',
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'Key': {
+ 'ForumName': {
+ 'S': 'foo-key'
},
- 'TableName': self.test_table_name,
- 'ReturnConsumedCapacity': 'TOTAL'
- }
- self.assertEqual(req.call_args[0][1], params)
+ 'Subject': {
+ 'S': 'foo-range-key'
+ }
+ },
+ 'ConditionExpression': 'attribute_not_exists (#0)',
+ 'UpdateExpression': 'SET #1 = :0',
+ 'ExpressionAttributeNames': {
+ '#0': 'Forum',
+ '#1': 'Subject'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'foo-subject'
+ }
+ },
+ 'TableName': TEST_TABLE_NAME
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.delete_item(
- self.test_table_name,
- "Amazon DynamoDB",
- "How do I update multiple items?",
- return_item_collection_metrics='SIZE'
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ # attributes are missing
+ with pytest.raises(ValueError):
+ conn.update_item(
+ TEST_TABLE_NAME,
+ 'foo-key',
+ range_key='foo-range-key',
)
- params = {
- 'Key': {
- 'ForumName': {
- 'S': 'Amazon DynamoDB'
- },
- 'Subject': {
- 'S': 'How do I update multiple items?'
- }
- },
- 'TableName': self.test_table_name,
- 'ReturnItemCollectionMetrics': 'SIZE',
- 'ReturnConsumedCapacity': 'TOTAL'
- }
- self.assertEqual(req.call_args[0][1], params)
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.delete_item(
- self.test_table_name,
- "Amazon DynamoDB",
- "How do I update multiple items?",
- condition=Path('ForumName').does_not_exist(),
- return_item_collection_metrics='SIZE'
- )
- params = {
- 'Key': {
- 'ForumName': {
- 'S': 'Amazon DynamoDB'
- },
- 'Subject': {
- 'S': 'How do I update multiple items?'
- }
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.update_item(
+ TEST_TABLE_NAME,
+ 'foo-key',
+ actions=[Path('Subject').set('Bar')],
+ condition=(Path('ForumName').does_not_exist() & (Path('Subject') == 'Foo')),
+ range_key='foo-range-key',
+ )
+ params = {
+ 'Key': {
+ 'ForumName': {
+ 'S': 'foo-key'
},
- 'ConditionExpression': 'attribute_not_exists (#0)',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName'
+ 'Subject': {
+ 'S': 'foo-range-key'
+ }
+ },
+ 'ConditionExpression': '(attribute_not_exists (#0) AND #1 = :0)',
+ 'UpdateExpression': 'SET #1 = :1',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName',
+ '#1': 'Subject'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'Foo'
},
- 'TableName': self.test_table_name,
- 'ReturnConsumedCapacity': 'TOTAL',
- 'ReturnItemCollectionMetrics': 'SIZE'
- }
- self.assertEqual(req.call_args[0][1], params)
-
- def test_get_item(self):
- """
- Connection.get_item
- """
- conn = Connection(self.region)
- table_name = 'Thread'
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+ ':1': {
+ 'S': 'Bar'
+ }
+ },
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'TableName': TEST_TABLE_NAME,
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = GET_ITEM_DATA
- item = conn.get_item(table_name, "Amazon DynamoDB", "How do I update multiple items?")
- self.assertEqual(item, GET_ITEM_DATA)
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(UpdateError):
+ conn.update_item(TEST_TABLE_NAME, 'foo-key', range_key='foo-range-key', actions=[SetAction(Path('bar'), Value('foobar'))])
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(
- GetError,
- conn.get_item,
- table_name,
- "Amazon DynamoDB",
- "How do I update multiple items?"
- )
- with patch(PATCH_METHOD) as req:
- req.return_value = GET_ITEM_DATA
- conn.get_item(
- table_name,
- "Amazon DynamoDB",
- "How do I update multiple items?",
- attributes_to_get=['ForumName']
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'ProjectionExpression': '#0',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName'
+def test_connection_put_item():
+ """
+ Connection.put_item
+ """
+ conn = Connection(REGION)
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(TableError):
+ conn.put_item('foo-key', TEST_TABLE_NAME, return_values='ALL_NEW', attributes={'ForumName': 'foo-value'})
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.put_item(
+ TEST_TABLE_NAME,
+ 'foo-key',
+ range_key='foo-range-key',
+ return_consumed_capacity='TOTAL',
+ return_item_collection_metrics='SIZE',
+ return_values='ALL_NEW',
+ attributes={'ForumName': 'foo-value'}
+ )
+ params = {
+ 'ReturnValues': 'ALL_NEW',
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'ReturnItemCollectionMetrics': 'SIZE',
+ 'TableName': TEST_TABLE_NAME,
+ 'Item': {
+ 'ForumName': {
+ 'S': 'foo-value'
},
- 'Key': {
- 'ForumName': {
- 'S': 'Amazon DynamoDB'
- },
- 'Subject': {
- 'S': 'How do I update multiple items?'
- }
+ 'Subject': {
+ 'S': 'foo-range-key'
+ }
+ }
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(PutError):
+ conn.put_item(TEST_TABLE_NAME, 'foo-key', range_key='foo-range-key', attributes={'ForumName': 'foo-value'})
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.put_item(
+ TEST_TABLE_NAME,
+ 'foo-key',
+ range_key='foo-range-key',
+ attributes={'ForumName': 'foo-value'}
+ )
+ params = {'TableName': TEST_TABLE_NAME,
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'Item': {'ForumName': {'S': 'foo-value'}, 'Subject': {'S': 'foo-range-key'}}}
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.put_item(
+ TEST_TABLE_NAME,
+ 'foo-key',
+ range_key='foo-range-key',
+ attributes={'ForumName': 'foo-value'}
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'Item': {
+ 'ForumName': {
+ 'S': 'foo-value'
+ },
+ 'Subject': {
+ 'S': 'foo-range-key'
+ }
+ },
+ 'TableName': TEST_TABLE_NAME
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.put_item(
+ TEST_TABLE_NAME,
+ 'item1-hash',
+ range_key='item1-range',
+ attributes={'foo': {'S': 'bar'}},
+ condition=(Path('Forum').does_not_exist() & (Path('Subject') == 'Foo'))
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'TableName': TEST_TABLE_NAME,
+ 'ConditionExpression': '(attribute_not_exists (#0) AND #1 = :0)',
+ 'ExpressionAttributeNames': {
+ '#0': 'Forum',
+ '#1': 'Subject'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'Foo'
+ }
+ },
+ 'Item': {
+ 'ForumName': {
+ 'S': 'item1-hash'
+ },
+ 'foo': {
+ 'S': 'bar'
+ },
+ 'Subject': {
+ 'S': 'item1-range'
+ }
+ }
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.put_item(
+ TEST_TABLE_NAME,
+ 'item1-hash',
+ range_key='item1-range',
+ attributes={'foo': {'S': 'bar'}},
+ condition=(Path('ForumName') == 'item1-hash')
+ )
+ params = {
+ 'TableName': TEST_TABLE_NAME,
+ 'ConditionExpression': '#0 = :0',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'item1-hash'
+ }
+ },
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'Item': {
+ 'ForumName': {
+ 'S': 'item1-hash'
},
- 'ConsistentRead': False,
- 'TableName': 'Thread'
+ 'foo': {
+ 'S': 'bar'
+ },
+ 'Subject': {
+ 'S': 'item1-range'
+ }
}
- self.assertEqual(req.call_args[0][1], params)
+ }
+ assert req.call_args[0][1] == params
- @pytest.mark.filterwarnings("ignore")
- def test_update_item(self):
- """
- Connection.update_item
- """
- conn = Connection()
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
- self.assertRaises(ValueError, conn.update_item, self.test_table_name, 'foo-key')
+def test_connection_transact_write_items():
+ conn = Connection()
+ with patch(PATCH_METHOD) as req:
+ conn.transact_write_items([], [], [], [])
+ assert req.call_args[0][0] == 'TransactWriteItems'
+ assert req.call_args[0][1] == {
+ 'TransactItems': [],
+ 'ReturnConsumedCapacity': 'TOTAL'
+ }
- self.assertRaises(ValueError, conn.update_item, self.test_table_name, 'foo', actions=[])
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.update_item(
- self.test_table_name,
- 'foo-key',
- return_consumed_capacity='TOTAL',
- return_item_collection_metrics='NONE',
- return_values='ALL_NEW',
- actions=[Path('Subject').set('foo-subject')],
- condition=Path('Forum').does_not_exist(),
- range_key='foo-range-key',
- )
- params = {
- 'ReturnValues': 'ALL_NEW',
- 'ReturnItemCollectionMetrics': 'NONE',
- 'ReturnConsumedCapacity': 'TOTAL',
- 'Key': {
- 'ForumName': {
- 'S': 'foo-key'
- },
- 'Subject': {
- 'S': 'foo-range-key'
- }
- },
- 'ConditionExpression': 'attribute_not_exists (#0)',
- 'UpdateExpression': 'SET #1 = :0',
- 'ExpressionAttributeNames': {
- '#0': 'Forum',
- '#1': 'Subject'
- },
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'foo-subject'
- }
- },
- 'TableName': 'Thread'
- }
- self.assertEqual(req.call_args[0][1], params)
+def test_connection_transact_get_items():
+ conn = Connection()
+ with patch(PATCH_METHOD) as req:
+ conn.transact_get_items([])
+ assert req.call_args[0][0] == 'TransactGetItems'
+ assert req.call_args[0][1] == {
+ 'TransactItems': [],
+ 'ReturnConsumedCapacity': 'TOTAL'
+ }
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- # attributes are missing
- with self.assertRaises(ValueError):
- conn.update_item(
- self.test_table_name,
- 'foo-key',
- range_key='foo-range-key',
- )
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.update_item(
- self.test_table_name,
- 'foo-key',
- actions=[Path('Subject').set('Bar')],
- condition=(Path('ForumName').does_not_exist() & (Path('Subject') == 'Foo')),
- range_key='foo-range-key',
- )
- params = {
- 'Key': {
- 'ForumName': {
- 'S': 'foo-key'
- },
- 'Subject': {
- 'S': 'foo-range-key'
- }
- },
- 'ConditionExpression': '(attribute_not_exists (#0) AND #1 = :0)',
- 'UpdateExpression': 'SET #1 = :1',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName',
- '#1': 'Subject'
- },
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'Foo'
- },
- ':1': {
- 'S': 'Bar'
- }
- },
- 'ReturnConsumedCapacity': 'TOTAL',
- 'TableName': 'Thread'
+def test_connection_batch_write_item():
+ """
+ Connection.batch_write_item
+ """
+ items = []
+ conn = Connection()
+ table_name = 'Thread'
+ for i in range(10):
+ items.append(
+ {"ForumName": "FooForum", "Subject": "thread-{}".format(i)}
+ )
+ with pytest.raises(ValueError):
+ conn.batch_write_item(table_name)
+
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.batch_write_item(
+ table_name,
+ put_items=items,
+ return_item_collection_metrics='SIZE',
+ return_consumed_capacity='TOTAL'
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'ReturnItemCollectionMetrics': 'SIZE',
+ 'RequestItems': {
+ 'Thread': [
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
+ ]
}
- self.assertEqual(req.call_args[0][1], params)
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(
- UpdateError,
- conn.update_item,
- self.test_table_name,
- 'foo-key',
- range_key='foo-range-key',
- actions=[SetAction(Path('bar'), Value('foobar'))],
- )
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.batch_write_item(
+ table_name,
+ put_items=items
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'RequestItems': {
+ 'Thread': [
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
+ {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
+ ]
+ }
+ }
+ assert req.call_args[0][1] == params
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(PutError):
+ conn.batch_write_item(table_name, delete_items=items)
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.batch_write_item(
+ table_name,
+ delete_items=items
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'RequestItems': {
+ 'Thread': [
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
+ ]
+ }
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.batch_write_item(
+ table_name,
+ delete_items=items,
+ return_consumed_capacity='TOTAL',
+ return_item_collection_metrics='SIZE'
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'ReturnItemCollectionMetrics': 'SIZE',
+ 'RequestItems': {
+ 'Thread': [
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
+ {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
+ ]
+ }
+ }
+ assert req.call_args[0][1] == params
- def test_put_item(self):
- """
- Connection.put_item
- """
- conn = Connection(self.region)
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(
- TableError,
- conn.put_item,
- 'foo-key',
- self.test_table_name,
- return_values='ALL_NEW',
- attributes={'ForumName': 'foo-value'}
- )
+def test_connection_batch_get_item():
+ """
+ Connection.batch_get_item
+ """
+ items = []
+ conn = Connection()
+ table_name = 'Thread'
+ for i in range(10):
+ items.append(
+ {"ForumName": "FooForum", "Subject": "thread-{}".format(i)}
+ )
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.put_item(
- self.test_table_name,
- 'foo-key',
- range_key='foo-range-key',
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(GetError):
+ conn.batch_get_item(
+ table_name,
+ items,
+ consistent_read=True,
return_consumed_capacity='TOTAL',
- return_item_collection_metrics='SIZE',
- return_values='ALL_NEW',
- attributes={'ForumName': 'foo-value'}
- )
- params = {
- 'ReturnValues': 'ALL_NEW',
- 'ReturnConsumedCapacity': 'TOTAL',
- 'ReturnItemCollectionMetrics': 'SIZE',
- 'TableName': self.test_table_name,
- 'Item': {
- 'ForumName': {
- 'S': 'foo-value'
- },
- 'Subject': {
- 'S': 'foo-range-key'
- }
- }
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(
- PutError,
- conn.put_item,
- self.test_table_name,
- 'foo-key',
- range_key='foo-range-key',
- attributes={'ForumName': 'foo-value'}
- )
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.put_item(
- self.test_table_name,
- 'foo-key',
- range_key='foo-range-key',
- attributes={'ForumName': 'foo-value'}
+ attributes_to_get=['ForumName']
)
- params = {'TableName': self.test_table_name,
- 'ReturnConsumedCapacity': 'TOTAL',
- 'Item': {'ForumName': {'S': 'foo-value'}, 'Subject': {'S': 'foo-range-key'}}}
- self.assertEqual(req.call_args[0][1], params)
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.put_item(
- self.test_table_name,
- 'foo-key',
- range_key='foo-range-key',
- attributes={'ForumName': 'foo-value'}
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'Item': {
- 'ForumName': {
- 'S': 'foo-value'
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.batch_get_item(
+ table_name,
+ items,
+ consistent_read=True,
+ return_consumed_capacity='TOTAL',
+ attributes_to_get=['ForumName']
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'RequestItems': {
+ 'Thread': {
+ 'ConsistentRead': True,
+ 'ProjectionExpression': '#0',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName'
},
- 'Subject': {
- 'S': 'foo-range-key'
- }
- },
- 'TableName': self.test_table_name
+ 'Keys': [
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}
+ ]
+ }
}
- self.assertEqual(req.call_args[0][1], params)
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.put_item(
- self.test_table_name,
- 'item1-hash',
- range_key='item1-range',
- attributes={'foo': {'S': 'bar'}},
- condition=(Path('Forum').does_not_exist() & (Path('Subject') == 'Foo'))
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'TableName': self.test_table_name,
- 'ConditionExpression': '(attribute_not_exists (#0) AND #1 = :0)',
- 'ExpressionAttributeNames': {
- '#0': 'Forum',
- '#1': 'Subject'
- },
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'Foo'
- }
- },
- 'Item': {
- 'ForumName': {
- 'S': 'item1-hash'
- },
- 'foo': {
- 'S': 'bar'
- },
- 'Subject': {
- 'S': 'item1-range'
- }
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.batch_get_item(
+ table_name,
+ items
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'RequestItems': {
+ 'Thread': {
+ 'Keys': [
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}},
+ {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}
+ ]
}
}
- self.assertEqual(req.call_args[0][1], params)
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.put_item(
- self.test_table_name,
- 'item1-hash',
- range_key='item1-range',
- attributes={'foo': {'S': 'bar'}},
- condition=(Path('ForumName') == 'item1-hash')
- )
- params = {
- 'TableName': self.test_table_name,
- 'ConditionExpression': '#0 = :0',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName'
+
+def test_connection_query():
+ """
+ Connection.query
+ """
+ conn = Connection()
+ table_name = 'Thread'
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+
+ with pytest.raises(ValueError, match="Table Thread has no index: NonExistentIndexName"):
+ conn.query(table_name, "FooForum", limit=1, index_name='NonExistentIndexName')
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.query(
+ table_name,
+ "FooForum",
+ Path('Subject').startswith('thread'),
+ scan_index_forward=True,
+ return_consumed_capacity='TOTAL',
+ select='ALL_ATTRIBUTES'
+ )
+ params = {
+ 'ScanIndexForward': True,
+ 'Select': 'ALL_ATTRIBUTES',
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'KeyConditionExpression': '(#0 = :0 AND begins_with (#1, :1))',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName',
+ '#1': 'Subject'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'FooForum'
},
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'item1-hash'
- }
+ ':1': {
+ 'S': 'thread'
+ }
+ },
+ 'TableName': 'Thread'
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.query(
+ table_name,
+ "FooForum",
+ Path('Subject').startswith('thread')
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'KeyConditionExpression': '(#0 = :0 AND begins_with (#1, :1))',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName',
+ '#1': 'Subject'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'FooForum'
},
- 'ReturnConsumedCapacity': 'TOTAL',
- 'Item': {
- 'ForumName': {
- 'S': 'item1-hash'
- },
- 'foo': {
- 'S': 'bar'
- },
- 'Subject': {
- 'S': 'item1-range'
- }
+ ':1': {
+ 'S': 'thread'
}
- }
- self.assertEqual(req.call_args[0][1], params)
-
- def test_transact_write_items(self):
- conn = Connection()
- with patch(PATCH_METHOD) as req:
- conn.transact_write_items([], [], [], [])
- self.assertEqual(req.call_args[0][0], 'TransactWriteItems')
- self.assertDictEqual(
- req.call_args[0][1], {
- 'TransactItems': [],
- 'ReturnConsumedCapacity': 'TOTAL'
+ },
+ 'TableName': 'Thread'
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.query(
+ table_name,
+ "FooForum",
+ limit=1,
+ index_name='LastPostIndex',
+ attributes_to_get=['ForumName'],
+ exclusive_start_key="FooForum",
+ consistent_read=True
+ )
+ params = {
+ 'Limit': 1,
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'ConsistentRead': True,
+ 'ExclusiveStartKey': {
+ 'ForumName': {
+ 'S': 'FooForum'
}
- )
-
- def test_transact_get_items(self):
- conn = Connection()
- with patch(PATCH_METHOD) as req:
- conn.transact_get_items([])
- self.assertEqual(req.call_args[0][0], 'TransactGetItems')
- self.assertDictEqual(
- req.call_args[0][1], {
- 'TransactItems': [],
- 'ReturnConsumedCapacity': 'TOTAL'
+ },
+ 'IndexName': 'LastPostIndex',
+ 'ProjectionExpression': '#0',
+ 'KeyConditionExpression': '#0 = :0',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'FooForum'
}
- )
-
- def test_batch_write_item(self):
- """
- Connection.batch_write_item
- """
- items = []
- conn = Connection()
- table_name = 'Thread'
- for i in range(10):
- items.append(
- {"ForumName": "FooForum", "Subject": "thread-{}".format(i)}
- )
- self.assertRaises(
- ValueError,
- conn.batch_write_item,
- table_name)
-
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.batch_write_item(
- table_name,
- put_items=items,
- return_item_collection_metrics='SIZE',
- return_consumed_capacity='TOTAL'
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'ReturnItemCollectionMetrics': 'SIZE',
- 'RequestItems': {
- 'Thread': [
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
- ]
+ },
+ 'TableName': 'Thread'
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.query(
+ table_name,
+ "FooForum",
+ select='ALL_ATTRIBUTES',
+ exclusive_start_key="FooForum"
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'ExclusiveStartKey': {
+ 'ForumName': {
+ 'S': 'FooForum'
}
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.batch_write_item(
- table_name,
- put_items=items
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'RequestItems': {
- 'Thread': [
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
- {'PutRequest': {'Item': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
- ]
+ },
+ 'KeyConditionExpression': '#0 = :0',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'FooForum'
}
- }
- self.assertEqual(req.call_args[0][1], params)
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(
- PutError,
- conn.batch_write_item,
- table_name,
- delete_items=items
- )
+ },
+ 'TableName': 'Thread',
+ 'Select': 'ALL_ATTRIBUTES'
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.batch_write_item(
- table_name,
- delete_items=items
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'RequestItems': {
- 'Thread': [
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
- ]
- }
- }
- self.assertEqual(req.call_args[0][1], params)
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.batch_write_item(
- table_name,
- delete_items=items,
- return_consumed_capacity='TOTAL',
- return_item_collection_metrics='SIZE'
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'ReturnItemCollectionMetrics': 'SIZE',
- 'RequestItems': {
- 'Thread': [
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}}}},
- {'DeleteRequest': {'Key': {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}}}
- ]
+def test_connection_scan():
+ """
+ Connection.scan
+ """
+ conn = Connection()
+ table_name = 'Thread'
+
+ conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.scan(
+ table_name,
+ segment=0,
+ total_segments=22,
+ consistent_read=True
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'TableName': table_name,
+ 'Segment': 0,
+ 'TotalSegments': 22,
+ 'ConsistentRead': True
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.scan(
+ table_name,
+ segment=0,
+ total_segments=22,
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'TableName': table_name,
+ 'Segment': 0,
+ 'TotalSegments': 22,
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.scan(
+ table_name,
+ return_consumed_capacity='TOTAL',
+ exclusive_start_key="FooForum",
+ limit=1,
+ segment=2,
+ total_segments=4,
+ attributes_to_get=['ForumName'],
+ index_name='LastPostIndex',
+ )
+ params = {
+ 'ProjectionExpression': '#0',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName'
+ },
+ 'ExclusiveStartKey': {
+ "ForumName": {
+ "S": "FooForum"
}
- }
- self.assertEqual(req.call_args[0][1], params)
-
- def test_batch_get_item(self):
- """
- Connection.batch_get_item
- """
- items = []
- conn = Connection()
- table_name = 'Thread'
- for i in range(10):
- items.append(
- {"ForumName": "FooForum", "Subject": "thread-{}".format(i)}
- )
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
-
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(
- GetError,
- conn.batch_get_item,
- table_name,
- items,
- consistent_read=True,
- return_consumed_capacity='TOTAL',
- attributes_to_get=['ForumName']
- )
+ },
+ 'TableName': table_name,
+ 'Limit': 1,
+ 'Segment': 2,
+ 'TotalSegments': 4,
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'IndexName': 'LastPostIndex'
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.batch_get_item(
- table_name,
- items,
- consistent_read=True,
- return_consumed_capacity='TOTAL',
- attributes_to_get=['ForumName']
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'RequestItems': {
- 'Thread': {
- 'ConsistentRead': True,
- 'ProjectionExpression': '#0',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName'
- },
- 'Keys': [
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}
- ]
- }
- }
- }
- self.assertEqual(req.call_args[0][1], params)
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.scan(
+ table_name,
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'TableName': table_name
+ }
+ assert req.call_args[0][1] == params
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.batch_get_item(
- table_name,
- items
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'RequestItems': {
- 'Thread': {
- 'Keys': [
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-0'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-1'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-2'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-3'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-4'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-5'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-6'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-7'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-8'}},
- {'ForumName': {'S': 'FooForum'}, 'Subject': {'S': 'thread-9'}}
- ]
- }
+ with patch(PATCH_METHOD) as req:
+ req.return_value = {}
+ conn.scan(
+ table_name,
+ Path('ForumName').startswith('Foo') & Path('Subject').contains('Foo')
+ )
+ params = {
+ 'ReturnConsumedCapacity': 'TOTAL',
+ 'TableName': table_name,
+ 'FilterExpression': '(begins_with (#0, :0) AND contains (#1, :1))',
+ 'ExpressionAttributeNames': {
+ '#0': 'ForumName',
+ '#1': 'Subject'
+ },
+ 'ExpressionAttributeValues': {
+ ':0': {
+ 'S': 'Foo'
+ },
+ ':1': {
+ 'S': 'Foo'
}
}
- self.assertEqual(req.call_args[0][1], params)
-
- def test_query(self):
- """
- Connection.query
- """
- conn = Connection()
- table_name = 'Thread'
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
-
- with pytest.raises(ValueError, match="Table Thread has no index: NonExistentIndexName"):
- conn.query(table_name, "FooForum", limit=1, index_name='NonExistentIndexName')
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.query(
- table_name,
- "FooForum",
- Path('Subject').startswith('thread'),
- scan_index_forward=True,
- return_consumed_capacity='TOTAL',
- select='ALL_ATTRIBUTES'
- )
- params = {
- 'ScanIndexForward': True,
- 'Select': 'ALL_ATTRIBUTES',
- 'ReturnConsumedCapacity': 'TOTAL',
- 'KeyConditionExpression': '(#0 = :0 AND begins_with (#1, :1))',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName',
- '#1': 'Subject'
- },
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'FooForum'
- },
- ':1': {
- 'S': 'thread'
- }
+ }
+ assert req.call_args[0][1] == params
+
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(ScanError):
+ conn.scan(table_name)
+
+
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection__make_api_call__wraps_verbose_client_error_create(send_mock):
+ response = AWSResponse(
+ url='',
+ status_code=500,
+ raw='', # todo: use stream, like `botocore.tests.RawResponse`?
+ headers={'X-Amzn-RequestId': 'abcdef'},
+ )
+ response._content = json.dumps({
+ '__type': 'InternalServerError',
+ 'message': 'There is a problem',
+ 'code': 'InternalServerError',
+ }).encode('utf-8')
+ send_mock.return_value = response
+
+ c = Connection(max_retry_attempts=0)
+
+ with pytest.raises(VerboseClientError) as excinfo:
+ c._make_api_call('CreateTable', {'TableName': 'MyTable'})
+ assert (
+ 'An error occurred (InternalServerError) on request (abcdef) on table (MyTable) when calling the CreateTable operation: There is a problem'
+ in str(excinfo.value)
+ )
+
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection__make_api_call__wraps_verbose_client_error_batch(send_mock):
+ response = AWSResponse(
+ url='',
+ status_code=500,
+ raw='', # todo: use stream, like `botocore.tests.RawResponse`?
+ headers={'X-Amzn-RequestId': 'abcdef'},
+ )
+ response._content = json.dumps({
+ '__type': 'InternalServerError',
+ 'message': 'There is a problem',
+ 'code': 'InternalServerError',
+ }).encode('utf-8')
+ send_mock.return_value = response
+
+ c = Connection(max_retry_attempts=0)
+
+ with pytest.raises(VerboseClientError) as excinfo:
+ c._make_api_call('BatchGetItem', {
+ 'RequestItems': {
+ 'table_one': {
+ "Keys": [
+ {"ID": {"S": "1"}},
+ {"ID": {"S": "2"}},
+ ]
},
- 'TableName': 'Thread'
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.query(
- table_name,
- "FooForum",
- Path('Subject').startswith('thread')
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'KeyConditionExpression': '(#0 = :0 AND begins_with (#1, :1))',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName',
- '#1': 'Subject'
+ 'table_two': {
+ "Keys": [
+ {"ID": {"S": "3"}}
+ ],
},
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'FooForum'
+ },
+ })
+ assert (
+ 'An error occurred (InternalServerError) on request (abcdef) on table (table_one,table_two) when calling the BatchGetItem operation: There is a problem'
+ in str(excinfo.value)
+ )
+
+
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection__make_api_call__wraps_verbose_client_error_transact(send_mock):
+ response = AWSResponse(
+ url='',
+ status_code=500,
+ raw='', # todo: use stream, like `botocore.tests.RawResponse`?
+ headers={'X-Amzn-RequestId': 'abcdef'},
+ )
+ response._content = json.dumps({
+ '__type': 'InternalServerError',
+ 'message': 'There is a problem',
+ 'code': 'InternalServerError',
+ }).encode('utf-8')
+ send_mock.return_value = response
+
+ c = Connection(max_retry_attempts=0)
+
+ with pytest.raises(VerboseClientError) as excinfo:
+ c._make_api_call('TransactWriteItems', {
+ 'ClientRequestToken': "some_token",
+ 'TransactItems': [
+ {
+ 'Put': {
+ 'Item': {'id': {'S': 'item_id_one'}},
+ 'TableName': 'table_one',
},
- ':1': {
- 'S': 'thread'
- }
- },
- 'TableName': 'Thread'
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.query(
- table_name,
- "FooForum",
- limit=1,
- index_name='LastPostIndex',
- attributes_to_get=['ForumName'],
- exclusive_start_key="FooForum",
- consistent_read=True
- )
- params = {
- 'Limit': 1,
- 'ReturnConsumedCapacity': 'TOTAL',
- 'ConsistentRead': True,
- 'ExclusiveStartKey': {
- 'ForumName': {
- 'S': 'FooForum'
- }
- },
- 'IndexName': 'LastPostIndex',
- 'ProjectionExpression': '#0',
- 'KeyConditionExpression': '#0 = :0',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName'
- },
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'FooForum'
- }
- },
- 'TableName': 'Thread'
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.query(
- table_name,
- "FooForum",
- select='ALL_ATTRIBUTES',
- exclusive_start_key="FooForum"
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'ExclusiveStartKey': {
- 'ForumName': {
- 'S': 'FooForum'
- }
- },
- 'KeyConditionExpression': '#0 = :0',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName'
},
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'FooForum'
- }
- },
- 'TableName': 'Thread',
- 'Select': 'ALL_ATTRIBUTES'
- }
- self.assertEqual(req.call_args[0][1], params)
-
- def test_scan(self):
- """
- Connection.scan
- """
- conn = Connection()
- table_name = 'Thread'
-
- conn.add_meta_table(MetaTable(DESCRIBE_TABLE_DATA[TABLE_KEY]))
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.scan(
- table_name,
- segment=0,
- total_segments=22,
- consistent_read=True
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'TableName': table_name,
- 'Segment': 0,
- 'TotalSegments': 22,
- 'ConsistentRead': True
- }
- self.assertDictEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.scan(
- table_name,
- segment=0,
- total_segments=22,
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'TableName': table_name,
- 'Segment': 0,
- 'TotalSegments': 22,
- }
- self.assertDictEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.scan(
- table_name,
- return_consumed_capacity='TOTAL',
- exclusive_start_key="FooForum",
- limit=1,
- segment=2,
- total_segments=4,
- attributes_to_get=['ForumName'],
- index_name='LastPostIndex',
- )
- params = {
- 'ProjectionExpression': '#0',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName'
- },
- 'ExclusiveStartKey': {
- "ForumName": {
- "S": "FooForum"
+ {
+ 'Update': {
+ 'Key': {'id': {'S': 'item_id_two'}},
+ 'TableName': 'table_two',
}
},
- 'TableName': table_name,
- 'Limit': 1,
- 'Segment': 2,
- 'TotalSegments': 4,
- 'ReturnConsumedCapacity': 'TOTAL',
- 'IndexName': 'LastPostIndex'
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.scan(
- table_name,
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'TableName': table_name
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.return_value = {}
- conn.scan(
- table_name,
- Path('ForumName').startswith('Foo') & Path('Subject').contains('Foo')
- )
- params = {
- 'ReturnConsumedCapacity': 'TOTAL',
- 'TableName': table_name,
- 'FilterExpression': '(begins_with (#0, :0) AND contains (#1, :1))',
- 'ExpressionAttributeNames': {
- '#0': 'ForumName',
- '#1': 'Subject'
- },
- 'ExpressionAttributeValues': {
- ':0': {
- 'S': 'Foo'
- },
- ':1': {
- 'S': 'Foo'
- }
- }
- }
- self.assertEqual(req.call_args[0][1], params)
-
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(
- ScanError,
- conn.scan,
- table_name)
-
- def test_make_api_call__happy_path(self):
- response = AWSResponse(
- url='https://www.example.com',
- status_code=200,
- raw=urllib3.HTTPResponse(
- body=io.BytesIO(json.dumps({}).encode('utf-8')),
- preload_content=False,
- ),
- headers={'x-amzn-RequestId': 'abcdef'},
- )
-
- c = Connection()
-
- with patch.object(botocore.httpsession.URLLib3Session, 'send', return_value=response):
- c._make_api_call('CreateTable', {'TableName': 'MyTable'})
-
- @mock.patch('pynamodb.connection.Connection.client')
- def test_make_api_call_throws_verbose_error_after_backoff(self, client_mock):
- response = AWSResponse(
- url='http://lyft.com',
- status_code=500,
- raw='', # todo: use stream, like `botocore.tests.RawResponse`?
- headers={'x-amzn-RequestId': 'abcdef'},
- )
- response._content = json.dumps({'message': 'There is a problem', '__type': 'InternalServerError'}).encode('utf-8')
- client_mock._endpoint.http_session.send.return_value = response
-
- c = Connection()
-
- with self.assertRaises(ClientError):
- try:
- c._make_api_call('CreateTable', {'TableName': 'MyTable'})
- except Exception as e:
- self.assertEqual(
- str(e),
- 'An error occurred (InternalServerError) on request (abcdef) on table (MyTable) when calling the CreateTable operation: There is a problem'
- )
- raise
-
- @mock.patch('random.randint')
- @mock.patch('pynamodb.connection.Connection.client')
- def test_make_api_call_throws_verbose_error_after_backoff_later_succeeds(self, client_mock, rand_int_mock):
- # mock response
- bad_response = mock.Mock(spec=AWSResponse)
- bad_response.status_code = 500
- bad_response.headers = {'x-amzn-RequestId': 'abcdef'}
- bad_response.text = json.dumps({'message': 'There is a problem', '__type': 'InternalServerError'})
- bad_response.content = bad_response.text.encode()
-
- good_response_content = {'TableDescription': {'TableName': 'table', 'TableStatus': 'Creating'}}
- good_response = mock.Mock(spec=AWSResponse)
- good_response.status_code = 200
- good_response.headers = {}
- good_response.text = json.dumps(good_response_content)
- good_response.content = good_response.text.encode()
-
- send_mock = client_mock._endpoint.http_session.send
- send_mock.side_effect = [
- bad_response,
- bad_response,
- good_response,
- ]
-
- rand_int_mock.return_value = 1
-
- c = Connection()
-
- self.assertEqual(good_response_content, c._make_api_call('CreateTable', {'TableName': 'MyTable'}))
- self.assertEqual(len(send_mock.mock_calls), 3)
-
- assert rand_int_mock.call_args_list == [mock.call(0, 25), mock.call(0, 50)]
+ ],
+ })
+ assert (
+ 'An error occurred (InternalServerError) on request (abcdef) on table (table_one,table_two) when calling the TransactWriteItems operation: There is a problem'
+ in str(excinfo.value)
+ )
+
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection__make_api_call_throws_verbose_error_after_backoff_later_succeeds(send_mock):
+ # mock response
+ bad_response = mock.Mock(spec=AWSResponse)
+ bad_response.status_code = 500
+ bad_response.headers = {'x-amzn-RequestId': 'abcdef'}
+ bad_response.text = json.dumps({'message': 'There is a problem', '__type': 'InternalServerError'})
+ bad_response.content = bad_response.text.encode()
+
+ good_response_content = {
+ 'TableDescription': {'TableName': 'table', 'TableStatus': 'Creating'},
+ 'ResponseMetadata': {'HTTPHeaders': {}, 'HTTPStatusCode': 200, 'RetryAttempts': 2},
+ }
+ good_response = mock.Mock(spec=AWSResponse)
+ good_response.status_code = 200
+ good_response.headers = {}
+ good_response.text = json.dumps(good_response_content)
+ good_response.content = good_response.text.encode()
+
+ send_mock.side_effect = [
+ bad_response,
+ bad_response,
+ good_response,
+ ]
+
+
+ c = Connection()
+
+ assert c._make_api_call('CreateTable', {'TableName': 'MyTable'}) == good_response_content
+ assert len(send_mock.mock_calls) == 3
+
+
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection_make_api_call__retries_properly(send_mock):
+ deserializable_response = AWSResponse(
+ url='',
+ status_code=200,
+ headers={},
+ raw='',
+ )
+ deserializable_response._content = json.dumps({'hello': 'world'}).encode('utf-8')
+
+ bad_response = AWSResponse(
+ url='',
+ status_code=503,
+ headers={},
+ raw='',
+ )
+ bad_response._content = 'not_json'.encode('utf-8')
+
+ send_mock.side_effect = [
+ bad_response,
+ botocore.exceptions.ReadTimeoutError(endpoint_url='http://lyft.com'),
+ bad_response,
+ deserializable_response,
+ ]
+ c = Connection(max_retry_attempts=3)
+
+ c._make_api_call('DescribeTable', {'TableName': 'MyTable'})
+ assert len(send_mock.mock_calls) == 4
+
+
+def test_connection__botocore_config():
+ c = Connection(connect_timeout_seconds=5, read_timeout_seconds=10, max_pool_connections=20)
+ assert c.client._client_config.connect_timeout == 5
+ assert c.client._client_config.read_timeout == 10
+ assert c.client._client_config.max_pool_connections == 20
+
+
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection_make_api_call___extra_headers(send_mock):
+ good_response = mock.Mock(spec=AWSResponse, status_code=200, headers={}, text='{}', content=b'{}')
+
+ send_mock.return_value = good_response
+
+ c = Connection(extra_headers={'foo': 'bar'}, max_retry_attempts=0)
+ c._make_api_call(
+ 'DescribeTable',
+ {'TableName': 'MyTable'},
+ )
+
+ assert send_mock.call_count == 1
+ request = send_mock.call_args[0][0]
+ assert request.headers.get('foo').decode() == 'bar'
+
+
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection_make_api_call__throws_when_retries_exhausted(send_mock):
+ send_mock.side_effect = [
+ botocore.exceptions.ConnectionError(error="problems"),
+ botocore.exceptions.ConnectionError(error="problems"),
+ botocore.exceptions.ConnectionError(error="problems"),
+ botocore.exceptions.ReadTimeoutError(endpoint_url="http://lyft.com"),
+ ]
+ c = Connection(max_retry_attempts=3)
+
+ with pytest.raises(botocore.exceptions.ReadTimeoutError):
+ c._make_api_call('DescribeTable', {'TableName': 'MyTable'})
- @mock.patch('pynamodb.connection.Connection.client')
- def test_make_api_call_retries_properly(self, client_mock):
- deserializable_response = AWSResponse(
- url='',
- status_code=200,
- headers={},
- raw='',
- )
- deserializable_response._content = json.dumps({'hello': 'world'}).encode('utf-8')
+ assert len(send_mock.mock_calls) == 4
- bad_response = AWSResponse(
- url='',
- status_code=503,
- headers={},
- raw='',
- )
- bad_response._content = 'not_json'.encode('utf-8')
- prepared_request = AWSRequest('GET', 'http://lyft.com').prepare()
-
- send_mock = client_mock._endpoint.http_session.send
- send_mock.side_effect = [
- bad_response,
- botocore.exceptions.ReadTimeoutError(endpoint_url='http://lyft.com'),
- bad_response,
- deserializable_response,
- ]
- c = Connection()
- c._max_retry_attempts_exception = 3
- c._create_prepared_request = mock.Mock()
- c._create_prepared_request.return_value = prepared_request
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection_make_api_call__throws_retry_disabled(send_mock):
+ send_mock.side_effect = [
+ botocore.exceptions.ReadTimeoutError(endpoint_url='http://lyft.com'),
+ ]
+ c = Connection(read_timeout_seconds=11, max_retry_attempts=0)
+ with pytest.raises(botocore.exceptions.ReadTimeoutError):
c._make_api_call('DescribeTable', {'TableName': 'MyTable'})
- self.assertEqual(len(send_mock.mock_calls), 4)
-
- for call in send_mock.mock_calls:
- self.assertEqual(call[1][0], prepared_request)
-
- def test_connection__timeout(self):
- c = Connection(connect_timeout_seconds=5, read_timeout_seconds=10, max_pool_connections=20)
- assert c.client._client_config.connect_timeout == 5
- assert c.client._client_config.read_timeout == 10
- assert c.client._client_config.max_pool_connections == 20
-
- def test_sign_request(self):
- request = AWSRequest(method='POST', url='http://localhost:8000/', headers={}, data={'foo': 'bar'})
- c = Connection(region='us-west-1')
- c._sign_request(request)
- assert 'X-Amz-Date' in request.headers
- assert 'Authorization' in request.headers
- assert 'us-west-1' in request.headers['Authorization']
- assert request.headers['Authorization'].startswith('AWS4-HMAC-SHA256')
-
- @mock.patch('pynamodb.connection.Connection.client')
- def test_make_api_call___extra_headers(self, client_mock):
- good_response = mock.Mock(spec=AWSResponse, status_code=200, headers={}, text='{}', content=b'{}')
-
- send_mock = client_mock._endpoint.http_session.send
- send_mock.return_value = good_response
-
- client_mock._convert_to_request_dict.return_value = {'method': 'POST', 'url': '', 'headers': {}, 'body': '', 'context': {}}
-
- mock_req = mock.Mock(spec=AWSPreparedRequest, headers={})
- create_request_mock = client_mock._endpoint.prepare_request
- create_request_mock.return_value = mock_req
-
- c = Connection(extra_headers={'foo': 'bar', 'abc': '123'})
- c._make_api_call(
- 'DescribeTable',
- {'TableName': 'MyTable'},
- settings=OperationSettings(extra_headers={'abc': 'xyz'}),
- )
- assert send_mock.call_count == 1
- request = send_mock.call_args[0][0]
- assert request.headers.get('foo') == 'bar'
- assert request.headers.get('abc') == 'xyz'
-
- @mock.patch('pynamodb.connection.Connection.client')
- def test_make_api_call_throws_when_retries_exhausted(self, client_mock):
- prepared_request = AWSRequest('GET', 'http://lyft.com').prepare()
-
- send_mock = client_mock._endpoint.http_session.send
- send_mock.side_effect = [
- botocore.exceptions.ConnectionError(error="problems"),
- botocore.exceptions.ConnectionError(error="problems"),
- botocore.exceptions.ConnectionError(error="problems"),
- botocore.exceptions.ReadTimeoutError(endpoint_url="http://lyft.com"),
- ]
- c = Connection()
- c._max_retry_attempts_exception = 3
- c._create_prepared_request = mock.Mock()
- c._create_prepared_request.return_value = prepared_request
-
- with self.assertRaises(botocore.exceptions.ReadTimeoutError):
- c._make_api_call('DescribeTable', {'TableName': 'MyTable'})
-
- self.assertEqual(len(send_mock.mock_calls), 4)
- for call in send_mock.mock_calls:
- self.assertEqual(call[1][0], prepared_request)
-
- @mock.patch('random.randint')
- @mock.patch('pynamodb.connection.Connection.client')
- def test_make_api_call_throws_retry_disabled(self, client_mock, rand_int_mock):
- prepared_request = AWSRequest('GET', 'http://lyft.com').prepare()
-
- send_mock = client_mock._endpoint.http_session.send
- send_mock.side_effect = [
- botocore.exceptions.ReadTimeoutError(endpoint_url='http://lyft.com'),
- ]
- c = Connection(read_timeout_seconds=11, base_backoff_ms=3, max_retry_attempts=0)
- c._create_prepared_request = mock.Mock()
- c._create_prepared_request.return_value = prepared_request
+ assert len(send_mock.mock_calls) == 1
- assert c._base_backoff_ms == 3
- with self.assertRaises(botocore.exceptions.ReadTimeoutError):
- c._make_api_call('DescribeTable', {'TableName': 'MyTable'})
- self.assertEqual(len(send_mock.mock_calls), 1)
- rand_int_mock.assert_not_called()
+@mock.patch('urllib3.connectionpool.HTTPConnectionPool.urlopen')
+def test_connection_make_api_call__throws_conn_closed(urlopen_mock):
+ urlopen_mock.side_effect = [
+ urllib3.exceptions.ProtocolError(),
+ ]
+ c = Connection(read_timeout_seconds=11, max_retry_attempts=0)
- for call in send_mock.mock_calls:
- self.assertEqual(call[1][0], prepared_request)
+ with pytest.raises(botocore.exceptions.ConnectionClosedError):
+ c._make_api_call('DescribeTable', {'TableName': 'MyTable'})
- def test_handle_binary_attributes_for_unprocessed_items(self):
- binary_blob = b'\x00\xFF\x00\xFF'
- unprocessed_items = [
- {
+@mock.patch('botocore.httpsession.URLLib3Session.send')
+def test_connection_make_api_call__binary_attributes(send_mock):
+ binary_blob = b'\x00\xFF\x00\xFF'
+ resp_text = json.dumps({
+ UNPROCESSED_ITEMS: {
+ 'someTable': [{
'PutRequest': {
'Item': {
- 'name': {
- STRING: 'daniel'
- },
- 'picture': {
- BINARY: base64.b64encode(binary_blob).decode()
- },
- 'map': {
- MAP: {
- 'picture': {
- BINARY: base64.b64encode(binary_blob).decode()
- },
- },
- },
- 'list': {
- LIST: [
- {
- BINARY: base64.b64encode(binary_blob).decode()
- },
- ],
- },
+ 'name': {STRING: 'daniel'},
+ 'picture': {BINARY: base64.b64encode(binary_blob).decode(DEFAULT_ENCODING)},
}
}
- }
- for _ in range(5)
- ]
-
- actual = Connection._handle_binary_attributes({UNPROCESSED_ITEMS: {'someTable': unprocessed_items}})
-
- assert actual == {
- UNPROCESSED_ITEMS: {
- 'someTable': [
- {
- 'PutRequest': {
- 'Item': {
- 'name': {STRING: 'daniel'},
- 'picture': {BINARY: binary_blob},
- 'map': {MAP: {'picture': {BINARY: binary_blob}}},
- 'list': {LIST: [{BINARY: binary_blob}]},
- }
- }
- }
- for _ in range(5)
- ]
- }
+ }],
}
+ })
- def test_handle_binary_attributes_for_unprocessed_keys(self):
- binary_blob = b'\x00\xFF\x00\xFF'
- unprocessed_keys = {
- 'UnprocessedKeys': {
- 'MyTable': {
- 'AttributesToGet': ['ForumName'],
- 'Keys': [
- {
- 'ForumName': {'S': 'FooForum'},
- 'Subject': {'B': base64.b64encode(binary_blob).decode()}
- },
- {
- 'ForumName': {'S': 'FooForum'},
- 'Subject': {'S': 'thread-1'}
- }
- ],
- 'ConsistentRead': False
- },
- 'MyOtherTable': {
- 'AttributesToGet': ['ForumName'],
- 'Keys': [
- {
- 'ForumName': {'S': 'FooForum'},
- 'Subject': {'B': base64.b64encode(binary_blob).decode()}
- },
- {
- 'ForumName': {'S': 'FooForum'},
- 'Subject': {'S': 'thread-1'}
- }
- ],
- 'ConsistentRead': False
- }
+ resp = mock.Mock(
+ spec=AWSResponse,
+ status_code=200,
+ headers={},
+ content=resp_text.encode(),
+ )
+
+ send_mock.return_value = resp
+
+ resp = Connection()._make_api_call('BatchWriteItem', {})
+
+ assert resp['UnprocessedItems']['someTable'] == [{
+ 'PutRequest': {
+ 'Item': {
+ 'name': {STRING: 'daniel'},
+ 'picture': {BINARY: binary_blob}
}
}
+ }]
- actual = Connection._handle_binary_attributes(unprocessed_keys)
- assert actual['UnprocessedKeys']['MyTable']['Keys'][0]['Subject']['B'] == binary_blob
- assert actual['UnprocessedKeys']['MyOtherTable']['Keys'][0]['Subject']['B'] == binary_blob
-
- def test_update_time_to_live_fail(self):
- conn = Connection(self.region)
- with patch(PATCH_METHOD) as req:
- req.side_effect = BotoCoreError
- self.assertRaises(TableError, conn.update_time_to_live, 'test table', 'my_ttl')
+def test_connection_update_time_to_live__fail():
+ conn = Connection(REGION)
+ with patch(PATCH_METHOD) as req:
+ req.side_effect = BotoCoreError
+ with pytest.raises(TableError):
+ conn.update_time_to_live('test table', 'my_ttl')
diff --git a/tests/test_model.py b/tests/test_model.py
index cd236c9a..9bd12db6 100644
--- a/tests/test_model.py
+++ b/tests/test_model.py
@@ -518,14 +518,12 @@ def fake_dynamodb(*args):
assert UserModel.Meta.connect_timeout_seconds, 15
assert UserModel.Meta.read_timeout_seconds == 30
assert UserModel.Meta.max_retry_attempts == 3
- assert UserModel.Meta.base_backoff_ms == 25
assert UserModel.Meta.max_pool_connections == 10
assert UserModel._connection.connection._connect_timeout_seconds == 15
assert UserModel._connection.connection._read_timeout_seconds == 30
assert UserModel._connection.connection._max_retry_attempts_exception == 3
assert UserModel._connection.connection._max_pool_connections == 10
- assert UserModel._connection.connection._base_backoff_ms == 25
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA