Skip to content

Commit

Permalink
Remove snake_to_camel_case and just use constants. (#865)
Browse files Browse the repository at this point in the history
  • Loading branch information
jpinner-lyft authored Oct 10, 2020
1 parent 2e2a37c commit b42c326
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 89 deletions.
2 changes: 1 addition & 1 deletion docs/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ Other changes in this release:
* Remove ``ListAttribute.remove_indexes`` (added in v4.3.2) and document usage of remove for list elements (#838)
* Add the attribute name to error messages when deserialization fails (#815)
* Add the table name to error messages for transactional operations (#835)
* Move ``pynamodb.connection.util.pythonic`` to ``pynamodb.util.snake_to_camel_case`` (#753)
* Remove ``pynamodb.connection.util.pythonic`` (#753) and (#865)
* Remove ``ModelContextManager`` class (#861)

Contributors to this release:
Expand Down
33 changes: 16 additions & 17 deletions pynamodb/connection/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
from pynamodb.settings import get_settings_value
from pynamodb.signals import pre_dynamodb_send, post_dynamodb_send
from pynamodb.types import HASH, RANGE
from pynamodb.util import snake_to_camel_case

BOTOCORE_EXCEPTIONS = (BotoCoreError, ClientError)
RATE_LIMITING_ERROR_CODES = ['ProvisionedThroughputExceededException', 'ThrottlingException']
Expand Down Expand Up @@ -587,8 +586,8 @@ def create_table(
raise ValueError("attribute_definitions argument is required")
for attr in attribute_definitions:
attrs_list.append({
ATTR_NAME: attr.get(snake_to_camel_case(ATTR_NAME)),
ATTR_TYPE: attr.get(snake_to_camel_case(ATTR_TYPE))
ATTR_NAME: attr.get('attribute_name'),
ATTR_TYPE: attr.get('attribute_type')
})
operation_kwargs[ATTR_DEFINITIONS] = attrs_list

Expand All @@ -603,10 +602,10 @@ def create_table(
global_secondary_indexes_list = []
for index in global_secondary_indexes:
index_kwargs = {
INDEX_NAME: index.get(snake_to_camel_case(INDEX_NAME)),
KEY_SCHEMA: sorted(index.get(snake_to_camel_case(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
PROJECTION: index.get(snake_to_camel_case(PROJECTION)),
PROVISIONED_THROUGHPUT: index.get(snake_to_camel_case(PROVISIONED_THROUGHPUT))
INDEX_NAME: index.get('index_name'),
KEY_SCHEMA: sorted(index.get('key_schema'), key=lambda x: x.get(KEY_TYPE)),
PROJECTION: index.get('projection'),
PROVISIONED_THROUGHPUT: index.get('provisioned_throughput')
}
if billing_mode == PAY_PER_REQUEST_BILLING_MODE:
del index_kwargs[PROVISIONED_THROUGHPUT]
Expand All @@ -618,25 +617,25 @@ def create_table(
key_schema_list = []
for item in key_schema:
key_schema_list.append({
ATTR_NAME: item.get(snake_to_camel_case(ATTR_NAME)),
KEY_TYPE: str(item.get(snake_to_camel_case(KEY_TYPE))).upper()
ATTR_NAME: item.get('attribute_name'),
KEY_TYPE: str(item.get('key_type')).upper()
})
operation_kwargs[KEY_SCHEMA] = sorted(key_schema_list, key=lambda x: x.get(KEY_TYPE))

local_secondary_indexes_list = []
if local_secondary_indexes:
for index in local_secondary_indexes:
local_secondary_indexes_list.append({
INDEX_NAME: index.get(snake_to_camel_case(INDEX_NAME)),
KEY_SCHEMA: sorted(index.get(snake_to_camel_case(KEY_SCHEMA)), key=lambda x: x.get(KEY_TYPE)),
PROJECTION: index.get(snake_to_camel_case(PROJECTION)),
INDEX_NAME: index.get('index_name'),
KEY_SCHEMA: sorted(index.get('key_schema'), key=lambda x: x.get(KEY_TYPE)),
PROJECTION: index.get('projection'),
})
operation_kwargs[LOCAL_SECONDARY_INDEXES] = local_secondary_indexes_list

if stream_specification:
operation_kwargs[STREAM_SPECIFICATION] = {
STREAM_ENABLED: stream_specification[snake_to_camel_case(STREAM_ENABLED)],
STREAM_VIEW_TYPE: stream_specification[snake_to_camel_case(STREAM_VIEW_TYPE)]
STREAM_ENABLED: stream_specification['stream_enabled'],
STREAM_VIEW_TYPE: stream_specification['stream_view_type']
}

try:
Expand Down Expand Up @@ -699,10 +698,10 @@ def update_table(
for index in global_secondary_index_updates:
global_secondary_indexes_list.append({
UPDATE: {
INDEX_NAME: index.get(snake_to_camel_case(INDEX_NAME)),
INDEX_NAME: index.get('index_name'),
PROVISIONED_THROUGHPUT: {
READ_CAPACITY_UNITS: index.get(snake_to_camel_case(READ_CAPACITY_UNITS)),
WRITE_CAPACITY_UNITS: index.get(snake_to_camel_case(WRITE_CAPACITY_UNITS))
READ_CAPACITY_UNITS: index.get('read_capacity_units'),
WRITE_CAPACITY_UNITS: index.get('write_capacity_units')
}
}
})
Expand Down
9 changes: 4 additions & 5 deletions pynamodb/indexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from pynamodb.expressions.condition import Condition
from pynamodb.pagination import ResultIterator
from pynamodb.types import HASH, RANGE
from pynamodb.util import snake_to_camel_case

if TYPE_CHECKING:
from pynamodb.models import Model
Expand Down Expand Up @@ -156,8 +155,8 @@ def _get_schema(cls) -> Dict:
schema = []
for attr_name, attr_cls in cls._get_attributes().items():
attr_definitions.append({
snake_to_camel_case(ATTR_NAME): attr_cls.attr_name,
snake_to_camel_case(ATTR_TYPE): attr_cls.attr_type
'attribute_name': attr_cls.attr_name,
'attribute_type': attr_cls.attr_type
})
if attr_cls.is_hash_key:
schema.append({
Expand All @@ -170,8 +169,8 @@ def _get_schema(cls) -> Dict:
KEY_TYPE: RANGE
})
return {
snake_to_camel_case(KEY_SCHEMA): schema,
snake_to_camel_case(ATTR_DEFINITIONS): attr_definitions
'key_schema': schema,
'attribute_definitions': attr_definitions
}

@classmethod
Expand Down
105 changes: 52 additions & 53 deletions pynamodb/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
COUNT, ITEM_COUNT, KEY, UNPROCESSED_ITEMS, STREAM_VIEW_TYPE,
STREAM_SPECIFICATION, STREAM_ENABLED, BILLING_MODE, PAY_PER_REQUEST_BILLING_MODE
)
from pynamodb.util import snake_to_camel_case

_T = TypeVar('_T', bound='Model')
_KeyType = Any
Expand Down Expand Up @@ -415,11 +414,11 @@ def update(self, actions: Sequence[Action], condition: Optional[Condition] = Non
if version_condition is not None:
condition &= version_condition
kwargs: Dict[str, Any] = {
snake_to_camel_case(RETURN_VALUES): ALL_NEW,
'return_values': ALL_NEW,
}

if snake_to_camel_case(RANGE_KEY) in save_kwargs:
kwargs[snake_to_camel_case(RANGE_KEY)] = save_kwargs[snake_to_camel_case(RANGE_KEY)]
if 'range_key' in save_kwargs:
kwargs['range_key'] = save_kwargs['range_key']

kwargs.update(condition=condition)
kwargs.update(actions=actions)
Expand Down Expand Up @@ -482,8 +481,8 @@ def get_operation_kwargs_from_instance(
)
if not is_update:
kwargs.update(save_kwargs)
elif snake_to_camel_case(RANGE_KEY) in save_kwargs:
kwargs[snake_to_camel_case(RANGE_KEY)] = save_kwargs[snake_to_camel_case(RANGE_KEY)]
elif 'range_key' in save_kwargs:
kwargs['range_key'] = save_kwargs['range_key']
return self._get_connection().get_operation_kwargs(*args, **kwargs)

@classmethod
Expand Down Expand Up @@ -771,32 +770,32 @@ def create_table(
"""
if not cls.exists():
schema = cls._get_schema()
if hasattr(cls.Meta, snake_to_camel_case(READ_CAPACITY_UNITS)):
schema[snake_to_camel_case(READ_CAPACITY_UNITS)] = cls.Meta.read_capacity_units
if hasattr(cls.Meta, snake_to_camel_case(WRITE_CAPACITY_UNITS)):
schema[snake_to_camel_case(WRITE_CAPACITY_UNITS)] = cls.Meta.write_capacity_units
if hasattr(cls.Meta, snake_to_camel_case(STREAM_VIEW_TYPE)):
schema[snake_to_camel_case(STREAM_SPECIFICATION)] = {
snake_to_camel_case(STREAM_ENABLED): True,
snake_to_camel_case(STREAM_VIEW_TYPE): cls.Meta.stream_view_type
if hasattr(cls.Meta, 'read_capacity_units'):
schema['read_capacity_units'] = cls.Meta.read_capacity_units
if hasattr(cls.Meta, 'write_capacity_units'):
schema['write_capacity_units'] = cls.Meta.write_capacity_units
if hasattr(cls.Meta, 'stream_view_type'):
schema['stream_specification'] = {
'stream_enabled': True,
'stream_view_type': cls.Meta.stream_view_type
}
if hasattr(cls.Meta, snake_to_camel_case(BILLING_MODE)):
schema[snake_to_camel_case(BILLING_MODE)] = cls.Meta.billing_mode
if hasattr(cls.Meta, 'billing_mode'):
schema['billing_mode'] = cls.Meta.billing_mode
if read_capacity_units is not None:
schema[snake_to_camel_case(READ_CAPACITY_UNITS)] = read_capacity_units
schema['read_capacity_units'] = read_capacity_units
if write_capacity_units is not None:
schema[snake_to_camel_case(WRITE_CAPACITY_UNITS)] = write_capacity_units
schema['write_capacity_units'] = write_capacity_units
if billing_mode is not None:
schema[snake_to_camel_case(BILLING_MODE)] = billing_mode
schema['billing_mode'] = billing_mode
index_data = cls._get_indexes()
schema[snake_to_camel_case(GLOBAL_SECONDARY_INDEXES)] = index_data.get(snake_to_camel_case(GLOBAL_SECONDARY_INDEXES))
schema[snake_to_camel_case(LOCAL_SECONDARY_INDEXES)] = index_data.get(snake_to_camel_case(LOCAL_SECONDARY_INDEXES))
index_attrs = index_data.get(snake_to_camel_case(ATTR_DEFINITIONS))
attr_keys = [attr.get(snake_to_camel_case(ATTR_NAME)) for attr in schema.get(snake_to_camel_case(ATTR_DEFINITIONS))]
schema['global_secondary_indexes'] = index_data.get('global_secondary_indexes')
schema['local_secondary_indexes'] = index_data.get('local_secondary_indexes')
index_attrs = index_data.get('attribute_definitions')
attr_keys = [attr.get('attribute_name') for attr in schema.get('attribute_definitions')]
for attr in index_attrs:
attr_name = attr.get(snake_to_camel_case(ATTR_NAME))
attr_name = attr.get('attribute_name')
if attr_name not in attr_keys:
schema[snake_to_camel_case(ATTR_DEFINITIONS)].append(attr)
schema['attribute_definitions'].append(attr)
attr_keys.append(attr_name)
cls._get_connection().create_table(
**schema
Expand Down Expand Up @@ -841,24 +840,24 @@ def _get_schema(cls):
Returns the schema for this table
"""
schema: Dict[str, List] = {
snake_to_camel_case(ATTR_DEFINITIONS): [],
snake_to_camel_case(KEY_SCHEMA): []
'attribute_definitions': [],
'key_schema': []
}
for attr_name, attr_cls in cls.get_attributes().items():
if attr_cls.is_hash_key or attr_cls.is_range_key:
schema[snake_to_camel_case(ATTR_DEFINITIONS)].append({
snake_to_camel_case(ATTR_NAME): attr_cls.attr_name,
snake_to_camel_case(ATTR_TYPE): attr_cls.attr_type
schema['attribute_definitions'].append({
'attribute_name': attr_cls.attr_name,
'attribute_type': attr_cls.attr_type
})
if attr_cls.is_hash_key:
schema[snake_to_camel_case(KEY_SCHEMA)].append({
snake_to_camel_case(KEY_TYPE): HASH,
snake_to_camel_case(ATTR_NAME): attr_cls.attr_name
schema['key_schema'].append({
'key_type': HASH,
'attribute_name': attr_cls.attr_name
})
elif attr_cls.is_range_key:
schema[snake_to_camel_case(KEY_SCHEMA)].append({
snake_to_camel_case(KEY_TYPE): RANGE,
snake_to_camel_case(ATTR_NAME): attr_cls.attr_name
schema['key_schema'].append({
'key_type': RANGE,
'attribute_name': attr_cls.attr_name
})
return schema

Expand All @@ -869,35 +868,35 @@ def _get_indexes(cls):
"""
if cls._indexes is None:
cls._indexes = {
snake_to_camel_case(GLOBAL_SECONDARY_INDEXES): [],
snake_to_camel_case(LOCAL_SECONDARY_INDEXES): [],
snake_to_camel_case(ATTR_DEFINITIONS): []
'global_secondary_indexes': [],
'local_secondary_indexes': [],
'attribute_definitions': []
}
cls._index_classes = {}
for name, index in getmembers(cls, lambda o: isinstance(o, Index)):
cls._index_classes[index.Meta.index_name] = index
schema = index._get_schema()
idx = {
snake_to_camel_case(INDEX_NAME): index.Meta.index_name,
snake_to_camel_case(KEY_SCHEMA): schema.get(snake_to_camel_case(KEY_SCHEMA)),
snake_to_camel_case(PROJECTION): {
'index_name': index.Meta.index_name,
'key_schema': schema.get('key_schema'),
'projection': {
PROJECTION_TYPE: index.Meta.projection.projection_type,
},

}
if isinstance(index, GlobalSecondaryIndex):
if getattr(cls.Meta, 'billing_mode', None) != PAY_PER_REQUEST_BILLING_MODE:
idx[snake_to_camel_case(PROVISIONED_THROUGHPUT)] = {
idx['provisioned_throughput'] = {
READ_CAPACITY_UNITS: index.Meta.read_capacity_units,
WRITE_CAPACITY_UNITS: index.Meta.write_capacity_units
}
cls._indexes[snake_to_camel_case(ATTR_DEFINITIONS)].extend(schema.get(snake_to_camel_case(ATTR_DEFINITIONS)))
cls._indexes['attribute_definitions'].extend(schema.get('attribute_definitions'))
if index.Meta.projection.non_key_attributes:
idx[snake_to_camel_case(PROJECTION)][NON_KEY_ATTRIBUTES] = index.Meta.projection.non_key_attributes
idx['projection'][NON_KEY_ATTRIBUTES] = index.Meta.projection.non_key_attributes
if isinstance(index, GlobalSecondaryIndex):
cls._indexes[snake_to_camel_case(GLOBAL_SECONDARY_INDEXES)].append(idx)
cls._indexes['global_secondary_indexes'].append(idx)
else:
cls._indexes[snake_to_camel_case(LOCAL_SECONDARY_INDEXES)].append(idx)
cls._indexes['local_secondary_indexes'].append(idx)
return cls._indexes

def _get_save_args(self, attributes=True, null_check=True):
Expand All @@ -919,9 +918,9 @@ def _get_save_args(self, attributes=True, null_check=True):
range_key = attribute_values.pop(range_key_attribute.attr_name, {}).get(range_key_attribute.attr_type)
args = (hash_key, )
if range_key is not None:
kwargs[snake_to_camel_case(RANGE_KEY)] = range_key
kwargs['range_key'] = range_key
if attributes:
kwargs[snake_to_camel_case(ATTRIBUTES)] = attribute_values
kwargs['attributes'] = attribute_values
return args, kwargs

def _handle_version_attribute(self, serialized_attributes, actions=None):
Expand All @@ -941,16 +940,16 @@ def _handle_version_attribute(self, serialized_attributes, actions=None):
version_condition = version_attribute == version_attribute_value
if actions:
actions.append(version_attribute.add(1))
elif snake_to_camel_case(ATTRIBUTES) in serialized_attributes:
serialized_attributes[snake_to_camel_case(ATTRIBUTES)][version_attribute.attr_name] = self._serialize_value(
elif 'attributes' in serialized_attributes:
serialized_attributes['attributes'][version_attribute.attr_name] = self._serialize_value(
version_attribute, version_attribute_value + 1
)
else:
version_condition = version_attribute.does_not_exist()
if actions:
actions.append(version_attribute.set(1))
elif snake_to_camel_case(ATTRIBUTES) in serialized_attributes:
serialized_attributes[snake_to_camel_case(ATTRIBUTES)][version_attribute.attr_name] = self._serialize_value(
elif 'attributes' in serialized_attributes:
serialized_attributes['attributes'][version_attribute.attr_name] = self._serialize_value(
version_attribute, 1
)

Expand Down
12 changes: 0 additions & 12 deletions pynamodb/util.py

This file was deleted.

2 changes: 1 addition & 1 deletion tests/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2684,7 +2684,7 @@ def test_model_with_maps_with_nulls_retrieve_from_db(self):
self.assertIsNone(item.person.age)
self.assertIsNone(item.person.is_male)

def test_model_with_maps_with_snake_to_camel_case_attributes(self):
def test_model_with_maps_with_snake_case_attributes(self):
fake_db = self.database_mocker(
OfficeEmployee,
OFFICE_EMPLOYEE_MODEL_TABLE_DATA,
Expand Down

0 comments on commit b42c326

Please sign in to comment.