Skip to content

Commit

Permalink
black
Browse files Browse the repository at this point in the history
  • Loading branch information
richardkiss committed May 20, 2022
1 parent fae87ba commit e92f68f
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 14 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

setup(
name="clvm",
packages=["clvm",],
packages=["clvm"],
author="Chia Network, Inc.",
author_email="hello@chia.net",
url="https://github.com/Chia-Network/clvm",
Expand Down
28 changes: 15 additions & 13 deletions tests/serialize_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,12 @@
import unittest

from clvm import to_sexp_f
from clvm.serialize import (_atom_from_stream, sexp_from_stream, sexp_buffer_from_stream, atom_to_byte_iterator)
from clvm.serialize import (
_atom_from_stream,
sexp_from_stream,
sexp_buffer_from_stream,
atom_to_byte_iterator,
)


TEXT = b"the quick brown fox jumps over the lazy dogs"
Expand All @@ -13,12 +18,12 @@ def __init__(self, b):
self.buf = b

def read(self, n):
ret = b''
ret = b""
while n > 0 and len(self.buf) > 0:
ret += self.buf[0:1]
self.buf = self.buf[1:]
n -= 1
ret += b' ' * n
ret += b" " * n
return ret


Expand All @@ -35,9 +40,9 @@ def has_backrefs(blob: bytes) -> bool:
obj_count = 1
while obj_count > 0:
b = f.read(1)[0]
if b == 0xfe:
if b == 0xFE:
return True
if b == 0xff:
if b == 0xFF:
obj_count += 1
else:
_atom_from_stream(f, b, lambda x: x)
Expand Down Expand Up @@ -69,7 +74,6 @@ def check_serde(self, s):
if has_backrefs(b2) or len(b2) < len(b):
# if we have any backrefs, ensure they actually save space
self.assertTrue(len(b2) < len(b))
print("%d bytes before %d after %d saved" % (len(b), len(b2), len(b) - len(b2)))
io_b2 = io.BytesIO(b2)
self.assertRaises(ValueError, lambda: sexp_from_stream(io_b2, to_sexp_f))
io_b2 = io.BytesIO(b2)
Expand All @@ -79,7 +83,6 @@ def check_serde(self, s):
self.assertEqual(b, b3)
return b2


def test_zero(self):
v = to_sexp_f(b"\x00")
self.assertEqual(v.as_bin(), b"\x00")
Expand Down Expand Up @@ -115,7 +118,7 @@ def test_long_blobs(self):
def test_blob_limit(self):
with self.assertRaises(ValueError):
for b in atom_to_byte_iterator(LargeAtom()):
print('%02x' % b)
print("%02x" % b)

def test_very_long_blobs(self):
for size in [0x40, 0x2000, 0x100000, 0x8000000]:
Expand All @@ -136,7 +139,7 @@ def test_very_deep_tree(self):
self.check_serde(s)

def test_deserialize_empty(self):
bytes_in = b''
bytes_in = b""
with self.assertRaises(ValueError):
sexp_from_stream(io.BytesIO(bytes_in), to_sexp_f)

Expand All @@ -146,7 +149,7 @@ def test_deserialize_empty(self):
def test_deserialize_truncated_size(self):
# fe means the total number of bytes in the length-prefix is 7
# one for each bit set. 5 bytes is too few
bytes_in = b'\xfe '
bytes_in = b"\xfe "
with self.assertRaises(ValueError):
sexp_from_stream(io.BytesIO(bytes_in), to_sexp_f)

Expand All @@ -156,7 +159,7 @@ def test_deserialize_truncated_size(self):
def test_deserialize_truncated_blob(self):
# this is a complete length prefix. The blob is supposed to be 63 bytes
# the blob itself is truncated though, it's less than 63 bytes
bytes_in = b'\xbf '
bytes_in = b"\xbf "

with self.assertRaises(ValueError):
sexp_from_stream(io.BytesIO(bytes_in), to_sexp_f)
Expand All @@ -170,7 +173,7 @@ def test_deserialize_large_blob(self):
# we don't support blobs this large, and we should fail immediately when
# exceeding the max blob size, rather than trying to read this many
# bytes from the stream
bytes_in = b'\xfe' + b'\xff' * 6
bytes_in = b"\xfe" + b"\xff" * 6

with self.assertRaises(ValueError):
sexp_from_stream(InfiniteStream(bytes_in), to_sexp_f)
Expand All @@ -185,7 +188,6 @@ def test_deserialize_generator(self):
assert len(b) == 19124

def test_deserialize_bomb(self):

def make_bomb(depth):
bomb = TEXT
for _ in range(depth):
Expand Down

0 comments on commit e92f68f

Please sign in to comment.