-
Notifications
You must be signed in to change notification settings - Fork 0
/
test.py
151 lines (132 loc) · 4.61 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
import lzip
import os
import pathlib
import tempfile
dirname = pathlib.Path(__file__).resolve().parent
# decompress_file_iter
length = 0
for chunk in lzip.decompress_file_iter(dirname / "test_data.lz"):
length += len(chunk)
assert length == 254
# decompress_file_iter with word size
length = 0
try:
for chunk in lzip.decompress_file_iter(dirname / "test_data.lz", word_size=100):
length += len(chunk)
except lzip.RemainingBytesError as error:
assert len(error.buffer) == 54
length += len(error.buffer)
assert length == 254
# decompress_file
assert len(lzip.decompress_file(dirname / "test_data.lz")) == 254
# decompress_file with word size
length = 0
try:
length += lzip.decompress_file(dirname / "test_data.lz", word_size=100)
except lzip.RemainingBytesError as error:
assert len(error.buffer) == 54
length += len(error.buffer)
assert length == 54
# decompress_buffer_iter
with open(dirname / "test_data.lz", "rb") as input_file:
buffer = input_file.read()
length = 0
for chunk in lzip.decompress_buffer_iter(buffer):
length += len(chunk)
assert length == 254
# decompress_buffer_iter with word size
with open(dirname / "test_data.lz", "rb") as input_file:
buffer = input_file.read()
length = 0
try:
for chunk in lzip.decompress_buffer_iter(buffer, word_size=100):
length += len(chunk)
except lzip.RemainingBytesError as error:
assert len(error.buffer) == 54
length += len(error.buffer)
assert length == 254
# decompress_buffer
with open(dirname / "test_data.lz", "rb") as input_file:
buffer = input_file.read()
assert len(lzip.decompress_buffer(buffer)) == 254
# decompress_buffer with word size
with open(dirname / "test_data.lz", "rb") as input_file:
buffer = input_file.read()
length = 0
try:
length += lzip.decompress_buffer(buffer, word_size=100)
except lzip.RemainingBytesError as error:
assert len(error.buffer) == 54
length += len(error.buffer)
assert length == 54
# decompress_url_iter
length = 0
for chunk in lzip.decompress_url_iter((dirname / "test_data.lz").as_uri()):
length += len(chunk)
assert length == 254
# decompress_url_iter with word size
length = 0
try:
for chunk in lzip.decompress_url_iter((dirname / "test_data.lz").as_uri(), word_size=100):
length += len(chunk)
except lzip.RemainingBytesError as error:
assert len(error.buffer) == 54
length += len(error.buffer)
assert length == 254
# decompress_url
assert len(lzip.decompress_url((dirname / "test_data.lz").as_uri())) == 254
# decompress_url with word size
length = 0
try:
length += lzip.decompress_url((dirname / "test_data.lz").as_uri(), word_size=100)
except lzip.RemainingBytesError as error:
assert len(error.buffer) == 54
length += len(error.buffer)
assert length == 54
# BufferEncoder
with open(dirname / "test_data", "rb") as input_file:
buffer = input_file.read()
length = 0
encoder = lzip.BufferEncoder()
length += len(encoder.compress(buffer[:100]))
length += len(encoder.compress(buffer[100:200]))
length += len(encoder.compress(buffer[200:]))
length += len(encoder.finish())
assert length == 198
# compress_to_buffer
with open(dirname / "test_data", "rb") as input_file:
buffer = input_file.read()
assert len(lzip.compress_to_buffer(buffer)) == 198
# FileEncoder
with open(dirname / "test_data", "rb") as input_file:
buffer = input_file.read()
with tempfile.TemporaryDirectory() as temporary_directory:
path = pathlib.Path(temporary_directory) / "test_data.lz"
with lzip.FileEncoder(path) as encoder:
encoder.compress(buffer[:100])
encoder.compress(buffer[100:200])
encoder.compress(buffer[200:])
with open(path, "rb") as encoded_file:
assert len(encoded_file.read()) == 198
# compress_to_file
with open(dirname / "test_data", "rb") as input_file:
buffer = input_file.read()
with tempfile.TemporaryDirectory() as temporary_directory:
path = pathlib.Path(temporary_directory) / "test_data.lz"
lzip.compress_to_file(path, buffer)
with open(path, "rb") as encoded_file:
assert len(encoded_file.read()) == 198
# generate large (> 65536) random file, compress, decompress
print("generate random buffer")
buffer = os.urandom(2 * 3 * 5 * 7 * 8 * 20 * 1024)
print("encode")
encoded_buffer = lzip.compress_to_buffer(buffer)
print("decode (word_size = 1)")
decoded_buffer = lzip.decompress_buffer(encoded_buffer)
assert buffer == decoded_buffer
for word_size in (2, 3, 5, 7, 8, 20, 1024):
print(f"decode (word_size = {word_size})")
decoded_buffer = b""
for chunk in lzip.decompress_buffer_iter(encoded_buffer, word_size=word_size):
decoded_buffer += chunk
assert buffer == decoded_buffer