Skip to content

Commit

Permalink
[dns] possible fix for pointer corruption
Browse files Browse the repository at this point in the history
  • Loading branch information
meitinger committed Jun 29, 2024
1 parent 27f9902 commit 4e28f52
Showing 1 changed file with 57 additions and 13 deletions.
70 changes: 57 additions & 13 deletions mitmproxy/dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,14 @@ class Question(serializable.SerializableDataclass):
name: str
type: int
class_: int
_orig_data: bytes = None

def __setattr__(self, name, value):
# We try to preserve the request data for pointer safety,
# unless something gets changed by a hook.
if name != "_orig_data":
self._orig_data = None
return super().__setattr__(name, value)

def __str__(self) -> str:
return self.name
Expand All @@ -57,6 +65,14 @@ class ResourceRecord(serializable.SerializableDataclass):
class_: int
ttl: int
data: bytes
_orig_data: bytes = None

def __setattr__(self, name, value):
# We try to preserve the request data for pointer safety,
# unless something gets changed by a hook.
if name != "_orig_data":
self._orig_data = None
return super().__setattr__(name, value)

def __str__(self) -> str:
try:
Expand Down Expand Up @@ -332,10 +348,19 @@ def unpack_domain_name() -> str:

for i in range(0, len_questions):
try:
start_data = offset
name = unpack_domain_name()
type, class_ = Question.HEADER.unpack_from(buffer, offset)
offset += Question.HEADER.size
msg.questions.append(Question(name=name, type=type, class_=class_))
end_data = offset
msg.questions.append(
Question(
name=name,
type=type,
class_=class_,
_orig_data=buffer[start_data:end_data],
)
)
except struct.error as e:
raise struct.error(f"question #{i}: {str(e)}")

Expand All @@ -345,6 +370,7 @@ def unpack_rrs(
nonlocal buffer, offset
for i in range(0, count):
try:
start_data = offset
name = unpack_domain_name()
type, class_, ttl, len_data = ResourceRecord.HEADER.unpack_from(
buffer, offset
Expand All @@ -356,20 +382,32 @@ def unpack_rrs(
f"unpack requires a data buffer of {len_data} bytes"
)
data = buffer[offset:end_data]
if 0b11000000 in data:
# the resource record might contains a compressed domain name, if so, uncompressed in advance
# RDATA might contain compressed domain names.
# We handle simple records (CNAME, PTR, NS) here.
# Complex records (MX, ...) aren't handled so far,
# for those we try to preserve the request data,
# which should keep the pointers intact.
if type in (types.NS, types.CNAME, types.PTR):
try:
(
rr_name,
rr_name_len,
) = domain_names.unpack_from_with_compression(
buffer, offset, cached_names
)
if rr_name_len == len_data:
data = domain_names.pack(rr_name)
if rr_name_len != len_data:
raise struct.error(
"compressed length doesn't match RR size"
)
data = domain_names.pack(rr_name)
except struct.error:
# most likely an invalid record, but proceed anyway
pass
section.append(ResourceRecord(name, type, class_, ttl, data))
section.append(
ResourceRecord(
name, type, class_, ttl, data, buffer[start_data:end_data]
)
)
offset += len_data
except struct.error as e:
raise struct.error(f"{section_name} #{i}: {str(e)}")
Expand Down Expand Up @@ -421,14 +459,20 @@ def packed(self) -> bytes:
)
# TODO implement compression
for question in self.questions:
data.extend(domain_names.pack(question.name))
data.extend(Question.HEADER.pack(question.type, question.class_))
if question._orig_data is None:
data.extend(domain_names.pack(question.name))
data.extend(Question.HEADER.pack(question.type, question.class_))
else:
data.extend(question._orig_data)
for rr in (*self.answers, *self.authorities, *self.additionals):
data.extend(domain_names.pack(rr.name))
data.extend(
ResourceRecord.HEADER.pack(rr.type, rr.class_, rr.ttl, len(rr.data))
)
data.extend(rr.data)
if rr._orig_data is None:
data.extend(domain_names.pack(rr.name))
data.extend(
ResourceRecord.HEADER.pack(rr.type, rr.class_, rr.ttl, len(rr.data))
)
data.extend(rr.data)
else:
data.extend(rr._orig_data)
return bytes(data)

def to_json(self) -> dict:
Expand Down

0 comments on commit 4e28f52

Please sign in to comment.