# coding: utf-8
# PyDERASN -- Python ASN.1 DER/BER codec with abstract structures
-# Copyright (C) 2017-2018 Sergey Matveev <stargrave@stargrave.org>
+# Copyright (C) 2017-2019 Sergey Matveev <stargrave@stargrave.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
+from copy import deepcopy
from datetime import datetime
from string import ascii_letters
from string import digits
decode_path_strat = lists(integers(), max_size=3).map(
lambda decode_path: tuple(str(dp) for dp in decode_path)
)
+ctx_dummy = dictionaries(integers(), integers(), min_size=2, max_size=4).example()
class TestHex(TestCase):
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
repr(err.exception)
obj = ObjectIdentifier(value)
self.assertTrue(obj.ready)
+ self.assertFalse(obj.ber_encoded)
repr(obj)
list(obj.pps())
pprint(obj, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
ObjectIdentifier((2, 999, 3)),
)
+ @given(data_strategy())
+ def test_nonnormalized_first_arc(self, d):
+ tampered = (
+ ObjectIdentifier.tag_default +
+ len_encode(2) +
+ b'\x80' +
+ ObjectIdentifier((1, 0)).encode()[-1:]
+ )
+ obj, _ = ObjectIdentifier().decode(tampered, ctx={"bered": True})
+ self.assertTrue(obj.ber_encoded)
+ with assertRaisesRegex(self, DecodeError, "non normalized arc encoding"):
+ ObjectIdentifier().decode(tampered)
+
+ @given(data_strategy())
+ def test_nonnormalized_arcs(self, d):
+ arcs = d.draw(lists(
+ integers(min_value=0, max_value=100),
+ min_size=1,
+ max_size=5,
+ ))
+ dered = ObjectIdentifier((1, 0) + tuple(arcs)).encode()
+ _, tlen, lv = tag_strip(dered)
+ _, llen, v = len_decode(lv)
+ v_no_first_arc = v[1:]
+ idx_for_tamper = d.draw(integers(
+ min_value=0,
+ max_value=len(v_no_first_arc) - 1,
+ ))
+ tampered = list(bytearray(v_no_first_arc))
+ for _ in range(d.draw(integers(min_value=1, max_value=3))):
+ tampered.insert(idx_for_tamper, 0x80)
+ tampered = bytes(bytearray(tampered))
+ tampered = (
+ ObjectIdentifier.tag_default +
+ len_encode(len(tampered)) +
+ tampered
+ )
+ obj, _ = ObjectIdentifier().decode(tampered, ctx={"bered": True})
+ self.assertTrue(obj.ber_encoded)
+ with assertRaisesRegex(self, DecodeError, "non normalized arc encoding"):
+ ObjectIdentifier().decode(tampered)
+
@composite
def enumerated_values_strategy(draw, schema=None, do_expl=False):
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
base_klass = UTF8String
+cyrillic_letters = text(
+ alphabet="".join(six_unichr(i) for i in list(range(0x0410, 0x044f + 1))),
+ min_size=1,
+ max_size=5,
+)
+
+
class UnicodeDecodeErrorMixin(object):
- @given(text(
- alphabet="".join(six_unichr(i) for i in list(range(0x0410, 0x044f + 1))),
- min_size=1,
- max_size=5,
- ))
+ @given(cyrillic_letters)
def test_unicode_decode_error(self, cyrillic_text):
with self.assertRaises(DecodeError):
self.base_klass(cyrillic_text)
):
base_klass = PrintableString
+ def text_alphabet(self):
+ return ascii_letters + digits + " '()+,-./:=?"
+
+ @given(text(alphabet=sorted(set(whitespace) - set(" ")), min_size=1, max_size=5))
+ def test_non_printable(self, non_printable_text):
+ with assertRaisesRegex(self, DecodeError, "non-printable"):
+ self.base_klass(non_printable_text)
+
+ @given(
+ sets(integers(min_value=0, max_value=10), min_size=2, max_size=2),
+ integers(min_value=0),
+ decode_path_strat,
+ )
+ def test_invalid_bounds_while_decoding(self, ints, offset, decode_path):
+ value, bound_min = list(sorted(ints))
+
+ class String(self.base_klass):
+ bounds = (bound_min, bound_min)
+ with self.assertRaises(DecodeError) as err:
+ String().decode(
+ self.base_klass(b"1" * value).encode(),
+ offset=offset,
+ decode_path=decode_path,
+ )
+ repr(err.exception)
+ self.assertEqual(err.exception.offset, offset)
+ self.assertEqual(err.exception.decode_path, decode_path)
+
class TestTeletexString(
UnicodeDecodeErrorMixin,
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
t, _, lv = tag_strip(seq_encoded)
_, _, v = len_decode(lv)
seq_encoded_lenindef = t + LENINDEF + v + EOC
+ ctx_copied = deepcopy(ctx_dummy)
+ ctx_copied["bered"] = True
seq_decoded_lenindef, tail_lenindef = seq.decode(
seq_encoded_lenindef + tail_junk,
- ctx={"bered": True},
+ ctx=ctx_copied,
)
+ del ctx_copied["bered"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertTrue(seq_decoded_lenindef.lenindef)
self.assertTrue(seq_decoded_lenindef.bered)
with self.assertRaises(DecodeError):
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
seq = Seq()
seq["erste"] = PrintableString("test")
self.assertSequenceEqual(seq.encode(), hexdec("3006130474657374"))
+ # Asterisk is actually not allowable
+ PrintableString._allowable_chars |= set(b"*")
seq["erste"] = PrintableString("test*")
self.assertSequenceEqual(seq.encode(), hexdec("30071305746573742a"))
+ PrintableString._allowable_chars -= set(b"*")
class Seq(Sequence):
schema = (
pprint(seq_sequenced, big_blobs=True, with_decode_path=True)
defines_by_path = []
- seq_integered, _ = Seq().decode(seq_integered_raw)
+ ctx_copied = deepcopy(ctx_dummy)
+ seq_integered, _ = Seq().decode(
+ seq_integered_raw,
+ ctx=ctx_copied,
+ )
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNone(seq_integered["value"].defined)
defines_by_path.append(
(("type",), ((("value",), {
type_sequenced: SeqInner(),
}),))
)
+ ctx_copied["defines_by_path"] = defines_by_path
seq_integered, _ = Seq().decode(
seq_integered_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_integered["value"].defined)
self.assertEqual(seq_integered["value"].defined[0], type_integered)
self.assertEqual(seq_integered["value"].defined[1], Integer(123))
list(seq_integered.pps())
pprint(seq_integered, big_blobs=True, with_decode_path=True)
+ ctx_copied["defines_by_path"] = defines_by_path
seq_sequenced, _ = Seq().decode(
seq_sequenced_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_sequenced["value"].defined)
self.assertEqual(seq_sequenced["value"].defined[0], type_sequenced)
seq_inner = seq_sequenced["value"].defined[1]
("value", DecodePathDefBy(type_sequenced), "typeInner"),
((("valueInner",), {type_innered: Pairs()}),),
))
+ ctx_copied["defines_by_path"] = defines_by_path
seq_sequenced, _ = Seq().decode(
seq_sequenced_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_sequenced["value"].defined)
self.assertEqual(seq_sequenced["value"].defined[0], type_sequenced)
seq_inner = seq_sequenced["value"].defined[1]
type_octet_stringed: OctetString(),
}),),
))
+ ctx_copied["defines_by_path"] = defines_by_path
seq_sequenced, _ = Seq().decode(
seq_sequenced_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_sequenced["value"].defined)
self.assertEqual(seq_sequenced["value"].defined[0], type_sequenced)
seq_inner = seq_sequenced["value"].defined[1]