# coding: utf-8
# PyDERASN -- Python ASN.1 DER/BER codec with abstract structures
-# Copyright (C) 2017-2018 Sergey Matveev <stargrave@stargrave.org>
+# Copyright (C) 2017-2019 Sergey Matveev <stargrave@stargrave.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
+# published by the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
+from copy import deepcopy
from datetime import datetime
from string import ascii_letters
from string import digits
decode_path_strat = lists(integers(), max_size=3).map(
lambda decode_path: tuple(str(dp) for dp in decode_path)
)
+ctx_dummy = dictionaries(integers(), integers(), min_size=2, max_size=4).example()
class TestHex(TestCase):
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
self.assertTrue(obj.ber_encoded)
self.assertFalse(obj.lenindef)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertFalse(obj.lenindef)
+ self.assertTrue(obj.bered)
@given(
integers(min_value=1).map(tag_ctxc),
self.assertFalse(obj.lenindef)
self.assertFalse(obj.ber_encoded)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.expl_lenindef)
+ self.assertFalse(obj.lenindef)
+ self.assertFalse(obj.ber_encoded)
+ self.assertTrue(obj.bered)
self.assertSequenceEqual(tail, junk)
repr(obj)
list(obj.pps())
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
self.assertTrue(obj.ber_encoded)
self.assertEqual(obj.lenindef, lenindef_expected)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertEqual(obj.lenindef, lenindef_expected)
+ self.assertTrue(obj.bered)
self.assertEqual(len(encoded), obj.tlvlen)
@given(
self.assertTrue(obj.ber_encoded)
self.assertTrue(obj.lenindef)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertTrue(obj.lenindef)
+ self.assertTrue(obj.bered)
@composite
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
self.assertTrue(obj.ber_encoded)
self.assertEqual(obj.lenindef, lenindef_expected)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertEqual(obj.lenindef, lenindef_expected)
+ self.assertTrue(obj.bered)
self.assertEqual(len(encoded), obj.tlvlen)
@given(
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
repr(err.exception)
obj = ObjectIdentifier(value)
self.assertTrue(obj.ready)
+ self.assertFalse(obj.ber_encoded)
repr(obj)
list(obj.pps())
pprint(obj, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
ObjectIdentifier((2, 999, 3)),
)
+ @given(data_strategy())
+ def test_nonnormalized_first_arc(self, d):
+ tampered = (
+ ObjectIdentifier.tag_default +
+ len_encode(2) +
+ b'\x80' +
+ ObjectIdentifier((1, 0)).encode()[-1:]
+ )
+ obj, _ = ObjectIdentifier().decode(tampered, ctx={"bered": True})
+ self.assertTrue(obj.ber_encoded)
+ self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertTrue(obj.bered)
+ with assertRaisesRegex(self, DecodeError, "non normalized arc encoding"):
+ ObjectIdentifier().decode(tampered)
+
+ @given(data_strategy())
+ def test_nonnormalized_arcs(self, d):
+ arcs = d.draw(lists(
+ integers(min_value=0, max_value=100),
+ min_size=1,
+ max_size=5,
+ ))
+ dered = ObjectIdentifier((1, 0) + tuple(arcs)).encode()
+ _, tlen, lv = tag_strip(dered)
+ _, llen, v = len_decode(lv)
+ v_no_first_arc = v[1:]
+ idx_for_tamper = d.draw(integers(
+ min_value=0,
+ max_value=len(v_no_first_arc) - 1,
+ ))
+ tampered = list(bytearray(v_no_first_arc))
+ for _ in range(d.draw(integers(min_value=1, max_value=3))):
+ tampered.insert(idx_for_tamper, 0x80)
+ tampered = bytes(bytearray(tampered))
+ tampered = (
+ ObjectIdentifier.tag_default +
+ len_encode(len(tampered)) +
+ tampered
+ )
+ obj, _ = ObjectIdentifier().decode(tampered, ctx={"bered": True})
+ self.assertTrue(obj.ber_encoded)
+ self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertTrue(obj.bered)
+ with assertRaisesRegex(self, DecodeError, "non normalized arc encoding"):
+ ObjectIdentifier().decode(tampered)
+
@composite
def enumerated_values_strategy(draw, schema=None, do_expl=False):
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
base_klass = UTF8String
+cyrillic_letters = text(
+ alphabet="".join(six_unichr(i) for i in list(range(0x0410, 0x044f + 1))),
+ min_size=1,
+ max_size=5,
+)
+
+
class UnicodeDecodeErrorMixin(object):
- @given(text(
- alphabet="".join(six_unichr(i) for i in list(range(0x0410, 0x044f + 1))),
- min_size=1,
- max_size=5,
- ))
+ @given(cyrillic_letters)
def test_unicode_decode_error(self, cyrillic_text):
with self.assertRaises(DecodeError):
self.base_klass(cyrillic_text)
base_klass = NumericString
def text_alphabet(self):
- return digits
+ return digits + " "
@given(text(alphabet=ascii_letters, min_size=1, max_size=5))
- def test_non_numeric(self, cyrillic_text):
+ def test_non_numeric(self, non_numeric_text):
with assertRaisesRegex(self, DecodeError, "non-numeric"):
- self.base_klass(cyrillic_text)
+ self.base_klass(non_numeric_text)
@given(
sets(integers(min_value=0, max_value=10), min_size=2, max_size=2),
):
base_klass = PrintableString
+ def text_alphabet(self):
+ return ascii_letters + digits + " '()+,-./:=?"
+
+ @given(text(alphabet=sorted(set(whitespace) - set(" ")), min_size=1, max_size=5))
+ def test_non_printable(self, non_printable_text):
+ with assertRaisesRegex(self, DecodeError, "non-printable"):
+ self.base_klass(non_printable_text)
+
+ @given(
+ sets(integers(min_value=0, max_value=10), min_size=2, max_size=2),
+ integers(min_value=0),
+ decode_path_strat,
+ )
+ def test_invalid_bounds_while_decoding(self, ints, offset, decode_path):
+ value, bound_min = list(sorted(ints))
+
+ class String(self.base_klass):
+ bounds = (bound_min, bound_min)
+ with self.assertRaises(DecodeError) as err:
+ String().decode(
+ self.base_klass(b"1" * value).encode(),
+ offset=offset,
+ decode_path=decode_path,
+ )
+ repr(err.exception)
+ self.assertEqual(err.exception.offset, offset)
+ self.assertEqual(err.exception.decode_path, decode_path)
+
class TestTeletexString(
UnicodeDecodeErrorMixin,
self.assertTrue(obj.ber_encoded)
self.assertFalse(obj.lenindef)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertFalse(obj.lenindef)
+ self.assertTrue(obj.bered)
obj, tail = VisibleString().decode(
hexdec("3A8004034A6F6E040265730000"),
self.assertTrue(obj.ber_encoded)
self.assertTrue(obj.lenindef)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.ber_encoded)
+ self.assertTrue(obj.lenindef)
+ self.assertTrue(obj.bered)
class TestGeneralString(
pprint(obj, big_blobs=True, with_decode_path=True)
self.assertFalse(obj.expled)
obj_encoded = obj.encode()
+ self.additional_symmetric_check(value, obj_encoded)
obj_expled = obj(value, expl=tag_expl)
self.assertTrue(obj_expled.expled)
repr(obj_expled)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
min_datetime = datetime(1900, 1, 1)
max_datetime = datetime(9999, 12, 31)
+ def additional_symmetric_check(self, value, obj_encoded):
+ if value.microsecond > 0:
+ self.assertFalse(obj_encoded.endswith(b"0Z"))
+
+ def test_x690_vector_valid(self):
+ for data in ((
+ b"19920521000000Z",
+ b"19920622123421Z",
+ b"19920722132100.3Z",
+ )):
+ GeneralizedTime(data)
+
+ def test_x690_vector_invalid(self):
+ for data in ((
+ b"19920520240000Z",
+ b"19920622123421.0Z",
+ b"19920722132100.30Z",
+ )):
+ with self.assertRaises(DecodeError) as err:
+ GeneralizedTime(data)
+ repr(err.exception)
+
def test_go_vectors_invalid(self):
for data in ((
b"20100102030405",
junk
)
+ def test_ns_fractions(self):
+ GeneralizedTime(b"20010101000000.000001Z")
+ with assertRaisesRegex(self, DecodeError, "only microsecond fractions"):
+ GeneralizedTime(b"20010101000000.0000001Z")
+
class TestUTCTime(TimeMixin, CommonMixin, TestCase):
base_klass = UTCTime
min_datetime = datetime(2000, 1, 1)
max_datetime = datetime(2049, 12, 31)
+ def additional_symmetric_check(self, value, obj_encoded):
+ pass
+
+ def test_x690_vector_valid(self):
+ for data in ((
+ b"920521000000Z",
+ b"920622123421Z",
+ b"920722132100Z",
+ )):
+ UTCTime(data)
+
+ def test_x690_vector_invalid(self):
+ for data in ((
+ b"920520240000Z",
+ b"9207221321Z",
+ )):
+ with self.assertRaises(DecodeError) as err:
+ UTCTime(data)
+ repr(err.exception)
+
def test_go_vectors_invalid(self):
for data in ((
b"a10506234540Z",
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
self.assertTrue(obj.lenindef)
self.assertFalse(obj.ber_encoded)
self.assertTrue(obj.bered)
+ obj = obj.copy()
+ self.assertTrue(obj.lenindef)
+ self.assertFalse(obj.ber_encoded)
+ self.assertTrue(obj.bered)
repr(obj)
list(obj.pps())
pprint(obj, big_blobs=True, with_decode_path=True)
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
t, _, lv = tag_strip(seq_encoded)
_, _, v = len_decode(lv)
seq_encoded_lenindef = t + LENINDEF + v + EOC
+ ctx_copied = deepcopy(ctx_dummy)
+ ctx_copied["bered"] = True
seq_decoded_lenindef, tail_lenindef = seq.decode(
seq_encoded_lenindef + tail_junk,
- ctx={"bered": True},
+ ctx=ctx_copied,
)
+ del ctx_copied["bered"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
+ self.assertTrue(seq_decoded_lenindef.lenindef)
+ self.assertTrue(seq_decoded_lenindef.bered)
+ seq_decoded_lenindef = seq_decoded_lenindef.copy()
self.assertTrue(seq_decoded_lenindef.lenindef)
self.assertTrue(seq_decoded_lenindef.bered)
with self.assertRaises(DecodeError):
seq_decoded, _ = seq_with_default.decode(seq_encoded, ctx=ctx)
self.assertTrue(seq_decoded.ber_encoded)
self.assertTrue(seq_decoded.bered)
+ seq_decoded = seq_decoded.copy()
+ self.assertTrue(seq_decoded.ber_encoded)
+ self.assertTrue(seq_decoded.bered)
for name, value in _schema:
self.assertEqual(seq_decoded[name], seq_with_default[name])
self.assertEqual(seq_decoded[name], value)
self.assertFalse(decoded.ber_encoded)
self.assertFalse(decoded.lenindef)
self.assertTrue(decoded.bered)
+ decoded = decoded.copy()
+ self.assertFalse(decoded.ber_encoded)
+ self.assertFalse(decoded.lenindef)
+ self.assertTrue(decoded.bered)
class Seq(self.base_klass):
schema = (("underlying", OctetString()),)
self.assertFalse(decoded.ber_encoded)
self.assertFalse(decoded.lenindef)
self.assertTrue(decoded.bered)
+ decoded = decoded.copy()
+ self.assertFalse(decoded.ber_encoded)
+ self.assertFalse(decoded.lenindef)
+ self.assertTrue(decoded.bered)
class TestSequence(SeqMixing, CommonMixin, TestCase):
seq_decoded, _ = Seq().decode(seq_encoded, ctx=ctx)
self.assertTrue(seq_decoded.ber_encoded)
self.assertTrue(seq_decoded.bered)
+ seq_decoded = seq_decoded.copy()
+ self.assertTrue(seq_decoded.ber_encoded)
+ self.assertTrue(seq_decoded.bered)
self.assertSequenceEqual(
[bytes(seq_decoded[str(i)]) for i, t in enumerate(tags)],
[t for t in tags],
list(obj_expled.pps())
pprint(obj_expled, big_blobs=True, with_decode_path=True)
obj_expled_encoded = obj_expled.encode()
+ ctx_copied = deepcopy(ctx_dummy)
obj_decoded, tail = obj_expled.decode(
obj_expled_encoded + tail_junk,
offset=offset,
+ ctx=ctx_copied,
)
+ self.assertDictEqual(ctx_copied, ctx_dummy)
repr(obj_decoded)
list(obj_decoded.pps())
pprint(obj_decoded, big_blobs=True, with_decode_path=True)
)
self.assertTrue(obj_decoded_lenindef.lenindef)
self.assertTrue(obj_decoded_lenindef.bered)
+ obj_decoded_lenindef = obj_decoded_lenindef.copy()
+ self.assertTrue(obj_decoded_lenindef.lenindef)
+ self.assertTrue(obj_decoded_lenindef.bered)
repr(obj_decoded_lenindef)
list(obj_decoded_lenindef.pps())
pprint(obj_decoded_lenindef, big_blobs=True, with_decode_path=True)
self.assertFalse(decoded.ber_encoded)
self.assertFalse(decoded.lenindef)
self.assertTrue(decoded.bered)
+ decoded = decoded.copy()
+ self.assertFalse(decoded.ber_encoded)
+ self.assertFalse(decoded.lenindef)
+ self.assertTrue(decoded.bered)
class SeqOf(self.base_klass):
schema = OctetString()
self.assertFalse(decoded.ber_encoded)
self.assertFalse(decoded.lenindef)
self.assertTrue(decoded.bered)
+ decoded = decoded.copy()
+ self.assertFalse(decoded.ber_encoded)
+ self.assertFalse(decoded.lenindef)
+ self.assertTrue(decoded.bered)
class TestSequenceOf(SeqOfMixing, CommonMixin, TestCase):
seq_decoded, _ = Seq().decode(seq_encoded, ctx=ctx)
self.assertTrue(seq_decoded.ber_encoded)
self.assertTrue(seq_decoded.bered)
+ seq_decoded = seq_decoded.copy()
+ self.assertTrue(seq_decoded.ber_encoded)
+ self.assertTrue(seq_decoded.bered)
self.assertSequenceEqual(
[obj.encode() for obj in seq_decoded],
values,
seq = Seq()
seq["erste"] = PrintableString("test")
self.assertSequenceEqual(seq.encode(), hexdec("3006130474657374"))
+ # Asterisk is actually not allowable
+ PrintableString._allowable_chars |= set(b"*")
seq["erste"] = PrintableString("test*")
self.assertSequenceEqual(seq.encode(), hexdec("30071305746573742a"))
+ PrintableString._allowable_chars -= set(b"*")
class Seq(Sequence):
schema = (
chosen_id = oids[chosen]
pp = _pp(asn1_type_name=ObjectIdentifier.asn1_type_name, value=chosen)
self.assertNotIn(chosen_id, pp_console_row(pp))
- self.assertIn(chosen_id, pp_console_row(pp, oids=oids))
+ self.assertIn(
+ chosen_id,
+ pp_console_row(pp, oid_maps=[{'whatever': 'whenever'}, oids]),
+ )
class TestAutoAddSlots(TestCase):
pprint(seq_sequenced, big_blobs=True, with_decode_path=True)
defines_by_path = []
- seq_integered, _ = Seq().decode(seq_integered_raw)
+ ctx_copied = deepcopy(ctx_dummy)
+ seq_integered, _ = Seq().decode(
+ seq_integered_raw,
+ ctx=ctx_copied,
+ )
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNone(seq_integered["value"].defined)
defines_by_path.append(
(("type",), ((("value",), {
type_sequenced: SeqInner(),
}),))
)
+ ctx_copied["defines_by_path"] = defines_by_path
seq_integered, _ = Seq().decode(
seq_integered_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_integered["value"].defined)
self.assertEqual(seq_integered["value"].defined[0], type_integered)
self.assertEqual(seq_integered["value"].defined[1], Integer(123))
list(seq_integered.pps())
pprint(seq_integered, big_blobs=True, with_decode_path=True)
+ ctx_copied["defines_by_path"] = defines_by_path
seq_sequenced, _ = Seq().decode(
seq_sequenced_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_sequenced["value"].defined)
self.assertEqual(seq_sequenced["value"].defined[0], type_sequenced)
seq_inner = seq_sequenced["value"].defined[1]
("value", DecodePathDefBy(type_sequenced), "typeInner"),
((("valueInner",), {type_innered: Pairs()}),),
))
+ ctx_copied["defines_by_path"] = defines_by_path
seq_sequenced, _ = Seq().decode(
seq_sequenced_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_sequenced["value"].defined)
self.assertEqual(seq_sequenced["value"].defined[0], type_sequenced)
seq_inner = seq_sequenced["value"].defined[1]
type_octet_stringed: OctetString(),
}),),
))
+ ctx_copied["defines_by_path"] = defines_by_path
seq_sequenced, _ = Seq().decode(
seq_sequenced_raw,
- ctx={"defines_by_path": defines_by_path},
+ ctx=ctx_copied,
)
+ del ctx_copied["defines_by_path"]
+ self.assertDictEqual(ctx_copied, ctx_dummy)
self.assertIsNotNone(seq_sequenced["value"].defined)
self.assertEqual(seq_sequenced["value"].defined[0], type_sequenced)
seq_inner = seq_sequenced["value"].defined[1]
decoded, _ = seq.decode(raw, ctx={"allow_default_values": True})
self.assertTrue(decoded.ber_encoded)
self.assertTrue(decoded.bered)
+ decoded = decoded.copy()
+ self.assertTrue(decoded.ber_encoded)
+ self.assertTrue(decoded.bered)
decoded, _ = seq.decode(raw, ctx={"bered": True})
self.assertTrue(decoded.ber_encoded)
self.assertTrue(decoded.bered)
+ decoded = decoded.copy()
+ self.assertTrue(decoded.ber_encoded)
+ self.assertTrue(decoded.bered)
class TestX690PrefixedType(TestCase):