From 517f9ab0bb42c9d89fade2aa69ec5da9501efc0a Mon Sep 17 00:00:00 2001 From: Sergey Matveev Date: Sat, 8 Feb 2020 12:58:25 +0300 Subject: [PATCH] *Time BER decoding support --- doc/limitations.rst | 2 - doc/news.rst | 10 +- pyderasn.py | 294 +++++++++++++++++++++------- tests/test_pyderasn.py | 432 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 664 insertions(+), 74 deletions(-) diff --git a/doc/limitations.rst b/doc/limitations.rst index 79b1ac3..89b4bd4 100644 --- a/doc/limitations.rst +++ b/doc/limitations.rst @@ -6,6 +6,4 @@ Limitations in any way, except just trying to be decoded in ``ascii``, ``iso-8859-1``, ``utf-8/16/32`` correspondingly * :py:class:`pyderasn.GeneralizedTime` does not support zero year -* :py:class:`pyderasn.UTCTime` and :py:class:`pyderasn.GeneralizedTime` - support only DER-encoding * No REAL, RELATIVE OID, EXTERNAL, INSTANCE OF, EMBEDDED PDV, CHARACTER STRING diff --git a/doc/news.rst b/doc/news.rst index 90c894c..cbaa344 100644 --- a/doc/news.rst +++ b/doc/news.rst @@ -7,6 +7,7 @@ News --- * UTCTime and GeneralizedTime allowed values to have plus sign in them, passing int() check successfully. Prohibit that incorrect behaviour +* UTCTime and GeneralizedTime BER decoding support * Explicitly Check that all ObjectIdentifier arcs are non-negative .. _release6.0: @@ -18,7 +19,7 @@ News * Copies made previously with ``.copy()`` lacked ``.defined`` field, now they are not * All objects are friendly to ``pickle`` libraries -* ``PrintableString`` has ``allow_asterisk`` and ``allow_ampersand`` +* PrintableString has ``allow_asterisk`` and ``allow_ampersand`` properties * ``.hexencode()``, ``.hexdecode()`` and ``.hexdecod()`` methods appeared, working with hexadecimal encoded data @@ -31,9 +32,8 @@ News * Control characters (like newlines) of text fields in pprinted output are escaped * Ability to allow asterisk and ampersand characters - (``allow_asterisk``, ``allow_ampersand`` kwargs) in - ``PrintableString``, that unfortunately could be met - in X.509 certificates + (``allow_asterisk``, ``allow_ampersand`` kwargs) in PrintableString, + that unfortunately could be met in X.509 certificates .. _release5.5: @@ -45,7 +45,7 @@ News 5.4 --- -* Do not shadow underlying ``DecodeError`` during decoding of optional +* Do not shadow underlying DecodeError during decoding of optional sequence's field .. _release5.3: diff --git a/pyderasn.py b/pyderasn.py index 7808029..0d3a663 100755 --- a/pyderasn.py +++ b/pyderasn.py @@ -340,7 +340,8 @@ Let's parse that output, human:: Only applicable to BER encoded data. If object has BER-specific encoding, then ``BER`` will be shown. It does not depend on indefinite length encoding. ``EOC``, ``BOOLEAN``, ``BIT STRING``, ``OCTET STRING`` - (and its derivatives), ``SET``, ``SET OF`` could be BERed. + (and its derivatives), ``SET``, ``SET OF``, ``UTCTime``, ``GeneralizedTime`` + could be BERed. .. _definedby: @@ -490,8 +491,8 @@ constructed primitive types should be parsed successfully. * If object is encoded in BER form (not the DER one), then ``ber_encoded`` attribute is set to True. Only ``BOOLEAN``, ``BIT STRING``, ``OCTET - STRING``, ``OBJECT IDENTIFIER``, ``SEQUENCE``, ``SET``, ``SET OF`` - can contain it. + STRING``, ``OBJECT IDENTIFIER``, ``SEQUENCE``, ``SET``, ``SET OF``, + ``UTCTime``, ``GeneralizedTime`` can contain it. * If object has an indefinite length encoding, then its ``lenindef`` attribute is set to True. Only ``BIT STRING``, ``OCTET STRING``, ``SEQUENCE``, ``SET``, ``SEQUENCE OF``, ``SET OF``, ``ANY`` can @@ -659,6 +660,7 @@ from collections import namedtuple from collections import OrderedDict from copy import copy from datetime import datetime +from datetime import timedelta from math import ceil from os import environ from string import ascii_letters @@ -2902,6 +2904,7 @@ class OctetString(Obj): default=None, optional=False, _decoded=(0, 0, 0), + ctx=None, ): """ :param value: set the value. Either binary type, or @@ -3039,7 +3042,7 @@ class OctetString(Obj): self._value, )) - def _decode_chunk(self, lv, offset, decode_path): + def _decode_chunk(self, lv, offset, decode_path, ctx): try: l, llen, v = len_decode(lv) except DecodeError as err: @@ -3066,6 +3069,7 @@ class OctetString(Obj): default=self.default, optional=self.optional, _decoded=(offset, llen, l), + ctx=ctx, ) except DecodeError as err: raise DecodeError( @@ -3096,7 +3100,7 @@ class OctetString(Obj): if t == self.tag: if tag_only: return None - return self._decode_chunk(lv, offset, decode_path) + return self._decode_chunk(lv, offset, decode_path, ctx) if t == self.tag_constructed: if not ctx.get("bered", False): raise DecodeError( @@ -3174,6 +3178,7 @@ class OctetString(Obj): default=self.default, optional=self.optional, _decoded=(offset, llen, vlen + (EOC_LEN if lenindef else 0)), + ctx=ctx, ) except DecodeError as err: raise DecodeError( @@ -4038,6 +4043,7 @@ class PrintableString(AllowableCharsMixin, CommonString): default=None, optional=False, _decoded=(0, 0, 0), + ctx=None, allow_asterisk=False, allow_ampersand=False, ): @@ -4050,7 +4056,7 @@ class PrintableString(AllowableCharsMixin, CommonString): if allow_ampersand: self._allowable_chars |= self._ampersand super(PrintableString, self).__init__( - value, bounds, impl, expl, default, optional, _decoded, + value, bounds, impl, expl, default, optional, _decoded, ctx, ) @property @@ -4136,6 +4142,11 @@ LEN_YYYYMMDDHHMMSSDMZ = len("YYYYMMDDHHMMSSDMZ") LEN_YYYYMMDDHHMMSSZ = len("YYYYMMDDHHMMSSZ") +def fractions2float(fractions_raw): + pureint(fractions_raw) + return float("0." + fractions_raw) + + class VisibleString(CommonString): __slots__ = () tag_default = tag_encode(26) @@ -4143,6 +4154,9 @@ class VisibleString(CommonString): asn1_type_name = "VisibleString" +UTCTimeState = namedtuple("UTCTimeState", OctetStringState._fields + ("ber_raw",)) + + class UTCTime(VisibleString): """``UTCTime`` datetime type @@ -4159,9 +4173,18 @@ class UTCTime(VisibleString): .. warning:: - BER encoding is unsupported. + Pay attention that UTCTime can not hold full year, so all years + having < 50 years are treated as 20xx, 19xx otherwise, according + to X.509 recommendation. + + .. warning:: + + No strict validation of UTC offsets are made, but very crude: + + * minutes are not exceeding 60 + * offset value is not exceeding 14 hours """ - __slots__ = () + __slots__ = ("_ber_raw",) tag_default = tag_encode(23) encoding = "ascii" asn1_type_name = "UTCTime" @@ -4175,6 +4198,7 @@ class UTCTime(VisibleString): optional=False, _decoded=(0, 0, 0), bounds=None, # dummy argument, workability for OctetString.decode + ctx=None, ): """ :param value: set the value. Either datetime type, or @@ -4185,13 +4209,15 @@ class UTCTime(VisibleString): :param bool optional: is object ``OPTIONAL`` in sequence """ super(UTCTime, self).__init__( - None, None, impl, expl, default, optional, _decoded, + None, None, impl, expl, None, optional, _decoded, ctx, ) self._value = value + self._ber_raw = None if value is not None: - self._value = self._value_sanitize(value) + self._value, self._ber_raw = self._value_sanitize(value, ctx) + self.ber_encoded = self._ber_raw is not None if default is not None: - default = self._value_sanitize(default) + default, _ = self._value_sanitize(default) self.default = self.__class__( value=default, impl=self.tag, @@ -4199,6 +4225,51 @@ class UTCTime(VisibleString): ) if self._value is None: self._value = default + optional = True + self.optional = optional + + def _strptime_bered(self, value): + year = pureint(value[:2]) + year += 2000 if year < 50 else 1900 + decoded = datetime( + year, # %Y + pureint(value[2:4]), # %m + pureint(value[4:6]), # %d + pureint(value[6:8]), # %H + pureint(value[8:10]), # %M + ) + value = value[10:] + if len(value) == 0: + raise ValueError("no timezone") + offset = 0 + if value[-1] == "Z": + value = value[:-1] + else: + if len(value) < 5: + raise ValueError("invalid UTC offset") + if value[-5] == "-": + sign = -1 + elif value[-5] == "+": + sign = 1 + else: + raise ValueError("invalid UTC offset") + offset = 60 * pureint(value[-2:]) + if offset >= 3600: + raise ValueError("invalid UTC offset minutes") + offset += 3600 * pureint(value[-4:-2]) + if offset > 14 * 3600: + raise ValueError("too big UTC offset") + offset *= sign + value = value[:-5] + if len(value) == 0: + return offset, decoded + if len(value) != 2: + raise ValueError("invalid UTC offset seconds") + seconds = pureint(value) + if seconds >= 60: + raise ValueError("invalid seconds value") + decoded += timedelta(seconds=seconds) + return offset, decoded def _strptime(self, value): # datetime.strptime's format: %y%m%d%H%M%SZ @@ -4206,8 +4277,10 @@ class UTCTime(VisibleString): raise ValueError("invalid UTCTime length") if value[-1] != "Z": raise ValueError("non UTC timezone") + year = pureint(value[:2]) + year += 2000 if year < 50 else 1900 return datetime( - 2000 + int(value[:2]), # %y + year, # %y pureint(value[2:4]), # %m pureint(value[4:6]), # %d pureint(value[6:8]), # %H @@ -4215,26 +4288,71 @@ class UTCTime(VisibleString): pureint(value[10:12]), # %S ) - def _value_sanitize(self, value): + def _dt_sanitize(self, value): + if value.year < 1950 or value.year > 2049: + raise ValueError("UTCTime can hold only 1950-2049 years") + return value.replace(microsecond=0) + + def _value_sanitize(self, value, ctx=None): if isinstance(value, binary_type): try: value_decoded = value.decode("ascii") except (UnicodeEncodeError, UnicodeDecodeError) as err: raise DecodeError("invalid UTCTime encoding: %r" % err) + err = None try: - self._strptime(value_decoded) - except (TypeError, ValueError) as err: - raise DecodeError("invalid UTCTime format: %r" % err) - return value + return self._strptime(value_decoded), None + except (TypeError, ValueError) as _err: + err = _err + if (ctx is not None) and ctx.get("bered", False): + try: + offset, _value = self._strptime_bered(value_decoded) + _value = _value - timedelta(seconds=offset) + return self._dt_sanitize(_value), value_decoded + except (TypeError, ValueError, OverflowError) as _err: + err = _err + raise DecodeError( + "invalid %s format: %r" % (self.asn1_type_name, err), + klass=self.__class__, + ) if isinstance(value, self.__class__): - return value._value + return value._value, None if isinstance(value, datetime): - return value.strftime("%y%m%d%H%M%SZ").encode("ascii") + return self._dt_sanitize(value), None raise InvalidValueType((self.__class__, datetime)) + def _pp_value(self): + if self.ready: + value = self._value.isoformat() + if self.ber_encoded: + value += " (%s)" % self._ber_raw + return value + + def __unicode__(self): + if self.ready: + value = self._value.isoformat() + if self.ber_encoded: + value += " (%s)" % self._ber_raw + return value + return text_type(self._pp_value()) + + def __getstate__(self): + return UTCTimeState( + *super(UTCTime, self).__getstate__(), + **{"ber_raw": self._ber_raw} + ) + + def __setstate__(self, state): + super(UTCTime, self).__setstate__(state) + self._ber_raw = state.ber_raw + + def __bytes__(self): + self._assert_ready() + return self._encode_time() + def __eq__(self, their): if isinstance(their, binary_type): - return self._value == their + return self._encode_time() == their if isinstance(their, datetime): return self.todatetime() == their if not isinstance(their, self.__class__): @@ -4245,25 +4363,16 @@ class UTCTime(VisibleString): self._expl == their._expl ) - def todatetime(self): - """Convert to datetime + def _encode_time(self): + return self._value.strftime("%y%m%d%H%M%SZ").encode("ascii") - :returns: datetime + def _encode(self): + self._assert_ready() + value = self._encode_time() + return b"".join((self.tag, len_encode(len(value)), value)) - Pay attention that UTCTime can not hold full year, so all years - having < 50 years are treated as 20xx, 19xx otherwise, according - to X.509 recomendation. - """ - value = self._strptime(self._value.decode("ascii")) - year = value.year % 100 - return datetime( - year=(2000 + year) if year < 50 else (1900 + year), - month=value.month, - day=value.day, - hour=value.hour, - minute=value.minute, - second=value.second, - ) + def todatetime(self): + return self._value def __repr__(self): return pp_console_row(next(self.pps())) @@ -4274,7 +4383,7 @@ class UTCTime(VisibleString): asn1_type_name=self.asn1_type_name, obj_name=self.__class__.__name__, decode_path=decode_path, - value=self.todatetime().isoformat() if self.ready else None, + value=self._pp_value(), optional=self.optional, default=self == self.default, impl=None if self.tag == self.tag_default else tag_decode(self.tag), @@ -4309,13 +4418,19 @@ class GeneralizedTime(UTCTime): .. warning:: - BER encoding is unsupported. + Only microsecond fractions are supported in DER encoding. + :py:exc:`pyderasn.DecodeError` will be raised during decoding of + higher precision values. .. warning:: - Only microsecond fractions are supported. - :py:exc:`pyderasn.DecodeError` will be raised during decoding of - higher precision values. + BER encoded data can loss information (accuracy) during decoding + because of float transformations. + + .. warning:: + + Local times (without explicit timezone specification) are treated + as UTC one, no transformations are made. .. warning:: @@ -4325,6 +4440,70 @@ class GeneralizedTime(UTCTime): tag_default = tag_encode(24) asn1_type_name = "GeneralizedTime" + def _dt_sanitize(self, value): + return value + + def _strptime_bered(self, value): + if len(value) < 4 + 3 * 2: + raise ValueError("invalid GeneralizedTime") + decoded = datetime( + pureint(value[:4]), # %Y + pureint(value[4:6]), # %m + pureint(value[6:8]), # %d + pureint(value[8:10]), # %H + ) + value = value[10:] + offset = 0 + if len(value) == 0: + return offset, decoded + if value[-1] == "Z": + value = value[:-1] + else: + for char, sign in (("-", -1), ("+", 1)): + idx = value.rfind(char) + if idx == -1: + continue + offset_raw = value[idx + 1:].replace(":", "") + if len(offset_raw) not in (2, 4): + raise ValueError("invalid UTC offset") + value = value[:idx] + offset = 60 * pureint(offset_raw[2:] or "0") + if offset >= 3600: + raise ValueError("invalid UTC offset minutes") + offset += 3600 * pureint(offset_raw[:2]) + if offset > 14 * 3600: + raise ValueError("too big UTC offset") + offset *= sign + break + if len(value) == 0: + return offset, decoded + decimal_signs = ".," + if value[0] in decimal_signs: + return offset, ( + decoded + timedelta(seconds=3600 * fractions2float(value[1:])) + ) + if len(value) < 2: + raise ValueError("stripped minutes") + decoded += timedelta(seconds=60 * pureint(value[:2])) + value = value[2:] + if len(value) == 0: + return offset, decoded + if value[0] in decimal_signs: + return offset, ( + decoded + timedelta(seconds=60 * fractions2float(value[1:])) + ) + if len(value) < 2: + raise ValueError("stripped seconds") + decoded += timedelta(seconds=pureint(value[:2])) + value = value[2:] + if len(value) == 0: + return offset, decoded + if value[0] not in decimal_signs: + raise ValueError("invalid format after seconds") + return offset, ( + decoded + timedelta(microseconds=10**6 * fractions2float(value[1:])) + ) + def _strptime(self, value): l = len(value) if l == LEN_YYYYMMDDHHMMSSZ: @@ -4364,31 +4543,12 @@ class GeneralizedTime(UTCTime): return decoded raise ValueError("invalid GeneralizedTime length") - def _value_sanitize(self, value): - if isinstance(value, binary_type): - try: - value_decoded = value.decode("ascii") - except (UnicodeEncodeError, UnicodeDecodeError) as err: - raise DecodeError("invalid GeneralizedTime encoding: %r" % err) - try: - self._strptime(value_decoded) - except (TypeError, ValueError) as err: - raise DecodeError( - "invalid GeneralizedTime format: %r" % err, - klass=self.__class__, - ) - return value - if isinstance(value, self.__class__): - return value._value - if isinstance(value, datetime): - encoded = value.strftime("%Y%m%d%H%M%S") - if value.microsecond > 0: - encoded = encoded + (".%06d" % value.microsecond).rstrip("0") - return (encoded + "Z").encode("ascii") - raise InvalidValueType((self.__class__, datetime)) - - def todatetime(self): - return self._strptime(self._value.decode("ascii")) + def _encode_time(self): + value = self._value + encoded = value.strftime("%Y%m%d%H%M%S") + if value.microsecond > 0: + encoded += (".%06d" % value.microsecond).rstrip("0") + return (encoded + "Z").encode("ascii") class GraphicString(CommonString): diff --git a/tests/test_pyderasn.py b/tests/test_pyderasn.py index f2e73b4..d396c64 100644 --- a/tests/test_pyderasn.py +++ b/tests/test_pyderasn.py @@ -18,11 +18,14 @@ from copy import copy from copy import deepcopy from datetime import datetime +from datetime import timedelta from importlib import import_module +from random import random from string import ascii_letters from string import digits from string import printable from string import whitespace +from time import mktime from time import time from unittest import TestCase @@ -3955,6 +3958,10 @@ class TestGeneralizedTime(TimeMixin, CommonMixin, TestCase): if value.microsecond > 0: self.assertFalse(obj_encoded.endswith(b"0Z")) + def test_repr_not_ready(self): + unicode(GeneralizedTime()) if PY2 else str(GeneralizedTime()) + repr(GeneralizedTime()) + def test_x690_vector_valid(self): for data in (( b"19920521000000Z", @@ -4007,6 +4014,238 @@ class TestGeneralizedTime(TimeMixin, CommonMixin, TestCase): datetime(2010, 1, 2, 3, 4, 5, 0), ) + def test_go_vectors_valid_ber(self): + for data in (( + b"20100102030405+0607", + b"20100102030405-0607", + )): + GeneralizedTime(data, ctx={"bered": True}) + + def test_utc_offsets(self): + """Some know equal UTC offsets + """ + dts = [ + GeneralizedTime(data.encode("ascii"), ctx={"bered": True}) + for data in ( + "200101011830Z", + "200101012230+04", + "200101011130-0700", + "200101011500-03:30", + ) + ] + self.assertEqual(dts[0], dts[1]) + self.assertEqual(dts[0], dts[2]) + self.assertEqual(dts[0], dts[3]) + + @settings(max_examples=LONG_TEST_MAX_EXAMPLES) + @given(data_strategy()) + def test_valid_ber(self, d): + min_year = 1901 if PY2 else 2 + year = d.draw(integers(min_value=min_year, max_value=9999)) + month = d.draw(integers(min_value=1, max_value=12)) + day = d.draw(integers(min_value=1, max_value=28)) + hours = d.draw(integers(min_value=0, max_value=23)) + data = "%04d%02d%02d%02d" % (year, month, day, hours) + dt = datetime(year, month, day, hours) + fractions_sign = d.draw(sampled_from(" ,.")) + fractions = None + if fractions_sign != " ": + fractions = random() + if d.draw(booleans()): + minutes = d.draw(integers(min_value=0, max_value=59)) + data += "%02d" % minutes + dt += timedelta(seconds=60 * minutes) + if d.draw(booleans()): + seconds = d.draw(integers(min_value=0, max_value=59)) + data += "%02d" % seconds + dt += timedelta(seconds=seconds) + if fractions is not None: + dt += timedelta(microseconds=10**6 * fractions) + elif fractions is not None: + dt += timedelta(seconds=60 * fractions) + elif fractions is not None: + dt += timedelta(seconds=3600 * fractions) + if fractions is not None: + data += fractions_sign + str(fractions)[2:] + if d.draw(booleans()): + data += "Z" + elif d.draw(booleans()): + offset_hour = d.draw(integers(min_value=0, max_value=13)) + sign = 1 + if d.draw(booleans()): + data += "-" + sign = -1 + else: + data += "+" + dt -= timedelta(seconds=sign * 3600 * offset_hour) + data += "%02d" % offset_hour + minutes_separator = d.draw(sampled_from((None, "", ":"))) + if minutes_separator is not None: + offset_minute = d.draw(integers(min_value=0, max_value=59)) + dt -= timedelta(seconds=sign * 60 * offset_minute) + data += "%s%02d" % (minutes_separator, offset_minute) + data = data.encode("ascii") + data = GeneralizedTime.tag_default + len_encode(len(data)) + data + try: + GeneralizedTime().decod(data) + except DecodeError: + dered = False + else: + dered = True + obj = GeneralizedTime().decod(data, ctx={"bered": True}) + if dt.year > 1970: + self.assertEqual( + mktime(obj.todatetime().timetuple()), + mktime(dt.timetuple()), + ) + elif not PY2: + self.assertEqual(obj.todatetime().timestamp(), dt.timestamp()) + self.assertEqual(obj.ber_encoded, not dered) + self.assertEqual(obj.bered, not dered) + self.assertEqual(obj.encode() == data, dered) + repr(obj) + bytes(obj) + str(obj) + + def test_invalid_ber(self): + for data in (( + # "00010203040506.07", + "-0010203040506.07", + "0001-203040506.07", + "000102-3040506.07", + "00010203-40506.07", + "0001020304-506.07", + "000102030405-6.07", + "00010203040506.-7", + "+0010203040506.07", + "0001+203040506.07", + "000102+3040506.07", + "00010203+40506.07", + "0001020304+506.07", + "000102030405+6.07", + "00010203040506.+7", + " 0010203040506.07", + "0001 203040506.07", + "000102 3040506.07", + "00010203 40506.07", + "0001020304 506.07", + "000102030405 6.07", + "00010203040506. 7", + "001 0203040506.07", + "00012 03040506.07", + "0001023 040506.07", + "000102034 0506.07", + "00010203045 06.07", + "0001020304056 .07", + "00010203040506.7 ", + "00010203040506.", + "0001020304050607", + + "-0010203040506", + "0001-203040506", + "000102-3040506", + "00010203-40506", + "0001020304-506", + "000102030405-6", + "0001+203040506", + "000102+3040506", + "00010203+40506", + "0001020304+506", + "000102030405+6", + " 0010203040506", + "0001 203040506", + "000102 3040506", + "00010203 40506", + "0001020304 506", + "000102030405 6", + "001 0203040506", + "00012 03040506", + "0001023 040506", + "000102034 0506", + "00010203045 06", + "0001020304056 ", + + "-00102030405.07", + "0001-2030405.07", + "000102-30405.07", + "00010203-405.07", + "0001020304-5.07", + "000102030405.-7", + "+00102030405.07", + "0001+2030405.07", + "00010203+405.07", + "0001020304+5.07", + "000102030405.+7", + " 00102030405.07", + "0001 2030405.07", + "000102 30405.07", + "00010203 405.07", + "0001020304 5.07", + "000102030405. 7", + "001 02030405.07", + "00012 030405.07", + "0001023 0405.07", + "000102034 05.07", + "00010203045 .07", + "000102030405.7 ", + "000102030405.", + + "-001020304.07", + "0001-20304.07", + "000102-304.07", + "00010203-4.07", + "0001020304.-7", + "+001020304.07", + "0001+20304.07", + "00010203+4.07", + "0001020304.+7", + " 001020304.07", + "0001 20304.07", + "000102 304.07", + "00010203 4.07", + "0001020304. 7", + "001 020304.07", + "00012 0304.07", + "0001023 04.07", + "000102034 .07", + "0001020304.7 ", + "0001020304.", + + "00010203", + "00010203040506Y", + "0001010100+0001", + "0001010100+00:01", + "0001010100+01", + + "00010203040506.07+15", + "00010203040506.07-15", + "00010203040506.07+14:60", + "00010203040506.07+1460", + "00010203040506.07-1460", + "00010203040506.07+00:60", + "00010203040506.07-00:60", + + "00010203040506+15", + "00010203040506-15", + "00010203040506+14:60", + "00010203040506+1460", + "00010203040506-1460", + "00010203040506+00:60", + "00010203040506-00:60", + + "0001020304050.07", + "00010203040.07", + "000102030.07", + "0001020304050", + "00010203040", + "000102030", + )): + with self.assertRaises(DecodeError): + GeneralizedTime(data.encode("ascii"), ctx={"bered": True}) + data = data.replace(".", ",") + with self.assertRaises(DecodeError): + GeneralizedTime(data.encode("ascii"), ctx={"bered": True}) + @given( binary( min_size=(LEN_YYYYMMDDHHMMSSZ - 1) // 2, @@ -4093,6 +4332,10 @@ class TestUTCTime(TimeMixin, CommonMixin, TestCase): def additional_symmetric_check(self, value, obj_encoded): pass + def test_repr_not_ready(self): + unicode(GeneralizedTime()) if PY2 else str(GeneralizedTime()) + repr(UTCTime()) + def test_x690_vector_valid(self): for data in (( b"920521000000Z", @@ -4178,6 +4421,195 @@ class TestUTCTime(TimeMixin, CommonMixin, TestCase): with self.assertRaises(DecodeError): UTCTime(data) + def test_x680_vector_valid_ber(self): + for data, dt in (( + (b"8201021200Z", datetime(1982, 1, 2, 12)), + (b"8201020700-0500", datetime(1982, 1, 2, 12)), + (b"0101021200Z", datetime(2001, 1, 2, 12)), + (b"0101020700-0500", datetime(2001, 1, 2, 12)), + )): + data = UTCTime.tag_default + len_encode(len(data)) + data + obj = UTCTime().decod(data, ctx={"bered": True}) + self.assertEqual(obj, dt) + self.assertEqual(obj.todatetime(), dt) + self.assertTrue(obj.ber_encoded) + self.assertTrue(obj.bered) + self.assertNotEqual(obj.encode(), data) + repr(obj) + + def test_go_vectors_valid_ber(self): + for data in (( + b"910506164540-0700", + b"910506164540+0730", + b"9105062345Z", + b"5105062345Z", + )): + data = UTCTime.tag_default + len_encode(len(data)) + data + obj = UTCTime().decod(data, ctx={"bered": True}) + self.assertTrue(obj.ber_encoded) + self.assertTrue(obj.bered) + self.assertNotEqual(obj.encode(), data) + repr(obj) + + @settings(max_examples=LONG_TEST_MAX_EXAMPLES) + @given(data_strategy()) + def test_valid_ber(self, d): + year = d.draw(integers(min_value=0, max_value=99)) + month = d.draw(integers(min_value=1, max_value=12)) + day = d.draw(integers(min_value=1, max_value=28)) + hours = d.draw(integers(min_value=0, max_value=23)) + minute = d.draw(integers(min_value=0, max_value=59)) + data = "%02d%02d%02d%02d%02d" % (year, month, day, hours, minute) + dt = datetime( + year + (2000 if year < 50 else 1900), + month, + day, + hours, + minute, + ) + dered = False + if d.draw(booleans()): + dered = True + seconds = d.draw(integers(min_value=0, max_value=59)) + data += "%02d" % seconds + dt += timedelta(seconds=seconds) + if d.draw(booleans()): + data += "Z" + else: + dered = False + offset_hour = d.draw(integers(min_value=0, max_value=13)) + offset_minute = d.draw(integers(min_value=0, max_value=59)) + offset = timedelta(seconds=offset_hour * 3600 + offset_minute * 60) + if d.draw(booleans()): + dt += offset + data += "-" + else: + dt -= offset + data += "+" + data += "%02d%02d" % (offset_hour, offset_minute) + data = data.encode("ascii") + data = UTCTime.tag_default + len_encode(len(data)) + data + obj = UTCTime().decod(data, ctx={"bered": True}) + self.assertEqual(obj, dt) + self.assertEqual(obj.todatetime(), dt) + self.assertEqual(obj.ber_encoded, not dered) + self.assertEqual(obj.bered, not dered) + self.assertEqual(obj.encode() == data, dered) + repr(obj) + bytes(obj) + str(obj) + + def test_invalid_ber(self): + for data in (( + # b"0001020304Z", + b"-101020304Z", + b"00-1020304Z", + b"0001-20304Z", + b"000102-304Z", + b"000102-104Z", + b"00000203-4Z", + b"+101020304Z", + b"00+1020304Z", + b"0001+20304Z", + b"000102+304Z", + b"000102+104Z", + b"00000203+4Z", + b" 101020304Z", + b"00 1020304Z", + b"0001 20304Z", + b"000102 304Z", + b"000102 104Z", + b"00000203 4Z", + b"1 01020304Z", + b"001 020304Z", + b"00012 0304Z", + b"0001023 04Z", + b"0001021 04Z", + b"000002034 Z", + b"0013020304Z", + b"0001000304Z", + b"0001320304Z", + b"0001022404Z", + b"0001020360Z", + b"0002300304Z", + b"0001020304", + b"0001020304T", + b"0001020304+", + b"0001020304-", + b"0001020304+0", + b"0001020304+00", + b"0001020304+000", + b"0001020304+000Z", + b"0001020304+0000Z", + b"0001020304+-101", + b"0001020304+01-1", + b"0001020304+0060", + b"0001020304+1401", + b"5001010000+0001", + b"000102030Z", + b"0001020Z", + )): + with self.assertRaises(DecodeError): + UTCTime(data, ctx={"bered": True}) + data = data[:8] + data[8+2:] + with self.assertRaises(DecodeError): + UTCTime(data, ctx={"bered": True}) + + for data in (( + # b"000102030405Z", + b"-10102030405Z", + b"00-102030405Z", + b"0001-2030405Z", + b"000102-30405Z", + b"000102-10405Z", + b"00000203-405Z", + b"0000020304-5Z", + b"+10102030405Z", + b"00+102030405Z", + b"0001+2030405Z", + b"000102+30405Z", + b"000102+10405Z", + b"00000203+405Z", + b"0000020304+5Z", + b" 10102030405Z", + b"00 102030405Z", + b"0001 2030405Z", + b"000102 30405Z", + b"000102 10405Z", + b"00000203 405Z", + b"0000020304 5Z", + b"1 0102030405Z", + b"001 02030405Z", + b"00012 030405Z", + b"0001023 0405Z", + b"0001021 0405Z", + b"000002034 05Z", + b"00000203045 Z", + b"001302030405Z", + b"000100030405Z", + b"000132030405Z", + b"000102240405Z", + b"000102036005Z", + b"000230030405Z", + b"000102030460Z", + b"000102030405", + b"000102030405T", + b"000102030405+", + b"000102030405-", + b"000102030405+0", + b"000102030405+00", + b"000102030405+000", + b"000102030405+000Z", + b"000102030405+0000Z", + b"000102030405+-101", + b"000102030405+01-1", + b"000102030405+0060", + b"000102030405+1401", + b"500101000002+0003", + )): + with self.assertRaises(DecodeError): + UTCTime(data, ctx={"bered": True}) + @given(integers(min_value=0, max_value=49)) def test_pre50(self, year): self.assertEqual( -- 2.44.0