def test_blob_encode(self) -> None:
blob = Blob(4, b"testdata")
encoded = dumps(blob)
- self.assertEqual(
+ self.assertSequenceEqual(
encoded, b"\x0b\x00\x00\x00\x00\x00\x00\x00\x03\x01test\x01data\x80"
)
encoded = b"\x0B\x00\x00\x00\x00\x00\x00\x00\x03\x01test\x01data\x80" + junk
decoded, tail = loads(encoded)
self.assertEqual(decoded.l, 4)
- self.assertEqual(decoded.v, b"testdata")
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(decoded.v, b"testdata")
+ self.assertSequenceEqual(tail, junk)
def test_throws_when_not_enough_data(self) -> None:
encoded = b"\x0B\x00\x00\x00\x00\x00\x00\x00\x03\x01test\x01da"
encoded = b"\x03" + junk
decoded, tail = loads(encoded)
self.assertIs(decoded, True)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
@given(junk_st)
def test_bool_decode_false(self, junk):
encoded = b"\x02" + junk
decoded, tail = loads(encoded)
self.assertIs(decoded, False)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
]
for _float, expected in zip(floats, expecteds):
decoded, tail = loads(_float)
- self.assertEqual(decoded, expected)
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(decoded, expected)
+ self.assertSequenceEqual(tail, b"")
def test_float_not_enough_data(self) -> None:
floats: List[bytes] = [
b"\x0c\x91\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
]
for integer, encoded in zip(ints, expected):
- self.assertEqual(dumps(integer), encoded)
+ self.assertSequenceEqual(dumps(integer), encoded)
decoded, tail = loads(encoded)
self.assertEqual(decoded, integer)
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(tail, b"")
def test_int_negative(self) -> None:
ints: list[int] = [-1, -123, -(1 << 64), -(1 << 130)]
b"\x0D\x91\x03\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
]
for integer, encoded in zip(ints, expected):
- self.assertEqual(dumps(integer), encoded)
+ self.assertSequenceEqual(dumps(integer), encoded)
decoded, tail = loads(encoded)
self.assertEqual(decoded, integer)
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(tail, b"")
def test_int_zero(self) -> None:
encoded: bytes = dumps(0)
- self.assertEqual(encoded, b"\x0C\x80")
+ self.assertSequenceEqual(encoded, b"\x0C\x80")
decoded, tail = loads(encoded)
self.assertEqual(decoded, 0)
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(tail, b"")
def test_int_decode_not_enough_data(self) -> None:
encoded: bytes = b"\x0C\x81"
class TestList(TestCase):
def test_list_encode_empty(self) -> None:
encoded = dumps([])
- self.assertEqual(encoded, b"\x08\x00")
+ self.assertSequenceEqual(encoded, b"\x08\x00")
@given(lists(any_st, max_size=4))
def test_list_encode_non_empty(self, test_list: List) -> None:
encoded = dumps(test_list)
- self.assertEqual(
+ self.assertSequenceEqual(
encoded, b"\x08" + b"".join(dumps(i) for i in test_list) + b"\x00"
)
encoded = b"\x08\x00"
decoded, tail = loads(encoded)
self.assertEqual(decoded, [])
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(tail, b"")
@given(lists(any_st, max_size=4), junk_st)
def test_list_decode_non_empty(self, test_list: List, junk: bytes) -> None:
encoded = b"\x08" + b"".join(dumps(i) for i in test_list) + b"\x00" + junk
decoded, tail = loads(encoded)
self.assertEqual(decoded, test_list)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
@given(lists(any_st, max_size=4))
def test_no_eoc(self, test_list: List) -> None:
) +
b"\x00"
)
- self.assertEqual(encoded, expected)
+ self.assertSequenceEqual(encoded, expected)
@given(dictionaries(keys=mapkey_st, values=any_st, max_size=4), junk_st)
def test_map_decode(self, test_map, junk):
)
decoded, tail = loads(encoded)
self.assertEqual(decoded, test_map)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
@given(junk_st)
def test_map_empty(self, junk):
test_map = {}
encoded = dumps(test_map) + junk
expected = b"\x09\x00" + junk
- self.assertEqual(encoded, expected)
+ self.assertSequenceEqual(encoded, expected)
decoded, tail = loads(encoded)
self.assertEqual(decoded, test_map)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
@given(lists(mapkey_st, max_size=4), junk_st)
def test_decode_to_set(self, keys, junk):
encoded = dumps(test_map) + junk
decoded, tail = loads(encoded, sets=True)
self.assertEqual(decoded, set(keys))
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
def test_map_throws_when_decoding_unsorted_keys(self):
encoded = b"\x09\xc4key2\x0c\x81\x01\xc4key1\xc6value1\x00"
return
encoded = dumps(test_str)
tag = (TagStr | TagUTF8 | len(test_str.encode("utf-8"))).to_bytes(1, "big")
- self.assertEqual(encoded, tag + test_str.encode("utf-8"))
+ self.assertSequenceEqual(encoded, tag + test_str.encode("utf-8"))
def test_long_utf8(self) -> None:
long_strings = ["a" * 62, "a" * 318, "a" * 65853]
for long_string in long_strings:
encoded = dumps(long_string)
decoded, tail = loads(encoded)
- self.assertEqual(decoded, long_string)
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(decoded, long_string)
+ self.assertSequenceEqual(tail, b"")
def test_encode_non_utf8(self) -> None:
bs = b"\x00\x01\x02"
encoded = dumps(bs)
- self.assertEqual(encoded, b"\x83\x00\x01\x02")
+ self.assertSequenceEqual(encoded, b"\x83\x00\x01\x02")
decoded, tail = loads(encoded)
- self.assertEqual(decoded, b"\x00\x01\x02")
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(decoded, b"\x00\x01\x02")
+ self.assertSequenceEqual(tail, b"")
def test_long_non_utf8(self) -> None:
long_bss = [b"\x01" * 62, b"\x01" * 318, b"\x01" * 65853]
for long_bs in long_bss:
encoded = dumps(long_bs)
decoded, tail = loads(encoded)
- self.assertEqual(decoded, long_bs)
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(decoded, long_bs)
+ self.assertSequenceEqual(tail, b"")
def test_throws_when_not_enough_data(self) -> None:
encoded = b"\x85he"
def test_encode_decode_tai64(self, junk: bytes) -> None:
dt = datetime(2023, 10, 1, 12, 0, 0)
encoded = dumps(dt)
- self.assertEqual(encoded, b"\x18\x40\x00\x00\x00\x65\x19\x5f\x65")
+ self.assertSequenceEqual(encoded, b"\x18\x40\x00\x00\x00\x65\x19\x5f\x65")
decoded, tail = loads(encoded + junk)
self.assertEqual(dt, decoded)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
@given(junk_st)
def test_encode_decode_tai64n(self, junk: bytes) -> None:
dt = datetime(2023, 10, 1, 12, 0, 0, 123456)
encoded = dumps(dt) + junk
- self.assertEqual(
+ self.assertSequenceEqual(
encoded, b"\x19\x40\x00\x00\x00\x65\x19\x5f\x65\x07\x5b\xca\x00" + junk
)
decoded, tail = loads(encoded)
self.assertEqual(dt, decoded)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
@given(junk_st)
def test_decode_tai64na(self, junk: bytes) -> None:
)
decoded, tail = loads(encoded)
self.assertEqual(decoded, expected)
- self.assertEqual(tail, junk)
+ self.assertSequenceEqual(tail, junk)
def test_throws_when_not_enough_data_for_tai64(self) -> None:
with self.assertRaises(NotEnoughData):
def test_large_number_of_secs(self) -> None:
decoded, tail = loads(b"\x18\x70\x00\x00\x00\x65\x19\x5f\x65")
self.assertEqual(decoded, Raw(t=24, v=b"p\x00\x00\x00e\x19_e"))
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(tail, b"")
def test_nanoseconds_not_convertible_to_microseconds(self) -> None:
decoded, tail = loads(
b"\x19\x40\x00\x00\x00\x65\x19\x5f\x65\x07\x5b\xca\x01"
)
self.assertEqual(decoded, Raw(t=25, v=b"@\x00\x00\x00e\x19_e\x07[\xca\x01"))
- self.assertEqual(tail, b"")
+ self.assertSequenceEqual(tail, b"")
def test_throws_when_first_bit_is_in_use(self) -> None:
with self.assertRaises(DecodeError) as cm:
uuid_str: str = "12345678-1234-5678-1234-567812345678"
uuid_obj: UUID = UUID(uuid_str)
encoded: bytes = dumps(uuid_obj)
- self.assertEqual(encoded, b"\x04\x124Vx\x124Vx\x124Vx\x124Vx")
+ self.assertSequenceEqual(encoded, b"\x04\x124Vx\x124Vx\x124Vx\x124Vx")
def test_uuid_decode(self) -> None:
uuid_str: str = "12345678-1234-5678-1234-567812345678"