Fix token handling
Token was encoded as utf-8 string. However, token is a byte string that is not required to be utf-8 encodable. This patch changes token type in the Message class to string of bytes instead of string of characters. Signed-off-by: Hubert Miś <hubert.mis@gmail.com>
This commit is contained in:
parent
89d51737a9
commit
297404ce33
2 changed files with 6 additions and 11 deletions
|
@ -115,15 +115,15 @@ class Message(object):
|
|||
"""
|
||||
Set the Token of the message.
|
||||
|
||||
:type value: String
|
||||
:type value: Bytes
|
||||
:param value: the Token
|
||||
:raise AttributeError: if value is longer than 256
|
||||
"""
|
||||
if value is None:
|
||||
self._token = value
|
||||
return
|
||||
if not isinstance(value, str):
|
||||
value = str(value)
|
||||
if not isinstance(value, bytes):
|
||||
value = bytes(value)
|
||||
if len(value) > 256:
|
||||
raise AttributeError
|
||||
self._token = value
|
||||
|
|
|
@ -51,10 +51,7 @@ class Serializer(object):
|
|||
message.type = message_type
|
||||
message.mid = mid
|
||||
if token_length > 0:
|
||||
fmt = "%ss" % token_length
|
||||
s = struct.Struct(fmt)
|
||||
token_value = s.unpack_from(datagram[pos:])[0]
|
||||
message.token = token_value.decode("utf-8")
|
||||
message.token = datagram[pos:pos+token_length]
|
||||
else:
|
||||
message.token = None
|
||||
|
||||
|
@ -152,10 +149,8 @@ class Serializer(object):
|
|||
values = [tmp, message.code, message.mid]
|
||||
|
||||
if message.token is not None and tkl > 0:
|
||||
|
||||
for b in str(message.token):
|
||||
fmt += "c"
|
||||
values.append(bytes(b, "utf-8"))
|
||||
fmt += "%ss" % tkl
|
||||
values.append(message.token)
|
||||
|
||||
options = Serializer.as_sorted_list(message.options) # already sorted
|
||||
lastoptionnumber = 0
|
||||
|
|
Loading…
Reference in a new issue