diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a293b6b --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ + +/src/dabmsc.egg-info +/build diff --git a/bin/decode b/bin/decode index d81a848..9c5f09e 100755 --- a/bin/decode +++ b/bin/decode @@ -20,7 +20,7 @@ parser.add_argument('-f', dest='output', help='outfile file directory') args = parser.parse_args() if args.filename: - print 'decoding from', args.filename + print(('decoding from', args.filename)) f = open(args.filename, 'rb') else: f = sys.stdin @@ -54,17 +54,17 @@ logger.debug("decoding function: %s", f); for o in f: if isinstance(o, Packet): - print 'packet:', o + print('packet:', o) elif isinstance(o, Datagroup): - print 'dataroup:', o + print('dataroup:', o) elif isinstance(o, MotObject): - print "=" * 48 - print '{name} {type} ({size} bytes)'.format(name=o.get_name(), type=o.get_type(), size=len(o.get_body())) - print "=" * 48 - print 'parameters:' + print("=" * 48) + print('{name} {type} ({size} bytes)'.format(name=o.get_name(), type=o.get_type(), size=len(o.get_body()))) + print("=" * 48) + print('parameters:') for p in o.get_parameters(): - print '\t', repr(p) - print + print('\t', repr(p)) + print() if args.output: import base64 file_output = open(os.path.join(args.output, base64.urlsafe_b64encode(o.get_name())), 'wb') diff --git a/setup.py b/setup.py index e1391a8..6d9f3e3 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from distutils.core import setup +from setuptools import setup setup(name='dabmsc', version='1.0.1', @@ -11,5 +11,6 @@ download_url='https://github.com/GlobalRadio/python-dabmsc/tarball/1.0.1', packages=['msc', 'msc.datagroups', 'msc.packets'], package_dir = {'' : 'src'}, - keywords = ['dab', 'msc', 'radio'] + keywords = ['dab', 'msc', 'radio'], + install_requires = ['bitarray'] ) diff --git a/src/msc/__init__.py b/src/msc/__init__.py index 273cf34..4bb9c5d 100644 --- a/src/msc/__init__.py +++ b/src/msc/__init__.py @@ -4,9 +4,7 @@ logger = logging.getLogger('msc') -# See Annex E in EN 300 401 -crc16_11021 = crcmod.mkCrcFun(0x11021, 0x0, False, 0xFFFF) -crcfun = crcmod.predefined.mkPredefinedCrcFun('x25') +crcfun = crcmod.mkCrcFun(0x11021, 0x0, False, 0xFFFF) def calculate_crc(data): return crcfun(data) @@ -17,7 +15,7 @@ def hex_to_bitarray(hex): return b def int_to_bitarray(i, n): - return bitarray(('{0:0%db}' % n).format(i)) + return bitarray(('{0:0%db}' % n).format(int(i))) def bitarray_to_int(bits): return int(bits.to01(), 2) @@ -63,7 +61,7 @@ def __init__(self): def next(self, name=None): # first check the cache - if name is not None and self.cache.has_key(name): + if name is not None and name in self.cache: return self.cache.get(name) # if we've run out then start recycling from the head diff --git a/src/msc/datagroups/__init__.py b/src/msc/datagroups/__init__.py index 961968b..82bd570 100644 --- a/src/msc/datagroups/__init__.py +++ b/src/msc/datagroups/__init__.py @@ -1,4 +1,4 @@ -from msc import bitarray_to_hex, int_to_bitarray, crc16_11021, InvalidCrcError, generate_transport_id +from msc import bitarray_to_hex, int_to_bitarray, calculate_crc, InvalidCrcError, generate_transport_id from mot import DirectoryEncoder, SortedHeaderInformation from bitarray import bitarray import logging @@ -94,7 +94,7 @@ def _segment(data, strategy): bits += int_to_bitarray(0, 3) # (0-2): Repetition Count remaining (0 = only broadcast) bits += int_to_bitarray(len(segment_data), 13) # (3-16): SegmentSize - segments.append(bits.tobytes() + segment_data) + segments.append((bits.tobytes()) + segment_data) i += segment_size @@ -127,7 +127,7 @@ def encode_headermode(objects, segmenting_strategy=None): # insert the core parameters into the header bits = bitarray() bits += int_to_bitarray(len(body_data) if body_data else 0, 28) # (0-27): BodySize in bytes - bits += int_to_bitarray(extension_bits.length() / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension) + bits += int_to_bitarray(len(extension_bits) / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension) bits += int_to_bitarray(object.get_type().type, 6) # (41-46): ContentType bits += int_to_bitarray(object.get_type().subtype, 9) # (47-55): ContentSubType bits += extension_bits # (56-n): Header extension data @@ -168,7 +168,7 @@ def encode_directorymode(objects, directory_parameters=None, segmenting_strategy # add the core parameters into the header entries += int_to_bitarray(len(object.get_body()), 28) # (0-27): BodySize in bytes - entries += int_to_bitarray(extension_bits.length() / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension) + entries += int_to_bitarray(len(extension_bits) / 8 + 7, 13) # (28-40): HeaderSize in bytes (core=7 + extension) entries += int_to_bitarray(object.get_type().type, 6) # (41-46): ContentType entries += int_to_bitarray(object.get_type().subtype, 9) # (47-55): ContentSubType entries += extension_bits # (56-n): Header extension data @@ -183,7 +183,7 @@ def encode_directorymode(objects, directory_parameters=None, segmenting_strategy bits = bitarray() bits += bitarray('0') # (0): CompressionFlag: This bit shall be set to 0 bits += bitarray('0') # (1): RFU - bits += int_to_bitarray(len(entries.tobytes()), 30) # (2-31): DirectorySize: total size of the MOT directory in bytes + bits += int_to_bitarray(len(entries.tobytes()) + 13 + len(directory_params.tobytes()), 30) # (2-31): DirectorySize: total size of the MOT directory in bytes, including the 13 header bytes and length of the directory parameter bytes bits += int_to_bitarray(len(objects), 16) # (32-47): NumberOfObjects: Total number of objects described by the directory bits += int_to_bitarray(0, 24) # (48-71): DataCarouselPeriod: Max time in tenths of seconds for the data carousel to complete a cycle. Value of zero for undefined bits += bitarray('000') # (72-74): RFU @@ -197,21 +197,40 @@ def encode_directorymode(objects, directory_parameters=None, segmenting_strategy bits += entries # segment and add directory datagroups with a new transport ID + continuity_directory = 0 directory_transport_id = generate_transport_id() segments = _segment(bits.tobytes(), segmenting_strategy) for i, segment in enumerate(segments): - header_group = Datagroup(directory_transport_id, DIRECTORY_UNCOMPRESSED, segment, i, i%16, last=True if i == len(segments) - 1 else False) + header_group = Datagroup(directory_transport_id, DIRECTORY_UNCOMPRESSED, segment, i, continuity_directory, last=True if i == len(segments) - 1 else False) tmp = bitarray() tmp.frombytes(header_group.tobytes()) tmp.frombytes(header_group.tobytes()) datagroups.append(header_group) + continuity_directory = (continuity_directory + 1) % 16 # add body datagroups + continuity_body = 0 for object in objects: segments = _segment(object.get_body(), segmenting_strategy) for i, segment in enumerate(segments): - body_group = Datagroup(object.get_transport_id(), BODY, segment, i, i%16, last=True if i == len(segments) - 1 else False) + body_group = Datagroup(object.get_transport_id(), BODY, segment, i, continuity_body, last=True if i == len(segments) - 1 else False) datagroups.append(body_group) + continuity_body = (continuity_body + 1) % 16 + # add empty body datagroups to assure continuity + if continuity_body != 0: + # segment header + bits = bitarray() + bits += int_to_bitarray(0, 3) # (0-2): Repetition Count remaining (0 = only broadcast) + bits += int_to_bitarray(0, 13) # (3-16): SegmentSize + dummysegment = bits.tobytes() + body_group = Datagroup(generate_transport_id(), BODY, dummysegment, 0, continuity_body, last=True) + datagroups.append(body_group) + continuity_body = (continuity_body + 1) % 16 + if continuity_body != 0: + continuity_body = 15 + body_group = Datagroup(generate_transport_id(), BODY, dummysegment, 0, continuity_body, last=True) + datagroups.append(body_group) + return datagroups import select @@ -233,7 +252,7 @@ def decode_datagroups(data, error_callback=None, check_crc=True, resync=True): if isinstance(data, bitarray): i = 0 - while i < data.length(): + while i < len(data): datagroup = Datagroup.frombits(data, i=i, check_crc=check_crc) yield datagroup i += (datagroup.size * 8) @@ -248,19 +267,19 @@ def decode_datagroups(data, error_callback=None, check_crc=True, resync=True): except: reading = False logger.exception("error") - if not buf.length(): + if not len(buf): logger.debug('buffer is at zero length') return i = 0 - #logger.debug('chunking buffer of length %d bytes', buf.length()/8) - length = buf.length()/8 + #logger.debug('chunking buffer of length %d bytes', len(buf)/8) + length = len(buf)/8 if length < 9: continue size = int(buf[59:72].to01(), 2) if length < size: #logger.debug('buffer still not at right size for datagroup size of %d bytes', size) continue - while i < buf.length(): + while i < len(buf): try: datagroup = Datagroup.frombits(buf, i=i, check_crc=check_crc) yield datagroup @@ -268,7 +287,7 @@ def decode_datagroups(data, error_callback=None, check_crc=True, resync=True): buf = buf[i:] except IncompleteDatagroupError: break - except InvalidCrcError, ice: + except (InvalidCrcError, ice): if error_callback: error_callback(ice) buf = buf[8:] # attempt to resync? #i += 8 @@ -287,14 +306,14 @@ def decode_datagroups(data, error_callback=None, check_crc=True, resync=True): buf.frombytes(p.data) if p.last: - logger.debug('got packet %s - buffer now %d bytes', p, buf.length()/8) + logger.debug('got packet %s - buffer now %d bytes', p, len(buf)/8) try: datagroup = Datagroup.frombits(buf, i=i, check_crc=check_crc) logger.debug('yielding datagroup: %s', datagroup) yield datagroup - except IncompleteDatagroupError, ide: + except (IncompleteDatagroupError, ide): if error_callback: error_callback(ide) - except InvalidCrcError, ice: + except (InvalidCrcError, ice): if error_callback: error_callback(ice) del buf buf = bitarray() @@ -341,24 +360,22 @@ def tobytes(self): # datagroup header bits += bitarray('0') # (0): ExtensionFlag - 0=no extension bits += bitarray('1' if self.crc_enabled else '0') # (1): CrcFlag - true if there is a CRC at the end of the datagroup - bits += bitarray('0' if self.segment_index is None else '1') # (2): SegmentFlag - 1=segment header included - bits += bitarray('0' if self.transport_id is None else '1') # (3): UserAccessFlag - true + bits += bitarray('1') # (2): SegmentFlag - 1=segment header included + bits += bitarray('1') # (3): UserAccessFlag - true bits += int_to_bitarray(self._type, 4) # (4-7): DataGroupType bits += int_to_bitarray(self.continuity % 16, 4) # (8-11): ContinuityIndex bits += int_to_bitarray(self.repetition, 4) # (12-15): RepetitionIndex - remaining = 0 (only this once) # session header # segment field - if self.segment_index is not None: - bits += bitarray('1' if self.last else '0') # (16): Last - true if the last segment - bits += int_to_bitarray(self.segment_index, 15) # (17-32): SegmentNumber + bits += bitarray('1' if self.last else '0') # (16): Last - true if the last segment + bits += int_to_bitarray(self.segment_index, 15) # (17-32): SegmentNumber # user access field - if self.transport_id is not None: - bits += bitarray('000') # (33-35): RFA - bits += bitarray('1') # (36): TransportId - true to include Transport ID - bits += int_to_bitarray(2, 4) # (37-40): LengthIndicator - length of transport Id and End user address fields (will be 2 bytes as only transport ID defined) - bits += int_to_bitarray(self._transport_id, 16) # (41-56) transport ID + bits += bitarray('000') # (33-35): RFA + bits += bitarray('1') # (36): TransportId - true to include Transport ID + bits += int_to_bitarray(2, 4) # (37-40): LengthIndicator - length of transport Id and End user address fields (will be 2 bytes as only transport ID defined) + bits += int_to_bitarray(self._transport_id, 16) # (41-56) transport ID # data field tmp = bitarray() @@ -367,7 +384,7 @@ def tobytes(self): # CRC crc = 0; - if self.crc_enabled: crc = crc16_11021(bits.tobytes()) + if self.crc_enabled: crc = calculate_crc(bits.tobytes()) bits += int_to_bitarray(crc, 16) return bits.tobytes() @@ -375,32 +392,10 @@ def tobytes(self): @staticmethod def frombits(bits, i=0, check_crc=True): """Parse a datagroup from a bitarray, with an optional offset""" - - # use only the slice indicated by the offset - bits = bits[i:] + # check we have enough header first - ext_flag = bits[0] - crc_flag = bits[1] - seg_flag = bits[2] - uaf_flag = bits[3] - header_size = 16 - if ext_flag: - header_size += 16 - if seg_flag: - header_size += 16 - if uaf_flag: - tid_present = bits[header_size+3] - uaf_sz = int(bits[header_size+4:header_size+8].to01(), 2) - header_size += 8+8*uaf_sz - else: - tid_present = False - - min_size = header_size - if crc_flag: - min_size += 16 - if bits.length() < min_size: - raise IncompleteDatagroupError - + if (len(bits) - i) < ((9 + 2) * 8): raise IncompleteDatagroupError + # datagroup header type = int(bits[4:8].to01(), 2) continuity = int(bits[8:12].to01(), 2) @@ -408,31 +403,22 @@ def frombits(bits, i=0, check_crc=True): # session header # segment field - if seg_flag: - last = bits[16] - segment_index = int(bits[17:32].to01(), 2) - else: - last = False - segment_index = None - + last = bits[16] + segment_index = int(bits[17:32].to01(), 2) + # user access field - if tid_present: - transport_id = int(bits[40:56].to01(), 2) - else: - transport_id = None - - # extract data and compute CRC - if crc_flag: - hdr_plus_data = bits[:-16] - crc = int(bits[bits.length()-16:].to01(), 2) - if check_crc and crc != crc16_11021(hdr_plus_data.tobytes()): - raise InvalidCrcError(crc, crc_slice.tobytes()) - else: - hdr_plus_data = bits + transport_id = int(bits[40:56].to01(), 2) + + # data segment header + size = int(bits[59:72].to01(), 2) # get size to check we have a complete datagroup + if len(bits) < 72 + size * 8 + 16: raise IncompleteDatagroupError + data = bits[72 : 72 + (size*8)] + if check_crc: + crc = int(bits[72 + len(data) : 72 + len(data) + 16].to01(), 2) + calculated = calculate_crc(bits[:72+len(data)].tobytes()) + if crc != calculated: raise InvalidCrcError(crc, bits[:72+len(data) + 16].tobytes()) - datagroup = Datagroup(transport_id, type, - hdr_plus_data[header_size:].tobytes(), segment_index, continuity, - True, repetition, last) + datagroup = Datagroup(transport_id, type, data.tobytes(), segment_index, continuity, True, repetition, last) logger.debug('parsed datagroup: %s', datagroup) return datagroup @@ -443,7 +429,7 @@ def __str__(self): elif self._type == 6: type_description = 'MOT Directory (uncompressed)' elif self._type == 7: type_description = 'MOT Directory (compressed)' else: type_description = 'unknown' - return '[segment=%d bytes], type=%d [%s], transportid=%s, segmentindex=%s, continuity=%d, last=%s' % (len(self._data), self._type, type_description, self._transport_id, self.segment_index, self.continuity, self.last) + return '[segment=%d bytes], type=%d [%s], transportid=%d, segmentindex=%d, continuity=%d, last=%s' % (len(self._data), self._type, type_description, self._transport_id, self.segment_index, self.continuity, self.last) def __repr__(self): return '' % str(self) @@ -489,5 +475,5 @@ def regenerate(self): def __iter__(self): return self.iterator - def next(self): - return self.iterator.next() + def __next__(self): + return next(self.iterator) diff --git a/src/msc/packets/__init__.py b/src/msc/packets/__init__.py index 9acd105..9717e2f 100644 --- a/src/msc/packets/__init__.py +++ b/src/msc/packets/__init__.py @@ -38,8 +38,9 @@ def tobytes(self): # add the packet data tmp = bitarray() - tmp.frombytes(self.data) - bits += tmp # (24-n): packet data + if len(self.data) > 0: + tmp.frombytes(self.data) + bits += tmp # (24-n): packet data # add packet padding if needed bits += bitarray('0'*(self.size - len(self.data) - 5)*8) @@ -54,7 +55,7 @@ def frombits(bits, i=0, check_crc=True): """Parse a packet from a bitarray, with an optional offset""" size = (int(bits[i+0:i+2].to01(), 2) + 1) * 24 - if (bits.length() - i) < (size * 8): raise IncompletePacketError('length of bitarray is less than passed data length %d bytes < %d bytes', bits.length() / 8, size) + if (len(bits) - i) < (size * 8): raise IncompletePacketError('length of bitarray is less than passed data length %d bytes < %d bytes', len(bits) / 8, size) index = int(bits[i+2:i+4].to01(), 2) first = bits[i+4] last = bits[i+5] @@ -77,38 +78,63 @@ def __str__(self): def __repr__(self): return '' % str(self) -def encode_packets(datagroups, address=None, size=None, continuity=None): +def encode_packets(datagroups, address=None, size=None, continuity=None, padding=False): """ Encode a set of datagroups into packets """ + def get_continuity_index(address): + index=0 + if address in continuity: + index = continuity[address] + index += 1 + if index > 3: index = 0 + continuity[address] = index + return index + + def get_required_size(payload_size, max_packet_size): + if payload_size > (max_packet_size-5): + return max_packet_size + if payload_size > (72-5): + return Packet.SIZE_96 + elif payload_size > (48-5): + return Packet.SIZE_72 + elif payload_size > (24-5): + return Packet.SIZE_48 + else: + return Packet.SIZE_24 + if not address: address = 1 if not size: size = Packet.SIZE_96 if not continuity: continuity = {} + if not padding: padding = False if address < 1 or address > 1024: raise ValueError('packet address must be greater than zero and less than 1024') if size not in Packet.sizes: raise ValueError('packet size %d must be one of: %s' % (size, Packet.sizes)) packets = [] - def get_continuity_index(address): - index=0 - if continuity.has_key(address): - index = continuity[address] - index += 1 - if index > 3: index = 0 - continuity[address] = index - return index - # encode the datagroups into a continuous datastream - for datagroup in datagroups: - data = datagroup.tobytes() - chunk_size = size - 5 - for i in range(0, len(data), chunk_size): - chunk = data[i:i+chunk_size if i+chunk_size < len(data) else len(data)] - packet = Packet(size, address, chunk, True if i == 0 else False, True if i+chunk_size >= len(data) else False, get_continuity_index(address)) - packets.append(packet) + # repeating sufficient times to make sure the final continuity index is 3 + # this could make the output filesize x2 or x4 the minimum size + while True: + for datagroup in datagroups: + data = datagroup.tobytes() + chunk_size = size - 5 + for i in range(0, len(data), chunk_size): + chunk = data[i:i+chunk_size if i+chunk_size < len(data) else len(data)] + continuity_index = get_continuity_index(address) + packet = Packet(get_required_size(len(chunk),size), address, chunk, True if i == 0 else False, True if i+chunk_size >= len(data) else False, continuity_index) + packets.append(packet) + if padding == False or (padding == True and continuity_index == 3): + break + # add padding packets to make sure the Continuity Index ends with 3 + # if padding and continuity_index != 3: + # while continuity_index !=3: + # continuity_index += 1 + # packet = Packet(size, address, [], True, True, continuity_index) + # packets.append(packet) return packets @@ -123,16 +149,16 @@ def decode_packets(data, error_callback=None, check_crc=True, resync=True): if isinstance(data, bitarray): logger.debug('decoding packets from bitarray') i = 0 - while i < data.length(): - while i < data.length(): - if data.length() < 2: break + while i < len(data): + while i < len(data): + if len(data) < 2: break size = (int(data[i:i+2].to01(), 2) + 1) * 24 - if data.length() < (size * 8): break + if len(data) < (size * 8): break try: packet = Packet.frombits(data, i=i, check_crc=check_crc) yield packet i += (size * 8) - except InvalidCrcError, ice: + except (InvalidCrcError, ice): if error_callback: error_callback(ice) if resync: i += 8 else: i += (size * 8) @@ -142,19 +168,19 @@ def decode_packets(data, error_callback=None, check_crc=True, resync=True): r = data.read(1024) while len(r): buf.frombytes(r) - logger.debug('chunking buffer of length %d bytes', buf.length()/8) + logger.debug('chunking buffer of length %d bytes', len(buf)/8) i = 0 - while i < buf.length(): - if buf.length() < 2: break + while i < len(buf): + if len(buf) < 2: break size = (int(buf[i:i+2].to01(), 2) + 1) * 24 - if buf.length() < (size * 8): break + if len(buf) < (size * 8): break try: packet = Packet.frombits(buf, i=i, check_crc=check_crc) yield packet i += (size * 8) except IncompletePacketError: break - except InvalidCrcError, ice: + except (InvalidCrcError, ice): if error_callback: error_callback(ice) if resync: i += 8 else: i += (size * 8) @@ -169,18 +195,18 @@ def decode_packets(data, error_callback=None, check_crc=True, resync=True): b.frombytes(r) while len(r): buf.frombytes(r) - logger.debug('chunking buffer of length %d bytes', buf.length()/8) + logger.debug('chunking buffer of length %d bytes', len(buf)/8) i = 0 - while i < buf.length(): - if buf.length() < 2: break + while i < len(buf): + if len(buf) < 2: break size = (int(buf[i:i+2].to01(), 2) + 1) * 24 - if buf.length() < (size * 8): break + if len(buf) < (size * 8): break try: packet = Packet.frombits(buf, i=i, check_crc=check_crc) yield packet i += (size * 8) except IncompletePacketError: break - except InvalidCrcError, ice: + except (InvalidCrcError, ice): if error_callback: error_callback(ice) if resync: i += 8 else: i += (size * 8) diff --git a/src/msc/test/test_datagroups.py b/src/msc/test/test_datagroups.py index add34ea..c537c42 100644 --- a/src/msc/test/test_datagroups.py +++ b/src/msc/test/test_datagroups.py @@ -10,7 +10,7 @@ def test_blank_headermode(self): """testing header mode with blank image""" # create MOT object - print 'creating MOT object' + print('creating MOT object') object = MotObject("TestObject", "\x00" * 1024, ContentType.IMAGE_JFIF) # encode object diff --git a/src/msc/test/test_transports.py b/src/msc/test/test_transports.py index 6362d57..1bcd1ed 100644 --- a/src/msc/test/test_transports.py +++ b/src/msc/test/test_transports.py @@ -1,5 +1,5 @@ import unittest -import urllib2 +import urllib.request, urllib.error, urllib.parse from msc import calculate_crc from mot import MotObject, ContentType @@ -15,8 +15,8 @@ def test_fromurl(self): def test_encode(self): - req = urllib2.Request(url) - response = urllib2.urlopen(req) + req = urllib.request.Request(url) + response = urllib.request.urlopen(req) data = response.read() type = ContentType.IMAGE_JFIF @@ -29,7 +29,7 @@ def test_encode(self): # define callback i = iter(datagroups) def callback(): - return i.next() + return next(i) transport = UdpTransport(address=('10.15.81.160', 5555)) transport.start(callback) @@ -43,8 +43,8 @@ def test_fromurl(self): def test_encode_slide_to_file(self): url = 'http://owdo.thisisglobal.com/2.0/id/25/logo/320x240.jpg' - req = urllib2.Request(url) - response = urllib2.urlopen(req) + req = urllib.request.Request(url) + response = urllib.request.urlopen(req) data = response.read() type = ContentType.IMAGE_JFIF @@ -57,10 +57,10 @@ def test_encode_slide_to_file(self): # define callback i = iter(datagroups) def callback(): - return i.next() + return next(i) - import StringIO - s = StringIO.StringIO() + import io + s = io.StringIO() transport = FileTransport(s) transport.start(callback) diff --git a/src/msc/transports.py b/src/msc/transports.py index af400b2..85a3138 100644 --- a/src/msc/transports.py +++ b/src/msc/transports.py @@ -20,7 +20,7 @@ def clock(self): raise NotImplementedError() class BlockingTransportMixin: """Defines a transport where the clock is set from the system clock elapsing""" - def clock(self): return elapsed_from_clock().next + def clock(self): return elapsed_from_clock().__next__ class UdpTransport(NonBlockingTransportMixin): """Send data over a UDP socket either as Datagroups or DAB Packets""" @@ -46,8 +46,8 @@ def fromurl(url, logger=logger): * bitrate: transport bitrate in bps (default 16kbps) """ - from urlparse import urlparse, parse_qsl - if isinstance(url, basestring): url = urlparse(url) + from urllib.parse import urlparse, parse_qsl + if isinstance(url, str): url = urlparse(url) if url.scheme != 'udp': raise ValueError('url must begin with the udp scheme') if url.path.find('?') >=0 : kwargs = dict(parse_qsl(url.path[url.path.index('?')+1:])) else: kwargs = dict(parse_qsl(url.query)) @@ -106,11 +106,11 @@ def __init__(self, transport): self.transport = transport def __iter__(self): return self - def next(self): + def __next__(self): r = self.transport.elapsed self.transport.elapsed = datetime.timedelta(0) return r - return Iter(self).next + return Iter(self).__next__ def __str__(self): return 'udp://{address}'.format(address=self.address) @@ -135,8 +135,8 @@ def fromurl(url, logger=logger): * bitrate: transport bitrate in bps (default 8kbps) """ - from urlparse import urlparse, parse_qsl - if isinstance(url, basestring): url = urlparse(url) + from urllib.parse import urlparse, parse_qsl + if isinstance(url, str): url = urlparse(url) if url.scheme != 'file': raise ValueError('url must begin with the file scheme') path = url.path[:url.path.index('?')] if url.path.find('?') >= 0 else url.path path = path.strip() @@ -169,10 +169,10 @@ def start(self, callback): for d in data: b = d.tobytes() if isinstance(d, Datagroup): - self.f.write(b) + self.f.write(b.decode('ascii')) self.elapsed += datetime.timedelta(milliseconds=(8 * float(len(b)) * 1000)/self.bitrate) elif isinstance(d, Packet): - self.f.write(b) + self.f.write(b.decode('ascii')) self.elapsed += datetime.timedelta(milliseconds=24) else: raise TypeError('yarrgh. neither a datagroup nor packet this be: %s', type(d)) self.f.flush() @@ -184,12 +184,12 @@ def __init__(self, transport): self.transport = transport def __iter__(self): return self - def next(self): + def __next__(self): r = self.transport.elapsed self.transport.elapsed = datetime.timedelta(0) return r - return Iter(self).next + return Iter(self).__next__ def __str__(self): return 'file://{path}'.format(path=self.path)