]> git.etc.gen.nz Git - mythtv-epg-nz.git/commitdiff
Import version 0.7.1 from http://www.jsw.gen.nz/mythtv/mhegepgsnoop-0.7.1.py
authorAndrew Ruthven <andrew@etc.gen.nz>
Sat, 11 Jun 2022 05:21:53 +0000 (17:21 +1200)
committerAndrew Ruthven <andrew@etc.gen.nz>
Sat, 11 Jun 2022 05:21:53 +0000 (17:21 +1200)
bin/mhegepgsnoop.py [new file with mode: 0644]

diff --git a/bin/mhegepgsnoop.py b/bin/mhegepgsnoop.py
new file mode 100644 (file)
index 0000000..bbff13c
--- /dev/null
@@ -0,0 +1,1606 @@
+#!/usr/bin/env python3
+
+VERSION = '0.7.1'
+'''
+mhegepgsnoop.py - Freeview DVB-T MHEG-5 to XMLTV EPG converter
+Version 0.7.0 JSW modified version converted to Python 3
+Copyright (C) 2011  David Moore <dmoo1790@ihug.co.nz>
+Contributors: Bruce Wilson <acaferacer@gmail.com>
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program.  If not, see <http://www.gnu.org/licenses/>.
+'''
+extra_help='''
+Several python modules (sqlite3, zlib, difflib, etc.) are required. There is no error-checking in this
+version to confirm whether these modules are available on your system.
+
+This script started as a Python port of the mheg2xmltv.sh bash script originally created by SolorVox
+and updated by myself. It takes DVB-T MHEG-5 EPG data collected by directly reading data from a DVB demux device,
+or by using dvbsnoop to read data, and creates an XML file suitable for importing into the MythTV EPG. The
+default is to read from the DVB demux device.
+
+It uses fuzzy matching of the channel name extracted from the MHEG data to the callsign, name and xmltvid
+fields in the MythTV database channels table to link the extracted EPG data to MythTV xmltv IDs. This
+means that at least one of callsign, name or xmltvid for each MythTV channel must resemble the MHEG channel
+name for each broadcast channel. Any channels that don't match are dropped, i.e., the EPG data is not
+written to the output xml file. Use the -v option to see a list of the broadcast channel names in the verbose output.
+
+This script does NOT:
+
+       - Require access to the internet. (So you don't get any more data than what is broadcast via MHEG-5.)
+       - Require configuration. Optional settings are command line arguments.
+       - Update MythTV EPG. You need to run mythfilldatabase to import the xml file generated by this script.
+
+Example usage to collect EPG data from default adapter and write to tvguide.xml:
+       mhegepgsnoop.py -o tvguide.xml
+
+Example usage to collect EPG data using dvbsnoop from default adapter and write to tvguide.xml:
+       mhegepgsnoop.py -o tvguide.xml -s
+
+Example usage to collect EPG data from specified demux device and write to tvguide.xml:
+       mhegepgsnoop.py -o tvguide.xml -d "/dev/dvb/adapter1/demux0"
+
+Example usage to collect EPG data using dvbsnoop from adapter 1 and write to tvguide.xml with verbose output:
+       mhegepgsnoop.py -o tvguide.xml -e "-adapter 1" -vs
+
+Example usage to collect EPG data from default adapter and write to tvguide.xml and specify MySQL user & password:
+       mhegepgsnoop.py -o tvguide.xml -m "-u myuser -pmypassword"
+
+Example usage to collect EPG data from specified demux device, write to tvguide.xml, connect to mythconverg using
+Python bindings, verbose output:
+       mhegepgsnoop.py -o tvguide.xml -d "/dev/dvb/adapter1/demux0" -vp
+
+Example usage to collect EPG data from specified demux device, write to tvguide.xml, use a channel map file, verbose output:
+       mhegepgsnoop.py -o tvguide.xml -d "/dev/dvb/adapter1/demux0" -v -f chanmap.txt
+
+Example usage to collect EPG data using from default adapter, write to tvguide.xml, tune to channel number 3:
+       mhegepgsnoop.py -o tvguide.xml -t 3
+
+Example usage to collect EPG data from default adapter, write to tvguide.xml, and strip some text from titles:
+       mhegepgsnoop.py -o tvguide.xml -c "All New |Movie: "
+       mhegepgsnoop.py -o tvguide.xml -c
+'''
+'''
+REVISION HISTORY
+
+0.1    First release
+0.2    Some code clean up. Added messages.
+0.3    Added bindings for Linux DVB API
+0.3.1  Clean up.
+0.3.2  Fixed problem where channel matching over-wrote best match with later, worse match.
+       Bruce Wilson fixed bug in date calculation for months less than 10.
+0.3.3  Add option for channel map file instead of mysql lookup
+0.3.4  Add option to generate program start/stop times in UTC + timezone offset format for users
+       with "auto" timezone setting for xmltv
+0.3.5  Changed/added time options to generate (a) times in <local time> +<offset> format or
+       (b) times in UTC format for users with "auto" timezone setting for xmltv
+0.3.6  Fix bug introduced in midnight corrections due to using non-local time
+0.4    Add option to use MythTV Python bindings for database access
+       Add error trapping
+       Added help text and more examples
+0.4.1  Added xml declaration and doctype headers
+       Changed to cElementTree instead of ElementTree. Supposed to be faster and use less memory.
+0.5    Added tuning option for DVB-T tuners
+0.5.1  Tidy up
+       Add hierarchy parameter to tuning
+       Add version number to header
+       Delete channels not listed in channel map file
+0.5.2  Fixed bug in match_channels. Trapped list index errors in tune.
+0.5.3  Add -c option to clean "All New" from the front of titles.
+0.6    Added argparse because it's easier to handle variable numbers of arguments compared to optparse
+       Put default options in a class to simplify using argparse or optparse
+       Refactored various options into the global options list and removed unneccessary option tests by adding defaults
+       Added regex stripping of unwanted text from titles. Bit of a hack getting it working for Python with/without argparse.
+       Removed many global variables
+       Fixed apparently long standing bug in dvbsnoop code
+0.6.1  Fixed bug in code handling optparse and argparse due to clean_titles2 not being set in some cases.
+       Fixed handling of some tuning parameters with default values of 'a'.
+       Added '-T' option to tune by chanid (which is unique) instead of channum.
+0.6.1 JSW Change selects used to get channels from MythTV to only get DVB-T channels.
+0.6.1 JSW Increase the buffer size used to read from the demux device to prevent buffer overflows.
+0.6.2 JSW Fixed extra + bug in line 734.
+0.7.0 JSW Conversion to Python 3.
+    Added -b option.
+       Replaced build_modules code to speed it up (runs in 1/10th the time).
+       Removed optparse (not needed with Python 3).
+       Prevented hang on tuner open error - it will now timeout and die.
+0.7.1 JSW Fixed a problem with -b option where the database was being accessed when only the map file was being used.
+
+TO DO
+
+Confirm correct function at year end and at daylight savings changes.
+Tidy up code using DVB API.
+Do we need to set flags in dmx_sct_filter_params?
+
+'''
+####### Start code pasted from linuxdvb module ##########################
+"""
+Python bindings for Linux DVB API v5.1
+see headers in linux/dvb/ for reference
+"""
+import ctypes
+
+# the following 41 lines are copied verbatim from the python v4l2 binding
+
+_IOC_NRBITS = 8
+_IOC_TYPEBITS = 8
+_IOC_SIZEBITS = 14
+_IOC_DIRBITS = 2
+
+_IOC_NRSHIFT = 0
+_IOC_TYPESHIFT = _IOC_NRSHIFT + _IOC_NRBITS
+_IOC_SIZESHIFT = _IOC_TYPESHIFT + _IOC_TYPEBITS
+_IOC_DIRSHIFT = _IOC_SIZESHIFT + _IOC_SIZEBITS
+
+_IOC_NONE = 0
+_IOC_WRITE = 1
+_IOC_READ  = 2
+
+
+def _IOC(dir_, type_, nr, size):
+       return (
+               ctypes.c_int32(dir_ << _IOC_DIRSHIFT).value |
+               ctypes.c_int32(ord(type_) << _IOC_TYPESHIFT).value |
+               ctypes.c_int32(nr << _IOC_NRSHIFT).value |
+               ctypes.c_int32(size << _IOC_SIZESHIFT).value)
+
+
+def _IOC_TYPECHECK(t):
+       return ctypes.sizeof(t)
+
+
+def _IO(type_, nr):
+       return _IOC(_IOC_NONE, type_, nr, 0)
+
+
+def _IOW(type_, nr, size):
+       return _IOC(_IOC_WRITE, type_, nr, _IOC_TYPECHECK(size))
+
+
+def _IOR(type_, nr, size):
+       return _IOC(_IOC_READ, type_, nr, _IOC_TYPECHECK(size))
+
+
+def _IOWR(type_, nr, size):
+       return _IOC(_IOC_READ | _IOC_WRITE, type_, nr, _IOC_TYPECHECK(size))
+
+# end code cribbed from v4l2 binding
+
+def _binrange(start, stop):
+       '''returns a list of ints from start to stop in increments of one binary lshift'''
+       out = list()
+       out.append(start)
+       if start == 0:
+               start = 1
+               out.append(start)
+       while start < stop:
+               start = start << 1
+               out.append(start)
+       return out
+
+#
+# frontend 
+#
+
+fe_type = list([
+       'FE_QPSK',
+       'FE_QAM',
+       'FE_OFDM',
+       'FE_ATSC'
+])
+for i, name in enumerate(fe_type):
+       exec(name + '=' + str(i))
+
+
+fe_caps = dict(list(zip(_binrange(0, 0x800000) + _binrange(0x10000000, 0x80000000), (
+       'FE_IS_STUPID',
+       'FE_CAN_INVERSION_AUTO',
+       'FE_CAN_FEC_1_2',
+       'FE_CAN_FEC_2_3',
+       'FE_CAN_FEC_3_4',
+       'FE_CAN_FEC_4_5',
+       'FE_CAN_FEC_5_6',
+       'FE_CAN_FEC_6_7',
+       'FE_CAN_FEC_7_8',
+       'FE_CAN_FEC_8_9',
+       'FE_CAN_FEC_AUTO',
+       'FE_CAN_QPSK',
+       'FE_CAN_QAM_16',
+       'FE_CAN_QAM_32',
+       'FE_CAN_QAM_64',
+       'FE_CAN_QAM_128',
+       'FE_CAN_QAM_256',
+       'FE_CAN_QAM_AUTO',
+       'FE_CAN_TRANSMISSION_MODE_AUTO',
+       'FE_CAN_BANDWIDTH_AUTO',
+       'FE_CAN_GUARD_INTERVAL_AUTO',
+       'FE_CAN_HIERARCHY_AUTO',
+       'FE_CAN_8VSB',
+       'FE_CAN_16VSB',
+       'FE_HAS_EXTENDED_CAPS',
+       'FE_CAN_2G_MODULATION',
+       'FE_NEEDS_BENDING',
+       'FE_CAN_RECOVER',
+       'FE_CAN_MUTE_TS'
+))))
+for val, name in list(fe_caps.items()):
+       exec(name + '=' + str(val))
+
+
+class dvb_frontend_info(ctypes.Structure):
+       _fields_ = [
+               ('name', ctypes.c_char * 128),
+               ('type', ctypes.c_uint),
+               ('frequency_min', ctypes.c_uint32),
+               ('frequency_max', ctypes.c_uint32),
+               ('frequency_stepsize', ctypes.c_uint32),
+               ('frequency_tolerance', ctypes.c_uint32),
+               ('symbol_rate_min', ctypes.c_uint32),
+               ('symbol_rate_max', ctypes.c_uint32),
+               ('symbol_rate_tolerance', ctypes.c_uint32),
+               ('notifier_delay', ctypes.c_uint32),
+               ('caps', ctypes.c_uint32)
+       ]
+
+
+class dvb_diseqc_master_cmd(ctypes.Structure):
+       _fields_ = [
+               ('msg', ctypes.c_uint8 * 6),
+               ('msg_len', ctypes.c_uint8)
+       ]
+
+
+class dvb_diseqc_slave_reply(ctypes.Structure):
+       _fields_ = [
+               ('msg', ctypes.c_uint8 * 4),
+               ('msg_len', ctypes.c_uint8),
+               ('timeout', ctypes.c_int)
+       ]
+
+
+fe_sec_voltage = list([
+       'SEC_VOLTAGE_13',
+       'SEC_VOLTAGE_18',
+       'SEC_VOLTAGE_OFF'
+])
+for i, name in enumerate(fe_sec_voltage):
+       exec(name + '=' + str(i))
+
+
+fe_sec_tone_mode = list([
+       'SEC_TONE_ON',
+       'SEC_TONE_OFF'
+])
+for i, name in enumerate(fe_sec_tone_mode):
+       exec(name + '=' + str(i))
+
+
+fe_sec_mini_cmd = list([
+       'SEC_MINI_A',
+       'SEC_MINI_B'
+])
+for i, name in enumerate(fe_sec_mini_cmd):
+       exec(name + '=' + str(i))
+
+
+fe_status = dict(list(zip(_binrange(0x01, 0x40), (
+       'FE_HAS_SIGNAL',
+       'FE_HAS_CARRIER',
+       'FE_HAS_VITERBI',
+       'FE_HAS_SYNC',
+       'FE_HAS_LOCK',
+       'FE_TIMEDOUT',
+       'FE_REINIT'
+))))
+for val, name in list(fe_status.items()):
+       exec(name + '=' + str(val))
+
+
+fe_spectral_inversion = list([
+       'INVERSION_OFF',
+       'INVERSION_ON',
+       'INVERSION_AUTO'
+])
+for i, name in enumerate(fe_spectral_inversion):
+       exec(name + '=' + str(i))
+
+
+fe_code_rate = list([
+       'FEC_NONE',
+       'FEC_1_2',
+       'FEC_2_3',
+       'FEC_3_4',
+       'FEC_4_5',
+       'FEC_5_6',
+       'FEC_6_7',
+       'FEC_7_8',
+       'FEC_8_9',
+       'FEC_AUTO',
+       'FEC_3_5',
+       'FEC_9_10'
+])
+for i, name in enumerate(fe_code_rate):
+       exec(name + '=' + str(i))
+
+
+fe_modulation = list([
+       'QPSK',
+       'QAM_16',
+       'QAM_32',
+       'QAM_64',
+       'QAM_128',
+       'QAM_256',
+       'QAM_AUTO',
+       'VSB_8',
+       'VSB_16',
+       'PSK_8',
+       'APSK_16',
+       'APSK_32',
+       'DQPSK'
+])
+for i, name in enumerate(fe_modulation):
+       exec(name + '=' + str(i))
+
+
+fe_transmit_mode = list([
+       'TRANSMISSION_MODE_2K',
+       'TRANSMISSION_MODE_8K',
+       'TRANSMISSION_MODE_AUTO',
+       'TRANSMISSION_MODE_4K'
+])
+for i, name in enumerate(fe_transmit_mode):
+       exec(name + '=' + str(i))
+
+
+fe_bandwidth = list([
+       'BANDWIDTH_8_MHZ',
+       'BANDWIDTH_7_MHZ',
+       'BANDWIDTH_6_MHZ',
+       'BANDWIDTH_AUTO'
+])
+for i, name in enumerate(fe_bandwidth):
+       exec(name + '=' + str(i))
+
+
+fe_guard_interval = list([
+       'GUARD_INTERVAL_1_32',
+       'GUARD_INTERVAL_1_16',
+       'GUARD_INTERVAL_1_8',
+       'GUARD_INTERVAL_1_4',
+       'GUARD_INTERVAL_AUTO'
+])
+for i, name in enumerate(fe_guard_interval):
+       exec(name + '=' + str(i))
+
+
+fe_hierarchy = list([
+       'HIERARCHY_NONE',
+       'HIERARCHY_1',
+       'HIERARCHY_2',
+       'HIERARCHY_4',
+       'HIERARCHY_AUTO'
+])
+for i, name in enumerate(fe_hierarchy):
+       exec(name + '=' + str(i))
+
+
+class dvb_qpsk_parameters(ctypes.Structure):
+       _fields_ = [
+               ('symbol_rate', ctypes.c_uint32),
+               ('fec_inner', ctypes.c_uint)
+       ]
+
+
+class dvb_qam_parameters(ctypes.Structure):
+       _fields_ = [
+               ('symbol_rate', ctypes.c_uint32),
+               ('fec_inner', ctypes.c_uint),
+               ('modulation', ctypes.c_uint)
+       ]
+
+
+class dvb_vsb_parameters(ctypes.Structure):
+       _fields_ = [
+               ('modulation', ctypes.c_uint)
+       ]
+
+
+class dvb_ofdm_parameters(ctypes.Structure):
+       _fields_ = [
+               ('bandwidth', ctypes.c_uint),
+               ('code_rate_HP', ctypes.c_uint),
+               ('code_rate_LP', ctypes.c_uint),
+               ('constellation', ctypes.c_uint),
+               ('transmission_mode', ctypes.c_uint),
+               ('guard_interval', ctypes.c_uint),
+               ('hierarchy_information', ctypes.c_uint)
+       ]
+
+
+class dvb_frontend_parameters(ctypes.Structure):
+       class _u(ctypes.Union):
+               _fields_ = [
+                       ('qpsk', dvb_qpsk_parameters),
+                       ('qam', dvb_qam_parameters),
+                       ('ofdm', dvb_ofdm_parameters),
+                       ('vsb', dvb_vsb_parameters)
+               ]
+
+       _fields_ = [
+               ('frequency', ctypes.c_uint32),
+               ('inversion', ctypes.c_uint),
+               ('u', _u)
+       ]
+
+
+class dvb_frontend_event(ctypes.Structure):
+       _fields_ = [
+               ('status', ctypes.c_uint),
+               ('parameters', dvb_frontend_parameters)
+       ]
+
+
+s2api_commands = list([
+       'DTV_UNDEFINED',
+       'DTV_TUNE',
+       'DTV_CLEAR',
+       'DTV_FREQUENCY',
+       'DTV_MODULATION',
+       'DTV_BANDWIDTH_HZ',
+       'DTV_INVERSION',
+       'DTV_DISEQC_MASTER',
+       'DTV_SYMBOL_RATE',
+       'DTV_INNER_FEC',
+       'DTV_VOLTAGE',
+       'DTV_TONE',
+       'DTV_PILOT',
+       'DTV_ROLLOFF',
+       'DTV_DISEQC_SLAVE_REPLY',
+       'DTV_FE_CAPABILITY_COUNT',
+       'DTV_FE_CAPABILITY',
+       'DTV_DELIVERY_SYSTEM',
+       'DTV_ISDBT_PARTIAL_RECEPTION',
+       'DTV_ISDBT_SOUND_BROADCASTING',
+       'DTV_ISDBT_SB_SUBCHANNEL_ID',
+       'DTV_ISDBT_SB_SEGMENT_IDX',
+       'DTV_ISDBT_SB_SEGMENT_COUNT',
+       'DTV_ISDBT_LAYERA_FEC',
+       'DTV_ISDBT_LAYERA_MODULATION',
+       'DTV_ISDBT_LAYERA_SEGMENT_COUNT',
+       'DTV_ISDBT_LAYERA_TIME_INTERLEAVING',
+       'DTV_ISDBT_LAYERB_FEC',
+       'DTV_ISDBT_LAYERB_MODULATION',
+       'DTV_ISDBT_LAYERB_SEGMENT_COUNT',
+       'DTV_ISDBT_LAYERB_TIME_INTERLEAVING',
+       'DTV_ISDBT_LAYERC_FEC',
+       'DTV_ISDBT_LAYERC_MODULATION',
+       'DTV_ISDBT_LAYERC_SEGMENT_COUNT',
+       'DTV_ISDBT_LAYERC_TIME_INTERLEAVING',
+       'DTV_API_VERSION',
+       'DTV_CODE_RATE_HP',
+       'DTV_CODE_RATE_LP',
+       'DTV_GUARD_INTERVAL',
+       'DTV_TRANSMISSION_MODE',
+       'DTV_HIERARCHY',
+       'DTV_ISDBT_LAYER_ENABLED',
+       'DTV_ISDBS_TS_ID'
+])
+for i, name in enumerate(s2api_commands):
+       exec(name + '=' + str(i))
+DTV_MAX_COMMAND = DTV_ISDBS_TS_ID
+
+
+fe_pilot = list([
+       'PILOT_ON',
+       'PILOT_OFF',
+       'PILOT_AUTO'
+])
+for i, name in enumerate(fe_pilot):
+       exec(name + '=' + str(i))
+
+
+fe_rolloff = list([
+       'ROLLOFF_35',
+       'ROLLOFF_20',
+       'ROLLOFF_25',
+       'ROLLOFF_AUTO'
+])
+for i, name in enumerate(fe_rolloff):
+       exec(name + '=' + str(i))
+
+
+fe_delivery_system = list([
+       'SYS_UNDEFINED',
+       'SYS_DVBC_ANNEX_AC',
+       'SYS_DVBC_ANNEX_B',
+       'SYS_DVBT',
+       'SYS_DSS',
+       'SYS_DVBS',
+       'SYS_DVBS2',
+       'SYS_DVBH',
+       'SYS_ISDBT',
+       'SYS_ISDBS',
+       'SYS_ISDBC',
+       'SYS_ATSC',
+       'SYS_ATSCMH',
+       'SYS_DMBTH',
+       'SYS_CMMB',
+       'SYS_DAB',
+       'SYS_DCII_C_QPSK',
+       'SYS_DCII_I_QPSK',
+       'SYS_DCII_Q_QPSK',
+       'SYS_DCII_C_OQPSK'
+])
+for i, name in enumerate(fe_delivery_system):
+       exec(name + '=' + str(i))
+
+
+class dtv_cmds_h(ctypes.Structure):
+       _fields_ = [
+               ('name', ctypes.c_char_p),
+               ('cmd', ctypes.c_uint32),
+               ('set', ctypes.c_uint32),
+               ('buffer', ctypes.c_uint32),
+               ('reserved', ctypes.c_uint32)
+       ]
+
+
+class dtv_property(ctypes.Structure):
+       class _u(ctypes.Union):
+               class _s(ctypes.Structure):
+                       _fields_ = [
+                               ('data', ctypes.c_uint8 * 32),
+                               ('len', ctypes.c_uint32),
+                               ('reserved1', ctypes.c_uint32 * 3),
+                               ('reserved2', ctypes.c_void_p)
+                       ]
+
+
+               _fields_ = [
+                       ('data', ctypes.c_uint32),
+                       ('buffer', _s)
+               ]
+
+
+       _fields_ = [
+               ('cmd', ctypes.c_uint32),
+               ('reserved', ctypes.c_uint32 * 3),
+               ('u', _u),
+               ('result', ctypes.c_int)
+       ]
+
+       _pack_ = True
+
+
+class dtv_properties(ctypes.Structure):
+       _fields_ = [
+               ('num', ctypes.c_uint32),
+               ('props', ctypes.POINTER(dtv_property))
+       ]
+
+
+FE_SET_PROPERTY = _IOW('o', 82, dtv_properties)
+FE_GET_PROPERTY = _IOR('o', 83, dtv_properties)
+
+DTV_IOCTL_MAX_MSGS = 64
+FE_TUNE_MODE_ONESHOT = 0x01
+
+FE_GET_INFO = _IOR('o', 61, dvb_frontend_info)
+
+FE_DISEQC_RESET_OVERLOAD = _IO('o', 62)
+FE_DISEQC_SEND_MASTER_CMD = _IOW('o', 63, dvb_diseqc_master_cmd)
+FE_DISEQC_RECV_SLAVE_REPLY = _IOR('o', 64, dvb_diseqc_slave_reply)
+FE_DISEQC_SEND_BURST = _IO('o', 65)
+
+FE_SET_TONE = _IO('o', 66)
+FE_SET_VOLTAGE = _IO('o', 67)
+FE_ENABLE_HIGH_LNB_VOLTAGE = _IO('o', 68)
+
+FE_READ_STATUS = _IOR('o', 69, ctypes.c_uint)
+FE_READ_BER = _IOR('o', 70, ctypes.c_uint32)
+FE_READ_SIGNAL_STRENGTH = _IOR('o', 71, ctypes.c_uint16)
+FE_READ_SNR = _IOR('o', 72, ctypes.c_uint16)
+FE_READ_UNCORRECTED_BLOCKS = _IOR('o', 73, ctypes.c_uint32)
+
+FE_SET_FRONTEND = _IOW('o', 76, dvb_frontend_parameters)
+FE_GET_FRONTEND = _IOR('o', 77, dvb_frontend_parameters)
+FE_SET_FRONTEND_TUNE_MODE = _IO('o', 81)
+FE_GET_EVENT = _IOR('o', 78, dvb_frontend_event)
+
+FE_DISHNETWORK_SEND_LEGACY_CMD = _IO('o', 80)
+
+#
+# demux
+#
+
+DMX_FILTER_SIZE = 16
+
+class dmx_filter(ctypes.Structure):
+       _fields_ = [
+               ('filter', ctypes.c_uint8 * DMX_FILTER_SIZE),
+               ('mask', ctypes.c_uint8 * DMX_FILTER_SIZE),
+               ('mode', ctypes.c_uint8 * DMX_FILTER_SIZE)
+       ]
+
+class dmx_sct_filter_params(ctypes.Structure):
+       _fields_ = [
+               ('pid', ctypes.c_uint16),
+               ('filter', dmx_filter),
+               ('timeout', ctypes.c_uint32),
+               ('flags', ctypes.c_uint32)
+       ]
+
+DMX_CHECK_CRC = 0x01
+DMX_ONESHOT = 0x02
+DMX_IMMEDIATE_START = 0x04
+DMX_KERNEL_CLIENT = 0x8000
+
+DMX_START = _IO('o', 41)
+DMX_STOP = _IO('o', 42)
+DMX_SET_BUFFER_SIZE = _IO('o', 45)
+DMX_SET_FILTER = _IOW('o', 43, dmx_sct_filter_params)
+
+####### End code pasted from linuxdvb module ##########################
+
+try:
+       from xml.etree import cElementTree as ET
+except ImportError:
+       from xml.etree import ElementTree as ET
+import difflib, fileinput, os, re, select, sqlite3, struct, sys, time, unicodedata, zlib
+from subprocess import Popen, PIPE, STDOUT
+import fcntl
+import traceback
+from array import array
+import argparse
+
+def main():
+       global c, c2, unsquashed_modules, chanlist, channels, MythDB, options
+
+       lines = []
+       unsquashed_modules = []
+       chanlist = []
+       channels = []
+       raw_tuning_info = []
+       
+       icons = {'hd.png':'HDTV', 'dolby.png':'dolby', 'ear.png':'teletext'}
+       ratings = {'ao.png':'AO', 'g.png':'G', 'pgr.png':'PGR'}
+
+       class defaults:
+               demux_device = '/dev/dvb/adapter0/demux0'
+               extra_args = ''
+               mysql_args = "-u root"
+               output_file = "/tmp/xmltv.xml" # Output filename
+               map_file = False # Channel map filename. Set to FALSE if -f option is not used.
+               both = False
+               tune_ch = False
+               tune_chanid = False
+               use_dvbsnoop = False
+               verbosity = False
+               timezone = False
+               UTC = False
+               clean_titles = False
+               py_bind = False
+       defs = defaults()
+
+       options = argparse_parse(defs)
+
+       verbose("Options selected = " + str(options))
+
+       # Test for dvbsnoop
+       if options.use_dvbsnoop and which("dvbsnoop") == None:
+               print("You need to install dvbsnoop")
+               sys.exit(1)
+
+       # Set up database in memory to simplify handling shows crossing midnight
+       conn = sqlite3.connect(":memory:")
+       conn.text_factory = sqlite3.OptimizedUnicode    # need this to handle inserting unicode strings
+       conn.row_factory = sqlite3.Row
+       c = conn.cursor()
+       c2 = conn.cursor()
+       c.execute('''create table programs(start, stop, channel, title, desc,
+                       episode_id, dolby_flag, teletext_flag, hd_flag, rating, start_time, local_start, local_stop)''')
+
+       if options.py_bind:
+               try:
+                       from MythTV import MythDB
+               except Exception as inst:
+                       print('\n' + str(inst))
+                       print('Unable to load MythTV Python module. Exiting.')
+                       sys.exit(1)
+
+       (chaninfo_map, chaninfo_db) = get_chan_info(options.map_file, options.py_bind, options.both)
+
+       if options.tune_ch or options.tune_chanid:
+               verbose('\nGetting tuning info for channel ' + (('ID ' + str(options.tune_chanid)) if options.tune_chanid else str(options.tune_ch)) + '\n')
+               tuning_fields = ['frequency','inversion','bandwidth','hp_code_rate','lp_code_rate','constellation','transmission_mode','guard_interval','hierarchy']
+               sql_select = 'select '
+               for t in tuning_fields:
+                       sql_select += t + ','
+               sql_select = sql_select.rstrip(',') + ' from channel join dtv_multiplex on channel.mplexid=dtv_multiplex.mplexid where (dtv_multiplex.mod_sys=\'UNDEFINED\' or left(dtv_multiplex.mod_sys, 5)=\'DVB-T\') and '
+               sql_select += ('chanid=' + str(options.tune_chanid)) if options.tune_chanid else ('channum=' + str(options.tune_ch))
+               tuning_info = get_tune_info(options.py_bind, sql_select, options.tune_ch, tuning_fields)
+               d = options.demux_device.rstrip('/')
+               frontend = d[0:d.rfind('/') + 1] + 'frontend' + d[len(d) - 1]
+               verbose('\nTuning ' + frontend + ' to channel ' + (('ID ' + str(options.tune_chanid)) if options.tune_chanid else str(options.tune_ch)) + '\n')
+               try:
+                       try:
+                               fefd = open(frontend, 'rb+')
+                       except:
+                               verbose('Unable to open ' + frontend + ' for tuning. Possibly in use. Continuing without tuning.\n')
+                               options.tune_ch = False
+                       if options.tune_ch:
+                               tune(frontend, fefd, tuning_info)
+               except Exception as inst:
+                       print(inst)
+                       traceback.print_exc()
+                       print('Unable to tune', frontend, '. Exiting')
+                       sys.exit(1)
+
+       datablocks = []
+       module_numbers = []
+       if options.use_dvbsnoop:
+               verbose("\nUsing dvbsnoop to collect data\n")
+               the_pid = find_pid()
+               if the_pid == -1:
+                       verbose("PID not found\n")
+                       sys.exit(1)
+               else:
+                       the_pid = find_pid2(the_pid)
+               download(the_pid, datablocks, module_numbers)
+       else:
+               verbose("\nOpening read from device " + options.demux_device + " to collect data\n")
+               try:
+                       dmxfd = open(options.demux_device, 'rb', 2*1024*1024)
+               except IOError as e:
+                       (errno, strerr) = e.args
+                       verbose("Could not open demux device " + options.demux_device + ". I/O error " + str(errno) + ": " + strerr + "\n")
+                       sys.exit(1)
+               except:
+                       verbose("Unexpected error: " + str(sys.exc_info()[0]))
+                       sys.exit(1)
+               retval = fcntl.ioctl(dmxfd, DMX_SET_BUFFER_SIZE, 20*4096)
+               if retval != 0:
+                       verbose('Unable to set DMX_SET_BUFFER_SIZE, returned value = ' + str(retval) + ', aborting')
+                       sys.exit(1)
+               demux_filter = dmx_sct_filter_params()
+               the_pid = find_pid3(dmxfd, demux_filter)
+               if the_pid != -1:
+                       download2(the_pid, datablocks, module_numbers, dmxfd, demux_filter)
+               else:
+                       verbose("Could not find DSM-CC carousel PID.\n")
+                       sys.exit(1)
+               dmxfd.close()
+
+       if options.tune_ch:
+               fefd.close()
+
+       build_modules(datablocks, module_numbers)
+       lines = get_MHEG_data()
+       verbose("Extracting EPG info\n\n--- Channel Names ---\n")
+       parse_MHEG_data(lines, icons, ratings)
+       if options.map_file:
+               verbose("\nMatching MHEG channels to MythTV channels using map file\n")
+               map_channels(chaninfo_map)
+       if not options.map_file or options.both:
+               verbose("\nFuzzy matching MHEG channels to MythTV channels\n")
+               match_channels(chaninfo_db)
+       delete_unmapped_channels()
+       verbose("Fixing start/stop times for shows crossing midnight\n")
+       fix_midnight()
+       verbose("Building XML file: " + options.output_file + "\n")
+       build_xml()
+       prettyup_xml()
+       if options.clean_titles:
+               if isinstance(options.clean_titles, bool): # A hack to handle both argparse and optparse
+                       options.clean_titles = 'All New '
+               do_clean_titles(options.clean_titles)
+       sys.exit(0)
+
+def argparse_parse(defs):
+       parser = argparse.ArgumentParser(epilog=extra_help, 
+                                                                       formatter_class=argparse.RawDescriptionHelpFormatter, 
+                                                                       description='Convert DVB-T MHEG EPG data to xmltv for import into MythTV EPG')
+       parser.add_argument('-d', dest='demux_device', action='store', default=defs.demux_device, 
+                                       help='Specify DVB demux device, e.g., "/dev/dvb/adapter1/demux0"')
+       parser.add_argument('-e', dest='extra_args', action='store', default=defs.extra_args, 
+                                       help='Extra dvbsnoop arguments, e.g., "-adapter 1"')
+       parser.add_argument('-m', dest='mysql_args', action='store', default=defs.mysql_args, 
+                                       help='MySQL arguments, e.g., "-u myuser -pmypassword"' + ' (default "' + defs.mysql_args + '")')
+       parser.add_argument('-o', dest='output_file', action='store', default=defs.output_file, 
+                                       help='Output filename (default '+defs.output_file+')')
+       parser.add_argument('-v', dest='verbosity', action='store_true', default=defs.verbosity,
+                                       help='Enable verbose output. Disable or redirect stdout & stderr to file if you get broken pipe error 32.')
+       parser.add_argument('-s', dest='use_dvbsnoop', action='store_true', default=defs.use_dvbsnoop, 
+                                       help="Use dvbsnoop instead of direct DVB API access (default FALSE)")
+       parser.add_argument('-f', dest='map_file', action='store', default=defs.map_file, 
+                                       help='Channel map filename (tab separated, default ' + str(defs.map_file) + ')')
+       parser.add_argument('-b', dest='both', action='store_true', default=defs.both, 
+                                       help='Match channels with both fuzzy matching and then the map file (default False)')
+       parser.add_argument('-z', dest='timezone', action='store_true', default=defs.timezone, 
+                                       help='Generate local + offset program start/stop times (default local time)')
+       parser.add_argument('-u', dest='UTC', action='store_true', default=defs.UTC, 
+                                       help='Generate UTC start/stop times (default local time)')
+       parser.add_argument('-p', dest='py_bind', action='store_true', default=defs.py_bind,
+                                       help='Use MythTV Python bindings for database access (default FALSE)')
+       parser.add_argument('-t', dest='tune_ch', action='store', type=int, default=defs.tune_ch, 
+                                       help='Tune to a specified channel number. DVB-T only. Will not tune the adapter if it appears to be locked by another app (e.g., MythTV) but will continue to try to download the EPG.')
+       parser.add_argument('-T', dest='tune_chanid', action='store', type=int, default=defs.tune_chanid, 
+                                       help='Tune to a specified channel ID. DVB-T only. Will not tune the adapter if it appears to be locked by another app (e.g., MythTV) but will continue to try to download the EPG.')
+       parser.add_argument('-c', dest='clean_titles', nargs='?', const='All New ', default=defs.clean_titles,
+                                       help='''Clean up silly things prepended to titles. Default (just -c with no string specified) removes "All New ".
+                                       Customize the text to be removed by specifying a matching regular expression string like this: 
+                                       "Remove this|And this" or this: "Remove this".''')
+       args = parser.parse_args()
+       return args
+
+def get_tune_info(py_bind, sql_select, tune_ch, tuning_fields):
+       if py_bind:
+               try:
+                       db = MythDB()
+                       dbconn = db.cursor()
+                       dbconn.execute(sql_select)
+                       raw_tuning_info = dbconn.fetchone()
+                       dbconn.close()
+                       if raw_tuning_info == None or len(raw_tuning_info) != len(tuning_fields):
+                               print('Unable to get tuning info for channel ' + str(tune_ch) + '.  Exiting.')
+                               sys.exit(1)
+                       tuning_info = dict(list(zip(tuning_fields,raw_tuning_info)))
+               except Exception as inst:
+                       print(inst)
+                       print('Unable to get tuning info for channel ' + str(tune_ch) + '.  Exiting.')
+                       sys.exit(1) 
+       else:
+               try:
+                       f = os.popen('mysql -ss ' + options.mysql_args + ' -e "' + sql_select + '" mythconverg')
+                       raw_tuning_info = f.read().rstrip('\n').split('\t')
+                       f.close()
+                       if len(raw_tuning_info) != len(tuning_fields):
+                               print('Unable to get tuning info for channel ' + str(tune_ch) + '.  Exiting.')
+                               sys.exit(1)
+                       tuning_info = dict(list(zip(tuning_fields,raw_tuning_info)))
+               except Exception as inst:
+                       print(inst)
+                       print('Unable to get tuning info for channel ' + str(tune_ch) + '.  Exiting.')
+                       sys.exit(1)
+       tuning_info['frequency'] = str(tuning_info['frequency'])
+       for ch in tuning_info:
+               verbose(ch + ': ' + str(tuning_info[ch]) + '\n')
+       return tuning_info
+
+def get_chan_info(map_file, py_bind, both):
+       chaninfo_map = []
+       chaninfo_db = []
+       if map_file:
+               # Get channel info from map file
+               verbose("\nGetting channel xmltv ids from map file " + map_file + "\n")
+               f = open(map_file)
+               chaninfo1 = f.readlines()
+               f.close()
+               for ch in chaninfo1:
+                       verbose(ch)
+                       chaninfo_map.append(ch.strip("\n").split("\t"))
+       if py_bind:
+               # Get database channel info using Python bindings
+               verbose("\nGetting channel info from MythTV database using Python bindings\n")
+               try:
+                       db = MythDB()
+                       dbconn = db.cursor()
+                       dbconn.execute("desc channel deleted")
+                       deleted = dbconn.fetchall()
+                       if deleted == ():
+                               deleted = ''
+                       else:
+                               deleted =  ' and c.deleted is NULL'
+                       dbconn.execute("select callsign, name, xmltvid from channel c, dtv_multiplex d where c.mplexid is not NULL and c.mplexid = d.mplexid and (d.mod_sys='UNDEFINED' or d.mod_sys like 'DVB-T%')" + deleted)
+                       chaninfo_db = dbconn.fetchall()
+                       dbconn.close()
+                       for ch in chaninfo_db:
+                               verbose(ch[0] + "\t" + ch[1] + "\t" + ch[2] + "\n")
+               except Exception as inst:
+                       print(inst)
+                       print("Error accessing mythconverg database using Python bindings.  Exiting.")
+                       sys.exit(1)
+       elif not map_file or both:
+               # Get database channel info from mysql
+               verbose("\nGetting channel info from MythTV database using mysql\n")
+               try:
+                       f = os.popen('mysql -ss ' + options.mysql_args + ' -e \'describe channel deleted\' mythconverg')
+                       deleted = f.read().splitlines()
+                       f.close()
+                       if deleted == []:
+                               deleted = ''
+                       else:
+                               deleted =  ' and c.deleted is NULL'
+
+                       f = os.popen('mysql -ss ' + options.mysql_args + ' -e \'select callsign, name, xmltvid from channel c, dtv_multiplex d where c.mplexid is not NULL and c.mplexid = d.mplexid and (d.mod_sys="UNDEFINED" or d.mod_sys like "DVB-T%")' + deleted + '\' mythconverg')
+                       chaninfo1 = f.read().splitlines()
+                       f.close()
+                       for ch in chaninfo1:
+                               verbose(ch + "\n")
+                               chaninfo_db.append(ch.split("\t"))
+               except Exception as inst:
+                       print(inst)
+                       print("Could not access mythconverg database.  Exiting.")
+                       sys.exit(1)
+       return (chaninfo_map, chaninfo_db)
+
+def tune(frontend, fefd, tuning_info):
+       feinfo = dvb_frontend_info()
+       fcntl.ioctl(fefd, FE_GET_INFO, feinfo)
+       verbose('Frontend name: ' + str(feinfo.name, 'utf-8') + '\n')
+       verbose('Frontend type: ' + fe_type[feinfo.type] + '\n')
+       festatus = dvb_frontend_event()
+       fcntl.ioctl(fefd, FE_READ_STATUS, festatus)
+       if festatus.status & FE_HAS_LOCK and festatus.parameters.frequency == int(tuning_info['frequency']):
+               verbose('Frontend is already locked to frequency: ' + str(festatus.parameters.frequency) + '. Not tuning.\n')
+               return
+       if feinfo.type != FE_OFDM:
+               print('Device', frontend, 'does not appear to be DVB-T. Exiting.')
+               sys.exit(1)
+       feparams = dvb_frontend_parameters()
+       fcntl.ioctl(fefd, FE_GET_FRONTEND, feparams)
+
+       # Convert myth tuning info to dvb api format
+       feparams.frequency = int(tuning_info['frequency'])
+       if tuning_info['inversion'] == 'a':
+               feparams.inversion = 2
+       else:
+               feparams.inversion = int(tuning_info['inversion'])
+       if tuning_info['bandwidth'] == 'a':
+               tuning_info['bandwidth'] = 'auto'
+       for b in fe_bandwidth:
+               if tuning_info['bandwidth'].upper() in b:
+                       exec('feparams.u.ofdm.bandwidth = ' + b)
+       for b in fe_code_rate:
+               if tuning_info['hp_code_rate'].replace('/', '_').upper() in b:
+                       exec('feparams.u.ofdm.code_rate_HP = ' + b)
+       for b in fe_code_rate:
+               if tuning_info['lp_code_rate'].replace('/', '_').upper() in b:
+                       exec('feparams.u.ofdm.code_rate_LP = ' + b)
+       if tuning_info['transmission_mode'] == 'a':
+               tuning_info['transmission_mode'] = 'auto'
+       for b in fe_transmit_mode:
+               if tuning_info['transmission_mode'].upper() in b:
+                       exec('feparams.u.ofdm.transmission_mode = ' + b)
+       for b in fe_modulation:
+               if tuning_info['constellation'].upper() in b:
+                       exec('feparams.u.ofdm.constellation = ' + b)
+       for b in fe_guard_interval:
+               if tuning_info['guard_interval'].replace('/', '_').upper() in b:
+                       exec('feparams.u.ofdm.guard_interval = ' + b)
+       for b in fe_hierarchy:
+               if tuning_info['hierarchy'].upper() in b:
+                       exec('feparams.u.ofdm.hierarchy_information = ' + b)
+
+       verbose('\nParameters to be sent to ' + frontend + ':\n')
+       verbose('Frequency = ' + str(feparams.frequency) + '\n')
+       verbose('Inversion = ' + str(feparams.inversion) + ' = ' + fe_spectral_inversion[feparams.inversion] + '\n')
+       verbose('Bandwidth = ' + str(feparams.u.ofdm.bandwidth) + ' = ' + fe_bandwidth[feparams.u.ofdm.bandwidth] + '\n')
+       verbose('Transmission mode = ' + str(feparams.u.ofdm.transmission_mode) + ' = ' + fe_transmit_mode[feparams.u.ofdm.transmission_mode] + '\n')
+       verbose('HP code rate = ' + str(feparams.u.ofdm.code_rate_HP) + ' = ' + fe_code_rate[feparams.u.ofdm.code_rate_HP] + '\n')
+       verbose('LP code rate = ' + str(feparams.u.ofdm.code_rate_LP) + ' = ' + fe_code_rate[feparams.u.ofdm.code_rate_LP] + '\n')
+       verbose('Constellation = ' + str(feparams.u.ofdm.constellation) + ' = ' + fe_modulation[feparams.u.ofdm.constellation] + '\n')
+       verbose('Guard interval = ' + str(feparams.u.ofdm.guard_interval) + ' = ' + fe_guard_interval[feparams.u.ofdm.guard_interval] + '\n')
+       verbose('Hierarchy = ' + str(feparams.u.ofdm.hierarchy_information) + ' = ' + fe_hierarchy[feparams.u.ofdm.hierarchy_information] + '\n')
+
+       # Do it
+       fcntl.ioctl(fefd, FE_SET_FRONTEND, feparams)
+       i = 0
+       locked = False
+       while i < 10 and not locked:
+               fcntl.ioctl(fefd, FE_READ_STATUS, festatus)
+               if festatus.status & FE_HAS_LOCK:
+                       verbose('Frontend has lock\n')
+                       locked = True
+               else:
+                       verbose('Waiting for frontend to lock\n')
+               time.sleep(1)
+               i = i + 1
+       if not locked:
+               print('Frontend:', frontend, 'did not lock on channel within 10 seconds. Exiting.')
+               sys.exit(1)
+
+       fcntl.ioctl(fefd, FE_GET_FRONTEND, feparams)
+       verbose('\nParameters read back from device. Might not be accurate?\n') # Why are some of these values wrong? Driver/firmware bugs?
+       verbose('Frequency = ' + str(feparams.frequency) + '\n')
+       verbose('Inversion = ' + str(feparams.inversion) + ' = ' + getFromList(fe_spectral_inversion, feparams.inversion) + '\n')
+       verbose('Bandwidth = ' + str(feparams.u.ofdm.bandwidth) + ' = ' + getFromList(fe_bandwidth, feparams.u.ofdm.bandwidth) + '\n')
+       verbose('Transmission mode = ' + str(feparams.u.ofdm.transmission_mode) + ' = ' + getFromList(fe_transmit_mode, feparams.u.ofdm.transmission_mode) + '\n')
+       verbose('HP code rate = ' + str(feparams.u.ofdm.code_rate_HP) + ' = ' + getFromList(fe_code_rate, feparams.u.ofdm.code_rate_HP) + '\n')
+       verbose('LP code rate = ' + str(feparams.u.ofdm.code_rate_LP) + ' = ' + getFromList(fe_code_rate, feparams.u.ofdm.code_rate_LP) + '\n')
+       verbose('Constellation = ' + str(feparams.u.ofdm.constellation) + ' = ' + getFromList(fe_modulation, feparams.u.ofdm.constellation) + '\n')
+       verbose('Guard interval = ' + str(feparams.u.ofdm.guard_interval) + ' = ' + getFromList(fe_guard_interval, feparams.u.ofdm.guard_interval) + '\n')
+       verbose('Hierarchy = ' + str(feparams.u.ofdm.hierarchy_information) + ' = ' + getFromList(fe_hierarchy, feparams.u.ofdm.hierarchy_information) + '\n')
+
+def getFromList(theList, index):
+       if index >= 0 and index < len(theList):
+               result = theList[index]
+       else:
+               result = "Possible bad list index value: " + str(index)
+       return result
+
+def verbose(stuff):
+       if options.verbosity:
+               sys.stdout.write(stuff)
+               sys.stdout.flush()
+
+def which(prog):
+       for path in os.environ["PATH"].split(os.pathsep):
+               f = os.path.join(path, prog)
+               if os.path.exists(f) and os.access(f, os.X_OK):
+                       return f
+       return None
+
+def find_pid3(dmxfd, demux_filter):
+       verbose("Getting program_map_pid from PAT\n")
+       program_map_pid = -1
+       demux_filter.pid = 0
+       fcntl.ioctl(dmxfd, DMX_SET_FILTER, demux_filter)
+       fcntl.ioctl(dmxfd, DMX_START)
+       r, w, e = select.select([dmxfd], [] , [], 2)
+       if not dmxfd in r:
+               dmxfd.close()
+               print('Timeout reading from ' + options.demux_device + ', exiting!')
+               exit(2)
+       buffer = dmxfd.read(3)
+       table_id = buffer[0]
+       b1 = buffer[1]
+       b2 = buffer[2]
+       sect_len = ((b1 & 0x0F) << 8) | b2
+       buffer += dmxfd.read(sect_len) # The remaining sect_len bytes in the section start immediately after sect_len.
+       program_map_pid = ((buffer[14] & 0x0F) << 8) | buffer[15] # Skip to the second program entry
+       verbose("program_map_pid = " + str(program_map_pid) + "\n")
+       fcntl.ioctl(dmxfd, DMX_STOP)
+
+       verbose("Getting carousel_pid from PMT\n")
+       carousel_pid = -1
+       demux_filter.pid = program_map_pid
+       fcntl.ioctl(dmxfd, DMX_SET_FILTER, demux_filter)
+       fcntl.ioctl(dmxfd, DMX_START)
+       buffer = dmxfd.read(3)
+       table_id = buffer[0]
+       b1 = buffer[1]
+       b2 = buffer[2]
+       sect_len = ((b1 & 0x0F) << 8) | b2
+       buffer += dmxfd.read(sect_len)
+       program_info_len =((buffer[10] & 0x0F) << 8) | buffer[11]
+       p = 11 + 1 + program_info_len
+       while p < sect_len - program_info_len - 12:
+               stream_type = buffer[p]
+               es_len = ((buffer[p+3] & 0x0F) << 8) | buffer[p+4]
+               if stream_type == 11:
+                       carousel_pid = ((buffer[p+1] & 0x1F) << 8) | buffer[p+2]
+                       verbose("carousel_pid = " + str(carousel_pid) + "\n")
+                       break
+               p = p + 5 + es_len
+       fcntl.ioctl(dmxfd, DMX_STOP)
+       return carousel_pid
+
+def find_pid():
+       f = os.popen("dvbsnoop -n 1 -nph -ph 2 " + options.extra_args + " 0")
+       for line in f.read().split("\n"):
+               if line.find("Program_map_PID:") != -1:
+                       f.close()
+                       return line.strip().split(" ")[1]
+       f.close()
+       return -1
+
+def find_pid2(pid):
+       f = os.popen("dvbsnoop -n 1 -nph -ph 2 " + options.extra_args + " " + pid)
+       next_one = False
+       for line in f.read().split("\n"):
+               if line.find("Stream_type: 11") != -1:
+                       next_one = True
+               elif line.find("Elementary_PID:") != -1 and next_one:
+                       f.close()
+                       return line.strip().split(" ")[1]
+       f.close()
+       return -1
+
+def download2(pid, datablocks, module_numbers, dmxfd, demux_filter):
+       block_sizes = []
+       module_sizes = []
+       found_list = False
+       finished_download = False
+
+       demux_filter.pid = pid
+       fcntl.ioctl(dmxfd, DMX_SET_FILTER, demux_filter)
+       fcntl.ioctl(dmxfd, DMX_START)
+
+       while not finished_download:
+               buffer = dmxfd.read(3)
+               table_id = buffer[0]
+               b1 = buffer[1]
+               b2 = buffer[2]
+               sect_syntax_ind = (b1 & 0x80) >> 7
+               private_ind = (b1 & 0x40) >> 6
+               sect_len = ((b1 & 0x0F) << 8) | b2
+               buffer += dmxfd.read(sect_len)
+               message_id = (buffer[10] << 8) | buffer[11]
+               if table_id == 60:
+                       module_id = (buffer[20] << 8) | buffer[21]
+                       block_no = (buffer[24] << 8) | buffer[25]
+                       downloaded = False
+                       if len(datablocks) > 0 and len([blk for blk in datablocks if blk[0] == module_id and blk[1] == block_no]) > 0:
+                               downloaded = True
+                       if not downloaded and module_id != 0: # Module 0 does not contain EPG data and is not compressed
+                               datablocks.append([module_id, block_no, buffer[26:len(buffer)-4]]) # Last 4 bytes are CRC
+                               if found_list:
+                                       verbose("Blocks left to download = " + str(total_blocks - len(datablocks)) + "        \r")
+                               else:
+                                       verbose("Started downloading blocks. Waiting for Download Message Block...\r")
+
+               elif table_id == 59 and message_id == 4098 and not found_list:
+                       found_list = True
+                       dsmcc_header_len = 12 # only if adaptation_length = 0
+                       block_size = (buffer[8 + dsmcc_header_len + 4] << 8) | buffer[8 + dsmcc_header_len + 5]
+                       no_of_modules = (buffer[8 + dsmcc_header_len + 18] << 8) | buffer[8 + dsmcc_header_len + 19]
+                       verbose("\nblock_size = " + str(block_size) + " no_of_modules = " + str(no_of_modules) + "\n")
+                       p = 8 + dsmcc_header_len + 20
+                       total_blocks = 0
+                       for i in range(0, no_of_modules):
+                               module_id = (buffer[p] << 8) | buffer[p+1]
+                               a = array('B', buffer[p+2:p+6])
+                               module_size = struct.unpack(">I", a)[0]
+                               module_info_len = buffer[p+7]
+                               if module_id != 0: # Module 0 does not contain EPG data and is not compressed
+                                       verbose("module_id = " + str(module_id) + " module_size = " + str(module_size) + "\n")
+                                       module_numbers.append(module_id)
+                                       module_sizes.append(module_size)
+                                       total_blocks = total_blocks + module_size//block_size + (module_size % block_size > 0)
+                               p = p + 8 + module_info_len
+                       verbose("\nFound Download Message Block. " + str(total_blocks) + " blocks total to download...\n")
+
+               if found_list and len(datablocks) >= total_blocks:
+                       finished_download = True
+
+       fcntl.ioctl(dmxfd, DMX_STOP)
+
+def download(pid, datablocks, module_numbers):
+       block_sizes = []
+       module_sizes = []
+       no_of_blocks = []
+       DDB = False
+       DSIorDII = False
+       block_list_found = False
+       finished_download = False
+       store_data = False
+       while not finished_download:
+               args = ["dvbsnoop", "-nph", "-ph", "2"]
+               if options.extra_args != '':
+                       for a in options.extra_args.split(" "):
+                               args.append(a)
+               args.append(pid)
+               f = Popen(args, stdout = PIPE)
+               for line in f.stdout:
+                       if line.find(b"Table_ID: 60") != -1:
+                               # This is a DDB (Data Download Block)
+                               DDB = True
+                               DSIorDII = False
+                       elif line.find(b"Table_ID: 59") != -1:
+                               # This is a DSI or DII block
+                               DSIorDII = True
+                               DDB = False
+                       if DSIorDII:
+                               if line.find(b"blockSize:") != -1:
+                                       # Store the DDB block size
+                                       block_size = int(line.split(b" ")[1])
+                               elif line.find(b"moduleId:") != -1:
+                                       # Store module number...
+                                       module_num = int(line.strip().split(b" ")[1])
+                               elif line.find(b"moduleSize:") != -1:
+                                       # ...and module size.
+                                       module_size = int(line.strip().split(b" ")[1])
+                                       gotalready = False
+                                       if len(module_numbers) > 0:
+                                               if module_num in module_numbers:
+                                                       gotalready = True
+                                       if not gotalready and module_num < 50 and module_num != 0: # Module 0 does not contain EPG data
+                                               module_numbers.append(module_num)
+                                               module_sizes.append(module_size)
+                                               no_of_blocks.append(module_size//block_size + (module_size % block_size > 0))
+                       if DDB:
+                               if line.find(b"moduleId:") != -1:
+                                       # Store the module number
+                                       module_num = int(line.split(b" ")[1])
+                               elif line.find(b"blockNumber:") != -1:
+                                       # Store the block number
+                                       block_num = int(line.split(b" ")[1])
+                               elif line.find(b"Block Data:") != -1:
+                                       # Set flag to store the data on the next line
+                                       store_data = True
+                               elif store_data:
+                                       # Reset the flag nd store the data
+                                       store_data = False
+                                       downloaded = False
+                                       if len(datablocks) > 0 and len([blk for blk in datablocks if blk[0] == module_num and blk[1] == block_num]) > 0:
+                                               downloaded = True
+                                       if not downloaded and module_num != 0: # Module 0 does not contain EPG data
+                                               datablocks.append([module_num, block_num, line.strip()])
+                                               if block_list_found:
+                                                       verbose("Blocks left to download = " + str(total_blocks - len(datablocks)) + "   \r")
+                                               else:
+                                                       verbose("Started downloading blocks. Waiting for Download Message Block...\r")
+
+                               if len(module_numbers) > 0 and not block_list_found:
+                                       block_list_found = True
+                                       total_blocks = 0
+                                       for b in no_of_blocks:
+                                               total_blocks = total_blocks + b
+                                       verbose("\nFound Download Message Block. " + str(total_blocks) + " blocks total to download...\n")
+
+
+                       if block_list_found and len(datablocks) >= total_blocks:
+                               finished_download = True
+
+                       if finished_download:
+                               f.terminate()
+
+def build_modules(datablocks, module_numbers):
+       """
+               Old algorithm replaced with a balance line to speed it up.
+       """
+       verbose("\nBuilding modules\n")
+       module_numbers.sort()
+       modules = []
+       datablocks.sort(key = lambda a: (a[0], a[1]))
+       starttime = time.time()
+       blki = iter(datablocks)
+       blk = next(blki)
+       for i in module_numbers:
+               bytestring = b''
+               try:
+                       while blk[0] != i:
+                               blk = next(blki)
+               except StopIteration:
+                       break
+               try:
+                       while blk[0] == i:
+                               if options.use_dvbsnoop:
+                                       for c in blk[2].split(b" "):
+                                               if len(c) == 2:
+                                                       x = struct.pack("B", int(c, 16))
+                                                       bytestring += x
+                               else:
+                                       bytestring += blk[2]
+                               blk = next(blki)
+               except StopIteration:
+                       pass
+               modules.append([i, bytestring])
+       stoptime = time.time()
+
+       verbose("Decompressing modules\n")
+       for squashed in modules:
+               m = squashed[0]
+               if len(squashed[1]) > 0:
+                       try:
+                               verbose("Expanding module " + str(m) + " from " + str(len(squashed[1])) + " bytes to ")
+                               unsquashed = zlib.decompress(squashed[1])
+                               verbose(str(len(unsquashed)) + " bytes\n")
+                       except Exception as e:
+                               verbose("\nException on decompress:\n")
+                               print(e)
+                               traceback.print_exc()
+                               verbose("\nLooks like module " + str(m) + " is incomplete. Attempting partial decompression.\n")
+                               try:
+                                       decom = zlib.decompressobj()
+                                       unsquashed = decom.decompress(squashed[1], 100000)
+                                       verbose("Possible successful partial decompression of module " + str(m) + "\n")
+                               except Exception as e:
+                                       verbose("\nException on decompress:\n")
+                                       print(e)
+                                       traceback.print_exc()
+                                       verbose("Failed partial decompression of module " + str(m) + "\n")
+                       build_modules2(unsquashed)
+               else:
+                       verbose("No data downloaded for module " + str(m) + ".\n")
+
+def build_modules2(unsquashed):
+       zz = re.finditer(b'BIOP', unsquashed)  # Get a MatchObject containing all instances of "BIOP" in module
+       listo = []
+       for ll in zz:
+               v = ll.start() + 8  # Offset of message_size (32 bit uimsbf)
+               listo.append([ll.start(), int(struct.unpack(">I", unsquashed[v:v+4])[0]) + 12])
+
+       next_byte = listo[0][0]  # Following is messy to try and avoid possible spurious instances of the string "BIOP"
+       stringo = []                     # and also avoid some non-EPG messages with string "crid://" buried in them.
+       for k in listo:
+               if k[0] == next_byte:
+                       end_byte = next_byte + k[1]
+                       type_id_ofs = next_byte + 13 + ord(unsquashed[next_byte + 12:next_byte + 13]) + 4
+                       if unsquashed[type_id_ofs:type_id_ofs + 3] == b"fil" and unsquashed[next_byte:next_byte + 130].find(b"crid://") != -1:
+                               chunk = unsquashed[next_byte:end_byte]
+                               chunk = re.sub(b'[\x0a\x0d]', b' ', chunk) # Get rid of CR/LF. Not needed in MythTV EPG.
+                               chunk = re.sub(b'[\x04\x00]', b' ', chunk)
+                               chunk = re.sub(b'\x1b[Cc]', b' ', chunk)
+                               crido = chunk.find(b'crid://')  # Find suitable point before which we get rid of \x1c which is used for splitting
+                               chunk = re.sub(b'\x1c', b'.', chunk[:crido]) + chunk[crido:] # so we don't accidentally split beginning of message
+                               stringo.extend(chunk.split(b'\x1c'))
+                       next_byte = end_byte
+       unsquashed_modules.append(stringo)
+
+def get_MHEG_data():
+       lines = []
+       # Loop through all the module files
+       for rawlines in unsquashed_modules:
+               # Loop through all the BIOP messages
+               for line in rawlines:
+                       if line.startswith(b'BIOP'):
+                               fields = line.split(b'\x1d')
+                               l = len(fields) - 1
+                               temp = b''
+                               for i in range(0, 5):
+                                       temp = b'\x1d' + fields[l - i].strip() + temp
+                                       if i == 4:
+                                               # Get the date
+                                               t = time.localtime(time.time() - 10 * 24 * 3600) # time 10 days before now - use for Dec/Jan issue
+                                               t2 = time.strptime(str(t[0]) + str(t[1]).rjust(2,'0') + str(t[2]).rjust(2,'0'), "%Y%m%d")
+                                               theday = time.strptime(fields[l - i].strip().decode() + " " + time.strftime("%Y", t2), "%a %d %b %Y")
+                                               # Fix for data crossing from one year to the next
+                                               if theday >= t2:
+                                                       thedate = time.strftime("%Y%m%d", theday)
+                                               else:
+                                                       thedate = str(t2[0] + 1) + time.strftime("%m%d", theday)
+                                               lines.append(b'DAYdayDAYday' + thedate.encode())
+                               lines.append(b'BIOP' + temp)
+                       else:
+                               lines.append(line)
+       return lines
+
+def maketime(thedate):
+       if options.timezone:    # If times should be local + offset
+               mktimethedate = time.mktime(thedate)
+               if time.daylight and time.localtime(time.mktime(thedate)).tm_isdst:
+                       offset = -time.altzone  # Seconds WEST of UTC. Negative means EAST of UTC.
+               else:
+                       offset = -time.timezone # Seconds WEST of UTC. Negative means EAST of UTC.
+               hh = int(offset/3600.0) # Divide by float to get correct values for negatives
+               mm = round((offset - hh*3600)/60)
+               offset  = hh * 100 + mm
+               thetime = time.strftime("%Y%m%d%H%M00", time.localtime(time.mktime(thedate)))
+               thetime = thetime + ' {0:+5}'.format(offset)
+       elif options.UTC:       # If times should be UTC with no offset
+               thetime = time.strftime("%Y%m%d%H%M00", time.gmtime(time.mktime(thedate)))
+       else:   # If times should be local
+               thetime = time.strftime("%Y%m%d%H%M00", time.localtime(time.mktime(thedate)))
+       return thetime
+
+def parse_MHEG_data(lines, icons, ratings):
+       for line in lines:
+               fields = line.split(b'\x1d')
+               num_fields = len(fields)
+               showrec = []
+               if line.startswith(b'DAYdayDAYday'):
+                       date = [int(line[12:16]), int(line[16:18]), int(line[18:20])]
+
+               # Get channel info if line starts with "BIOP" and contains "crid:" and update channels table if channel not already found
+               elif fields[0] == b'BIOP' and line.find(b'crid:') != -1:
+                       chan = str(fields[2], encoding="utf-8")   # Can be funny characters in channel name
+                       try:
+                               i = chanlist.index(chan)
+                       except:
+                               chanlist.append(chan)
+                               verbose(chan + "\n")
+
+               # Build the showrec list to create programme info to add to programs table
+               elif num_fields > 9 and fields[1]:
+                       # TODO: Need to think more about handling daylight savings change
+                       hh = int(int(fields[1]) / 3600)
+                       mm = round((int(fields[1]) - (hh * 3600))/60)
+                       date2 = []
+                       date2.extend(date)
+                       date2.extend([hh, mm, 0, 0, 0, -1])
+                       start_time = maketime(tuple(date2))
+                       local_start = time.strftime("%Y%m%d%H%M00", time.localtime(time.mktime(tuple(date2))))
+
+                       hh = int(int(fields[2]) / 3600)
+                       mm = round((int(fields[2]) - (hh * 3600))/60)
+                       del date2[:]
+                       date2.extend(date)
+                       date2.extend([hh, mm, 0, 0, 0, -1])
+                       end_time = maketime(tuple(date2))
+                       local_end = time.strftime("%Y%m%d%H%M00", time.localtime(time.mktime(tuple(date2))))
+
+                       fields[6] = fields[6].lstrip(b"/")
+                       # unicode the title and description in case there are weird characters in them
+                       showrec = [start_time, end_time, chan, str(fields[7], encoding="utf-8"), str(fields[8], encoding="utf-8"), str(fields[6], encoding='utf-8')]
+                       for j in range(8, num_fields):
+                               if fields[j].startswith(b"/") and fields[j].find(b".png") < 0:
+                                       fields[j] = fields[j].lstrip(b"/")
+                       for k,v in sorted(icons.items()): # iteritems() sorting is apparently problematic so sort so we're sure of order
+                               if line.find(k.encode()) != -1:
+                                       flag = v
+                               else:
+                                       flag = "blank"
+                               showrec.append(flag)
+                       rating_found = False
+                       for k,v in ratings.items():
+                               if line.find(k.encode()) != -1:
+                                       rating_found = True
+                                       showrec.append(v)
+                       if rating_found != True:
+                               showrec.append("no rating")
+                       showrec.append(str(fields[4], encoding='utf-8'))
+                       showrec.append(local_start)
+                       showrec.append(local_end)
+                       c.execute("insert into programs values (?,?,?,?,?,?,?,?,?,?,?,?,?)", showrec)
+
+def match_channels(chaninfo_db):
+       chaninfo_db = list(chaninfo_db)
+       for ch in channels:
+               if ch[1] in chanlist:
+                       chanlist.remove(ch[1])
+               for chi in chaninfo_db:
+                       if chi[2] == ch[0]:
+                               chaninfo_db.remove(chi)
+                               break
+       hits = []
+       for chan in chanlist:
+               for ch in chaninfo_db:
+                       matches = difflib.get_close_matches(chan.upper(), [x.upper() for x in ch])
+                       if len(matches) > 0:
+                               ratio = 0
+                               for chan2 in ch:
+                                       ratio = ratio + difflib.SequenceMatcher(None, chan.upper(), chan2.upper()).ratio()
+                               hits.append([ratio, chan, ch[2]])
+
+       # Sort descending by ratio so best matches come first when iterating the list
+       hits.sort(key = lambda x: (-x[0]))
+
+       for hit in hits:
+               if hit[2] != '' and hit[2] not in [x[0] for x in channels]:     # Test if we already have a channel with this name. Add it if we don't.
+                       channels.append([hit[2], hit[1]])
+                       verbose('"' + hit[1] + '" matched to MythTV xmltv ID "' + hit[2] + '"\n')
+                       t = (hit[2], hit[1])
+                       c.execute('update programs set channel=? where channel=?', t)
+
+def map_channels(chaninfo_map):
+       for line in chaninfo_map:
+               if len(line) == 2:
+                       chan = line[0]
+                       xmltvid = line[1]
+                       channels.append([xmltvid, chan])
+                       verbose('"' + chan + '" matched to MythTV xmltv ID "' + str(xmltvid) + '"\n')
+                       t = (xmltvid, chan)
+                       c.execute('update programs set channel=? where channel=?', t)
+
+def delete_unmapped_channels():
+       # Delete programs on channels which have not been mapped or matched.
+       c.execute('delete from programs where channel not in (' + ','.join('?'*len([t[0] for t in channels])) + ')', [t[0] for t in channels])
+
+def fix_midnight():
+       # Find shows supposedly ending at midnight and check and fix the stop times
+       c.execute('select * from programs where local_stop like "%000000"')
+       for row in c:
+               t = (row['stop'], row['channel'])
+               # Look for a show starting at midnight same day, same channel
+               c2.execute('select * from programs where start=? and channel=?', t)
+               r2 = c2.fetchone()
+               if r2 != None:
+                       if r2['title'] == row['title'] and r2['episode_id'] == row['episode_id']:
+                               # Correct the program stop time if the show has same title and episode_id
+                               t = (r2['stop'], r2['channel'], row['stop'])
+                               c2.execute('update programs set stop=? where channel=? and stop=?', t)
+                               # Delete the bogus duplicate record which starts at midnight
+                               t = (r2['channel'], r2['start'], r2['stop'])
+                               c2.execute('delete from programs where channel=? and start=? and stop=?', t)
+               else:
+                       # Didn't find a show starting after this one so delete it because it is suspect.
+                       # Cannot be sure about shows supposedly ending at midnight if there is no following
+                       # show supposedly starting at midnight.
+                       t = (row['channel'], row['start'], row['stop'])
+                       c2.execute('delete from programs where channel=? and start=? and stop=?', t)
+
+       # Find shows starting at midnight and delete if start_time not "00:00"
+       c.execute('delete from programs where local_start like "%000000" and start_time != "00:00"')
+
+def build_xml():
+       # Build the xml doc
+       root_element = ET.Element("tv")
+       root_element.set("date", maketime(time.localtime()))
+       root_element.set("generator-info-name", "mhegepgsnoop.py " + VERSION)
+       root_element.set("source-info-name", "DVB-T MHEG Stream")
+
+       # Create the channel elements
+       channels.sort(key = lambda a: a[1].lower())
+       for r in channels:
+               ch = ET.Element("channel")
+               display_name = ET.Element("display-name")
+               ch.set("id", r[0])
+               display_name.text = r[1]
+               ch.append(display_name)
+               root_element.append(ch)
+
+       # Create the programme elements
+       c.execute('select distinct * from programs order by channel collate nocase, start')
+       for r in c:
+               try:
+                       prog = ET.Element("programme")
+                       root_element.append(prog)
+                       prog.set("channel", r['channel'])
+                       prog.set("start", r['start'])
+                       prog.set("stop", r['stop'])
+                       title = ET.Element("title")
+                       title.text = r['title']
+                       prog.append(title)
+                       desc = ET.Element("desc")
+                       desc.text = r['desc']
+                       prog.append(desc)
+                       if r['episode_id']:
+                               episode = ET.Element("episode-num")
+                               episode.text = r[5]
+                               episode.set("system", "dd_progid")
+                               prog.append(episode)
+                       if r['hd_flag'] == "HDTV":
+                               video = ET.Element("video")
+                               present = ET.Element("present")
+                               quality = ET.Element("quality")
+                               present.text = "yes"
+                               quality.text = "HDTV"
+                               video.append(present)
+                               video.append(quality)
+                               prog.append(video)
+                       if r['dolby_flag'] == "dolby":
+                               audio = ET.Element("audio")
+                               stereo = ET.Element("stereo")
+                               stereo.text = "dolby"
+                               audio.append(stereo)
+                               prog.append(audio)
+                       if r['teletext_flag'] == "teletext":
+                               subtitles = ET.Element("subtitles")
+                               subtitles.set("type", "teletext")
+                               prog.append(subtitles)
+                       if r['rating'] != "no rating":
+                               rating = ET.Element("rating")
+                               rating.set("system", "Freeview")
+                               value = ET.Element("value")
+                               value.text = r[9]
+                               rating.append(value)
+                               prog.append(rating)
+               except:
+                       prog = ET.Element("programme")
+                       root_element.append(prog)
+                       prog.set("foobar", "foobar")
+
+       # Write the xml doc to disk
+       outfile = open(options.output_file, "w")
+       # Add manual declaration and doctype headers because it's tedious to do any other way
+       outfile.write('<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE tv SYSTEM "xmltv.dtd">')
+#      ET.ElementTree(root_element).write(outfile, encoding="unicode")
+       out_str = ET.tostring(root_element, encoding="unicode", method="xml")
+       outfile.write(out_str)
+       outfile.close()
+
+def prettyup_xml():
+       # Pretty up the output so it's easier to read
+       # This code edits the file inplace
+       for line in fileinput.FileInput(options.output_file, inplace=1):
+               line = re.sub("(<[/]*chan)", "\n\t\\1", line)
+               line = re.sub("(<displ|<icon|<tit|<desc|<epi|<subt)", "\n\t\t\\1", line)
+               line = re.sub("(<[/]*prog)", "\n\t\\1", line)
+               line = re.sub("(<[/]*rat)", "\n\t\t\\1", line)
+               line = re.sub("(<[/]*aud)", "\n\t\t\\1", line)
+               line = re.sub("(<[/]*vid)", "\n\t\t\\1", line)
+               line = re.sub("(<val|<pres|<qual|<ster)", "\n\t\t\t\\1", line)
+               line = re.sub("(</tv|<tv|<!DOCTYPE)", "\n\\1", line)
+               print(line)
+
+def    do_clean_titles(clean_titles):
+       verbose('Stripping "' + clean_titles + '" from titles\n')
+       regex = re.compile('<title>(' + clean_titles + ')')
+       for line in fileinput.FileInput(options.output_file, inplace=1):
+               line = regex.sub('<title>', line)
+               sys.stdout.write(line) # Use sys.stdout.write to avoid extra new lines
+
+if __name__ == '__main__':
+       main()