From 1a53694f75c0365f3cc4328410ebe4f1063ae0e5 Mon Sep 17 00:00:00 2001 From: yumoqing Date: Wed, 15 Oct 2025 16:47:47 +0800 Subject: [PATCH] bugfix --- iptv/downloadchannels.py | 73 ++++++++++++++++++++-- iptv/m3u8test.py | 126 +++++++++++++++----------------------- wwwroot/add_channels.dspy | 3 +- 3 files changed, 119 insertions(+), 83 deletions(-) diff --git a/iptv/downloadchannels.py b/iptv/downloadchannels.py index 8f9c8e2..82c8e53 100644 --- a/iptv/downloadchannels.py +++ b/iptv/downloadchannels.py @@ -23,12 +23,77 @@ async def download(url): clist = m3u.m3uParser(txt) return clist +async def check_if_exists(url): + env = ServerEnv() + dbname = env.get_module_dbname('iptv') + db = DBPools() + async with db.sqlorContext(dbname) as sor: + sql = "select * from iptvchannles where url = ${url}$" + recs = await sor.sqlExe(sql, {'url': url}) + if len(recs) > 0: + return True + return False -async def load_url_iptv(media_type,url, dbname): +async def write_goodchannel(b): + debug(f'write badchannels({b.url})') + env = ServerEnv() + dbname = env.get_module_dbname('iptv') + db = DBPools() + async with db.sqlorContext(dbname) as sor: + sql = """insert into iptvchannels + ( + id, + tv_group, + tv_name, + logo_url, + url, + media_type, + download_date, + del_flg + ) + values + ( + ${id}$, + ${tv_group}$, + ${tv_name}$, + ${logo_url}$, + ${url}$, + ${media_type}$, + ${download_date}$, + '0' + )""" + query = """select * from iptvchannels where url=${url}$""" + q = await sor.sqlExe(query,{'url', b['url']}) + if len(q) == 0: + r = copy(b) + r['media_type'] = 'iptv' + r['id'] = getID() + if not r.get('tv_group'): + r['tv_group'] = r.get('group-title','')[:500] + if not r.get('tv_name'): + r['tv_name'] = r.get('name','')[:500] + if not r.get('logo_url'): + r['logo_url'] = r.get('tvg-logo',None) + if r['logo_url'] and len(r['logo_url']) > 1000: + r['logo_url'] = None + if len(r['url']) >= 1000: + return + dt = datetime.now() + r['download_date'] = '%d-%02d-%02d' % (dt.year,dt.month,dt.day) + await sor.sqlExe(sql,r) + +async def load_url_iptv(url): clist = await download(url) - if clist: - debug('%d channels' % len(goodchannels)) - good, bad = await test_channels(clist, if_ok=write_goodchannel) + debug(f'clist={clist}') + newchannels = [] + for c in clist: + b = await check_if_exists(c.url) + if not b: + newchannels.append(c) + + if len(newchannels) > 0: + debug('%d new channels' % len(newchannels)) + good, bad = await test_channels(newchannels, if_ok=write_goodchannel) debug(f'{len(good)} new channels add {len(bad)} channels exists') else: debug(f'{url} return None') diff --git a/iptv/m3u8test.py b/iptv/m3u8test.py index 8e9f193..7fc8914 100644 --- a/iptv/m3u8test.py +++ b/iptv/m3u8test.py @@ -1,8 +1,6 @@ import time from appPublic.log import debug, exception from appPublic.uniqueID import getID -from sqlor.dbpools import DBPools -from ahserver.serverenv import ServerEnv from aiohttp import ( client, ClientSession @@ -14,86 +12,56 @@ from sqlor.dbpools import DBPools async def test_channels(channels,if_failed=None, if_ok=None): goodchannels = [] badchannels = [] + for c in channels: + try: + t1 = time.time() + async with ClientSession() as sess: + async with sess.get(c.url) as resp: + if resp.status != 200: + debug(f'{resp.status=}, {type(resp.status)=}') + badchannels.append(c) + if if_failed: + c['errorcode'] = resp.status + await if_failed(c) + else: + goodchannels.append(c) + if if_ok: + debug(f'write good channels') + t2 = time.time() + c['channel_delay'] = t2 - t1 + await if_ok(c) + except Exception as e: + debug(f'{c.url}, {e}') + badchannels.append(c) + if if_failed: + c['errorcode'] = 600 + await if_failed(c) + return goodchannels, badchannels + +async def write_badchannel(b): + debug(f'write badchannels({b.url})') env = ServerEnv() dbname = env.get_module_dbname('iptv') db = DBPools() async with db.sqlorContext(dbname) as sor: - for c in channels: - try: - t1 = time.time() - async with ClientSession() as sess: - async with sess.get(c.url) as resp: - debug(f'{resp.status=}, {type(resp.status)=}') - if resp.status != 200: - badchannels.append(c) - if if_ok: - t2 = time.time() - c['channel_delay'] = t2 - t1 - await if_ok(sor, c) - else: - goodchannels.append(c) - if if_failed: - c['errorcode'] = resp.status - await if_failed(sor, c) - except Exception as e: - debug(f'{c.url}, {e}') - badchannels.append(c) - if if_failed: - await if_failed(sor, c) - return goodchannels, badchannels - -async def write_badchannel(sor, b): - await sor.C('badchannels', { - 'id':getID(), - 'channelid': b.id, - 'errorcode': b.errorcode - }) - sql = "update iptvchannels set del_flg='1' where id=${id}$" - await sor.sqlExe(sql, {'id': b.id}) - -async def write_goodchannel(sor, b): - sql = """insert into iptvchannels -( - id, - tv_group, - tv_name, - logo_url, - url, - media_type, - download_date, - del_flg -) -values -( - ${id}$, - ${tv_group}$, - ${tv_name}$, - ${logo_url}$, - ${url}$, - ${media_type}$, - ${download_date}$, - '0' -)""" - query = """select * from iptvchannels where url=${url}$""" - q = await sor.sqlExe(query,{'url', b['url']}) - if len(q) == 0: - r = copy(b) - r['media_type'] = media_type - r['id'] = getID() - if not r.get('tv_group'): - r['tv_group'] = r.get('group-title','')[:500] - if not r.get('tv_name'): - r['tv_name'] = r.get('name','')[:500] - if not r.get('logo_url'): - r['logo_url'] = r.get('tvg-logo',None) - if r['logo_url'] and len(r['logo_url']) > 1000: - r['logo_url'] = None - if len(r['url']) >= 1000: - return - dt = datetime.now() - r['download_date'] = '%d-%02d-%02d' % (dt.year,dt.month,dt.day) - await sor.sqlExe(sql,r) + await sor.C('badchannels', { + 'id':getID(), + 'channelid': b.id, + 'errorcode': b.errorcode + }) + sql = "update iptvchannels set del_flg='1' where id=${id}$" + await sor.sqlExe(sql, {'id': b.id}) + debug(f'write badchannels finished') +async def write_goodchannel(b): + debug(f'write goodchannels({b.url})') + env = ServerEnv() + dbname = env.get_module_dbname('iptv') + db = DBPools() + async with db.sqlorContext(dbname) as sor: + sql = "update iptvchannels set channel_delay = ${channel_delay}$ where id=${id}$" + await sor.sqlExe(sql, {'id':b.id, 'channel_delay': b.channel_delay}) + async def kickout_badchannels(): db = DBPools() env = ServerEnv() @@ -101,5 +69,7 @@ async def kickout_badchannels(): channels = [] async with db.sqlorContext(dbname) as sor: channels = await sor.R('iptvchannels', {'del_flg':'0'}) - good, bad2 = await test_channels(channels, if_failed=write_badchannel) + good, bad2 = await test_channels(channels, + if_ok=write_goodchannel, + if_failed=write_badchannel) debug(f'{len(good)=},{len(bad)}') diff --git a/wwwroot/add_channels.dspy b/wwwroot/add_channels.dspy index 06075ea..ba86f3f 100644 --- a/wwwroot/add_channels.dspy +++ b/wwwroot/add_channels.dspy @@ -1,7 +1,8 @@ url = params_kw.url debug(f'{params_kw=}, {url=}') try: - x = await load_url_iptv('iptv', url, 'iptvdb') + dbname = get_module_dbname('iptv') + x = background_reco(load_url_iptv, url) return { "widgettype":"Message", "options":{