diff --git a/modules/bilibili/bili_api.py b/modules/bilibili/bili_api.py index 227f078a..2ad178a3 100644 --- a/modules/bilibili/bili_api.py +++ b/modules/bilibili/bili_api.py @@ -1,5 +1,4 @@ from datetime import datetime -import traceback from core.builtins import Bot, Image, Plain, Url from core.utils.cooldown import CoolDown @@ -29,7 +28,6 @@ async def get_video_info(msg: Bot.MessageSession, query, get_detail=False): except ValueError as e: if str(e).startswith('412'): await msg.finish(msg.locale.t('bilibili.message.error.rejected')) - traceback.print_exc() view = res['data']['View'] stat = view['stat'] diff --git a/modules/cytoid/profile.py b/modules/cytoid/profile.py index ee6cdfa3..075867b8 100644 --- a/modules/cytoid/profile.py +++ b/modules/cytoid/profile.py @@ -15,11 +15,10 @@ async def cytoid_profile(msg: Bot.MessageSession): await msg.finish(msg.locale.t('cytoid.message.user_unbound', prefix=msg.prefixes[0])) profile_url = 'http://services.cytoid.io/profile/' + query_id try: - profile = json.loads(await get_url(profile_url, status_code=200)) + profile = json.loads(await get_url(profile_url, 200)) except ValueError as e: if str(e).startswith('404'): await msg.finish(msg.locale.t('cytoid.message.user_not_found')) - raise e uid = profile['user']['uid'] nick = profile['user']['name'] avatar = profile['user']['avatar']['large'] diff --git a/modules/maimai/apiquery.py b/modules/maimai/apiquery.py index d54578a9..b15e0eb0 100644 --- a/modules/maimai/apiquery.py +++ b/modules/maimai/apiquery.py @@ -1,5 +1,4 @@ import re -import traceback from core.builtins import Bot, Plain, Image as BImage from core.utils.image import msgchain2image diff --git a/modules/maimai/libraries/chunithm_apidata.py b/modules/maimai/libraries/chunithm_apidata.py index c3b1dce0..e1de55d1 100644 --- a/modules/maimai/libraries/chunithm_apidata.py +++ b/modules/maimai/libraries/chunithm_apidata.py @@ -1,4 +1,3 @@ -import traceback import ujson as json from core.builtins import Bot, Plain @@ -32,7 +31,5 @@ async def get_record(msg, payload): await msg.finish(msg.locale.t("chunithm.message.forbidden.eula")) else: await msg.finish(msg.locale.t("chunithm.message.forbidden")) - else: - Logger.error(traceback.format_exc()) if data: return data \ No newline at end of file diff --git a/modules/maimai/libraries/maimaidx_apidata.py b/modules/maimai/libraries/maimaidx_apidata.py index fb032733..b311df21 100644 --- a/modules/maimai/libraries/maimaidx_apidata.py +++ b/modules/maimai/libraries/maimaidx_apidata.py @@ -3,6 +3,7 @@ import shutil import traceback import ujson as json +from config import Config from core.builtins import Bot, Plain, Image from core.logger import Logger from core.utils.cache import random_cache_path @@ -16,13 +17,14 @@ total_list = TotalList() async def update_alias(): try: url = "https://download.fanyu.site/maimai/alias.json" - data = await get_url(url, 200, fmt='json') + data = await get_url(url, 200, fmt='json', logging_err_resp=False) file_path = os.path.join(assets_path, "mai_alias.json") with open(file_path, 'w') as file: json.dump(data, file) - except: - Logger.error(traceback.format_exc()) + except Exception: + if Config('debug'): + Logger.error(traceback.format_exc()) return False return True @@ -31,7 +33,7 @@ async def update_covers(): try: cover_dir = f"{assets_path}/static/mai/cover" url = f"https://www.diving-fish.com/maibot/static.zip" - download_file = await download_to_cache(url, timeout=60) + download_file = await download_to_cache(url, timeout=60, logging_err_resp=False) Logger.info('Maimai covers download completed.') ca = random_cache_path() @@ -43,8 +45,9 @@ async def update_covers(): static_cover_dir = os.path.join(ca, 'mai/cover') if os.path.exists(static_cover_dir): shutil.move(static_cover_dir, cover_dir) - except: - Logger.error(traceback.format_exc()) + except Exception: + if Config('debug'): + Logger.error(traceback.format_exc()) return False os.remove(download_file) @@ -122,8 +125,6 @@ async def get_record(msg, payload): await msg.finish(msg.locale.t("maimai.message.forbidden.eula")) else: await msg.finish(msg.locale.t("maimai.message.forbidden")) - else: - Logger.error(traceback.format_exc()) if data: return data diff --git a/modules/mcv/mcv.py b/modules/mcv/mcv.py index 4e58eb27..a974b409 100644 --- a/modules/mcv/mcv.py +++ b/modules/mcv/mcv.py @@ -1,8 +1,9 @@ -import json -import re import datetime +import re +import traceback from google_play_scraper import app as google_play_scraper +import ujson as json from core.builtins import ErrorMessage from core.logger import Logger diff --git a/modules/meme/urban.py b/modules/meme/urban.py index ef56eef2..240d1594 100644 --- a/modules/meme/urban.py +++ b/modules/meme/urban.py @@ -27,7 +27,8 @@ async def urban(term: str, locale: Locale): text = await get_url(url, 200, headers={'accept': '*/*', 'accept-encoding': 'gzip, deflate', 'accept-language': 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7,en-GB;q=0.6', - 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36 Edg/96.0.1054.62'}) + 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36 Edg/96.0.1054.62'}, + request_private_ip=True) Logger.debug(text) data = json.loads(text)['list'] if not data: diff --git a/modules/mod_dl/__init__.py b/modules/mod_dl/__init__.py index f09ddaff..c57dda46 100644 --- a/modules/mod_dl/__init__.py +++ b/modules/mod_dl/__init__.py @@ -1,6 +1,5 @@ import asyncio import re -import traceback from config import Config from core.builtins import Bot @@ -61,16 +60,13 @@ async def main(msg: Bot.MessageSession, mod_name: str, version: str = None): if ver: url += f'&gameVersion={ver}' results = [] - try: - resp = await get_url(url, 200, fmt="json", timeout=5, attempt=3, headers=headers) - if resp: - if not enable_mirror: # 没提供 pagination - if resp["pagination"]["resultCount"] == 0: - return None - for mod in resp["data"]: - results.append(("curseforge", mod["name"], mod["id"], None)) - except Exception: - traceback.print_exc() + resp = await get_url(url, 200, fmt="json", timeout=5, attempt=3, headers=headers) + if resp: + if not enable_mirror: # 没提供 pagination + if resp["pagination"]["resultCount"] == 0: + return None + for mod in resp["data"]: + results.append(("curseforge", mod["name"], mod["id"], None)) return results async def get_modrinth_project_version(project_id: str, ver: str): @@ -105,12 +101,9 @@ async def main(msg: Bot.MessageSession, mod_name: str, version: str = None): } url = f'https://api.curseforge.com/v1/mods/{modid}/files?gameVersion={ver}' - try: - resp = await get_url(url, 200, fmt="json", timeout=5, attempt=3, headers=headers) - if resp: - return resp["data"][0] - except Exception: - traceback.print_exc() + resp = await get_url(url, 200, fmt="json", timeout=5, attempt=3, headers=headers) + if resp: + return resp["data"][0] # 搜索 Mod result = await asyncio.gather(*(search_modrinth(mod_name, ver), search_curseforge(mod_name, ver))) diff --git a/modules/phigros/update.py b/modules/phigros/update.py index c1668f3b..27c4dde6 100644 --- a/modules/phigros/update.py +++ b/modules/phigros/update.py @@ -40,7 +40,10 @@ async def update_assets(): try: update = await get_url(json_url, 200) except TimeoutError: - update = await get_url(json_url_mirror, 200) + try: + update = await get_url(json_url_mirror, 200) + except: + return False update_json = json.loads(update) for song in update_json: diff = {} @@ -76,6 +79,8 @@ async def update_assets(): data[row[0].lower()]['AT'] = row[4] os.remove(download_file) + else: + return False with open(file_path, 'w', encoding='utf-8') as f: f.write(json.dumps(data, indent=4, ensure_ascii=False)) shutil.move(file_path, rating_path) diff --git a/modules/tweet/__init__.py b/modules/tweet/__init__.py index e0e11749..b66af5ff 100644 --- a/modules/tweet/__init__.py +++ b/modules/tweet/__init__.py @@ -38,7 +38,7 @@ async def _(msg: Bot.MessageSession, tweet: str, use_local=True): await msg.finish(msg.locale.t("error.config.webrender.invalid")) use_local = False - res = await get_url(f'https://react-tweet.vercel.app/api/tweet/{tweet_id}') + res = await get_url(f'https://react-tweet.vercel.app/api/tweet/{tweet_id}', 200) res_json = json.loads(res) if not res_json['data']: await msg.finish(msg.locale.t('tweet.message.not_found')) diff --git a/modules/wiki/utils/wikilib.py b/modules/wiki/utils/wikilib.py index e9e2a2b0..325ac0aa 100644 --- a/modules/wiki/utils/wikilib.py +++ b/modules/wiki/utils/wikilib.py @@ -177,7 +177,7 @@ class WikiLib: break try: - return await get_url(api, status_code=200, headers=self.headers, fmt="json", request_private_ip=request_local, + await get_url(api, status_code=200, headers=self.headers, fmt="json", request_private_ip=request_local, cookies=cookies) except Exception as e: @@ -257,7 +257,8 @@ class WikiLib: return WikiStatus(available=False, value=False, message=self.locale.t( "wiki.message.utils.wikilib.get_failed.timeout")) except Exception as e: - Logger.debug(traceback.format_exc()) + if Config('debug'): + Logger.error(traceback.format_exc()) if e.args == (403,): message = self.locale.t("wiki.message.utils.wikilib.get_failed.forbidden") elif not re.match(r'^(https?://).*', self.url): @@ -280,7 +281,8 @@ class WikiLib: meta='siteinfo', siprop='general|namespaces|namespacealiases|interwikimap|extensions') except Exception as e: - Logger.debug(traceback.format_exc()) + if Config('debug'): + Logger.error(traceback.format_exc()) message = self.locale.t("wiki.message.utils.wikilib.get_failed.api") + str(e) if self.url.find('moegirl.org.cn') != -1: message += '\n' + self.locale.t("wiki.message.utils.wikilib.get_failed.moegirl") @@ -580,7 +582,6 @@ class WikiLib: invalid_namespace = False async def search_something(srwhat): - Logger.debug(traceback.format_exc()) try: research = await self.research_page(page_info.title, namespace, srwhat=srwhat) if srwhat == 'text': @@ -590,7 +591,8 @@ class WikiLib: invalid_namespace = research[1] return research except Exception: - Logger.debug(traceback.format_exc()) + if Config('debug'): + Logger.error(traceback.format_exc()) return None, False searches = [] diff --git a/schedulers/mcv_rss.py b/schedulers/mcv_rss.py index 438a70cb..580af7bb 100644 --- a/schedulers/mcv_rss.py +++ b/schedulers/mcv_rss.py @@ -51,7 +51,7 @@ async def get_article(version, use_local=True): get = (web_render_local if use_local else web_render) + 'source?url=' + quote(link) try: - html = await get_url(get, attempt=1) + html = await get_url(get, attempt=1, request_private_ip=True, logging_err_resp=False) soup = BeautifulSoup(html, 'html.parser') @@ -61,7 +61,8 @@ async def get_article(version, use_local=True): else: return link, title.text except Exception: - traceback.print_exc() + if Config('debug'): + Logger.error(traceback.format_exc()) return '', '' @@ -73,7 +74,7 @@ async def mcv_rss(): url = 'https://piston-meta.mojang.com/mc/game/version_manifest.json' try: verlist = get_stored_list('scheduler', 'mcv_rss') - file = json.loads(await get_url(url, attempt=1)) + file = json.loads(await get_url(url, attempt=1, logging_err_resp=False)) release = file['latest']['release'] snapshot = file['latest']['snapshot'] time_release = 0 @@ -123,7 +124,8 @@ async def mcv_rss(): get_stored_news_title.append(article[1]) update_stored_list('scheduler', 'mcnews', get_stored_news_title) except Exception: - traceback.print_exc() + if Config('debug'): + Logger.error(traceback.format_exc()) @Scheduler.scheduled_job(IntervalTrigger(seconds=180)) @@ -140,14 +142,16 @@ async def mcbv_rss(): verlist.append(version) update_stored_list('scheduler', 'mcbv_rss', verlist) except Exception: - traceback.print_exc() + if Config('debug'): + Logger.error(traceback.format_exc()) @Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times)) async def mcv_jira_rss(): try: + url = 'https://bugs.mojang.com/rest/api/2/project/10400/versions' verlist = get_stored_list('scheduler', 'mcv_jira_rss') - file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/10400/versions', 200, attempt=1)) + file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False)) releases = [] for v in file: if not v['archived']: @@ -169,14 +173,16 @@ async def mcv_jira_rss(): update_stored_list('scheduler', 'mcv_jira_rss', verlist) except Exception: - traceback.print_exc() + if Config('debug'): + Logger.error(traceback.format_exc()) @Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times)) async def mcbv_jira_rss(): try: + url = 'https://bugs.mojang.com/rest/api/2/project/10200/versions' verlist = get_stored_list('scheduler', 'mcbv_jira_rss') - file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/10200/versions', 200, attempt=1)) + file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False)) releases = [] for v in file: if not v['archived']: @@ -193,14 +199,16 @@ async def mcbv_jira_rss(): verlist.append(release) update_stored_list('scheduler', 'mcbv_jira_rss', verlist) except Exception: - traceback.print_exc() + if Config('debug'): + Logger.error(traceback.format_exc()) @Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times)) async def mcdv_jira_rss(): try: + url = 'https://bugs.mojang.com/rest/api/2/project/11901/versions' verlist = get_stored_list('scheduler', 'mcdv_jira_rss') - file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/11901/versions', 200, attempt=1)) + file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False)) releases = [] for v in file: if not v['archived']: @@ -217,14 +225,16 @@ async def mcdv_jira_rss(): verlist.append(release) update_stored_list('scheduler', 'mcdv_jira_rss', verlist) except Exception: - traceback.print_exc() + if Config('debug'): + Logger.error(traceback.format_exc()) @Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times)) async def mclgv_jira_rss(): try: + url = 'https://bugs.mojang.com/rest/api/2/project/12200/versions' verlist = get_stored_list('scheduler', 'mclgv_jira_rss') - file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/12200/versions', 200, attempt=1)) + file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False)) releases = [] for v in file: if not v['archived']: @@ -241,4 +251,5 @@ async def mclgv_jira_rss(): verlist.append(release) update_stored_list('scheduler', 'mclgv_jira_rss', verlist) except Exception: - traceback.print_exc() + if Config('debug'): + Logger.error(traceback.format_exc()) diff --git a/schedulers/minecraft_news.py b/schedulers/minecraft_news.py index d752b236..b1ed7710 100644 --- a/schedulers/minecraft_news.py +++ b/schedulers/minecraft_news.py @@ -88,7 +88,7 @@ async def feedback_news(): for section in sections: try: alist = get_stored_list('scheduler', 'mcfeedbacknews') - get = await get_url(section['url'], 200, attempt=1, logging_err_resp=False) + get = await get_url(section['url'], 200, attempt=1, request_private_ip=True, logging_err_resp=False) res = json.loads(get) articles = [] for i in res['articles']: