Archived
1
0
Fork 0
This commit is contained in:
多羅狼 2024-02-01 01:36:11 +08:00 committed by GitHub
parent 4769aed84a
commit 90b022566b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 64 additions and 57 deletions

View file

@ -1,5 +1,4 @@
from datetime import datetime
import traceback
from core.builtins import Bot, Image, Plain, Url
from core.utils.cooldown import CoolDown
@ -29,7 +28,6 @@ async def get_video_info(msg: Bot.MessageSession, query, get_detail=False):
except ValueError as e:
if str(e).startswith('412'):
await msg.finish(msg.locale.t('bilibili.message.error.rejected'))
traceback.print_exc()
view = res['data']['View']
stat = view['stat']

View file

@ -15,11 +15,10 @@ async def cytoid_profile(msg: Bot.MessageSession):
await msg.finish(msg.locale.t('cytoid.message.user_unbound', prefix=msg.prefixes[0]))
profile_url = 'http://services.cytoid.io/profile/' + query_id
try:
profile = json.loads(await get_url(profile_url, status_code=200))
profile = json.loads(await get_url(profile_url, 200))
except ValueError as e:
if str(e).startswith('404'):
await msg.finish(msg.locale.t('cytoid.message.user_not_found'))
raise e
uid = profile['user']['uid']
nick = profile['user']['name']
avatar = profile['user']['avatar']['large']

View file

@ -1,5 +1,4 @@
import re
import traceback
from core.builtins import Bot, Plain, Image as BImage
from core.utils.image import msgchain2image

View file

@ -1,4 +1,3 @@
import traceback
import ujson as json
from core.builtins import Bot, Plain
@ -32,7 +31,5 @@ async def get_record(msg, payload):
await msg.finish(msg.locale.t("chunithm.message.forbidden.eula"))
else:
await msg.finish(msg.locale.t("chunithm.message.forbidden"))
else:
Logger.error(traceback.format_exc())
if data:
return data

View file

@ -3,6 +3,7 @@ import shutil
import traceback
import ujson as json
from config import Config
from core.builtins import Bot, Plain, Image
from core.logger import Logger
from core.utils.cache import random_cache_path
@ -16,12 +17,13 @@ total_list = TotalList()
async def update_alias():
try:
url = "https://download.fanyu.site/maimai/alias.json"
data = await get_url(url, 200, fmt='json')
data = await get_url(url, 200, fmt='json', logging_err_resp=False)
file_path = os.path.join(assets_path, "mai_alias.json")
with open(file_path, 'w') as file:
json.dump(data, file)
except:
except Exception:
if Config('debug'):
Logger.error(traceback.format_exc())
return False
return True
@ -31,7 +33,7 @@ async def update_covers():
try:
cover_dir = f"{assets_path}/static/mai/cover"
url = f"https://www.diving-fish.com/maibot/static.zip"
download_file = await download_to_cache(url, timeout=60)
download_file = await download_to_cache(url, timeout=60, logging_err_resp=False)
Logger.info('Maimai covers download completed.')
ca = random_cache_path()
@ -43,7 +45,8 @@ async def update_covers():
static_cover_dir = os.path.join(ca, 'mai/cover')
if os.path.exists(static_cover_dir):
shutil.move(static_cover_dir, cover_dir)
except:
except Exception:
if Config('debug'):
Logger.error(traceback.format_exc())
return False
@ -122,8 +125,6 @@ async def get_record(msg, payload):
await msg.finish(msg.locale.t("maimai.message.forbidden.eula"))
else:
await msg.finish(msg.locale.t("maimai.message.forbidden"))
else:
Logger.error(traceback.format_exc())
if data:
return data

View file

@ -1,8 +1,9 @@
import json
import re
import datetime
import re
import traceback
from google_play_scraper import app as google_play_scraper
import ujson as json
from core.builtins import ErrorMessage
from core.logger import Logger

View file

@ -27,7 +27,8 @@ async def urban(term: str, locale: Locale):
text = await get_url(url, 200, headers={'accept': '*/*',
'accept-encoding': 'gzip, deflate',
'accept-language': 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7,en-GB;q=0.6',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36 Edg/96.0.1054.62'})
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36 Edg/96.0.1054.62'},
request_private_ip=True)
Logger.debug(text)
data = json.loads(text)['list']
if not data:

View file

@ -1,6 +1,5 @@
import asyncio
import re
import traceback
from config import Config
from core.builtins import Bot
@ -61,7 +60,6 @@ async def main(msg: Bot.MessageSession, mod_name: str, version: str = None):
if ver:
url += f'&gameVersion={ver}'
results = []
try:
resp = await get_url(url, 200, fmt="json", timeout=5, attempt=3, headers=headers)
if resp:
if not enable_mirror: # 没提供 pagination
@ -69,8 +67,6 @@ async def main(msg: Bot.MessageSession, mod_name: str, version: str = None):
return None
for mod in resp["data"]:
results.append(("curseforge", mod["name"], mod["id"], None))
except Exception:
traceback.print_exc()
return results
async def get_modrinth_project_version(project_id: str, ver: str):
@ -105,12 +101,9 @@ async def main(msg: Bot.MessageSession, mod_name: str, version: str = None):
}
url = f'https://api.curseforge.com/v1/mods/{modid}/files?gameVersion={ver}'
try:
resp = await get_url(url, 200, fmt="json", timeout=5, attempt=3, headers=headers)
if resp:
return resp["data"][0]
except Exception:
traceback.print_exc()
# 搜索 Mod
result = await asyncio.gather(*(search_modrinth(mod_name, ver), search_curseforge(mod_name, ver)))

View file

@ -40,7 +40,10 @@ async def update_assets():
try:
update = await get_url(json_url, 200)
except TimeoutError:
try:
update = await get_url(json_url_mirror, 200)
except:
return False
update_json = json.loads(update)
for song in update_json:
diff = {}
@ -76,6 +79,8 @@ async def update_assets():
data[row[0].lower()]['AT'] = row[4]
os.remove(download_file)
else:
return False
with open(file_path, 'w', encoding='utf-8') as f:
f.write(json.dumps(data, indent=4, ensure_ascii=False))
shutil.move(file_path, rating_path)

View file

@ -38,7 +38,7 @@ async def _(msg: Bot.MessageSession, tweet: str, use_local=True):
await msg.finish(msg.locale.t("error.config.webrender.invalid"))
use_local = False
res = await get_url(f'https://react-tweet.vercel.app/api/tweet/{tweet_id}')
res = await get_url(f'https://react-tweet.vercel.app/api/tweet/{tweet_id}', 200)
res_json = json.loads(res)
if not res_json['data']:
await msg.finish(msg.locale.t('tweet.message.not_found'))

View file

@ -177,7 +177,7 @@ class WikiLib:
break
try:
return await get_url(api, status_code=200, headers=self.headers, fmt="json", request_private_ip=request_local,
await get_url(api, status_code=200, headers=self.headers, fmt="json", request_private_ip=request_local,
cookies=cookies)
except Exception as e:
@ -257,7 +257,8 @@ class WikiLib:
return WikiStatus(available=False, value=False, message=self.locale.t(
"wiki.message.utils.wikilib.get_failed.timeout"))
except Exception as e:
Logger.debug(traceback.format_exc())
if Config('debug'):
Logger.error(traceback.format_exc())
if e.args == (403,):
message = self.locale.t("wiki.message.utils.wikilib.get_failed.forbidden")
elif not re.match(r'^(https?://).*', self.url):
@ -280,7 +281,8 @@ class WikiLib:
meta='siteinfo',
siprop='general|namespaces|namespacealiases|interwikimap|extensions')
except Exception as e:
Logger.debug(traceback.format_exc())
if Config('debug'):
Logger.error(traceback.format_exc())
message = self.locale.t("wiki.message.utils.wikilib.get_failed.api") + str(e)
if self.url.find('moegirl.org.cn') != -1:
message += '\n' + self.locale.t("wiki.message.utils.wikilib.get_failed.moegirl")
@ -580,7 +582,6 @@ class WikiLib:
invalid_namespace = False
async def search_something(srwhat):
Logger.debug(traceback.format_exc())
try:
research = await self.research_page(page_info.title, namespace, srwhat=srwhat)
if srwhat == 'text':
@ -590,7 +591,8 @@ class WikiLib:
invalid_namespace = research[1]
return research
except Exception:
Logger.debug(traceback.format_exc())
if Config('debug'):
Logger.error(traceback.format_exc())
return None, False
searches = []

View file

@ -51,7 +51,7 @@ async def get_article(version, use_local=True):
get = (web_render_local if use_local else web_render) + 'source?url=' + quote(link)
try:
html = await get_url(get, attempt=1)
html = await get_url(get, attempt=1, request_private_ip=True, logging_err_resp=False)
soup = BeautifulSoup(html, 'html.parser')
@ -61,7 +61,8 @@ async def get_article(version, use_local=True):
else:
return link, title.text
except Exception:
traceback.print_exc()
if Config('debug'):
Logger.error(traceback.format_exc())
return '', ''
@ -73,7 +74,7 @@ async def mcv_rss():
url = 'https://piston-meta.mojang.com/mc/game/version_manifest.json'
try:
verlist = get_stored_list('scheduler', 'mcv_rss')
file = json.loads(await get_url(url, attempt=1))
file = json.loads(await get_url(url, attempt=1, logging_err_resp=False))
release = file['latest']['release']
snapshot = file['latest']['snapshot']
time_release = 0
@ -123,7 +124,8 @@ async def mcv_rss():
get_stored_news_title.append(article[1])
update_stored_list('scheduler', 'mcnews', get_stored_news_title)
except Exception:
traceback.print_exc()
if Config('debug'):
Logger.error(traceback.format_exc())
@Scheduler.scheduled_job(IntervalTrigger(seconds=180))
@ -140,14 +142,16 @@ async def mcbv_rss():
verlist.append(version)
update_stored_list('scheduler', 'mcbv_rss', verlist)
except Exception:
traceback.print_exc()
if Config('debug'):
Logger.error(traceback.format_exc())
@Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times))
async def mcv_jira_rss():
try:
url = 'https://bugs.mojang.com/rest/api/2/project/10400/versions'
verlist = get_stored_list('scheduler', 'mcv_jira_rss')
file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/10400/versions', 200, attempt=1))
file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False))
releases = []
for v in file:
if not v['archived']:
@ -169,14 +173,16 @@ async def mcv_jira_rss():
update_stored_list('scheduler', 'mcv_jira_rss', verlist)
except Exception:
traceback.print_exc()
if Config('debug'):
Logger.error(traceback.format_exc())
@Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times))
async def mcbv_jira_rss():
try:
url = 'https://bugs.mojang.com/rest/api/2/project/10200/versions'
verlist = get_stored_list('scheduler', 'mcbv_jira_rss')
file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/10200/versions', 200, attempt=1))
file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False))
releases = []
for v in file:
if not v['archived']:
@ -193,14 +199,16 @@ async def mcbv_jira_rss():
verlist.append(release)
update_stored_list('scheduler', 'mcbv_jira_rss', verlist)
except Exception:
traceback.print_exc()
if Config('debug'):
Logger.error(traceback.format_exc())
@Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times))
async def mcdv_jira_rss():
try:
url = 'https://bugs.mojang.com/rest/api/2/project/11901/versions'
verlist = get_stored_list('scheduler', 'mcdv_jira_rss')
file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/11901/versions', 200, attempt=1))
file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False))
releases = []
for v in file:
if not v['archived']:
@ -217,14 +225,16 @@ async def mcdv_jira_rss():
verlist.append(release)
update_stored_list('scheduler', 'mcdv_jira_rss', verlist)
except Exception:
traceback.print_exc()
if Config('debug'):
Logger.error(traceback.format_exc())
@Scheduler.scheduled_job(IntervalTrigger(seconds=trigger_times))
async def mclgv_jira_rss():
try:
url = 'https://bugs.mojang.com/rest/api/2/project/12200/versions'
verlist = get_stored_list('scheduler', 'mclgv_jira_rss')
file = json.loads(await get_url('https://bugs.mojang.com/rest/api/2/project/12200/versions', 200, attempt=1))
file = json.loads(await get_url(url, 200, attempt=1, logging_err_resp=False))
releases = []
for v in file:
if not v['archived']:
@ -241,4 +251,5 @@ async def mclgv_jira_rss():
verlist.append(release)
update_stored_list('scheduler', 'mclgv_jira_rss', verlist)
except Exception:
traceback.print_exc()
if Config('debug'):
Logger.error(traceback.format_exc())

View file

@ -88,7 +88,7 @@ async def feedback_news():
for section in sections:
try:
alist = get_stored_list('scheduler', 'mcfeedbacknews')
get = await get_url(section['url'], 200, attempt=1, logging_err_resp=False)
get = await get_url(section['url'], 200, attempt=1, request_private_ip=True, logging_err_resp=False)
res = json.loads(get)
articles = []
for i in res['articles']: