Archived
1
0
Fork 0
This commit is contained in:
yzhh 2021-11-12 22:25:53 +08:00
parent 5a23f61704
commit 11be9e095e
63 changed files with 466 additions and 503 deletions

View file

@ -3,12 +3,12 @@ import logging
import os
import re
from aiocqhttp import Event
from config import Config
from core.bots.aiocqhttp.client import bot
from core.bots.aiocqhttp.message import MessageSession, FetchTarget
from core.bots.aiocqhttp.tasks import MessageTaskManager, FinishedTasks
from aiocqhttp import Event
from core.elements import MsgInfo, Session, StartUp, Schedule, EnableDirtyWordCheck, PrivateAssets
from core.loader import ModulesManager
from core.parser.message import parser

View file

@ -1,4 +1,3 @@
from aiocqhttp import CQHttp
bot = CQHttp()

View file

@ -1,10 +1,12 @@
import asyncio
import html
import re
import traceback
from typing import List
from pathlib import Path
from typing import List
from aiocqhttp import MessageSegment
from core.bots.aiocqhttp.client import bot
from core.bots.aiocqhttp.tasks import MessageTaskManager, FinishedTasks
from core.elements import Plain, Image, MessageSession as MS, MsgInfo, Session, Voice, FetchTarget as FT, \
@ -12,7 +14,6 @@ from core.elements import Plain, Image, MessageSession as MS, MsgInfo, Session,
from core.elements.others import confirm_command
from core.logger import Logger
from database import BotDBUtil
import html
def convert2lst(s) -> list:
@ -83,7 +84,8 @@ class MessageSession(MS):
if self.target.targetFrom == 'QQ' or self.target.senderInfo.check_TargetAdmin(
self.target.targetId) or self.target.senderInfo.query.isSuperUser:
return True
get_member_info = await bot.call_action('get_group_member_info', group_id=self.session.target, user_id=self.session.sender)
get_member_info = await bot.call_action('get_group_member_info', group_id=self.session.target,
user_id=self.session.sender)
if get_member_info['role'] in ['owner', 'admin']:
return True
return False
@ -119,7 +121,8 @@ class MessageSession(MS):
async def __aenter__(self):
if self.msg.target.targetFrom == 'QQ|Group':
await bot.send_group_msg(group_id=self.msg.session.target, message=f'[CQ:poke,qq={self.msg.session.sender}]')
await bot.send_group_msg(group_id=self.msg.session.target,
message=f'[CQ:poke,qq={self.msg.session.sender}]')
pass
async def __aexit__(self, exc_type, exc_val, exc_tb):

View file

@ -114,7 +114,7 @@ class MessageSession(MS):
self.msg = msg
async def __aenter__(self):
#await bot.answer_chat_action(self.msg.session.target, 'typing')
# await bot.answer_chat_action(self.msg.session.target, 'typing')
pass
async def __aexit__(self, exc_type, exc_val, exc_tb):

View file

@ -19,6 +19,7 @@ init()
count = 0
@client.event
async def on_ready():
Logger.info('Logged on as ' + str(client.user))

View file

@ -59,6 +59,7 @@ class MessageSession(MS):
async def waitConfirm(self, msgchain=None, quote=True):
ExecutionLockList.remove(self)
def check(m):
return m.channel == self.session.message.channel and m.author == self.session.message.author

View file

@ -7,8 +7,8 @@ from apscheduler.triggers.date import DateTrigger
from apscheduler.triggers.interval import IntervalTrigger
from core.elements import Command, RegexCommand, Option, Schedule, StartUp
from core.loader import ModulesManager
from core.elements.module.meta import *
from core.loader import ModulesManager
class Bind:

View file

@ -11,12 +11,12 @@ import time
import aiohttp
from tenacity import retry, wait_fixed, stop_after_attempt
from config import Config
from core.elements import EnableDirtyWordCheck
from core.logger import Logger
from database.logging_message import DirtyWordCache
from config import Config
def hash_hmac(key, code, sha1):
hmac_code = hmac.new(key.encode(), code.encode(), hashlib.sha1)
@ -30,16 +30,20 @@ def computeMD5hash(my_string):
def parse_data(result: dict):
print(result)
original_content = content = result['content']
status = True
for itemResult in result['results']:
if itemResult['suggestion'] == 'block':
for itemDetail in itemResult['details']:
if 'contexts' in itemDetail:
for itemContext in itemDetail["contexts"]:
content = content.replace(itemContext['context'], '<吃掉了>')
status = False
else:
content = "<全部吃掉了>"
return {original_content: content}
status = False
return {'content': content, 'status': status, 'original': original_content}
@retry(stop=stop_after_attempt(3), wait=wait_fixed(3))
@ -47,7 +51,7 @@ async def check(*text) -> list:
'''检查字符串是否合规
:param text: 字符串List/Union
:returns: 经过审核后的字符串不合规部分会被替换为'<吃掉了>'全部不合规则是'<全部吃掉了>'
:returns: 经过审核后的字符串不合规部分会被替换为'<吃掉了>'全部不合规则是'<全部吃掉了>'结构为[{'审核后的字符串': 处理结果True/False默认为True}]
'''
accessKeyId = Config("Check_accessKeyId")
accessKeySecret = Config("Check_accessKeySecret")
@ -55,26 +59,34 @@ async def check(*text) -> list:
if not accessKeyId or not accessKeySecret or not EnableDirtyWordCheck.status:
Logger.warn('Dirty words filter was disabled, skip.')
return text
for x in text:
if x == '':
text.remove(x)
if not text:
return []
query_list = {}
count = 0
for t in text:
if t == '':
query_list.update({count: {t: {'content': t, 'status': True, 'original': t}}})
else:
query_list.update({count: {t: False}})
count += 1
for q in query_list:
for pq in query_list[q]:
cache = DirtyWordCache(pq)
if not cache.need_insert:
query_list.update({q: parse_data(cache.get())})
call_api_list = {}
print(query_list)
for q in query_list:
for pq in query_list[q]:
if not query_list[q][pq]:
call_api_list.update({pq: q})
cache = DirtyWordCache(pq)
if not cache.need_insert:
query_list.update({q: {pq: parse_data(cache.get())}})
call_api_list = {}
for q in query_list:
print(q)
for pq in query_list[q]:
print(pq)
if not query_list[q][pq]:
if pq not in call_api_list:
call_api_list.update({pq: []})
print(call_api_list)
call_api_list[pq].append(q)
print(call_api_list)
call_api_list_ = [x for x in call_api_list]
if call_api_list_:
body = {
@ -126,15 +138,13 @@ async def check(*text) -> list:
print(result)
for item in result['data']:
content = item['content']
query_list.update({call_api_list[content]: parse_data(item)})
for n in call_api_list[content]:
query_list.update({n: parse_data(item)})
DirtyWordCache(content).update(item)
else:
raise ValueError(await resp.text())
results = []
print(query_list)
for x in query_list:
for y in query_list[x]:
results.append(query_list[x][y])
results.append(query_list[x])
return results

View file

@ -1,11 +1,12 @@
import re
import uuid
from os.path import abspath
import aiohttp
import filetype
from os.path import abspath
from PIL import Image as PImage
from aiohttp_retry import ExponentialRetry, RetryClient
from config import CachePath

View file

@ -1,5 +1,5 @@
import re
from typing import Callable, List, Union
from typing import List
from .meta import *

View file

@ -2,7 +2,7 @@ import importlib
import os
import re
import traceback
from typing import Dict, Union, List, Set
from typing import Dict, Union
from core.elements import Command, Option, Schedule, RegexCommand, StartUp, PrivateAssets
from core.logger import Logger
@ -105,7 +105,7 @@ class ModulesManager:
return d
@staticmethod
def return_specified_type_modules(module_type: [Command, RegexCommand, Schedule, StartUp, Option])\
def return_specified_type_modules(module_type: [Command, RegexCommand, Schedule, StartUp, Option]) \
-> Dict[str, Union[Command, RegexCommand, Schedule, StartUp, Option]]:
d = {}
modules = ModulesManager.return_modules_list_as_dict()

View file

@ -8,12 +8,12 @@ from graia.application.group import Group, Member
from graia.application.message.chain import MessageChain
from config import Config
from core.unused_bots.graia.broadcast import bcc, app
from core.unused_bots.graia.message import MessageSession, FetchTarget
from core.elements import MsgInfo, Session, Command, Schedule, PrivateAssets
from core.loader import ModulesManager
from core.parser.message import parser
from core.scheduler import Scheduler
from core.unused_bots.graia.broadcast import bcc, app
from core.unused_bots.graia.message import MessageSession, FetchTarget
from core.utils import init, load_prompt
PrivateAssets.set(os.path.abspath(os.path.dirname(__file__) + '/assets'))

View file

@ -11,10 +11,10 @@ from graia.broadcast.interrupt import InterruptControl
from graia.broadcast.interrupt.waiter import Waiter
from config import Config
from core.unused_bots.graia.broadcast import app, bcc
from core.elements import Plain as BPlain, Image as BImage, Voice as BVoice, MessageSession as MS, MsgInfo, Session, \
FetchTarget as FT
from core.elements.others import confirm_command
from core.unused_bots.graia.broadcast import app, bcc
from core.utils import slk_converter
from database import BotDBUtil
from database.logging_message import LoggerMSG

View file

@ -11,9 +11,8 @@ import ujson as json
from aiohttp_retry import ExponentialRetry, RetryClient
from core.elements import FetchTarget, PrivateAssets
from core.logger import Logger
from core.loader import load_modules
from core.logger import Logger
def init() -> None:
@ -35,15 +34,14 @@ def init() -> None:
write_tag.close()
async def get_url(url: str, status_code: int = False, headers: dict = None, fmt=None):
'''利用AioHttp获取指定url的内容。
"""利用AioHttp获取指定url的内容。
:param url: 需要获取的url
:param status_code: 指定请求到的状态码若不符则抛出ValueError
:param headers: 请求时使用的http头
:returns: 指定url的内容字符串
'''
"""
async with RetryClient(headers=headers, retry_options=ExponentialRetry(attempts=3)) as session:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20), headers=headers) as req:
if status_code and req.status != status_code:

View file

@ -1,12 +1,11 @@
import os
import re
import traceback
import uuid
import re
from typing import List, Union
import ujson as json
import aiohttp
import ujson as json
from tabulate import tabulate
from config import Config

View file

@ -1,12 +1,12 @@
import datetime
from tenacity import retry, stop_after_attempt
from config import Config
from core.elements.message import MessageSession
from core.elements.temp import EnabledModulesCache, SenderInfoCache
from database.orm import DBSession
from database.tables import EnabledModules, SenderInfo, TargetAdmin, CommandTriggerTime, GroupWhiteList
from config import Config
from tenacity import retry, stop_after_attempt
cache = Config('db_cache')
@ -41,6 +41,7 @@ def auto_rollback_error(func):
except Exception as e:
session.rollback()
raise e
return wrapper

View file

@ -1,4 +1,5 @@
import datetime
import ujson as json
from sqlalchemy import create_engine, Column, String, Text, Integer, TIMESTAMP, text
from sqlalchemy.ext.declarative import declarative_base
@ -7,7 +8,6 @@ from tenacity import retry, stop_after_attempt
Base = declarative_base()
DB_LINK = 'sqlite:///database/msg.db'
@ -57,6 +57,7 @@ def auto_rollback_error(func):
except Exception as e:
session.rollback()
raise e
return wrapper
@ -135,4 +136,3 @@ class UnfriendlyActions:
session.add_all([UnfriendlyActionsTable(targetId=self.targetId, senderId=self.senderId, action=action)])
session.commit()
return self.check_mute()

0
modules/__init__.py Normal file
View file

View file

@ -1,11 +1,10 @@
import os
from core.elements import MessageSession, Plain, Image
from core.component import on_command
from core.elements import MessageSession, Plain, Image
from .getb30 import getb30
from .initialize import arcb30init
b30 = on_command('b30', developers=['OasisAkari'])

View file

@ -1,7 +1,7 @@
import re
from core.elements import MessageSession
from core.component import on_command, on_regex
from core.elements import MessageSession
from .bugtracker import bugtracker_get
bug = on_command('bug', alias='b', developers=['OasisAkari'])

View file

@ -1,5 +1,4 @@
import json
import re
from core.utils import get_url

View file

@ -1,4 +1,3 @@
import asyncio
import os
import sys
import time
@ -7,11 +6,11 @@ import traceback
import psutil
import ujson as json
from core.component import on_command
from core.elements import MessageSession, Command, PrivateAssets, Image, Plain
from core.loader import ModulesManager
from core.component import on_command
from core.utils.image_table import ImageTable, image_table_render, web_render
from core.parser.command import CommandParser, InvalidHelpDocTypeError
from core.utils.image_table import ImageTable, image_table_render, web_render
from database import BotDBUtil
module = on_command('module',
@ -44,7 +43,8 @@ async def config_modules(msg: MessageSession):
for function in modules_:
if function[0] == '_':
continue
if isinstance(modules_[function], Command) and (modules_[function].base or modules_[function].required_superuser):
if isinstance(modules_[function], Command) and (
modules_[function].base or modules_[function].required_superuser):
continue
enable_list.append(function)
else:
@ -73,7 +73,8 @@ async def config_modules(msg: MessageSession):
for function in modules_:
if function[0] == '_':
continue
if isinstance(modules_[function], Command) and (modules_[function].base or modules_[function].required_superuser):
if isinstance(modules_[function], Command) and (
modules_[function].base or modules_[function].required_superuser):
continue
disable_list.append(function)
else:

View file

@ -1,5 +1,5 @@
from core.elements import MessageSession, Image
from core.component import on_command
from core.elements import MessageSession, Image
from database import BotDBUtil
from .profile import cytoid_profile
from .rating import get_rating

View file

@ -1,5 +1,5 @@
from core.elements import MessageSession
from core.component import on_command
from core.elements import MessageSession
from modules.github import repo, user, search
github = on_command('github', alias=['gh'], developers=['Dianliang233'])

View file

@ -60,5 +60,6 @@ Created {time_diff(result['created_at'])} ago | Updated {time_diff(result['updat
await msg.sendMessage([Plain(message), Image(
path=f'https://opengraph.githubassets.com/c9f4179f4d560950b2355c82aa2b7750bffd945744f9b8ea3f93cc24779745a0/{result["full_name"]}')])
except Exception as e:
await msg.sendMessage('发生错误:' + str(e) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=')
await msg.sendMessage('发生错误:' + str(
e) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=')
traceback.print_exc()

View file

@ -25,5 +25,6 @@ async def search(msg: MessageSession):
await msg.sendMessage(message)
except Exception as error:
await msg.sendMessage('发生错误:' + str(error) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=')
await msg.sendMessage('发生错误:' + str(
error) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=')
traceback.print_exc()

View file

@ -41,5 +41,6 @@ Account Created {time_diff(result['created_at'])} ago | Latest activity {time_di
await msg.sendMessage(message)
except Exception as error:
await msg.sendMessage('发生错误:' + str(error) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=')
await msg.sendMessage('发生错误:' + str(
error) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=')
traceback.print_exc()

View file

@ -61,9 +61,8 @@ async def dirty_check(text, *whitelist_check):
if whitelist_check in whitelist:
return False
check = await dirty.check(text)
check = '\n'.join(check)
print(check)
if check.find('<吃掉了>') != -1 or check.find('<全部吃掉了>') != -1:
for x in check:
if not x['status']:
return True
return False

View file

@ -1,14 +1,12 @@
import re
from collections import defaultdict
from modules.maimai.libraries.tool import hash
from modules.maimai.libraries.maimaidx_music import *
from core.component import on_command, on_regex
from core.elements import Plain, Image as BImage, MessageSession
from modules.maimai.libraries.image import *
from modules.maimai.libraries.maimai_best_40 import generate
from core.elements import Plain, Image as BImage, MessageSession
from core.component import on_command, on_regex
from modules.maimai.libraries.maimaidx_music import *
from modules.maimai.libraries.tool import hash
def song_txt(music: Music):

View file

@ -3,7 +3,6 @@ from io import BytesIO
from PIL import ImageFont, ImageDraw, Image
path = 'assets/maimai/static/high_eq_image.png'
fontpath = "assets/maimai/static/msyh.ttc"

View file

@ -1,13 +1,12 @@
# Author: xyb, Diving_Fish
import asyncio
import os
import math
import os
from typing import Optional, Dict, List
import aiohttp
from PIL import Image, ImageDraw, ImageFont, ImageFilter
from modules.maimai.libraries.maimaidx_music import total_list
from modules.maimai.libraries.maimaidx_music import total_list
scoreRank = 'D C B BB BBB A AA AAA S S+ SS SS+ SSS SSS+'.split(' ')
combo = ' FC FC+ AP AP+'.split(' ')
@ -15,8 +14,8 @@ diffs = 'Basic Advanced Expert Master Re:Master'.split(' ')
class ChartInfo(object):
def __init__(self, idNum:str, diff:int, tp:str, achievement:float, ra:int, comboId:int, scoreId:int,
title:str, ds:float, lv:str):
def __init__(self, idNum: str, diff: int, tp: str, achievement: float, ra: int, comboId: int, scoreId: int,
title: str, ds: float, lv: str):
self.idNum = idNum
self.diff = diff
self.tp = tp
@ -57,20 +56,19 @@ class ChartInfo(object):
)
class BestList(object):
def __init__(self, size:int):
def __init__(self, size: int):
self.data = []
self.size = size
def push(self, elem:ChartInfo):
def push(self, elem: ChartInfo):
if len(self.data) >= self.size and elem < self.data[-1]:
return
self.data.append(elem)
self.data.sort()
self.data.reverse()
while(len(self.data) > self.size):
while (len(self.data) > self.size):
del self.data[-1]
def pop(self):
@ -88,7 +86,7 @@ class BestList(object):
class DrawBest(object):
def __init__(self, sdBest:BestList, dxBest:BestList, userName:str, playerRating:int, musicRating:int):
def __init__(self, sdBest: BestList, dxBest: BestList, userName: str, playerRating: int, musicRating: int):
self.sdBest = sdBest
self.dxBest = dxBest
self.userName = self._stringQ2B(userName)
@ -115,7 +113,7 @@ class DrawBest(object):
inside_code = 0x0020
else:
inside_code -= 0xfee0
if inside_code < 0x0020 or inside_code > 0x7e: #转完之后不是半角字符返回原来的字符
if inside_code < 0x0020 or inside_code > 0x7e: # 转完之后不是半角字符返回原来的字符
return uchar
return chr(inside_code)
@ -138,13 +136,13 @@ class DrawBest(object):
return wid
return 1
def _coloumWidth(self, s:str):
def _coloumWidth(self, s: str):
res = 0
for ch in s:
res += self._getCharWidth(ord(ch))
return res
def _changeColumnWidth(self, s:str, len:int) -> str:
def _changeColumnWidth(self, s: str, len: int) -> str:
res = 0
sList = []
for ch in s:
@ -153,7 +151,7 @@ class DrawBest(object):
sList.append(ch)
return ''.join(sList)
def _resizePic(self, img:Image.Image, time:float):
def _resizePic(self, img: Image.Image, time: float):
return img.resize((int(img.size[0] * time), int(img.size[1] * time)))
def _findRaPic(self) -> str:
@ -178,7 +176,7 @@ class DrawBest(object):
num = '09'
return f'UI_CMN_DXRating_S_{num}.png'
def _drawRating(self, ratingBaseImg:Image.Image):
def _drawRating(self, ratingBaseImg: Image.Image):
COLOUMS_RATING = [86, 100, 115, 130, 145]
theRa = self.playerRating
i = 4
@ -191,7 +189,7 @@ class DrawBest(object):
i = i - 1
return ratingBaseImg
def _drawBestList(self, img:Image.Image, sdBest:BestList, dxBest:BestList):
def _drawBestList(self, img: Image.Image, sdBest: BestList, dxBest: BestList):
itemW = 164
itemH = 88
Color = [(69, 193, 36), (255, 186, 1), (255, 90, 102), (134, 49, 200), (217, 197, 233)]
@ -227,7 +225,8 @@ class DrawBest(object):
rankImg = self._resizePic(rankImg, 0.3)
temp.paste(rankImg, (88, 28), rankImg.split()[3])
if chartInfo.comboId:
comboImg = Image.open(self.pic_dir + f'UI_MSS_MBase_Icon_{comboPic[chartInfo.comboId]}_S.png').convert('RGBA')
comboImg = Image.open(self.pic_dir + f'UI_MSS_MBase_Icon_{comboPic[chartInfo.comboId]}_S.png').convert(
'RGBA')
comboImg = self._resizePic(comboImg, 0.45)
temp.paste(comboImg, (119, 27), comboImg.split()[3])
font = ImageFont.truetype('assets/maimai/static/adobe_simhei.otf', 12, encoding='utf-8')
@ -356,7 +355,7 @@ class DrawBest(object):
return self.img
def computeRa(ds: float, achievement:float) -> int:
def computeRa(ds: float, achievement: float) -> int:
baseRa = 15.0
if achievement >= 50 and achievement < 60:
baseRa = 5.0
@ -389,7 +388,8 @@ def computeRa(ds: float, achievement:float) -> int:
async def generate(payload: Dict) -> (Optional[Image.Image], bool):
async with aiohttp.request("POST", "https://www.diving-fish.com/api/maimaidxprober/query/player", json=payload) as resp:
async with aiohttp.request("POST", "https://www.diving-fish.com/api/maimaidxprober/query/player",
json=payload) as resp:
if resp.status == 400:
return None, 400
if resp.status == 403:
@ -403,5 +403,6 @@ async def generate(payload: Dict) -> (Optional[Image.Image], bool):
sd_best.push(ChartInfo.from_json(c))
for c in dx:
dx_best.push(ChartInfo.from_json(c))
pic = DrawBest(sd_best, dx_best, obj["nickname"], obj["rating"] + obj["additional_rating"], obj["rating"]).getDir()
pic = DrawBest(sd_best, dx_best, obj["nickname"], obj["rating"] + obj["additional_rating"],
obj["rating"]).getDir()
return pic, 0

View file

@ -1,7 +1,6 @@
import json
import random
from typing import Dict, List, Optional, Union, Tuple, Any
from copy import deepcopy
from typing import Dict, List, Optional, Union, Tuple, Any
import requests

View file

@ -1,5 +1,5 @@
from core.elements import MessageSession
from core.component import on_command
from core.elements import MessageSession
from .mcv import mcv, mcbv, mcdv
m = on_command(

View file

@ -3,8 +3,8 @@ import traceback
import ujson as json
from core.elements import FetchTarget, IntervalTrigger, PrivateAssets
from core.component import on_schedule
from core.elements import FetchTarget, IntervalTrigger, PrivateAssets
from core.logger import Logger
from core.utils import get_url

View file

@ -1,14 +1,14 @@
import aiohttp
import ujson as json
import traceback
import os
from urllib.parse import quote
import traceback
from datetime import datetime, timedelta
from urllib.parse import quote
import ujson as json
from config import Config
from core.component import on_schedule
from core.elements import FetchTarget, IntervalTrigger, PrivateAssets
from core.component import on_startup, on_schedule
from core.logger import Logger
from core.utils import get_url
@ -27,12 +27,14 @@ class Article:
count = 10
@on_schedule('minecraft_news', developers=['_LittleC_', 'OasisAkari', 'Dianliang233'], recommend_modules=['feedback_news'], trigger=IntervalTrigger(seconds=300), desc='开启后将会推送来自Minecraft官网的新闻。')
@on_schedule('minecraft_news', developers=['_LittleC_', 'OasisAkari', 'Dianliang233'],
recommend_modules=['feedback_news'], trigger=IntervalTrigger(seconds=300), desc='开启后将会推送来自Minecraft官网的新闻。')
async def start_check_news(bot: FetchTarget):
Logger.info('Checking Minecraft news...')
file_ = os.path.abspath(f'{PrivateAssets.path}/mcnews.txt')
baseurl = 'https://www.minecraft.net'
url = quote(f'https://www.minecraft.net/content/minecraft-net/_jcr_content.articles.grid?tileselection=auto&tagsPath=minecraft:article/news,minecraft:article/insider,minecraft:article/culture,minecraft:article/merch,minecraft:stockholm/news,minecraft:stockholm/guides,minecraft:stockholm/deep-dives,minecraft:stockholm/merch,minecraft:stockholm/events,minecraft:stockholm/minecraft-builds,minecraft:stockholm/marketplace&offset=0&pageSize={Article.count}')
url = quote(
f'https://www.minecraft.net/content/minecraft-net/_jcr_content.articles.grid?tileselection=auto&tagsPath=minecraft:article/news,minecraft:article/insider,minecraft:article/culture,minecraft:article/merch,minecraft:stockholm/news,minecraft:stockholm/guides,minecraft:stockholm/deep-dives,minecraft:stockholm/merch,minecraft:stockholm/events,minecraft:stockholm/minecraft-builds,minecraft:stockholm/marketplace&offset=0&pageSize={Article.count}')
webrender = Config('web_render')
get = webrender + 'source?url=' + url
if not webrender:
@ -60,10 +62,13 @@ async def start_check_news(bot: FetchTarget):
Logger.info('Minecraft news checked.')
@on_schedule('feedback_news', developers=['Dianliang233'], recommend_modules=['minecraft_news'], trigger=IntervalTrigger(seconds=300), desc='开启后将会推送来自Minecraft Feedback的更新记录。')
@on_schedule('feedback_news', developers=['Dianliang233'], recommend_modules=['minecraft_news'],
trigger=IntervalTrigger(seconds=300), desc='开启后将会推送来自Minecraft Feedback的更新记录。')
async def feedback_news(bot: FetchTarget):
sections = [{'name': 'beta', 'url': 'https://minecraftfeedback.zendesk.com/api/v2/help_center/en-us/sections/360001185332/articles?per_page=5'},
{'name': 'article', 'url': 'https://minecraftfeedback.zendesk.com/api/v2/help_center/en-us/sections/360001186971/articles?per_page=5'}]
sections = [{'name': 'beta',
'url': 'https://minecraftfeedback.zendesk.com/api/v2/help_center/en-us/sections/360001185332/articles?per_page=5'},
{'name': 'article',
'url': 'https://minecraftfeedback.zendesk.com/api/v2/help_center/en-us/sections/360001186971/articles?per_page=5'}]
for section in sections:
try:
name = section['name']

View file

@ -1,8 +1,8 @@
# copied from kurisu(https://github.com/nh-server/Kurisu/tree/main/cogs/results)
import discord
from core.elements import MessageSession
from core.component import on_command
from core.elements import MessageSession
from . import switch, wiiu_support, wiiu_results, ctr_support, ctr_results

View file

@ -2,8 +2,8 @@ import os
from PIL import Image, ImageDraw, ImageFont
from core.elements import Image as Img, MessageSession
from core.component import on_command
from core.elements import Image as Img, MessageSession
from core.utils import cache_name
assets_path = os.path.abspath('./assets/arcaea')

View file

@ -1,12 +1,12 @@
import ujson as json
from core.component import on_startup
from core.dirty_check import check
from core.elements import FetchTarget
from core.component import on_startup
from core.logger import Logger
from core.scheduler import Scheduler
from core.utils import get_url
from modules.utils.UTC8 import UTC8
from modules.wiki.utils.UTC8 import UTC8
@on_startup('__check_newbie__', required_superuser=True, developers=['OasisAkari'])
@ -17,14 +17,17 @@ async def newbie(bot: FetchTarget):
qq = []
for x in file['query']['logevents'][:]:
qq.append(x['title'])
@Scheduler.scheduled_job('interval', seconds=60)
async def check_newbie():
qqqq = json.loads(await get_url(url))
for xz in qqqq['query']['logevents'][:]:
if xz['title'] not in qq:
s = await check(UTC8(xz['timestamp'], 'onlytime') + '新增新人:\n' + xz['title'])
s = '\n'.join(s)
if s.find("<吃掉了>") != -1 or s.find("<全部吃掉了>") != -1:
prompt = UTC8(xz['timestamp'], 'onlytime') + '新增新人:\n' + xz['title']
s = await check(prompt)
for z in s:
s = z['content']
if not z['status']:
s = s + '\n检测到外来信息介入,请前往日志查看所有消息。' \
'https://minecraft.fandom.com/zh/wiki/Special:%E6%97%A5%E5%BF%97?type=newusers'
await bot.post_message('__check_newbie__', s)

View file

@ -1,8 +1,8 @@
import asyncio
from core.component import on_command
from core.dirty_check import check
from core.elements import MessageSession
from core.component import on_command
from .server import server
s = on_command('server', alias='s', developers=['_LittleC_', 'OasisAkari'])
@ -34,8 +34,8 @@ async def s(msg: MessageSession, address, raw, showplayer, mode):
sendmsg = await server(address, raw, showplayer, mode)
if sendmsg != '':
sendmsg = await check(sendmsg)
sendmsg = '\n'.join(sendmsg)
send = await msg.sendMessage(sendmsg + '\n[90秒后撤回消息]')
for x in sendmsg:
send = await msg.sendMessage(x['content'] + '\n[90秒后撤回消息]')
await msg.sleep(90)
await send.delete()
return sendmsg

View file

@ -75,7 +75,8 @@ async def server(address, raw=False, showplayer=False, mode='j'):
servers.append(serip + ':' + port1)
except Exception:
traceback.print_exc()
servers.append("[JE]\n发生错误调用API时发生错误。\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=")
servers.append(
"[JE]\n发生错误调用API时发生错误。\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title=")
except Exception:
traceback.print_exc()
if raw:

View file

@ -1,7 +1,7 @@
import re
from core.elements import Plain, Image, MessageSession
from core.component import on_command
from core.elements import Plain, Image, MessageSession
from modules.wiki.dbutils import WikiTargetInfo
from .userlib import GetUser

View file

@ -4,7 +4,7 @@ import urllib.parse
import aiohttp
from modules.utils.UTC8 import UTC8
from modules.wiki.utils.UTC8 import UTC8
from modules.wiki.wikilib import wikilib as wiki
from .gender import gender
@ -230,4 +230,5 @@ async def GetUser(wikiurl, username, argv=None):
return '没有找到此用户。'
else:
traceback.print_exc()
return '发生错误:' + str(e) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='
return '发生错误:' + str(
e) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='

View file

@ -1,50 +0,0 @@
from core.elements import MessageSession
from core.component import on_command
from modules.utils.ab import ab
from modules.utils.newbie import newbie
from modules.utils.rc import rc
from modules.utils.ab_qq import ab_qq
from modules.utils.rc_qq import rc_qq
from modules.wiki.dbutils import WikiTargetInfo
def get_start_wiki(msg: MessageSession):
start_wiki = WikiTargetInfo(msg).get_start_wiki()
return start_wiki
r = on_command('rc', desc='获取默认wiki的最近更改', developers=['OasisAkari'])
@r.handle()
async def rc_loader(msg: MessageSession):
start_wiki = get_start_wiki(msg)
if msg.Feature.forward and msg.target.targetFrom == 'QQ|Group':
nodelist = await rc_qq(start_wiki)
await msg.fake_forward_msg(nodelist)
else:
res = await rc(start_wiki)
await msg.sendMessage(res)
a = on_command('ab', desc='获取默认wiki的最近滥用日志', developers=['OasisAkari'])
@a.handle()
async def ab_loader(msg: MessageSession):
start_wiki = get_start_wiki(msg)
if msg.Feature.forward and msg.target.targetFrom == 'QQ|Group':
nodelist = await ab_qq(start_wiki)
await msg.fake_forward_msg(nodelist)
else:
res = await ab(start_wiki)
await msg.sendMessage(res)
n = on_command('newbie', desc='获取默认wiki的新用户', developers=['OasisAkari'])
@n.handle()
async def newbie_loader(msg: MessageSession):
res = await newbie(get_start_wiki(msg))
await msg.sendMessage(res)

View file

@ -1,31 +0,0 @@
import aiohttp
import ujson as json
from core.dirty_check import check
from modules.utils.UTC8 import UTC8
from modules.wiki.wikilib import wikilib
async def ab(wiki_url):
if wiki_url:
pageurl = await wikilib().get_article_path(wiki_url) + 'Special:AbuseLog'
url = wiki_url + '?action=query&list=abuselog&aflprop=user|title|action|result|filter|timestamp&format=json'
async with aiohttp.ClientSession() as session:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20)) as req:
if req.status != 200:
return f"请求时发生错误:{req.status}\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title="
else:
text1 = await req.text()
file = json.loads(text1)
d = []
for x in file['query']['abuselog'][:5]:
d.append('' + x['title'] + ' - ' + x['user'] + '' + UTC8(x['timestamp'], 'onlytimenoutc') + '\n过滤器名:' + x[
'filter'] + '\n处理结果:' + x['result'])
y = await check(*d)
y = '\n'.join(y)
if y.find('<吃掉了>') != -1 or y.find('<全部吃掉了>') != -1:
return f'{pageurl}\n{y}\n...仅显示前5条内容\n检测到外来信息介入,请前往滥用日志查看所有消息。'
else:
return f'{pageurl}\n{y}\n...仅显示前5条内容'
else:
return '未设定起始Wiki。'

View file

@ -1,74 +0,0 @@
import aiohttp
import ujson as json
from core.dirty_check import check
from modules.utils.UTC8 import UTC8
from modules.wiki.wikilib import wikilib
from config import Config
async def ab_qq(wiki_url):
qq_account = Config("qq_account")
if wiki_url:
article_path = await wikilib().get_article_path(wiki_url)
pageurl = article_path + 'Special:AbuseLog'
url = wiki_url + '?action=query&list=abuselog&aflprop=user|title|action|result|filter|timestamp&afllimit=99&format=json'
async with aiohttp.ClientSession() as session:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20)) as req:
if req.status != 200:
return f"请求时发生错误:{req.status}\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title="
else:
req_text = await req.text()
j = json.loads(req_text)
nodelist = [{
"type": "node",
"data": {
"name": f"滥用过滤器日志地址",
"uin": qq_account,
"content": [
{"type": "text", "data": {"text": pageurl}}]
}
}]
ablist = []
userlist = []
titlelist = []
for x in j["query"]["abuselog"]:
userlist.append(x['user'])
titlelist.append(x['title'])
checked_userlist = await check(*userlist)
count = 0
user_checked_map = {}
for u in checked_userlist:
user_checked_map[userlist[count]] = u
count += 1
checked_titlelist = await check(*titlelist)
count = 0
title_checked_map = {}
for t in checked_titlelist:
title_checked_map[titlelist[count]] = t
count += 1
for x in j["query"]["abuselog"]:
t = []
t.append(f"用户:{user_checked_map[x['user']]}")
t.append(f"过滤器名:{x['filter']}")
t.append(f"页面标题:{title_checked_map[x['title']]}")
t.append(f"操作:{x['action']}")
result = x['result']
if result == '':
result = 'pass'
t.append(f"处理结果:{result}")
t.append(UTC8(x['timestamp'], 'full'))
ablist.append('\n'.join(t))
for x in ablist:
nodelist.append(
{
"type": "node",
"data": {
"name": f"滥用过滤器日志",
"uin": qq_account,
"content": [{"type": "text", "data": {"text": x}}],
}
})
return nodelist
else:
return '未设定起始Wiki。'

View file

@ -1,33 +0,0 @@
import re
import aiohttp
import ujson as json
from core.dirty_check import check
from modules.wiki.wikilib import wikilib
async def newbie(wiki_url):
if wiki_url:
pageurl = await wikilib().get_article_path(wiki_url) + 'Special:Log?type=newusers'
url = wiki_url + '?action=query&list=logevents&letype=newusers&format=json'
async with aiohttp.ClientSession() as session:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20)) as req:
if req.status != 200:
return f"请求时发生错误:{req.status}\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title="
else:
text1 = await req.text()
file = json.loads(text1)
d = []
for x in file['query']['logevents']:
d.append(x['title'])
y = await check(*d)
y = '\n'.join(y)
f = re.findall(r'.*\n.*\n.*\n.*\n.*', str(y))
g = pageurl + '\n' + f[0] + '\n...仅显示前5条内容'
if g.find('<吃掉了>') != -1 or g.find('<全部吃掉了>') != -1:
return g + '\n检测到外来信息介入请前往日志查看所有消息。Special:日志?type=newusers'
else:
return g
else:
return '未设定起始Wiki。'

View file

@ -1,31 +0,0 @@
import aiohttp
import ujson as json
from core.dirty_check import check
from modules.utils.UTC8 import UTC8
from modules.wiki.wikilib import wikilib
async def rc(wiki_url):
if wiki_url:
pageurl = await wikilib().get_article_path(wiki_url) + 'Special:RecentChanges'
url = wiki_url + '?action=query&list=recentchanges&rcprop=title|user|timestamp&rctype=edit|new&format=json'
async with aiohttp.ClientSession() as session:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20)) as req:
if req.status != 200:
return f"请求时发生错误:{req.status}\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title="
else:
text1 = await req.text()
file = json.loads(text1)
d = []
for x in file['query']['recentchanges'][:5]:
d.append(x['title'] + ' - ' + x['user'] + ' ' + UTC8(x['timestamp'], 'onlytime'))
y = await check(*d)
y = '\n'.join(y)
if y.find('<吃掉了>') != -1 or y.find('<全部吃掉了>') != -1:
msg = f'{pageurl}\n{y}\n...仅显示前5条内容\n检测到外来信息介入,请前往最近更改查看所有消息。'
else:
msg = f'{pageurl}\n{y}\n...仅显示前5条内容'
return msg
else:
return '未设定起始Wiki。'

View file

@ -1,106 +0,0 @@
import aiohttp
import ujson as json
import urllib.parse
from core.dirty_check import check
from modules.utils.UTC8 import UTC8
from modules.utils.action_cn import action
from modules.wiki.wikilib import wikilib
from config import Config
async def rc_qq(wiki_url):
qq_account = Config("qq_account")
if wiki_url:
article_path = await wikilib().get_article_path(wiki_url)
pageurl = article_path + 'Special:RecentChanges'
url = wiki_url + '?action=query&list=recentchanges&rcprop=title|user|timestamp|loginfo|comment|redirect|flags|sizes|ids&rclimit=99&rctype=edit|new|log&format=json'
async with aiohttp.ClientSession() as session:
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20)) as req:
if req.status != 200:
return f"请求时发生错误:{req.status}\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title="
else:
req_text = await req.text()
j = json.loads(req_text)
nodelist = [{
"type": "node",
"data": {
"name": f"最近更改地址",
"uin": qq_account,
"content": [
{"type": "text", "data": {"text": pageurl}}]
}
}]
rclist = []
userlist = []
titlelist = []
for x in j["query"]["recentchanges"]:
userlist.append(x['user'])
titlelist.append(x['title'])
checked_userlist = await check(*userlist)
count = 0
user_checked_map = {}
for u in checked_userlist:
user_checked_map[userlist[count]] = u
count += 1
checked_titlelist = await check(*titlelist)
count = 0
title_checked_map = {}
for t in checked_titlelist:
title_checked_map[titlelist[count]] = t
count += 1
for x in j["query"]["recentchanges"]:
t = []
t.append(f"用户:{user_checked_map[x['user']]}")
t.append(UTC8(x['timestamp'], 'full'))
if x['type'] == 'edit':
count = x['newlen'] - x['oldlen']
if count > 0:
count = f'+{str(count)}'
else:
count = str(count)
t.append(f"{title_checked_map[x['title']]}{count}")
comment = x['comment']
if comment == '':
comment = '(无摘要内容)'
t.append(comment)
t.append(
f"{article_path}{urllib.parse.quote(title_checked_map[x['title']])}?oldid={x['old_revid']}&diff={x['revid']}")
if x['type'] == 'new':
r = ''
if 'redirect' in x:
r = '(新重定向)'
t.append(f"{title_checked_map[x['title']]}{r}")
comment = x['comment']
if comment == '':
comment = '(无摘要内容)'
t.append(comment)
if x['type'] == 'log':
log = x['logaction'] + '' + title_checked_map[x['title']]
if x['logtype'] in action:
a = action[x['logtype']].get(x['logaction'])
if a is not None:
log = a % title_checked_map[x['title']]
t.append(log)
params = x['logparams']
if 'durations' in params:
t.append('时长:' + params['durations'])
if 'target_title' in params:
t.append('对象页面:' + params['target_title'])
if x['revid'] != 0:
t.append(f"{article_path}{urllib.parse.quote(title_checked_map[x['title']])}")
rclist.append('\n'.join(t))
for x in rclist:
nodelist.append(
{
"type": "node",
"data": {
"name": f"最近更改",
"uin": qq_account,
"content": [{"type": "text", "data": {"text": x}}],
}
})
return nodelist
else:
return '未设定起始Wiki。'

View file

@ -2,8 +2,8 @@ import re
import ujson as json
from core.elements import Plain, Image, MessageSession
from core.component import on_command
from core.elements import Plain, Image, MessageSession
from core.utils import get_url

View file

@ -1,5 +1,5 @@
from core.elements import CronTrigger, FetchTarget
from core.component import on_schedule
from core.elements import CronTrigger, FetchTarget
from core.logger import Logger
from modules.weekly import get_weekly

View file

@ -1,22 +1,26 @@
import traceback
import asyncio
import filetype
import re
import traceback
from typing import Union
import filetype
import ujson as json
from core.elements import MessageSession, Plain, Image, Voice
from core.component import on_command, on_regex, on_option
from core.elements import MessageSession, Plain, Image, Voice
from core.exceptions import AbuseWarning
from core.utils import download_to_cache
from core.utils.image_table import image_table_render, ImageTable
from core.exceptions import AbuseWarning
from database import BotDBUtil
from .dbutils import WikiTargetInfo
from .wikilib_v2 import WikiLib, WhatAreUDoingError, PageInfo
from .getinfobox import get_infobox_pic
from .audit import WikiWhitelistError, audit_allow, audit_remove, audit_list, audit_query
from .dbutils import WikiTargetInfo
from .getinfobox import get_infobox_pic
from .utils.ab import ab
from .utils.ab_qq import ab_qq
from .utils.newbie import newbie
from .utils.rc import rc
from .utils.rc_qq import rc_qq
from .wikilib_v2 import WikiLib, WhatAreUDoingError, PageInfo, InvalidWikiError
wiki = on_command('wiki',
alias={'wiki_start_site': 'wiki set', 'interwiki': 'wiki iw'},
@ -355,6 +359,8 @@ async def query_pages(msg: MessageSession, title: Union[str, list, tuple],
wait_msg_list.append(Plain('\n'.join(wait_plain_slice)))
except WhatAreUDoingError:
raise AbuseWarning('使机器人重定向页面的次数过多。')
except InvalidWikiError as e:
await msg.sendMessage(f'发生错误:' + str(e))
except Exception:
traceback.print_exc()
if msg_list:
@ -372,3 +378,47 @@ async def query_pages(msg: MessageSession, title: Union[str, list, tuple],
confirm = await msg.waitConfirm(wait_msg_list)
if confirm and wait_list:
await query_pages(msg, wait_list)
rc_ = on_command('rc', desc='获取默认wiki的最近更改', developers=['OasisAkari'])
@rc_.handle()
async def rc_loader(msg: MessageSession):
start_wiki = WikiTargetInfo(msg).get_start_wiki()
if start_wiki is None:
return await msg.sendMessage('未设置起始wiki。')
if msg.Feature.forward and msg.target.targetFrom == 'QQ|Group':
nodelist = await rc_qq(start_wiki)
await msg.fake_forward_msg(nodelist)
else:
res = await rc(start_wiki)
await msg.sendMessage(res)
a = on_command('ab', desc='获取默认wiki的最近滥用日志', developers=['OasisAkari'])
@a.handle()
async def ab_loader(msg: MessageSession):
start_wiki = WikiTargetInfo(msg).get_start_wiki()
if start_wiki is None:
return await msg.sendMessage('未设置起始wiki。')
if msg.Feature.forward and msg.target.targetFrom == 'QQ|Group':
nodelist = await ab_qq(start_wiki)
await msg.fake_forward_msg(nodelist)
else:
res = await ab(start_wiki)
await msg.sendMessage(res)
n = on_command('newbie', desc='获取默认wiki的新用户', developers=['OasisAkari'])
@n.handle()
async def newbie_loader(msg: MessageSession):
start_wiki = WikiTargetInfo(msg).get_start_wiki()
if start_wiki is None:
return await msg.sendMessage('未设置起始wiki。')
res = await newbie(start_wiki)
await msg.sendMessage(res)

View file

@ -1,9 +1,10 @@
import re
from .orm import WikiWhitelist
from database import auto_rollback_error, session
from tenacity import retry, stop_after_attempt
from database import auto_rollback_error, session
from .orm import WikiWhitelist
@retry(stop=stop_after_attempt(3))
@auto_rollback_error

19
modules/wiki/utils/ab.py Normal file
View file

@ -0,0 +1,19 @@
from core.dirty_check import check
from modules.wiki.utils.UTC8 import UTC8
from modules.wiki.wikilib_v2 import WikiLib
async def ab(wiki_url):
wiki = WikiLib(wiki_url)
query = await wiki.get_json(action='query', list='abuselog', aflprop='user|title|action|result|filter|timestamp')
pageurl = wiki.wiki_info.articlepath.replace('$1', 'Special:AbuseLog')
d = []
for x in query['query']['abuselog'][:5]:
d.append('' + x['title'] + ' - ' + x['user'] + '' + UTC8(x['timestamp'], 'onlytimenoutc') + '\n过滤器名:' + x[
'filter'] + '\n处理结果:' + x['result'])
y = await check(*d)
y = '\n'.join(z['content'] for z in y)
if y.find('<吃掉了>') != -1 or y.find('<全部吃掉了>') != -1:
return f'{pageurl}\n{y}\n...仅显示前5条内容\n检测到外来信息介入,请前往滥用日志查看所有消息。'
else:
return f'{pageurl}\n{y}\n...仅显示前5条内容'

View file

@ -0,0 +1,58 @@
from config import Config
from core.dirty_check import check
from modules.wiki.utils.UTC8 import UTC8
from modules.wiki.wikilib_v2 import WikiLib
async def ab_qq(wiki_url):
wiki = WikiLib(wiki_url)
qq_account = Config("qq_account")
query = await wiki.get_json(action='query', list='abuselog', aflprop='user|title|action|result|filter|timestamp',
afllimit=99)
pageurl = wiki.wiki_info.articlepath.replace("$1", 'Special:AbuseLog')
nodelist = [{
"type": "node",
"data": {
"name": f"滥用过滤器日志地址",
"uin": qq_account,
"content": [
{"type": "text", "data": {"text": pageurl}}]
}
}]
ablist = []
userlist = []
titlelist = []
for x in query["query"]["abuselog"]:
userlist.append(x['user'])
titlelist.append(x['title'])
checked_userlist = await check(*userlist)
user_checked_map = {}
for u in checked_userlist:
user_checked_map[u['original']] = u['content']
checked_titlelist = await check(*titlelist)
title_checked_map = {}
for t in checked_titlelist:
title_checked_map[t['original']] = t['content']
for x in query["query"]["abuselog"]:
t = []
t.append(f"用户:{user_checked_map[x['user']]}")
t.append(f"过滤器名:{x['filter']}")
t.append(f"页面标题:{title_checked_map[x['title']]}")
t.append(f"操作:{x['action']}")
result = x['result']
if result == '':
result = 'pass'
t.append(f"处理结果:{result}")
t.append(UTC8(x['timestamp'], 'full'))
ablist.append('\n'.join(t))
for x in ablist:
nodelist.append(
{
"type": "node",
"data": {
"name": f"滥用过滤器日志",
"uin": qq_account,
"content": [{"type": "text", "data": {"text": x}}],
}
})
return nodelist

View file

@ -0,0 +1,17 @@
from core.dirty_check import check
from modules.wiki.wikilib_v2 import WikiLib
async def newbie(wiki_url):
wiki = WikiLib(wiki_url)
query = await wiki.get_json(action='query', list='logevents', letype='newusers')
pageurl = wiki.wiki_info.articlepath.replace('$1', 'Special:Log?type=newusers')
d = []
for x in query['query']['logevents'][:5]:
d.append(x['title'])
y = await check(*d)
y = '\n'.join(z['content'] for z in y)
g = f'{pageurl}\n{y}\n...仅显示前5条内容'
if g.find('<吃掉了>') != -1 or g.find('<全部吃掉了>') != -1:
g += '\n检测到外来信息介入请前往日志查看所有消息。Special:日志?type=newusers'
return g

19
modules/wiki/utils/rc.py Normal file
View file

@ -0,0 +1,19 @@
from core.dirty_check import check
from modules.wiki.utils.UTC8 import UTC8
from modules.wiki.wikilib_v2 import WikiLib
async def rc(wiki_url):
wiki = WikiLib(wiki_url)
query = await wiki.get_json(action='query', list='recentchanges', rcprop='title|user|timestamp', rctype='edit')
pageurl = wiki.wiki_info.articlepath.replace('$1', 'Special:RecentChanges')
d = []
for x in query['query']['recentchanges'][:5]:
d.append(x['title'] + ' - ' + x['user'] + ' ' + UTC8(x['timestamp'], 'onlytime'))
y = await check(*d)
y = '\n'.join(z['content'] for z in y)
if y.find('<吃掉了>') != -1 or y.find('<全部吃掉了>') != -1:
msg = f'{pageurl}\n{y}\n...仅显示前5条内容\n检测到外来信息介入,请前往最近更改查看所有消息。'
else:
msg = f'{pageurl}\n{y}\n...仅显示前5条内容'
return msg

View file

@ -0,0 +1,93 @@
import urllib.parse
from config import Config
from core.dirty_check import check
from modules.wiki.utils.UTC8 import UTC8
from modules.wiki.utils.action_cn import action
from modules.wiki.wikilib_v2 import WikiLib
async def rc_qq(wiki_url):
wiki = WikiLib(wiki_url)
qq_account = Config("qq_account")
query = await wiki.get_json(action='query', list='recentchanges',
rcprop='title|user|timestamp|loginfo|comment|redirect|flags|sizes|ids',
rclimit=99,
rctype='edit|new|log'
)
pageurl = wiki.wiki_info.articlepath.replace("$1", 'Special:RecentChanges')
nodelist = [{
"type": "node",
"data": {
"name": f"最近更改地址",
"uin": qq_account,
"content": [
{"type": "text", "data": {"text": pageurl}}]
}
}]
rclist = []
userlist = []
titlelist = []
for x in query["query"]["recentchanges"]:
userlist.append(x['user'])
titlelist.append(x['title'])
checked_userlist = await check(*userlist)
user_checked_map = {}
for u in checked_userlist:
user_checked_map[u['original']] = u['content']
checked_titlelist = await check(*titlelist)
title_checked_map = {}
for t in checked_titlelist:
title_checked_map[t['original']] = t['content']
for x in query["query"]["recentchanges"]:
t = []
t.append(f"用户:{user_checked_map[x['user']]}")
t.append(UTC8(x['timestamp'], 'full'))
if x['type'] == 'edit':
count = x['newlen'] - x['oldlen']
if count > 0:
count = f'+{str(count)}'
else:
count = str(count)
t.append(f"{title_checked_map[x['title']]}{count}")
comment = x['comment']
if comment == '':
comment = '(无摘要内容)'
t.append(comment)
t.append(
f"{pageurl}{urllib.parse.quote(title_checked_map[x['title']])}?oldid={x['old_revid']}&diff={x['revid']}")
if x['type'] == 'new':
r = ''
if 'redirect' in x:
r = '(新重定向)'
t.append(f"{title_checked_map[x['title']]}{r}")
comment = x['comment']
if comment == '':
comment = '(无摘要内容)'
t.append(comment)
if x['type'] == 'log':
log = x['logaction'] + '' + title_checked_map[x['title']]
if x['logtype'] in action:
a = action[x['logtype']].get(x['logaction'])
if a is not None:
log = a % title_checked_map[x['title']]
t.append(log)
params = x['logparams']
if 'durations' in params:
t.append('时长:' + params['durations'])
if 'target_title' in params:
t.append('对象页面:' + params['target_title'])
if x['revid'] != 0:
t.append(f"{pageurl}{urllib.parse.quote(title_checked_map[x['title']])}")
rclist.append('\n'.join(t))
for x in rclist:
nodelist.append(
{
"type": "node",
"data": {
"name": f"最近更改",
"uin": qq_account,
"content": [{"type": "text", "data": {"text": x}}],
}
})
return nodelist

View file

@ -108,9 +108,8 @@ class wikilib:
if not self.danger_wiki_check():
return False
check = await dirty_check.check(text)
check = '\n'.join(check)
print(check)
if check.find('<吃掉了>') != -1 or check.find('<全部吃掉了>') != -1:
for x in check:
if not x['status']:
return True
return False
@ -450,7 +449,8 @@ class wikilib:
return msgs
except Exception as e:
traceback.print_exc()
return {'status': 'done', 'text': '发生错误:' + str(e) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='}
return {'status': 'done', 'text': '发生错误:' + str(
e) + '\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='}
async def main(self, api_endpoint_link, page_name, interwiki=None, template=False, headers=None, tryiw=0):
print(api_endpoint_link)
@ -489,10 +489,12 @@ class wikilib:
self.page_name = 'Template:' + self.page_name
self.page_raw = await self.get_page_link()
except asyncio.exceptions.TimeoutError:
return {'status': 'done', 'text': '发生错误:请求页面超时。\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='}
return {'status': 'done',
'text': '发生错误:请求页面超时。\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='}
except Exception as e:
traceback.print_exc()
return {'status': 'done', 'text': f'发生错误:{str(e)}\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='}
return {'status': 'done',
'text': f'发生错误:{str(e)}\n错误汇报地址https://github.com/Teahouse-Studios/bot/issues/new?assignees=OasisAkari&labels=bug&template=5678.md&title='}
if 'interwiki' in self.page_raw['query']:
iwp = self.page_raw['query']['interwiki'][0]
match_interwiki = re.match(r'^' + iwp['iw'] + r':(.*)', iwp['title'])

View file

@ -1,18 +1,18 @@
import asyncio
import datetime
import re
import traceback
import urllib.parse
from typing import Union, Dict, List
import html2text
import ujson as json
import re
import urllib.parse
from core.dirty_check import check
from core.logger import Logger
from core.utils import get_url
from core.dirty_check import check
from .dbutils import WikiSiteInfo as DBSiteInfo
from .audit import check_whitelist
from .dbutils import WikiSiteInfo as DBSiteInfo
class InvalidPageIDError(Exception):
@ -168,7 +168,7 @@ class WikiLib:
api_match = self.url.split('//')[0] + api_match
Logger.info(api_match)
wiki_api_link = api_match
except TimeoutError:
except (TimeoutError, asyncio.TimeoutError):
return WikiStatus(available=False, value=False, message='错误:尝试建立连接超时。')
except Exception as e:
traceback.print_exc()
@ -192,6 +192,7 @@ class WikiLib:
meta='siteinfo',
siprop='general|namespaces|namespacealiases|interwikimap|extensions')
except Exception as e:
traceback.print_exc()
return WikiStatus(available=False, value=False, message='从API获取信息时出错' + str(e))
DBSiteInfo(wiki_api_link).update(get_json)
info = self.rearrange_siteinfo(get_json)
@ -292,7 +293,14 @@ class WikiLib:
:param iw_prefix: iw前缀
:return:
"""
try:
await self.fixup_wiki_info()
except InvalidWikiError as e:
if self.url.find('$1') != -1:
link = self.url.replace('$1', title)
else:
link = self.url + title
return PageInfo(title=title, link=link, desc='发生错误:' + str(e), info=self.wiki_info)
if tried_iw > 5:
raise WhatAreUDoingError
if title == '':
@ -430,7 +438,7 @@ class WikiLib:
chk = await check(*checklist)
for x in chk:
print(x)
if x.find("<吃掉了>") != -1 or x.find("<全部吃掉了>") != -1:
if not x['status']:
page_info.status = True
page_info.before_title = '?'
page_info.title = '¿'