Archived
1
0
Fork 0
This repository has been archived on 2024-04-26. You can view files and clone it, but cannot push or open issues or pull requests.
akari-bot/core/utils.py

118 lines
3.4 KiB
Python
Raw Normal View History

2021-04-28 12:21:33 +00:00
import os
2021-04-27 15:01:17 +00:00
import re
2021-08-07 12:55:07 +00:00
import shutil
2021-07-21 17:58:33 +00:00
import traceback
2021-07-27 17:42:47 +00:00
import uuid
2021-08-07 07:56:48 +00:00
from os.path import abspath
2021-04-27 15:01:17 +00:00
2021-02-06 16:30:13 +00:00
import aiohttp
2021-07-21 17:58:33 +00:00
import filetype as ft
2021-02-06 16:30:13 +00:00
2021-08-07 07:56:48 +00:00
from core.logger import Logger
2021-02-06 16:30:13 +00:00
2021-08-07 12:55:07 +00:00
def init_bot():
cache_path = os.path.abspath('./cache/')
if os.path.exists(cache_path):
shutil.rmtree(cache_path)
os.mkdir(cache_path)
else:
os.mkdir(cache_path)
version = os.path.abspath('.version')
write_version = open(version, 'w')
write_version.write(os.popen('git rev-parse HEAD', 'r').read()[0:7])
write_version.close()
tag = os.path.abspath('.version_tag')
write_tag = open(tag, 'w')
write_tag.write(os.popen('git tag -l', 'r').read().split('\n')[-2])
write_tag.close()
2021-07-27 17:42:47 +00:00
"""
2021-04-28 12:21:33 +00:00
async def load_prompt():
2021-06-07 13:49:39 +00:00
author_cache = os.path.abspath('.cache_restart_author')
loader_cache = os.path.abspath('.cache_loader')
if os.path.exists(author_cache):
2021-04-28 12:21:33 +00:00
import json
2021-06-07 13:49:39 +00:00
open_author_cache = open(author_cache, 'r')
cache_json = json.loads(open_author_cache.read())
open_loader_cache = open(loader_cache, 'r')
2021-07-21 17:58:33 +00:00
await sendMessage(cache_json, open_loader_cache.read(), quote=False)
2021-06-07 13:49:39 +00:00
open_loader_cache.close()
open_author_cache.close()
os.remove(author_cache)
os.remove(loader_cache)
2021-07-27 17:42:47 +00:00
"""
2021-04-28 12:21:33 +00:00
2021-08-07 07:56:48 +00:00
2021-02-11 12:41:07 +00:00
async def get_url(url: str, headers=None):
2021-02-06 16:30:13 +00:00
async with aiohttp.ClientSession() as session:
2021-02-11 12:41:07 +00:00
async with session.get(url, timeout=aiohttp.ClientTimeout(total=20), headers=headers) as req:
2021-02-06 16:30:13 +00:00
text = await req.text()
return text
2021-04-27 15:01:17 +00:00
2021-06-07 13:49:39 +00:00
def remove_ineffective_text(prefix, lst):
remove_list = ['\n', ' '] # 首尾需要移除的东西
for x in remove_list:
list_cache = []
for y in lst:
split_list = y.split(x)
for _ in split_list:
if split_list[0] == '':
del split_list[0]
if len(split_list) > 0:
if split_list[-1] == '':
del split_list[-1]
for _ in split_list:
if len(split_list) > 0:
if split_list[0][0] in prefix:
split_list[0] = re.sub(r'^' + split_list[0][0], '', split_list[0])
list_cache.append(x.join(split_list))
lst = list_cache
duplicated_list = [] # 移除重复命令
for x in lst:
if x not in duplicated_list:
duplicated_list.append(x)
lst = duplicated_list
2021-07-21 17:58:33 +00:00
return lst
2021-08-07 07:56:48 +00:00
2021-07-21 17:58:33 +00:00
def RemoveDuplicateSpace(text: str):
strip_display_space = text.split(' ')
display_list = [] # 清除指令中间多余的空格
for x in strip_display_space:
if x != '':
display_list.append(x)
text = ' '.join(display_list)
return text
async def download_to_cache(link):
try:
async with aiohttp.ClientSession() as session:
async with session.get(link) as resp:
res = await resp.read()
ftt = ft.match(res).extension
path = abspath(f'./cache/{str(uuid.uuid4())}.{ftt}')
with open(path, 'wb+') as file:
file.write(res)
return path
except:
traceback.print_exc()
return False
2021-07-27 17:42:47 +00:00
def cache_name():
return abspath(f'./cache/{str(uuid.uuid4())}')
2021-07-21 17:58:33 +00:00
async def slk_converter(filepath):
filepath2 = filepath + '.silk'
Logger.info('Start encoding voice...')
os.system('python slk_coder.py ' + filepath)
Logger.info('Voice encoded.')
return filepath2