Skip to content

Commit

Permalink
合并拉取请求 #11
Browse files Browse the repository at this point in the history
dev
  • Loading branch information
Cypas authored Jan 12, 2024
2 parents 093f640 + b7c3eeb commit fd06108
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 26 deletions.
18 changes: 11 additions & 7 deletions nonebot_plugin_splatoon3_schedule/image/image_processer_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,13 +262,17 @@ def have_festival(_festivals):
def now_is_festival(_festivals):
"""现在是否是祭典"""
now = get_time_now_china()
for v in _festivals:
if v["festMatchSetting"] is not None:
# 如果祭典有参数 且现在时间位于这个区间
st = time_converter(v["startTime"])
et = time_converter(v["endTime"])
if st < now < et:
return True
festival = _festivals[0]
setting = festival.get("festMatchSetting") or festival.get("festMatchSettings")
if setting:
# 如果祭典有参数 且现在时间位于这个区间
st = time_converter(festival["startTime"])
et = time_converter(festival["endTime"])
# logger.info(f"st {st}")
# logger.info(f"now {now}")
# logger.info(f"et {et}")
if st < now < et:
return True
return False


Expand Down
30 changes: 12 additions & 18 deletions nonebot_plugin_splatoon3_schedule/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,13 @@
from .dataClass import TimeUtil
from ..config import plugin_config

proxy_address = plugin_config.splatoon3_proxy_address
time_format_ymdh = "%Y-%m-%dT%H"
HTTP_TIME_OUT = 5.0 # 请求超时,秒
proxy_address = plugin_config.splatoon3_proxy_address
if proxy_address:
proxies = "http://{}".format(proxy_address)
else:
proxies = None

# 背景 rgb颜色
dict_bg_rgb = {
Expand All @@ -29,18 +34,17 @@

def cf_http_get(url: str):
"""cf get"""
global proxy_address
# 实例化一个create_scraper对象
scraper = cfscrape.create_scraper()
# 请求报错,可以加上时延
# scraper = cfscrape.create_scraper(delay = 6)
if proxy_address:
proxies = {
cf_proxies = {
"http": "http://{}".format(proxy_address),
"https": "http://{}".format(proxy_address),
}
# 获取网页内容 代理访问
res = scraper.get(url, proxies=proxies)
res = scraper.get(url, proxies=cf_proxies)
else:
# 获取网页内容
res = scraper.get(url)
Expand All @@ -49,24 +53,14 @@ def cf_http_get(url: str):

async def async_http_get(url: str) -> Response:
"""async http_get"""
if proxy_address:
proxies = "http://{}".format(proxy_address)
async with httpx.AsyncClient(proxies=proxies) as client:
response = await client.get(url, timeout=5.0)
return response
else:
async with httpx.AsyncClient() as client:
response = await client.get(url, timeout=5.0)
return response
async with httpx.AsyncClient(proxies=proxies) as client:
response = await client.get(url, timeout=HTTP_TIME_OUT)
return response


def http_get(url: str) -> Response:
"""http_get"""
if proxy_address:
proxies = "http://{}".format(proxy_address)
response = httpx.get(url, proxies=proxies, timeout=5.0)
else:
response = httpx.get(url, timeout=5.0)
response = httpx.get(url, proxies=proxies, timeout=HTTP_TIME_OUT)
return response


Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "nonebot-plugin-splatoon3-schedule"
version = "1.5.0"
version = "1.5.1"
description = "一个基于nonebot2框架的splatoon3游戏日程查询插件"
authors = ["cypas <ayano05@outlook.com>"]
readme = "README.md"
Expand Down

0 comments on commit fd06108

Please sign in to comment.