Use aiohttp instead of requests

master
Lonami Exo 2019-05-11 21:13:46 +02:00
parent 5d5ddb7db2
commit d263f315be
2 changed files with 19 additions and 15 deletions

30
bot.py
View File

@ -3,11 +3,10 @@
import asyncio
from os import environ
from collections import namedtuple
from json import loads
import logging
import requests
import aiohttp
from telethon import TelegramClient, events
from bs4 import BeautifulSoup
@ -26,15 +25,17 @@ XKCD_JSON_FMT_STR = "https://xkcd.com/{}/info.0.json"
MAX_SEARCH_RESULTS = 10
bot = TelegramClient('xkcd', 6, 'eb06d4abfb49dc3eeb1aeb98ae0f581e')
session = None # set later
# blockquote element -> Xkcd
def parse_blockquote(elem):
async def parse_blockquote(elem):
children = list(elem.children)
title = children[0].text
link = 'https' + children[-1].text[4:]
number = link.rsplit('/', 2)[1]
info = loads(requests.get(XKCD_JSON_FMT_STR.format(number)).text)
async with session.get(XKCD_JSON_FMT_STR.format(number)) as resp:
info = await resp.json()
alt = info['alt']
# TODO markdown bold the <span> matches
@ -47,16 +48,17 @@ def parse_blockquote(elem):
return Xkcd(title, link, text, alt, number)
# string -> [Xkcd]
def get_xkcds(text):
async def get_xkcds(text):
logger.info("getting %s", text)
if text == '':
return []
# TODO return newest when empty
soup = BeautifulSoup(requests.get(URL_FMT_STR.format(text)).text, "html.parser")
async with session.get(URL_FMT_STR.format(text)) as resp:
soup = BeautifulSoup(await resp.text(), "html.parser")
bqs = soup.find_all("blockquote")[:MAX_SEARCH_RESULTS]
logger.info(bqs)
return (parse_blockquote(e) for e in bqs)
return await asyncio.gather(*(parse_blockquote(e) for e in bqs))
# Define a few command handlers. These usually take the two arguments bot and
@ -84,7 +86,7 @@ async def inlinequery(event):
title=xkcd.title,
url=xkcd.link,
text=MSG_FMT_STR.format(number=xkcd.number, link=xkcd.link, title=xkcd.title, alt=xkcd.alt)
) for xkcd in get_xkcds(event.text)))
) for xkcd in await get_xkcds(event.text)))
# FIXME get_xkcds returns duplicates, which lead to the same result ID
# Build a dict by their ID to remove the duplicates
@ -92,11 +94,13 @@ async def inlinequery(event):
await event.answer(result)
def main():
bot.start(bot_token=environ['TOKEN'])
with bot:
bot.run_until_disconnected()
async def main():
global session
async with aiohttp.ClientSession() as session:
await bot.start(bot_token=environ['TOKEN'])
async with bot:
await bot.run_until_disconnected()
if __name__ == '__main__':
main()
asyncio.get_event_loop().run_until_complete(main())

View File

@ -1,3 +1,3 @@
telethon~=1.7
requests~=2.21
aiohttp~=3.5
beautifulsoup4~=4.7
telethon~=1.7