Archived
1
0
Fork 0

use asyncio

This commit is contained in:
Lukas Winkler 2018-01-04 20:25:44 +01:00
parent 6604523359
commit f6e9bc2a75
2 changed files with 39 additions and 13 deletions

39
main.py
View file

@ -1,8 +1,10 @@
import asyncio
import logging
import sys
from time import mktime
import aiohttp
import feedparser
import logging
import yaml
from raven import Client
from wallabag_api.wallabag import Wallabag
@ -46,18 +48,29 @@ if "sentry_url" in config and ("debug" not in config or not config["debug"]):
)
)
token = Wallabag.get_token(**config["wallabag"])
wall = Wallabag(host=config["wallabag"]["host"], client_secret=config["wallabag"]["client_secret"],
client_id=config["wallabag"]["client_id"], token=token)
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
sites = github_stars.get_starred_repos(config["github_username"], sites)
for sitetitle, site in sites.items():
async def main(loop, sites):
token = await Wallabag.get_token(**config["wallabag"])
async with aiohttp.ClientSession(loop=loop) as session:
wall = Wallabag(host=config["wallabag"]["host"], client_secret=config["wallabag"]["client_secret"],
client_id=config["wallabag"]["client_id"], token=token, aio_sess=session)
sites = github_stars.get_starred_repos(config["github_username"], sites)
await asyncio.gather(*[handle_feed(session, wall, sitetitle, site) for sitetitle, site in sites.items()])
async def handle_feed(session, wall, sitetitle, site):
logger.info(sitetitle + ": Downloading feed")
# r = requests.get(site["url"])
rss = await fetch(session, site["url"])
logger.info(sitetitle + ": Parsing feed")
f = feedparser.parse(site["url"])
f = feedparser.parse(rss)
logger.debug(sitetitle + ": finished parsing")
# feedtitle = f["feed"]["title"]
if "latest_article" in site:
@ -82,11 +95,15 @@ for sitetitle, site in sites.items():
else:
title = article.title
if "debug" not in config or not config["debug"]:
wall.post_entries(url=article.link, title=title, tags=tags)
await wall.post_entries(url=article.link, title=title, tags=tags)
else:
logger.debug(sitetitle + ": no latest_article")
if f.entries:
sites[sitetitle]["latest_article"] = f.entries[0].title
with open("sites.yaml", 'w') as stream:
yaml.dump(sites, stream, default_flow_style=False)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main(loop, sites))
with open("sites.yaml", 'w') as stream:
yaml.dump(sites, stream, default_flow_style=False)

View file

@ -1,6 +1,15 @@
aiodns==1.1.1
aiohttp==2.2.5
async-timeout==2.0.0
certifi==2017.11.5
chardet==3.0.4
feedparser==5.2.1
idna==2.6
multidict==3.3.2
pycares==2.3.0
PyYAML==3.12
raven==6.4.0
requests==2.18.4
wallabag-api==1.1.0
beautifulsoup4==4.6.0
urllib3==1.22
wallabag-api==1.2.0
yarl==0.17.0