38 lines
1.3 KiB
Python
38 lines
1.3 KiB
Python
from datetime import datetime
|
|
from argparse import Namespace
|
|
|
|
import requests.exceptions
|
|
from requests import Session
|
|
from requests.adapters import HTTPAdapter
|
|
from urllib3.util import Retry
|
|
from src.logger import logger as l
|
|
import src.stats as stats
|
|
|
|
|
|
def fetch(settings: list, args: Namespace) -> None:
|
|
logger = l.getChild(__name__)
|
|
logger.info("Starting refreshing feeds")
|
|
for sets in settings:
|
|
logger.info(f"Working set: {sets}")
|
|
assert type(sets) == dict
|
|
assert 'src' in sets
|
|
assert 'rss' in sets
|
|
s = Session()
|
|
retries = Retry(
|
|
total=3,
|
|
backoff_factor=0.2,
|
|
status_forcelist=[502, 503, 504],
|
|
allowed_methods={'GET'},
|
|
)
|
|
s.mount(sets['src'], HTTPAdapter(max_retries=retries))
|
|
try:
|
|
r = s.get(sets['src'], timeout=3, stream=True)
|
|
if r.status_code == 200:
|
|
logger.info(f"Saving to file: {sets['rss']}")
|
|
with open("{0}/{1}".format(args.directory, sets['rss']), 'wb') as rss:
|
|
for chunk in r:
|
|
rss.write(chunk)
|
|
except requests.exceptions.ConnectionError as e:
|
|
logger.warning(f"Unable to fetch {sets['src']}", e)
|
|
stats.set_last_modified_at(datetime.now())
|
|
logger.info("Feeds refreshed") |