Made sessions retriable and some minor changes (#102)
parent
4ef2bad32e
commit
7e956a6f0c
|
@ -1,12 +1,30 @@
|
|||
import requests
|
||||
import urllib3
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
_session = requests.Session()
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
def get_session():
|
||||
def get_session(custom_session=None):
|
||||
global _session
|
||||
_session = _session or requests.Session()
|
||||
|
||||
if custom_session:
|
||||
custom_session.verify = _session.verify
|
||||
_session = custom_session
|
||||
else:
|
||||
_session = _session or requests.Session()
|
||||
|
||||
retry = Retry(
|
||||
total=10,
|
||||
read=10,
|
||||
connect=10,
|
||||
backoff_factor=0.3,
|
||||
status_forcelist=(500, 502, 504,)
|
||||
)
|
||||
adapter = HTTPAdapter(max_retries=retry)
|
||||
_session.mount('http://', adapter)
|
||||
_session.mount('https://', adapter)
|
||||
|
||||
return _session
|
||||
|
|
|
@ -5,8 +5,9 @@ from anime_downloader.sites.anime import BaseEpisode, SearchResult
|
|||
from anime_downloader.sites.baseanimecf import BaseAnimeCF
|
||||
from anime_downloader.sites.exceptions import NotFoundError
|
||||
from anime_downloader import util
|
||||
from anime_downloader.session import get_session
|
||||
|
||||
scraper = cfscrape.create_scraper()
|
||||
scraper = get_session(cfscrape.create_scraper())
|
||||
|
||||
|
||||
class AnimePaheEpisode(BaseEpisode):
|
||||
|
|
|
@ -4,8 +4,9 @@ import logging
|
|||
|
||||
from anime_downloader.sites.anime import BaseAnime
|
||||
from anime_downloader.const import get_random_header
|
||||
from anime_downloader.session import get_session
|
||||
|
||||
scraper = cfscrape.create_scraper()
|
||||
scraper = get_session(cfscrape.create_scraper())
|
||||
|
||||
|
||||
class BaseAnimeCF(BaseAnime):
|
||||
|
|
|
@ -7,9 +7,10 @@ from anime_downloader.sites.anime import BaseEpisode, SearchResult
|
|||
from anime_downloader.sites.baseanimecf import BaseAnimeCF
|
||||
from anime_downloader.sites.exceptions import NotFoundError
|
||||
from anime_downloader.const import get_random_header
|
||||
from anime_downloader.session import get_session
|
||||
|
||||
|
||||
scraper = cfscrape.create_scraper(delay=10)
|
||||
scraper = get_session(cfscrape.create_scraper(delay=10))
|
||||
|
||||
|
||||
class KissanimeEpisode(BaseEpisode):
|
||||
|
|
|
@ -8,7 +8,7 @@ from bs4 import BeautifulSoup
|
|||
import cfscrape
|
||||
import logging
|
||||
|
||||
scraper = cfscrape.create_scraper()
|
||||
scraper = session.get_session(cfscrape.create_scraper())
|
||||
session = session.get_session()
|
||||
|
||||
|
||||
|
|
|
@ -8,8 +8,9 @@ from bs4 import BeautifulSoup
|
|||
from anime_downloader import util
|
||||
from anime_downloader.sites.anime import BaseAnime, BaseEpisode, SearchResult
|
||||
from anime_downloader.const import desktop_headers
|
||||
from anime_downloader.session import get_session
|
||||
|
||||
scraper = cfscrape.create_scraper()
|
||||
scraper = get_session(cfscrape.create_scraper())
|
||||
|
||||
|
||||
class MasteraniEpisode(BaseEpisode):
|
||||
|
|
|
@ -96,7 +96,7 @@ def split_anime(anime, episode_range):
|
|||
|
||||
|
||||
def parse_episode_range(anime, episode_range):
|
||||
if episode_range is None:
|
||||
if not episode_range:
|
||||
episode_range = '1:'
|
||||
if episode_range.endswith(':'):
|
||||
episode_range += str(len(anime) + 1)
|
||||
|
@ -107,7 +107,7 @@ def parse_episode_range(anime, episode_range):
|
|||
|
||||
def parse_ep_str(anime, grammar):
|
||||
episodes = []
|
||||
if grammar is None:
|
||||
if not grammar:
|
||||
return split_anime(anime, parse_episode_range(anime, grammar))
|
||||
|
||||
for episode_grammar in grammar.split(','):
|
||||
|
|
Loading…
Reference in New Issue