parent
e6e8c9b262
commit
7de69b7b54
|
@ -3,6 +3,9 @@
|
|||
import cfscrape
|
||||
import logging
|
||||
from bs4 import BeautifulSoup
|
||||
import tempfile
|
||||
import os
|
||||
import requests
|
||||
|
||||
from anime_downloader import session
|
||||
from anime_downloader.const import get_random_header
|
||||
|
@ -18,9 +21,14 @@ logger = logging.getLogger(__name__)
|
|||
req_session = session.get_session()
|
||||
cf_session = cfscrape.create_scraper(sess=req_session)
|
||||
default_headers = get_random_header()
|
||||
temp_dir = tempfile.mkdtemp(prefix='animedl')
|
||||
|
||||
|
||||
def setup(func):
|
||||
"""
|
||||
setup is a decorator which takes a function
|
||||
and converts it into a request method
|
||||
"""
|
||||
def setup_func(url: str,
|
||||
cf: bool = True,
|
||||
referer: str = None,
|
||||
|
@ -31,11 +39,13 @@ def setup(func):
|
|||
default_headers.update(headers)
|
||||
if referer:
|
||||
default_headers['Referer'] = referer
|
||||
|
||||
logger.debug('-----')
|
||||
logger.debug('{} {}'.format(func.__name__.upper(), url))
|
||||
logger.debug(kwargs)
|
||||
logger.debug(default_headers)
|
||||
logger.debug('-----')
|
||||
|
||||
res = sess.request(func.__name__.upper(),
|
||||
url,
|
||||
headers=default_headers,
|
||||
|
@ -43,6 +53,8 @@ def setup(func):
|
|||
res = sess.get(url, headers=default_headers, **kwargs)
|
||||
res.raise_for_status()
|
||||
# logger.debug(res.text)
|
||||
if logger.getEffectiveLevel() == logging.DEBUG:
|
||||
_log_response_body(res)
|
||||
return res
|
||||
return setup_func
|
||||
|
||||
|
@ -81,5 +93,30 @@ def soupify(res):
|
|||
-------
|
||||
BeautifulSoup.Soup
|
||||
"""
|
||||
soup = BeautifulSoup(res.text, 'html.parser')
|
||||
if isinstance(res, requests.Response):
|
||||
res = res.text
|
||||
soup = BeautifulSoup(res, 'html.parser')
|
||||
return soup
|
||||
|
||||
|
||||
def _log_response_body(res):
|
||||
import json
|
||||
file = tempfile.mktemp(dir=temp_dir)
|
||||
logging.debug(file)
|
||||
with open(file, 'w') as f:
|
||||
f.write(res.text)
|
||||
|
||||
data_file = temp_dir + '/data.json'
|
||||
if not os.path.exists(data_file):
|
||||
with open(data_file, 'w') as f:
|
||||
json.dump([], f)
|
||||
data = None
|
||||
with open(data_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
data.append({
|
||||
'method': res.request.method,
|
||||
'url': res.url,
|
||||
'file': file,
|
||||
})
|
||||
with open(data_file, 'w') as f:
|
||||
json.dump(data, f)
|
||||
|
|
|
@ -56,6 +56,7 @@ class NineAnimeEpisode(AnimeEpisode, sitename='9anime'):
|
|||
|
||||
|
||||
class NineAnime(Anime, sitename='9anime'):
|
||||
sitename = '9anime'
|
||||
QUALITIES = ['360p', '480p', '720p', '1080p']
|
||||
|
||||
@classmethod
|
||||
|
@ -92,7 +93,7 @@ class NineAnime(Anime, sitename='9anime'):
|
|||
def _scrape_episodes(self):
|
||||
soup = helpers.soupify(helpers.get(self.url))
|
||||
ts = soup.find('html')['data-ts']
|
||||
self._episodeClass.ts = ts
|
||||
NineAnimeEpisode.ts = ts
|
||||
logging.debug('data-ts: {}'.format(ts))
|
||||
|
||||
# TODO: !HACK!
|
||||
|
|
Loading…
Reference in New Issue