feat: Add server switching for 9anime

master
Vishnunarayan K I 2019-05-22 22:34:27 +05:30
parent 96e631b175
commit 16de16f209
8 changed files with 46 additions and 29 deletions

1
.gitignore vendored
View File

@ -103,3 +103,4 @@ ENV/
.pytest_cache
*.sqlite
/anime_downloader/__main__.py

View File

@ -4,7 +4,7 @@ verify_ssl = true
name = "pypi"
[packages]
anime-downloader = {editable = true, path = "."}
anime-downloader = {editable = true,path = "."}
[dev-packages]
twine = "*"
@ -14,3 +14,4 @@ radon = "*"
"flake8" = "*"
httpretty = "*"
pylint = "*"
python-lanugage-server = {extras = ["all"],version = "*"}

View File

@ -1,11 +1,11 @@
import click
import logging
import os
from anime_downloader import util
from anime_downloader import session
from anime_downloader.sites import get_anime_class
import click
from anime_downloader import session, util
from anime_downloader.__version__ import __version__
from anime_downloader.sites import get_anime_class
logger = logging.Logger(__name__)
@ -47,7 +47,8 @@ echo = click.echo
@click.option(
'--provider',
help='The anime provider (website) for search.',
type=click.Choice(['9anime', 'kissanime', 'twist.moe', 'animepahe', 'kisscartoon', 'masterani', 'gogoanime'])
type=click.Choice(['9anime', 'kissanime', 'twist.moe',
'animepahe', 'kisscartoon', 'masterani', 'gogoanime'])
)
@click.option(
'--external-downloader', '-xd',
@ -86,7 +87,6 @@ def command(ctx, anime_url, episode_range, url, player, skip_download, quality,
anime = cls(anime_url, quality=quality,
fallback_qualities=fallback_qualities)
logger.info('Found anime: {}'.format(anime.title))
animes = util.parse_ep_str(anime, episode_range)
@ -109,18 +109,21 @@ def command(ctx, anime_url, episode_range, url, player, skip_download, quality,
util.play_episode(episode, player=player)
if not skip_download:
downloader_session = session.DownloaderSession()
downloader = 'http'
if external_downloader:
logger.info('Downloading episode {} of {}'.format(
episode.ep_no, anime.title)
)
util.external_download(external_downloader, episode,
file_format, path=download_dir)
continue
downloader = external_downloader
if chunk_size is not None:
chunk_size *= 1e6
chunk_size = int(chunk_size)
episode.download(force=force_download,
path=download_dir,
format=file_format,
range_size=chunk_size)
downloader_session.get(downloader)
downloader.download(episode.source(),
force=force_download,
path=download_dir,
format=file_format,
range_size=chunk_size)
print()

View File

@ -23,6 +23,9 @@ DEFAULT_CONFIG = {
'quality': '720p',
'log_level': 'INFO',
'provider': '9anime',
},
"nineanime": {
"server": "mp4upload",
}
}
@ -44,11 +47,13 @@ class _Config:
self._CONFIG = self._read_config()
def update(gkey):
if gkey not in self._CONFIG:
self._CONFIG[gkey] = {}
for key, val in DEFAULT_CONFIG[gkey].items():
if key not in self._CONFIG[gkey].keys():
self._CONFIG[gkey][key] = val
for key in ['dl', 'watch']:
for key in DEFAULT_CONFIG.keys():
update(key)
self.write()

View File

@ -14,24 +14,29 @@ class BaseDownloader:
if options is None:
options = {}
self.options = options
# TODO: replace
self.referer = self.options.get('referer', '')
self.chunksize = 16384
self._total_size = None
self.url = None
def check_if_exists(self):
# Added Referer Header as kwik needd it.
r = session.get_session().get(
self.url, headers={'referer': self.referer}, stream=True)
self.total_size = int(r.headers['Content-length'])
self._total_size = int(r.headers['Content-length'])
if os.path.exists(self.path):
if abs(os.stat(self.path).st_size - self.total_size) < 10 \
and not self.option['force']:
if abs(os.stat(self.path).st_size - self._total_size) < 10 \
and not self.options['force']:
logger.warning('File already downloaded. Skipping download.')
return
else:
os.remove(self.path)
def download(self, url, path, options=None):
# TODO: Clean this up
self.pre_process()
self.url = url
logger.info(path)
@ -62,7 +67,7 @@ class BaseDownloader:
def report_chunk_downloaded(self):
self.downloaded += self.chunksize
write_status(self.downloaded, self.total_size, self.start_time)
write_status(self.downloaded, self._total_size, self.start_time)
def write_status(downloaded, total_size, start_time):

View File

@ -23,10 +23,12 @@ class HTTPDownloader(BaseDownloader):
with open(self.path, 'w'):
pass
r = session.get(self.url, headers={'referer': self.referer}, stream=True)
r = session.get(self.url, headers={
'referer': self.referer}, stream=True)
while self.downloaded < self.total_size:
r = session.get(self.url,
headers=set_range(range_start, range_end, self.referer),
headers=set_range(
range_start, range_end, self.referer),
stream=True)
if r.status_code == 206:
with open(self.path, 'ab') as f:
@ -43,7 +45,8 @@ class HTTPDownloader(BaseDownloader):
range_end = ''
def _non_range_download(self):
r = session.get(self.url, headers={'referer': self.referer}, stream=True)
r = session.get(self.url, headers={
'referer': self.referer}, stream=True)
if r.status_code == 200:
with open(self.path, 'wb') as f:
@ -55,9 +58,9 @@ class HTTPDownloader(BaseDownloader):
def set_range(start=0, end='', referer=None):
headers = {
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) Gecko/20100101"
"Firefox/56.0",
'referer' : referer
'user-agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) Gecko/20100101"
"Firefox/56.0",
'referer': referer
}
headers['Range'] = 'bytes={}-{}'.format(start, end)

View File

@ -50,7 +50,7 @@ def get_session(custom_session=None):
return _session
class _DownloaderSession:
class DownloaderSession:
external_downloaders = {
"aria2": {
"executable": "aria2c",
@ -74,7 +74,7 @@ class _DownloaderSession:
# TODO: Add ability to add downloaders using config
pass
def __getitem__(self, key):
def get(self, key, **options):
# HACK: Because of circular dependency
from anime_downloader import downloader
# HACK: This has to obtained like this because this variable is
@ -93,5 +93,3 @@ class _DownloaderSession:
self._cache[key] = downloader.get_downloader('ext')(
options=self.external_downloaders[key])
return self._cache[key]
DownloaderSession = _DownloaderSession()

View File

@ -64,6 +64,7 @@ def setup(func):
headers=default_headers,
**kwargs)
res.raise_for_status()
logger.debug(res.url)
# logger.debug(res.text)
if logger.getEffectiveLevel() == logging.DEBUG:
_log_response_body(res)