Fix everything; except the tool doesnt work
parent
d4628bd80b
commit
b360e65163
|
@ -99,3 +99,4 @@ ENV/
|
|||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.vscode
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import click
|
||||
import subprocess
|
||||
from anime_downloader.sites.9anime import Anime, NotFoundError
|
||||
from anime_downloader.sites.nineanime import NineAnime
|
||||
from anime_downloader.sites.anime import NotFoundError
|
||||
|
||||
echo = click.echo
|
||||
|
||||
|
@ -28,8 +29,8 @@ def cli(anime_url, range_, playlist, url, player, no_download, quality, force):
|
|||
Download your favourite anime.
|
||||
"""
|
||||
try:
|
||||
anime = Anime(anime_url, quality=quality,
|
||||
callback=lambda message: print('[INFO] '+message))
|
||||
anime = NineAnime(anime_url, quality=quality,
|
||||
callback=lambda message: print('[INFO] '+message))
|
||||
except NotFoundError as e:
|
||||
echo(e.args[0])
|
||||
return
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
from .anime import BaseAnime, BaseEpisode
|
||||
|
||||
class NineAnime(BaseAnime):
|
||||
# episodes = soup.find_all('ul', ['episodes'])
|
||||
# if episodes == []:
|
||||
# err = 'No episodes found in url "{}"'.format(self.url)
|
||||
# if self._callback:
|
||||
# self._callback(err)
|
||||
# args = [self.url]
|
||||
# raise NotFoundError(err, *args)
|
||||
# episodes = episodes[:int(len(episodes)/3)]
|
||||
#
|
||||
# for x in episodes:
|
||||
# for a in x.find_all('a'):
|
||||
# ep_id = a.get('data-id')
|
||||
# self._episodeIds.append(ep_id)
|
|
@ -6,7 +6,6 @@ import time
|
|||
import sys
|
||||
import os
|
||||
import click
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
|
||||
class AnimeDLError(Exception):
|
||||
|
@ -21,7 +20,7 @@ class NotFoundError(AnimeDLError):
|
|||
pass
|
||||
|
||||
|
||||
class BaseAnime(metaclass=ABCMeta):
|
||||
class BaseAnime():
|
||||
def __init__(self, url, quality='720p', callback=None):
|
||||
|
||||
if quality not in self.QUALITIES:
|
||||
|
@ -53,19 +52,20 @@ class BaseAnime(metaclass=ABCMeta):
|
|||
|
||||
def __getitem__(self, index):
|
||||
ep_id = self._episodeIds[index]
|
||||
return Episode(ep_id, self.quality, callback=self._callback)
|
||||
return self._episodeClass(ep_id, self.quality, callback=self._callback)
|
||||
|
||||
@abstractmethod
|
||||
def _getEpisodeUrls(self, soup):
|
||||
return
|
||||
|
||||
|
||||
class BaseEpisode:
|
||||
_base_url = r'https://9anime.is/ajax/episode/info?id={0}&server=33'
|
||||
QUALITIES = None
|
||||
title = ''
|
||||
stream_url = ''
|
||||
|
||||
def __init__(self, episode_id, quality='720p', callback=None):
|
||||
|
||||
if quality not in QUALITIES:
|
||||
if quality not in self.QUALITIES:
|
||||
raise AnimeDLError('Incorrect quality: "{}"'.format(quality))
|
||||
|
||||
self.episode_id = episode_id
|
||||
|
@ -76,20 +76,7 @@ class BaseEpisode:
|
|||
self.getData()
|
||||
|
||||
def getData(self):
|
||||
url = self._base_url.format(self.episode_id)
|
||||
data = json.loads(requests.get(url).text)
|
||||
url = data['target']
|
||||
title_re = re.compile(r'"og:title" content="(.*)"')
|
||||
image_re = re.compile(r'"og:image" content="(.*)"')
|
||||
|
||||
r = requests.get(url+'&q='+self.quality)
|
||||
soup = BeautifulSoup(r.text, 'html.parser')
|
||||
try:
|
||||
self.stream_url = soup.find_all('source')[0].get('src')
|
||||
self.title = title_re.findall(r.text)[0]
|
||||
self.image = image_re.findall(r.text)[0]
|
||||
except IndexError:
|
||||
raise NotFoundError("Episode not found")
|
||||
raise NotImplementedError
|
||||
|
||||
def download(self, force=False):
|
||||
print('[INFO] Downloading {}'.format(self.title))
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
from .anime import BaseAnime, BaseEpisode, AnimeDLError, URLError, NotFoundError
|
||||
import json
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
|
||||
|
||||
class NineAnimeEpisode(BaseEpisode):
|
||||
QUALITIES = ['360p', '480p', '720p']
|
||||
_base_url = r'https://9anime.is/ajax/episode/info?id={0}&server=33&_=1428&ts=1527426000'
|
||||
|
||||
def getData(self):
|
||||
url = self._base_url.format(self.episode_id)
|
||||
data = json.loads(requests.get(url).text)
|
||||
print(data)
|
||||
url = data['target']
|
||||
title_re = re.compile(r'"og:title" content="(.*)"')
|
||||
image_re = re.compile(r'"og:image" content="(.*)"')
|
||||
|
||||
r = requests.get(url+'&q='+self.quality)
|
||||
soup = BeautifulSoup(r.text, 'html.parser')
|
||||
try:
|
||||
self.stream_url = soup.find_all('source')[0].get('src')
|
||||
self.title = title_re.findall(r.text)[0]
|
||||
self.image = image_re.findall(r.text)[0]
|
||||
except IndexError:
|
||||
raise NotFoundError("Episode not found")
|
||||
|
||||
|
||||
|
||||
class NineAnime(BaseAnime):
|
||||
QUALITIES = ['360p', '480p', '720p']
|
||||
_episodeClass = NineAnimeEpisode
|
||||
|
||||
def _getEpisodeUrls(self, soup):
|
||||
episodes = soup.find_all('ul', ['episodes'])
|
||||
if episodes == []:
|
||||
err = 'No episodes found in url "{}"'.format(self.url)
|
||||
if self._callback:
|
||||
self._callback(err)
|
||||
args = [self.url]
|
||||
raise NotFoundError(err, *args)
|
||||
episodes = episodes[:int(len(episodes)/3)]
|
||||
|
||||
for x in episodes:
|
||||
for a in x.find_all('a'):
|
||||
ep_id = a.get('data-id')
|
||||
self._episodeIds.append(ep_id)
|
Loading…
Reference in New Issue