Add files via upload

master
Arjix 2020-12-10 22:14:30 +02:00 committed by GitHub
parent e21e36abb4
commit fcbb3430f5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 133 additions and 70 deletions

View File

@ -0,0 +1,62 @@
import logging
import re
import urllib.parse
from anime_downloader.sites.anime import Anime, AnimeEpisode, SearchResult
from anime_downloader.sites import helpers
logger = logging.getLogger(__name__)
class EgyAnime(Anime, sitename='egyanime'):
sitename = 'egyanime'
@classmethod
def search(cls, query):
soup = helpers.soupify(helpers.get('https://www.egyanime.com/a.php', params={'term': query}).text)
search_results = [
SearchResult(
title = i.text,
url= "https://www.egyanime.com/" + i['href']
)
for i in soup.find_all('a', href=True)
]
return search_results
def _scrape_episodes(self):
"""
Because of how the website is built,
the only way to access the episodes is by going to the last episode available
thats why im making two requests here.
"""
soup = helpers.soupify(helpers.get(self.url).text)
eps = ["https://www.egyanime.com/" + x['href'] for x in soup.select('a.tag.is-dark.is-medium.m-5')]
return eps
def _scrape_metadata(self):
soup = helpers.soupify(helpers.get(self.url).text)
self.title = soup.title.text.split('مشاهدة')[0].strip()
class EgyAnimeEpisode(AnimeEpisode, sitename='egyanime'):
def _get_sources(self):
soup = helpers.soupify(helpers.get(self.url).text)
servers = [x['data-link'] for x in soup.select('div.server-watch#server-watch > a')]
logger.debug('Hosts: ' + str([urllib.parse.urlparse(x).netloc for x in servers]))
sources = []
for i in servers:
if 'clipwatching' in i:
sources.append({
'extractor': 'clipwatching',
'url': i,
'server': 'clipwatching',
'version': '1'
})
elif 'streamtape' in i:
sources.append({
'extractor': 'streamtape',
'url': i,
'server': 'streamtape',
'version': '1'
})
return self.sort_sources(sources)

View File

@ -1,70 +1,71 @@
from importlib import import_module
ALL_ANIME_SITES = [
# ('filename', 'sitename', 'classname')
('_4anime', '4anime', 'Anime4'),
('anitube', 'anitube', 'AniTube'),
('anime8', 'anime8', 'Anime8'),
('animebinge', 'animebinge', 'AnimeBinge'),
('animechameleon', 'gurminder', 'AnimeChameleon'),
('animedaisuki', 'animedaisuki', 'Animedaisuki'),
('animeflix', 'animeflix', 'AnimeFlix'),
('animeflv', 'animeflv', 'Animeflv'),
('animefreak', 'animefreak', 'AnimeFreak'),
('animefree','animefree','AnimeFree'),
('animefrenzy','animefrenzy','AnimeFrenzy'),
('animekisa','animekisa','AnimeKisa'),
('animetake','animetake','AnimeTake'),
('animeonline','animeonline360','AnimeOnline'),
('animeout', 'animeout', 'AnimeOut'),
('animerush', 'animerush', 'AnimeRush'),
('animesimple', 'animesimple', 'AnimeSimple'),
('animesuge', 'animesuge', 'AnimeSuge'),
('animevibe', 'animevibe', 'AnimeVibe'),
('animixplay', 'animixplay', 'AniMixPlay'),
('darkanime', 'darkanime', 'DarkAnime'),
('dbanimes', 'dbanimes', 'DBAnimes'),
('erairaws', 'erai-raws', 'EraiRaws'),
('fastani', 'fastani', 'FastAni'),
('itsaturday', 'itsaturday', 'Itsaturday'),
('justdubs', 'justdubs', 'JustDubs'),
('kickass', 'kickass', 'KickAss'),
('kissanimex', 'kissanimex', 'KissAnimeX'),
('kisscartoon', 'kisscartoon', 'KissCartoon'),
('nineanime', '9anime', 'NineAnime'),
('nyaa', 'nyaa', 'Nyaa'),
('putlockers', 'putlockers', 'PutLockers'),
('ryuanime', 'ryuanime', 'RyuAnime'),
('subsplease', 'subsplease', 'SubsPlease'),
('twistmoe', 'twist.moe', 'TwistMoe'),
('tenshimoe', 'tenshi.moe', 'TenshiMoe'),
('vidstream', 'vidstream', 'VidStream'),
('voiranime', 'voiranime', 'VoirAnime'),
('vostfree', 'vostfree', 'VostFree'),
]
def get_anime_class(url):
"""
Get anime class corresposing to url or name.
See :py:data:`anime_downloader.sites.ALL_ANIME_SITES` to get the possible anime sites.
Parameters
----------
url: string
URL of the anime.
Returns
-------
:py:class:`anime_downloader.sites.anime.Anime`
Concrete implementation of :py:class:`anime_downloader.sites.anime.Anime`
"""
for site in ALL_ANIME_SITES:
if site[1] in url:
try:
module = import_module(
'anime_downloader.sites.{}'.format(site[0])
)
except ImportError:
raise
return getattr(module, site[2])
from importlib import import_module
ALL_ANIME_SITES = [
# ('filename', 'sitename', 'classname')
('_4anime', '4anime', 'Anime4'),
('anitube', 'anitube', 'AniTube'),
('anime8', 'anime8', 'Anime8'),
('animebinge', 'animebinge', 'AnimeBinge'),
('animechameleon', 'gurminder', 'AnimeChameleon'),
('animedaisuki', 'animedaisuki', 'Animedaisuki'),
('animeflix', 'animeflix', 'AnimeFlix'),
('animeflv', 'animeflv', 'Animeflv'),
('animefreak', 'animefreak', 'AnimeFreak'),
('animefree','animefree','AnimeFree'),
('animefrenzy','animefrenzy','AnimeFrenzy'),
('animekisa','animekisa','AnimeKisa'),
('animetake','animetake','AnimeTake'),
('animeonline','animeonline360','AnimeOnline'),
('animeout', 'animeout', 'AnimeOut'),
('animerush', 'animerush', 'AnimeRush'),
('animesimple', 'animesimple', 'AnimeSimple'),
('animesuge', 'animesuge', 'AnimeSuge'),
('animevibe', 'animevibe', 'AnimeVibe'),
('animixplay', 'animixplay', 'AniMixPlay'),
('darkanime', 'darkanime', 'DarkAnime'),
('dbanimes', 'dbanimes', 'DBAnimes'),
('erairaws', 'erai-raws', 'EraiRaws'),
('egyanime', 'egyanime', 'EgyAnime'),
('fastani', 'fastani', 'FastAni'),
('itsaturday', 'itsaturday', 'Itsaturday'),
('justdubs', 'justdubs', 'JustDubs'),
('kickass', 'kickass', 'KickAss'),
('kissanimex', 'kissanimex', 'KissAnimeX'),
('kisscartoon', 'kisscartoon', 'KissCartoon'),
('nineanime', '9anime', 'NineAnime'),
('nyaa', 'nyaa', 'Nyaa'),
('putlockers', 'putlockers', 'PutLockers'),
('ryuanime', 'ryuanime', 'RyuAnime'),
('subsplease', 'subsplease', 'SubsPlease'),
('twistmoe', 'twist.moe', 'TwistMoe'),
('tenshimoe', 'tenshi.moe', 'TenshiMoe'),
('vidstream', 'vidstream', 'VidStream'),
('voiranime', 'voiranime', 'VoirAnime'),
('vostfree', 'vostfree', 'VostFree'),
]
def get_anime_class(url):
"""
Get anime class corresposing to url or name.
See :py:data:`anime_downloader.sites.ALL_ANIME_SITES` to get the possible anime sites.
Parameters
----------
url: string
URL of the anime.
Returns
-------
:py:class:`anime_downloader.sites.anime.Anime`
Concrete implementation of :py:class:`anime_downloader.sites.anime.Anime`
"""
for site in ALL_ANIME_SITES:
if site[1] in url:
try:
module = import_module(
'anime_downloader.sites.{}'.format(site[0])
)
except ImportError:
raise
return getattr(module, site[2])