Prepare 2.4.0 release

master
Vishnunarayan K I 2018-06-11 01:26:56 +05:30
parent 077cc402c3
commit 0ffaf2dc4a
5 changed files with 10 additions and 4 deletions

View File

@ -1 +1 @@
__version__ = '2.3.4dev'
__version__ = '2.4.0'

View File

@ -96,7 +96,7 @@ def dl(ctx, anime_url, episode_range, url, player, skip_download, quality,
util.print_episodeurl(episode)
if player:
util.play_episode(episode)
util.play_episode(episode, player=player)
if not skip_download:
episode.download(force=force_download,

View File

@ -33,4 +33,7 @@ class BaseAnimeCF(BaseAnime):
logging.debug('EPISODE IDS: length: {}, ids: {}'.format(
self._len, self._episodeIds))
self._episodeIds = [(no+1, id) for no, id in
enumerate(self._episodeIds)]
return self._episodeIds

View File

@ -5,6 +5,7 @@ from anime_downloader.sites.exceptions import NotFoundError
from anime_downloader.sites import util
from bs4 import BeautifulSoup
import re
import logging
scraper = cfscrape.create_scraper()
@ -27,6 +28,7 @@ class KissanimeEpisode(BaseEpisode):
def getData(self):
url = self._base_url+self.episode_id
logging.debug('Calling url: {}'.format(url))
r = scraper.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
@ -68,7 +70,8 @@ class Kissanime(BaseAnimeCF):
args = [self.url]
raise NotFoundError(err, *args)
return list(reversed(ret))
ret = ret[::-1]
return ret
def _getMetadata(self, soup):
info_div = soup.find('div', {'class': 'barContent'})

View File

@ -90,7 +90,7 @@ def download_episode(episode, **kwargs):
print()
def play_epiosde(episode, *, player):
def play_episode(episode, *, player):
p = subprocess.Popen([player, episode.stream_url])
p.wait()