more docs

master
Vishnunarayan K I 2018-10-15 22:40:35 +05:30
parent 1b197136dc
commit 757dbb941c
6 changed files with 114 additions and 12 deletions

View File

@ -4,7 +4,7 @@ verify_ssl = true
name = "pypi" name = "pypi"
[packages] [packages]
anime-downloader = {editable = true, path = "."} anime-downloader = {editable = true, path = ".", extras = ["cloudflare"]}
[dev-packages] [dev-packages]
twine = "*" twine = "*"

11
Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{ {
"_meta": { "_meta": {
"hash": { "hash": {
"sha256": "3c226d0611904abe8b253ddaa90b12046c1c6363b5920e508f067e2d5830e130" "sha256": "b39a35608ce09e57197e0564221771979dcb36be0f7866a5c73298da5a98b4d5"
}, },
"pipfile-spec": 6, "pipfile-spec": 6,
"requires": {}, "requires": {},
@ -16,6 +16,9 @@
"default": { "default": {
"anime-downloader": { "anime-downloader": {
"editable": true, "editable": true,
"extras": [
"cloudflare"
],
"path": "." "path": "."
}, },
"beautifulsoup4": { "beautifulsoup4": {
@ -33,6 +36,12 @@
], ],
"version": "==2019.3.9" "version": "==2019.3.9"
}, },
"cfscrape": {
"hashes": [
"sha256:cb9159955d0e6e82cf4ad8cc9b19413e68ebfed1ce98a26e51f62e66d45146f1"
],
"version": "==1.9.5"
},
"chardet": { "chardet": {
"hashes": [ "hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",

View File

@ -29,9 +29,9 @@ class Anime:
Attributes Attributes
---------- ----------
sitename: string sitename: str
name of the site name of the site
title: string title: str
Title of the anime Title of the anime
meta: dict meta: dict
metadata about the anime. [Can be empty] metadata about the anime. [Can be empty]
@ -51,8 +51,15 @@ class Anime:
""" """
Search searches for the anime using the query given. Search searches for the anime using the query given.
query : Parameters
query is ----------
query: str
query is the query keyword to be searched.
Returns
-------
list
List of :py:class:`~anime_downloader.sites.anime.SearchResult`
""" """
return return
@ -70,7 +77,8 @@ class Anime:
if not _skip_online_data: if not _skip_online_data:
logging.info('Extracting episode info from page') logging.info('Extracting episode info from page')
self.get_data() self._episode_urls = self.get_data()
self._len = len(self._episode_urls)
@classmethod @classmethod
def verify_url(self, url): def verify_url(self, url):
@ -87,6 +95,28 @@ class Anime:
return cls.subclasses[sitename] return cls.subclasses[sitename]
def get_data(self): def get_data(self):
"""
get_data is called inside the :code:`__init__` of
:py:class:`~anime_downloader.sites.anime.BaseAnime`. It is used to get
the necessary data about the anime and it's episodes.
This function calls
:py:class:`~anime_downloader.sites.anime.BaseAnime._scarpe_episodes`
and
:py:class:`~anime_downloader.sites.anime.BaseAnime._scrape_metadata`
TODO: Refactor this so that classes which need not be soupified don't
have to overload this function.
Returns
-------
list
A list of tuples of episodes containing episode name and
episode url.
Ex::
[('1', 'https://9anime.is/.../...', ...)]
"""
self._episode_urls = [] self._episode_urls = []
try: try:
self._scrape_metadata() self._scrape_metadata()
@ -128,10 +158,35 @@ Episode count: {length}
def __str__(self): def __str__(self):
return self.title return self.title
def _scrape_episodes(self): def _scarpe_episodes(self, soup):
"""
_scarpe_episodes is function which has to be overridden by the base
classes to scrape the episode urls from the web page.
Parameters
----------
soup: `bs4.BeautifulSoup`
soup is the html of the anime url after passing through
BeautifulSoup.
Returns
-------
:code:`list` of :code:`str`
A list of episode urls.
"""
return return
def _scrape_metadata(self): def _scrape_metadata(self, soup):
"""
_scrape_metadata is function which has to be overridden by the base
classes to scrape the metadata of anime from the web page.
Parameters
----------
soup: :py:class:`bs4.BeautifulSoup`
soup is the html of the anime url after passing through
BeautifulSoup.
"""
return return
@ -234,11 +289,38 @@ class AnimeEpisode:
class SearchResult: class SearchResult:
def __init__(self, title, url, poster): """
SearchResult class holds the search result of a search done by an Anime
class
Parameters
----------
title: str
Title of the anime.
url: str
URL of the anime
poster: str
URL for the poster of the anime.
meta: dict
Additional metadata regarding the anime.
Attributes
----------
title: str
Title of the anime.
url: str
URL of the anime
poster: str
URL for the poster of the anime.
meta: dict
Additional metadata regarding the anime.
"""
def __init__(self, title, url, poster, meta=''):
self.title = title self.title = title
self.url = url self.url = url
self.poster = poster self.poster = poster
self.meta = '' self.meta = meta
def __repr__(self): def __repr__(self):
return '<SearchResult Title: {} URL: {}>'.format(self.title, self.url) return '<SearchResult Title: {} URL: {}>'.format(self.title, self.url)

View File

@ -1,3 +1,11 @@
Writing your own custom site class Writing your own custom site class
********************************** **********************************
:code:`anime_downloader` is built with easy extensibility in mind.
Each of the site (in the tool) can roughly be classfied into two.
- Sites which don't use cloudflare DDoS protection. Ex: :py:class:`~anime_downloader.sites.nineanime.NineAnime`
- Sites which use cloudflare DDoS protection. Ex: :py:class:`~anime_downloader.sites.kissanime.KissAnime`
Sites which use cloudflare have the base class :py:class:`~anime_downloader.sites.anime.BaseAnime`. Sites which don't have the base class :py:class:`~anime_downloader.sites.baseanimecf.BaseAnimeCF`.

View File

@ -4,4 +4,6 @@ Base classes
.. automodule:: anime_downloader.sites.anime .. automodule:: anime_downloader.sites.anime
.. autoclass:: anime_downloader.sites.anime.BaseAnime .. autoclass:: anime_downloader.sites.anime.BaseAnime
:members: search :members: search, get_data, _scarpe_episodes, _scrape_metadata
.. autoclass:: anime_downloader.sites.anime.SearchResult

View File

@ -42,6 +42,7 @@ extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.autodoc',
'sphinx.ext.todo', 'sphinx.ext.todo',
'sphinx.ext.viewcode', 'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
] ]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.