[go: nahoru, domu]

Skip to content

Commit

Permalink
server: remove need_cloud_scraper
Browse files Browse the repository at this point in the history
Most servers don't need cloudscraper for everything. The ones that
"depend" on it need it for searching but can generally stream and or
play already known medium. So now it has been made more of an optional
dependency that will only be imported when needed.

Also give a little more detail about what module failed to be imported
when a server fails to be instantiated
  • Loading branch information
TAAPArthur committed Jul 7, 2024
1 parent fd18399 commit bd8fe70
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 10 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ Optional dependency breakdown
* beautifulsoup4: required to download images for JNovelClub (only for light novel parts)
* beautifulsoup4: required to enable DB multiverse, FreeWebNovel, Funimation, Nyaa, RemoteServer, Tubi and Webtoons
* beautifulsoup4 : required to search for Crunchyroll (manga)
* cloudscraper: required to enable MangaSee and HumbleBundle
* cloudscraper: required to enable searching/updating on MangaSee and HumbleBundle
* cloudscraper: potentially required to access all features of Crunchyroll (manga and anime)
* m3u8 & pycryptodome: required just download media for Crunchyroll and HiDive (enables more formats for Funimation)

Expand Down
4 changes: 2 additions & 2 deletions amt/media_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ def __init__(self, state=None, server_list=SERVERS, tracker_list=TRACKERS):
if self.settings.is_server_enabled(instance.id, instance.alias, instance.official):
assert instance.id not in instance_map, f"Duplicate server id: {instance.id}"
instance_map[instance.id] = instance
except ImportError:
logging.debug("Could not instantiate %s", cls)
except ImportError as e:
logging.debug("Could not instantiate %s %s", cls, e)

self.session.headers.update({
"Connection": "keep-alive",
Expand Down
10 changes: 6 additions & 4 deletions amt/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ class RequestServer:
settings = None

# If true a cloudscraper object should be given instead of a normal session
need_cloud_scraper = False
maybe_need_cloud_scraper = False
_normal_session = None # the normal session in case a wrapper is used
domain = None
Expand All @@ -43,7 +42,7 @@ class RequestServer:
def __init__(self, session, settings=None):
self.settings = settings
self._normal_session = session
if self.settings.get_always_use_cloudscraper(self.id) or self.need_cloud_scraper:
if self.settings.get_always_use_cloudscraper(self.id):
self.session = self.get_cloudscraper_session(session)
else:
self.session = session
Expand Down Expand Up @@ -115,7 +114,7 @@ def _request(self, post_request, url, force_cloud_scraper=False, start=0, need_a
except SSLError:
if self.settings.get_fallback_to_insecure_connection(self.id) and kwargs.get("verify", True):
self.logger.warning("Retry request insecurely %s", url)
if self.settings.get_always_use_cloudscraper(self.id) or self.need_cloud_scraper: # pragma: no cover
if self.settings.get_always_use_cloudscraper(self.id) or force_cloud_scraper: # pragma: no cover
self.logger.warning("Using insecure connections and cloudscraper are not supported and may result in an error like 'ValueError: Cannot set verify_mode to CERT_NONE when check_hostname is enabled.'")
kwargs["verify"] = False
return self._request(post_request, url, **kwargs)
Expand All @@ -127,7 +126,10 @@ def _request(self, post_request, url, force_cloud_scraper=False, start=0, need_a
continue
if self.maybe_need_cloud_scraper and not force_cloud_scraper and r.status_code in (403, 503):
if session == self._normal_session:
return self._request(post_request, url, force_cloud_scraper=True, **kwargs)
try:
return self._request(post_request, url, force_cloud_scraper=True, **kwargs)
except ImportError:
pass
r.raise_for_status()
end = time.time()

Expand Down
2 changes: 1 addition & 1 deletion amt/servers/crunchyroll.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ class CrunchyrollAnime(GenericCrunchyrollServer):
id = "crunchyroll_anime"
alias = "crunchyroll"
media_type = MediaType.ANIME
need_cloud_scraper = True
maybe_need_cloud_scraper = True

stream_url_regex = re.compile(r"crunchyroll.\w+/watch/(\w*)/.+")
add_series_url_regex = re.compile(r"crunchyroll.\w+/series/(\w*)")
Expand Down
2 changes: 1 addition & 1 deletion amt/servers/humblebundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class HumbleBundle(Server):
official = True
has_free_chapters = False
is_premium = True
need_cloud_scraper = True
maybe_need_cloud_scraper = True

domain = "humblebundle.com"
base_url = "https://www.humblebundle.com/home/library"
Expand Down
2 changes: 1 addition & 1 deletion amt/servers/mangasee.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
class Mangasee(Server):
id = "mangasee"
official = False
need_cloud_scraper = True
maybe_need_cloud_scraper = True

domain = "mangasee123.com"
base_url = f"https://{domain}"
Expand Down

0 comments on commit bd8fe70

Please sign in to comment.