diff --git a/scripts/status_check.py b/scripts/status_check.py index 1b39cec..49b3c0e 100644 --- a/scripts/status_check.py +++ b/scripts/status_check.py @@ -27,7 +27,7 @@ from src.exceptions import ( BuilderError, ) from src.patches import Patches -from src.utils import apkmirror_status_check, bs4_parser, handle_request_response, request_header +from src.utils import apkmirror_status_check, bs4_parser, handle_request_response, request_header, request_timeout no_of_col = 8 combo_headers = {"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:109.0) Gecko/20100101 Firefox/116.0"} @@ -37,7 +37,8 @@ def apkcombo_scrapper(package_name: str) -> str: """Apkcombo scrapper.""" apkcombo_url = APK_COMBO_GENERIC_URL.format(package_name) try: - r = requests.get(apkcombo_url, headers=combo_headers, allow_redirects=True, timeout=60) + r = requests.get(apkcombo_url, headers=combo_headers, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, apkcombo_url) soup = BeautifulSoup(r.text, bs4_parser) avatar = soup.find(class_="avatar") if not isinstance(avatar, Tag): @@ -74,7 +75,8 @@ def apkmonk_scrapper(package_name: str) -> str: """APKMonk scrapper.""" apkmonk_url = APK_MONK_APK_URL.format(package_name) icon_logo = APK_MONK_ICON_URL.format(package_name) - r = requests.get(apkmonk_url, headers=combo_headers, allow_redirects=True, timeout=60) + r = requests.get(apkmonk_url, headers=combo_headers, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, apkmonk_url) if head := BeautifulSoup(r.text, bs4_parser).head: parsed_head = BeautifulSoup(str(head), bs4_parser) href_elements = parsed_head.find_all(href=True) @@ -98,7 +100,8 @@ def apkmirror_scrapper(package_name: str) -> str: def _extracted_from_apkmirror_scrapper(search_url: str) -> str: - r = requests.get(search_url, headers=request_header, timeout=60) + r = requests.get(search_url, headers=request_header, timeout=request_timeout) + handle_request_response(r, search_url) soup = BeautifulSoup(r.text, bs4_parser) icon_element = soup.select_one("div.bubble-wrap > img") if not icon_element: @@ -131,7 +134,8 @@ def apkpure_scrapper(package_name: str) -> str: """Scrap Icon from apkpure.""" apkpure_url = APK_PURE_ICON_URL.format(package_name) try: - r = requests.get(apkpure_url, headers=combo_headers, allow_redirects=True, timeout=60) + r = requests.get(apkpure_url, headers=combo_headers, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, apkpure_url) soup = BeautifulSoup(r.text, bs4_parser) search_result = soup.find_all(class_="brand-info-top") for brand_info in search_result: @@ -182,8 +186,8 @@ def generate_markdown_table(data: List[List[str]]) -> str: def main() -> None: """Entrypoint.""" - response = requests.get(revanced_api, timeout=10) - handle_request_response(response) + response = requests.get(revanced_api, timeout=request_timeout) + handle_request_response(response, revanced_api) patches = response.json() diff --git a/src/downloader/apkmirror.py b/src/downloader/apkmirror.py index 1307f73..013f226 100644 --- a/src/downloader/apkmirror.py +++ b/src/downloader/apkmirror.py @@ -8,9 +8,8 @@ from loguru import logger from src.app import APP from src.downloader.download import Downloader from src.downloader.sources import APK_MIRROR_BASE_URL -from src.downloader.utils import status_code_200 from src.exceptions import APKMirrorAPKDownloadError -from src.utils import bs4_parser, contains_any_word, request_header +from src.utils import bs4_parser, contains_any_word, handle_request_response, request_header, request_timeout class ApkMirror(Downloader): @@ -77,13 +76,8 @@ class ApkMirror(Downloader): @staticmethod def _extracted_search_div(url: str, search_class: str) -> Tag: """Extract search div.""" - r = requests.get(url, headers=request_header, timeout=60) - if r.status_code != status_code_200: - msg = f"Unable to connect with {url}. Are you blocked by APKMirror or abused apkmirror ?.Reason - {r.text}" - raise APKMirrorAPKDownloadError( - msg, - url=url, - ) + r = requests.get(url, headers=request_header, timeout=request_timeout) + handle_request_response(r, url) soup = BeautifulSoup(r.text, bs4_parser) return soup.find(class_=search_class) # type: ignore[return-value] diff --git a/src/downloader/apkmonk.py b/src/downloader/apkmonk.py index 770275e..c2bf5b8 100644 --- a/src/downloader/apkmonk.py +++ b/src/downloader/apkmonk.py @@ -9,9 +9,8 @@ from scripts.status_check import combo_headers from src.app import APP from src.downloader.download import Downloader from src.downloader.sources import APK_MONK_BASE_URL -from src.downloader.utils import status_code_200 from src.exceptions import APKMonkAPKDownloadError -from src.utils import bs4_parser, request_header +from src.utils import bs4_parser, handle_request_response, request_header, request_timeout class ApkMonk(Downloader): @@ -24,13 +23,8 @@ class ApkMonk(Downloader): :param app: Name of the app """ file_name = f"{app}.apk" - r = requests.get(page, headers=request_header, allow_redirects=True, timeout=60) - if r.status_code != status_code_200: - msg = f"Unable to connect with {page}.Reason - {r.text}" - raise APKMonkAPKDownloadError( - msg, - url=page, - ) + r = requests.get(page, headers=request_header, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, page) soup = BeautifulSoup(r.text, bs4_parser) download_scripts = soup.find_all("script", type="text/javascript") key_value_pattern = r'\{"pkg":"([^"]+)","key":"([^"]+)"\}' @@ -48,13 +42,8 @@ class ApkMonk(Downloader): url=page, ) request_header["User-Agent"] = combo_headers["User-Agent"] - r = requests.get(url, headers=request_header, allow_redirects=True, timeout=60) - if r.status_code != status_code_200: - msg = f"Unable to connect with {page}.Reason - {r.text}" - raise APKMonkAPKDownloadError( - msg, - url=page, - ) + r = requests.get(url, headers=request_header, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, url) final_download_url = r.json()["url"] self._download(final_download_url, file_name) return file_name, final_download_url @@ -67,13 +56,8 @@ class ApkMonk(Downloader): :param main_page: Version of the application to download :return: Version of downloaded apk """ - r = requests.get(app.download_source, headers=request_header, allow_redirects=True, timeout=60) - if r.status_code != status_code_200: - msg = f"Unable to connect with {app.download_source}.Reason - {r.text}" - raise APKMonkAPKDownloadError( - msg, - url=app.download_source, - ) + r = requests.get(app.download_source, headers=request_header, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, app.download_source) soup = BeautifulSoup(r.text, bs4_parser) version_table = soup.find_all(class_="striped") for version_row in version_table: @@ -95,7 +79,8 @@ class ApkMonk(Downloader): :param app: Name of the application :return: Version of downloaded apk """ - r = requests.get(app.download_source, headers=request_header, allow_redirects=True, timeout=60) + r = requests.get(app.download_source, headers=request_header, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, app.download_source) soup = BeautifulSoup(r.text, bs4_parser) latest_download_url = soup.find(id="download_button")["href"] # type: ignore[index] return self.extract_download_link(latest_download_url, app.app_name) # type: ignore[arg-type] diff --git a/src/downloader/apksos.py b/src/downloader/apksos.py index 5f0bb9b..3e46fb1 100644 --- a/src/downloader/apksos.py +++ b/src/downloader/apksos.py @@ -7,7 +7,7 @@ from bs4 import BeautifulSoup from src.app import APP from src.downloader.download import Downloader from src.exceptions import APKSosAPKDownloadError -from src.utils import bs4_parser, request_header +from src.utils import bs4_parser, handle_request_response, request_header, request_timeout class ApkSos(Downloader): @@ -19,7 +19,8 @@ class ApkSos(Downloader): :param page: Url of the page :param app: Name of the app """ - r = requests.get(page, headers=request_header, allow_redirects=True, timeout=60) + r = requests.get(page, headers=request_header, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, page) soup = BeautifulSoup(r.text, bs4_parser) download_button = soup.find(class_="col-sm-12 col-md-8 text-center") possible_links = download_button.find_all("a") # type: ignore[union-attr] diff --git a/src/downloader/download.py b/src/downloader/download.py index 7f3b332..f27a132 100644 --- a/src/downloader/download.py +++ b/src/downloader/download.py @@ -44,7 +44,7 @@ class Downloader(object): stream=True, headers=headers, ) - handle_request_response(response) + handle_request_response(response, url) total = int(response.headers.get("content-length", 0)) bar = tqdm( desc=file_name, diff --git a/src/downloader/github.py b/src/downloader/github.py index 0045c35..72b3699 100644 --- a/src/downloader/github.py +++ b/src/downloader/github.py @@ -10,7 +10,7 @@ from src.app import APP from src.config import RevancedConfig from src.downloader.download import Downloader from src.exceptions import DownloadError -from src.utils import handle_request_response, update_changelog +from src.utils import handle_request_response, request_timeout, update_changelog class Github(Downloader): @@ -34,8 +34,8 @@ class Github(Downloader): if self.config.personal_access_token: logger.debug("Using personal access token") headers["Authorization"] = f"token {self.config.personal_access_token}" - response = requests.get(repo_url, headers=headers, timeout=60) - handle_request_response(response) + response = requests.get(repo_url, headers=headers, timeout=request_timeout) + handle_request_response(response, repo_url) if repo_name == "revanced-patches": download_url = response.json()["assets"][1]["browser_download_url"] else: @@ -74,8 +74,8 @@ class Github(Downloader): } if config.personal_access_token: headers["Authorization"] = f"token {config.personal_access_token}" - response = requests.get(api_url, headers=headers, timeout=60) - handle_request_response(response) + response = requests.get(api_url, headers=headers, timeout=request_timeout) + handle_request_response(response, api_url) update_changelog(f"{github_repo_owner}/{github_repo_name}", response.json()) assets = response.json()["assets"] try: diff --git a/src/downloader/uptodown.py b/src/downloader/uptodown.py index fda670e..69d76d9 100644 --- a/src/downloader/uptodown.py +++ b/src/downloader/uptodown.py @@ -8,7 +8,7 @@ from loguru import logger from src.app import APP from src.downloader.download import Downloader from src.exceptions import UptoDownAPKDownloadError -from src.utils import bs4_parser, request_header +from src.utils import bs4_parser, handle_request_response, request_header, request_timeout class UptoDown(Downloader): @@ -16,7 +16,8 @@ class UptoDown(Downloader): def extract_download_link(self: Self, page: str, app: str) -> Tuple[str, str]: """Extract download link from uptodown url.""" - r = requests.get(page, headers=request_header, allow_redirects=True, timeout=60) + r = requests.get(page, headers=request_header, allow_redirects=True, timeout=request_timeout) + handle_request_response(r, page) soup = BeautifulSoup(r.text, bs4_parser) download_button = soup.find(id="detail-download-button") if not download_button: diff --git a/src/utils.py b/src/utils.py index 45692bf..9b52c54 100644 --- a/src/utils.py +++ b/src/utils.py @@ -29,6 +29,7 @@ request_header = { } bs4_parser = "html.parser" changelog_file = "changelog.md" +request_timeout = 60 def update_changelog(name: str, response: Dict[str, str]) -> None: @@ -99,7 +100,7 @@ def get_parent_repo() -> str: return "https://github.com/nikhilbadyal/docker-py-revanced" -def handle_request_response(response: Response) -> None: +def handle_request_response(response: Response, url: str) -> None: """The function handles the response of a GET request and raises an exception if the response code is not 200. Parameters @@ -108,11 +109,13 @@ def handle_request_response(response: Response) -> None: The parameter `response` is of type `Response`, which is likely referring to a response object from an HTTP request. This object typically contains information about the response received from the server, such as the status code, headers, and response body. + url: str + The url on which request was made """ response_code = response.status_code if response_code != status_code_200: msg = f"Unable to downloaded assets. Reason - {response.text}" - raise DownloadError(msg) + raise DownloadError(msg, url=url) def slugify(string: str) -> str: