🎨 Use newer style of coding (#287)

This commit is contained in:
Nikhil Badyal
2023-08-15 21:49:22 +05:30
committed by GitHub
parent 4f5806a773
commit bc723fe850
3 changed files with 34 additions and 35 deletions
+24 -25
View File
@@ -2,7 +2,7 @@
from typing import Any from typing import Any
import requests import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup, Tag
from loguru import logger from loguru import logger
from scripts.status_check import headers from scripts.status_check import headers
@@ -16,11 +16,7 @@ class ApkMirror(Downloader):
def _extract_force_download_link(self, link: str, app: str) -> None: def _extract_force_download_link(self, link: str, app: str) -> None:
"""Extract force download link.""" """Extract force download link."""
r = requests.get(link, headers=headers) notes_divs = self._extracted_search_div(link, "tab-pane")
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {link} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
notes_divs = soup.find(class_="tab-pane")
possible_links = notes_divs.find_all("a") possible_links = notes_divs.find_all("a")
for possible_link in possible_links: for possible_link in possible_links:
if possible_link.get("href") and "download.php?id=" in possible_link.get( if possible_link.get("href") and "download.php?id=" in possible_link.get(
@@ -38,36 +34,30 @@ class ApkMirror(Downloader):
:param app: Name of the app :param app: Name of the app
""" """
logger.debug(f"Extracting download link from\n{main_page}") logger.debug(f"Extracting download link from\n{main_page}")
r = requests.get(main_page, headers=headers) download_button = self._extracted_search_div(main_page, "center")
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {main_page} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
download_button = soup.find(class_="center")
download_links = download_button.find_all("a") download_links = download_button.find_all("a")
final_download_link = None if final_download_link := next(
for download_link in download_links: (
if download_link.get("href"): download_link["href"]
if "download/?key=" in download_link.get("href"): for download_link in download_links
final_download_link = download_link["href"] if download_link.get("href")
break and "download/?key=" in download_link.get("href")
if not final_download_link: ),
raise AppNotFound(f"Unable to download apk from {main_page}") None,
):
self._extract_force_download_link( self._extract_force_download_link(
self.config.apk_mirror + final_download_link, app self.config.apk_mirror + final_download_link, app
) )
else:
raise AppNotFound(f"Unable to download apk from {main_page}")
def get_download_page(self, main_page: str) -> str: def get_download_page(self, main_page: str) -> str:
"""Function to get the download page in apk_mirror. """Function to get the download page in apk_mirror.
:param parser: Parser
:param main_page: Main Download Page in APK mirror(Index) :param main_page: Main Download Page in APK mirror(Index)
:return: :return:
""" """
r = requests.get(main_page, headers=headers) list_widget = self._extracted_search_div(main_page, "listWidget")
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {main_page} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
list_widget = soup.find(class_="listWidget")
table_rows = list_widget.find_all(class_="table-row") table_rows = list_widget.find_all(class_="table-row")
sub_url = None sub_url = None
for row in table_rows: for row in table_rows:
@@ -80,6 +70,15 @@ class ApkMirror(Downloader):
raise AppNotFound("Unable to download apk from APKMirror.") raise AppNotFound("Unable to download apk from APKMirror.")
return f"{self.config.apk_mirror}{sub_url}" return f"{self.config.apk_mirror}{sub_url}"
@staticmethod
def _extracted_search_div(url: str, search_class: str) -> Tag:
"""Extract search div."""
r = requests.get(url, headers=headers)
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {url} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
return soup.find(class_=search_class)
def specific_version(self, app: str, version: str) -> None: def specific_version(self, app: str, version: str) -> None:
"""Function to download the specified version of app from apkmirror. """Function to download the specified version of app from apkmirror.
+3 -3
View File
@@ -8,6 +8,7 @@ from loguru import logger
from src.config import RevancedConfig from src.config import RevancedConfig
from src.downloader.download import Downloader from src.downloader.download import Downloader
from src.exceptions import PatchingFailed
from src.utils import handle_github_response, update_changelog from src.utils import handle_github_response, update_changelog
@@ -82,9 +83,8 @@ class Github(Downloader):
assets = response.json()["assets"] assets = response.json()["assets"]
try: try:
filter_pattern = re.compile(asset_filter) filter_pattern = re.compile(asset_filter)
except re.error: except re.error as e:
logger.error("Invalid regex pattern provided.") raise PatchingFailed("Invalid regex pattern provided.") from e
raise Exception()
for asset in assets: for asset in assets:
assets_url = asset["browser_download_url"] assets_url = asset["browser_download_url"]
assets_name = asset["name"] assets_name = asset["name"]
+5 -5
View File
@@ -1,4 +1,6 @@
"""Revanced Patches.""" """Revanced Patches."""
import contextlib
import json import json
from typing import Any, Dict, List, Tuple from typing import Any, Dict, List, Tuple
@@ -114,10 +116,8 @@ class Patches(object):
raise AppNotFound(app) raise AppNotFound(app)
patches = getattr(self, app_name) patches = getattr(self, app_name)
version = "latest" version = "latest"
try: with contextlib.suppress(StopIteration):
version = next(i["version"] for i in patches if i["version"] != "all") version = next(i["version"] for i in patches if i["version"] != "all")
except StopIteration:
pass
return patches, version return patches, version
def include_exclude_patch( def include_exclude_patch(
@@ -174,5 +174,5 @@ class PatchLoader:
with open(file_name) as f: with open(file_name) as f:
patches = json.load(f) patches = json.load(f)
return patches return patches
except FileNotFoundError: except FileNotFoundError as e:
raise PatchesJsonFailed() raise PatchesJsonFailed() from e