🎨 Use newer style of coding (#287)

This commit is contained in:
Nikhil Badyal
2023-08-15 21:49:22 +05:30
committed by GitHub
parent 4f5806a773
commit bc723fe850
3 changed files with 34 additions and 35 deletions
+26 -27
View File
@@ -2,7 +2,7 @@
from typing import Any
import requests
from bs4 import BeautifulSoup
from bs4 import BeautifulSoup, Tag
from loguru import logger
from scripts.status_check import headers
@@ -16,11 +16,7 @@ class ApkMirror(Downloader):
def _extract_force_download_link(self, link: str, app: str) -> None:
"""Extract force download link."""
r = requests.get(link, headers=headers)
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {link} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
notes_divs = soup.find(class_="tab-pane")
notes_divs = self._extracted_search_div(link, "tab-pane")
possible_links = notes_divs.find_all("a")
for possible_link in possible_links:
if possible_link.get("href") and "download.php?id=" in possible_link.get(
@@ -38,36 +34,30 @@ class ApkMirror(Downloader):
:param app: Name of the app
"""
logger.debug(f"Extracting download link from\n{main_page}")
r = requests.get(main_page, headers=headers)
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {main_page} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
download_button = soup.find(class_="center")
download_button = self._extracted_search_div(main_page, "center")
download_links = download_button.find_all("a")
final_download_link = None
for download_link in download_links:
if download_link.get("href"):
if "download/?key=" in download_link.get("href"):
final_download_link = download_link["href"]
break
if not final_download_link:
if final_download_link := next(
(
download_link["href"]
for download_link in download_links
if download_link.get("href")
and "download/?key=" in download_link.get("href")
),
None,
):
self._extract_force_download_link(
self.config.apk_mirror + final_download_link, app
)
else:
raise AppNotFound(f"Unable to download apk from {main_page}")
self._extract_force_download_link(
self.config.apk_mirror + final_download_link, app
)
def get_download_page(self, main_page: str) -> str:
"""Function to get the download page in apk_mirror.
:param parser: Parser
:param main_page: Main Download Page in APK mirror(Index)
:return:
"""
r = requests.get(main_page, headers=headers)
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {main_page} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
list_widget = soup.find(class_="listWidget")
list_widget = self._extracted_search_div(main_page, "listWidget")
table_rows = list_widget.find_all(class_="table-row")
sub_url = None
for row in table_rows:
@@ -80,6 +70,15 @@ class ApkMirror(Downloader):
raise AppNotFound("Unable to download apk from APKMirror.")
return f"{self.config.apk_mirror}{sub_url}"
@staticmethod
def _extracted_search_div(url: str, search_class: str) -> Tag:
"""Extract search div."""
r = requests.get(url, headers=headers)
if r.status_code != 200:
raise AppNotFound(f"Unable to connect with {url} on ApkMirror.")
soup = BeautifulSoup(r.text, bs4_parser)
return soup.find(class_=search_class)
def specific_version(self, app: str, version: str) -> None:
"""Function to download the specified version of app from apkmirror.
+3 -3
View File
@@ -8,6 +8,7 @@ from loguru import logger
from src.config import RevancedConfig
from src.downloader.download import Downloader
from src.exceptions import PatchingFailed
from src.utils import handle_github_response, update_changelog
@@ -82,9 +83,8 @@ class Github(Downloader):
assets = response.json()["assets"]
try:
filter_pattern = re.compile(asset_filter)
except re.error:
logger.error("Invalid regex pattern provided.")
raise Exception()
except re.error as e:
raise PatchingFailed("Invalid regex pattern provided.") from e
for asset in assets:
assets_url = asset["browser_download_url"]
assets_name = asset["name"]
+5 -5
View File
@@ -1,4 +1,6 @@
"""Revanced Patches."""
import contextlib
import json
from typing import Any, Dict, List, Tuple
@@ -114,10 +116,8 @@ class Patches(object):
raise AppNotFound(app)
patches = getattr(self, app_name)
version = "latest"
try:
with contextlib.suppress(StopIteration):
version = next(i["version"] for i in patches if i["version"] != "all")
except StopIteration:
pass
return patches, version
def include_exclude_patch(
@@ -174,5 +174,5 @@ class PatchLoader:
with open(file_name) as f:
patches = json.load(f)
return patches
except FileNotFoundError:
raise PatchesJsonFailed()
except FileNotFoundError as e:
raise PatchesJsonFailed() from e