mirror of
https://github.com/sotam0316/docker-py-revanced.git
synced 2026-04-25 03:48:37 +09:00
♻️ Cleanup
This commit is contained in:
@@ -1,450 +1,28 @@
|
|||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
from atexit import register
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
from pathlib import Path
|
|
||||||
from queue import PriorityQueue
|
|
||||||
from shutil import rmtree
|
|
||||||
from subprocess import PIPE, Popen
|
|
||||||
from time import perf_counter
|
|
||||||
from typing import Any, Dict, List, Tuple, Type
|
|
||||||
|
|
||||||
import requests
|
|
||||||
from environs import Env
|
from environs import Env
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from requests import Session
|
|
||||||
from selectolax.lexbor import LexborHTMLParser
|
from src.downloader import Downloader
|
||||||
from tqdm import tqdm
|
from src.parser import ArgParser
|
||||||
|
from src.patches import Patches
|
||||||
|
|
||||||
env = Env()
|
env = Env()
|
||||||
temp_folder = Path("apks")
|
|
||||||
session = Session()
|
|
||||||
session.headers["User-Agent"] = "anything"
|
|
||||||
supported_apps = [
|
|
||||||
"youtube",
|
|
||||||
"youtube_music",
|
|
||||||
"twitter",
|
|
||||||
"reddit",
|
|
||||||
"tiktok",
|
|
||||||
"warnwetter",
|
|
||||||
"spotify",
|
|
||||||
]
|
|
||||||
apps = env.list("PATCH_APPS", supported_apps)
|
|
||||||
build_extended = env.bool("BUILD_EXTENDED", False)
|
|
||||||
extended_apps = ["youtube", "youtube_music"]
|
|
||||||
keystore_name = env.str("KEYSTORE_FILE_NAME", "revanced.keystore")
|
|
||||||
apk_mirror = "https://www.apkmirror.com"
|
|
||||||
github = "https://www.github.com"
|
|
||||||
normal_cli_jar = "revanced-cli.jar"
|
|
||||||
normal_patches_jar = "revanced-patches.jar"
|
|
||||||
normal_integrations_apk = "revanced-integrations.apk"
|
|
||||||
cli_jar = f"inotia00-{normal_cli_jar}" if build_extended else normal_cli_jar
|
|
||||||
patches_jar = f"inotia00-{normal_patches_jar}" if build_extended else normal_patches_jar
|
|
||||||
integrations_apk = (
|
|
||||||
f"inotia00-{normal_integrations_apk}" if build_extended else normal_integrations_apk
|
|
||||||
)
|
|
||||||
apk_mirror_urls = {
|
|
||||||
"reddit": f"{apk_mirror}/apk/redditinc/reddit/",
|
|
||||||
"twitter": f"{apk_mirror}/apk/twitter-inc/twitter/",
|
|
||||||
"tiktok": f"{apk_mirror}/apk/tiktok-pte-ltd/tik-tok-including-musical-ly/",
|
|
||||||
"warnwetter": f"{apk_mirror}/apk/deutscher-wetterdienst/warnwetter/",
|
|
||||||
"youtube": f"{apk_mirror}/apk/google-inc/youtube/",
|
|
||||||
"youtube_music": f"{apk_mirror}/apk/google-inc/youtube-music/",
|
|
||||||
}
|
|
||||||
apk_mirror_version_urls = {
|
|
||||||
"reddit": f"{apk_mirror_urls.get('reddit')}reddit",
|
|
||||||
"twitter": f"{apk_mirror_urls.get('twitter')}twitter",
|
|
||||||
"tiktok": f"{apk_mirror_urls.get('tiktok')}tik-tok-including-musical-ly",
|
|
||||||
"warnwetter": f"{apk_mirror_urls.get('warnwetter')}warnwetter",
|
|
||||||
"youtube": f"{apk_mirror_urls.get('youtube')}youtube",
|
|
||||||
"youtube_music": f"{apk_mirror_urls.get('youtube_music')}youtube-music",
|
|
||||||
}
|
|
||||||
upto_down = ["spotify"]
|
|
||||||
|
|
||||||
|
|
||||||
class Downloader(object):
|
|
||||||
_CHUNK_SIZE = 2**21 * 5
|
|
||||||
_QUEUE = PriorityQueue()
|
|
||||||
_QUEUE_LENGTH = 0
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _download(cls, url: str, file_name: str) -> None:
|
|
||||||
logger.debug(f"Trying to download {file_name} from {url}")
|
|
||||||
cls._QUEUE_LENGTH += 1
|
|
||||||
start = perf_counter()
|
|
||||||
resp = session.get(url, stream=True)
|
|
||||||
total = int(resp.headers.get("content-length", 0))
|
|
||||||
bar = tqdm(
|
|
||||||
desc=file_name,
|
|
||||||
total=total,
|
|
||||||
unit="iB",
|
|
||||||
unit_scale=True,
|
|
||||||
unit_divisor=1024,
|
|
||||||
colour="green",
|
|
||||||
)
|
|
||||||
with temp_folder.joinpath(file_name).open("wb") as dl_file, bar:
|
|
||||||
for chunk in resp.iter_content(cls._CHUNK_SIZE):
|
|
||||||
size = dl_file.write(chunk)
|
|
||||||
bar.update(size)
|
|
||||||
cls._QUEUE.put((perf_counter() - start, file_name))
|
|
||||||
logger.debug(f"Downloaded {file_name}")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def extract_download_link(cls, page: str, app: str):
|
|
||||||
logger.debug(f"Extracting download link from\n{page}")
|
|
||||||
parser = LexborHTMLParser(session.get(page).text)
|
|
||||||
|
|
||||||
resp = session.get(
|
|
||||||
apk_mirror + parser.css_first("a.accent_bg").attributes["href"]
|
|
||||||
)
|
|
||||||
parser = LexborHTMLParser(resp.text)
|
|
||||||
|
|
||||||
href = parser.css_first(
|
|
||||||
"p.notes:nth-child(3) > span:nth-child(1) > a:nth-child(1)"
|
|
||||||
).attributes["href"]
|
|
||||||
cls._download(apk_mirror + href, f"{app}.apk")
|
|
||||||
logger.debug("Finished Extracting link and downloading")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_download_page(cls, parser, main_page):
|
|
||||||
apm = parser.css(".apkm-badge")
|
|
||||||
sub_url = ""
|
|
||||||
for is_apm in apm:
|
|
||||||
if "APK" in is_apm.text():
|
|
||||||
parser = is_apm.parent
|
|
||||||
sub_url = parser.css_first(".accent_color").attributes["href"]
|
|
||||||
break
|
|
||||||
if sub_url == "":
|
|
||||||
logger.exception(
|
|
||||||
f"Unable to find any apk on apkmirror_specific_version on {main_page}"
|
|
||||||
)
|
|
||||||
sys.exit(-1)
|
|
||||||
download_url = apk_mirror + sub_url
|
|
||||||
return download_url
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def upto_down_downloader(cls, app: str) -> str:
|
|
||||||
page = "https://spotify.en.uptodown.com/android/download"
|
|
||||||
parser = LexborHTMLParser(session.get(page).text)
|
|
||||||
main_page = parser.css_first("#detail-download-button")
|
|
||||||
download_url = main_page.attributes["data-url"]
|
|
||||||
app_version = parser.css_first(".version").text()
|
|
||||||
cls._download(download_url, "spotify.apk")
|
|
||||||
logger.debug(f"Downloaded {app} apk from apkmirror_specific_version in rt")
|
|
||||||
return app_version
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def apkmirror_specific_version(cls, app: str, version: str) -> str:
|
|
||||||
logger.debug(f"Trying to download {app},specific version {version}")
|
|
||||||
version = version.replace(".", "-")
|
|
||||||
main_page = f"{apk_mirror_version_urls.get(app)}-{version}-release/"
|
|
||||||
parser = LexborHTMLParser(session.get(main_page).text)
|
|
||||||
download_page = cls.get_download_page(parser, main_page)
|
|
||||||
cls.extract_download_link(download_page, app)
|
|
||||||
logger.debug(f"Downloaded {app} apk from apkmirror_specific_version")
|
|
||||||
return version
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def apkmirror_latest_version(cls, app: str) -> str:
|
|
||||||
logger.debug(f"Trying to download {app}'s latest version from apkmirror")
|
|
||||||
page = apk_mirror_urls.get(app)
|
|
||||||
if not page:
|
|
||||||
logger.debug("Invalid app")
|
|
||||||
sys.exit(1)
|
|
||||||
parser = LexborHTMLParser(session.get(page).text)
|
|
||||||
main_page = parser.css_first(".appRowVariantTag>.accent_color").attributes[
|
|
||||||
"href"
|
|
||||||
]
|
|
||||||
int_version = re.search(r"\d", main_page).start()
|
|
||||||
extra_release = main_page.rfind("release") - 1
|
|
||||||
version = main_page[int_version:extra_release]
|
|
||||||
version = version.replace("-", ".")
|
|
||||||
main_page = f"{apk_mirror}{main_page}"
|
|
||||||
parser = LexborHTMLParser(session.get(main_page).text)
|
|
||||||
download_page = cls.get_download_page(parser, main_page)
|
|
||||||
cls.extract_download_link(download_page, app)
|
|
||||||
logger.debug(f"Downloaded {app} apk from apkmirror_specific_version in rt")
|
|
||||||
return version
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def repository(cls, owner: str, name: str, file_name: str) -> None:
|
|
||||||
logger.debug(f"Trying to download {name} from github")
|
|
||||||
repo_url = f"https://api.github.com/repos/{owner}/{name}/releases/latest"
|
|
||||||
r = requests.get(
|
|
||||||
repo_url, headers={"Content-Type": "application/vnd.github.v3+json"}
|
|
||||||
)
|
|
||||||
if name == "revanced-patches":
|
|
||||||
download_url = r.json()["assets"][1]["browser_download_url"]
|
|
||||||
else:
|
|
||||||
download_url = r.json()["assets"][0]["browser_download_url"]
|
|
||||||
cls._download(download_url, file_name=file_name)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def report(cls) -> None:
|
|
||||||
started = False
|
|
||||||
while True:
|
|
||||||
item = cls._QUEUE.get()
|
|
||||||
logger.debug(f"{item[1]} downloaded in {item[0]:.2f} seconds.")
|
|
||||||
cls._QUEUE.task_done()
|
|
||||||
cls._QUEUE_LENGTH -= 1
|
|
||||||
|
|
||||||
if not started:
|
|
||||||
started = True
|
|
||||||
elif started and not cls._QUEUE_LENGTH:
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
class Patches(object):
|
|
||||||
def __init__(self) -> None:
|
|
||||||
logger.debug("fetching all patches")
|
|
||||||
resp = session.get(
|
|
||||||
"https://raw.githubusercontent.com/revanced/revanced-patches/main/patches.json"
|
|
||||||
)
|
|
||||||
patches = resp.json()
|
|
||||||
|
|
||||||
revanced_app_ids = {
|
|
||||||
"com.reddit.frontpage": ("reddit", "_reddit"),
|
|
||||||
"com.ss.android.ugc.trill": ("tiktok", "_tiktok"),
|
|
||||||
"com.twitter.android": ("twitter", "_twitter"),
|
|
||||||
"de.dwd.warnapp": ("warnwetter", "_warnwetter"),
|
|
||||||
"com.spotify.music": ("spotify", "_spotify"),
|
|
||||||
}
|
|
||||||
|
|
||||||
for app_name in (revanced_app_ids[x][1] for x in revanced_app_ids):
|
|
||||||
setattr(self, app_name, [])
|
|
||||||
|
|
||||||
for patch in patches:
|
|
||||||
for compatible_package, version in [
|
|
||||||
(x["name"], x["versions"]) for x in patch["compatiblePackages"]
|
|
||||||
]:
|
|
||||||
if compatible_package in revanced_app_ids:
|
|
||||||
app_name = revanced_app_ids[compatible_package][1]
|
|
||||||
p = {x: patch[x] for x in ["name", "description"]}
|
|
||||||
p["app"] = compatible_package
|
|
||||||
p["version"] = version[-1] if version else "all"
|
|
||||||
getattr(self, app_name).append(p)
|
|
||||||
|
|
||||||
if build_extended:
|
|
||||||
url = "https://raw.githubusercontent.com/inotia00/revanced-patches/revanced-extended/patches.json"
|
|
||||||
else:
|
|
||||||
url = "https://raw.githubusercontent.com/revanced/revanced-patches/main/patches.json"
|
|
||||||
|
|
||||||
resp_extended = session.get(url)
|
|
||||||
extended_patches = resp_extended.json()
|
|
||||||
revanced_extended_app_ids = {
|
|
||||||
"com.google.android.youtube": ("youtube", "_yt"),
|
|
||||||
"com.google.android.apps.youtube.music": ("youtube-music", "_ytm"),
|
|
||||||
}
|
|
||||||
for app_name in (
|
|
||||||
revanced_extended_app_ids[x][1] for x in revanced_extended_app_ids
|
|
||||||
):
|
|
||||||
setattr(self, app_name, [])
|
|
||||||
|
|
||||||
for patch in extended_patches:
|
|
||||||
for compatible_package, version in [
|
|
||||||
(x["name"], x["versions"]) for x in patch["compatiblePackages"]
|
|
||||||
]:
|
|
||||||
if compatible_package in revanced_extended_app_ids:
|
|
||||||
app_name = revanced_extended_app_ids[compatible_package][1]
|
|
||||||
p = {x: patch[x] for x in ["name", "description"]}
|
|
||||||
p["app"] = compatible_package
|
|
||||||
p["version"] = version[-1] if version else "all"
|
|
||||||
getattr(self, app_name).append(p)
|
|
||||||
|
|
||||||
for app_name, app_id in revanced_extended_app_ids.values():
|
|
||||||
n_patches = len(getattr(self, app_id))
|
|
||||||
logger.debug(f"Total patches in {app_name} are {n_patches}")
|
|
||||||
for app_name, app_id in revanced_app_ids.values():
|
|
||||||
n_patches = len(getattr(self, app_id))
|
|
||||||
logger.debug(f"Total patches in {app_name} are {n_patches}")
|
|
||||||
|
|
||||||
def get(self, app: str) -> Tuple[List[Dict[str, str]], str]:
|
|
||||||
logger.debug("Getting patches for %s" % app)
|
|
||||||
app_names = {
|
|
||||||
"reddit": "_reddit",
|
|
||||||
"tiktok": "_tiktok",
|
|
||||||
"twitter": "_twitter",
|
|
||||||
"warnwetter": "_warnwetter",
|
|
||||||
"youtube": "_yt",
|
|
||||||
"youtube_music": "_ytm",
|
|
||||||
"spotify": "_spotify",
|
|
||||||
}
|
|
||||||
if not (app_name := app_names.get(app)):
|
|
||||||
logger.debug("Invalid app name")
|
|
||||||
sys.exit(-1)
|
|
||||||
patches = getattr(self, app_name)
|
|
||||||
version = ""
|
|
||||||
if app in ("youtube", "youtube_music"):
|
|
||||||
version = next(i["version"] for i in patches if i["version"] != "all")
|
|
||||||
logger.debug(f"Recommended Version for patching {app} is {version}")
|
|
||||||
else:
|
|
||||||
logger.debug("No recommended version.")
|
|
||||||
return patches, version
|
|
||||||
|
|
||||||
|
|
||||||
class ArgParser(object):
|
|
||||||
def __init__(self):
|
|
||||||
self._PATCHES = []
|
|
||||||
self._EXCLUDED = []
|
|
||||||
|
|
||||||
def include(self, name: str) -> None:
|
|
||||||
self._PATCHES.extend(["-i", name])
|
|
||||||
|
|
||||||
def exclude(self, name: str) -> None:
|
|
||||||
self._PATCHES.extend(["-e", name])
|
|
||||||
self._EXCLUDED.append(name)
|
|
||||||
|
|
||||||
def get_excluded_patches(self) -> List[Any]:
|
|
||||||
return self._EXCLUDED
|
|
||||||
|
|
||||||
def run(self, app: str, version: str, is_experimental: bool = False) -> None:
|
|
||||||
logger.debug(f"Sending request to revanced cli for building {app} revanced")
|
|
||||||
cli = normal_cli_jar
|
|
||||||
patches = normal_patches_jar
|
|
||||||
integrations = normal_integrations_apk
|
|
||||||
if build_extended and app in extended_apps:
|
|
||||||
cli = cli_jar
|
|
||||||
patches = patches_jar
|
|
||||||
integrations = integrations_apk
|
|
||||||
args = [
|
|
||||||
"-jar",
|
|
||||||
cli,
|
|
||||||
"-a",
|
|
||||||
app + ".apk",
|
|
||||||
"-b",
|
|
||||||
patches,
|
|
||||||
"-m",
|
|
||||||
integrations,
|
|
||||||
"-o",
|
|
||||||
f"Re-{app}-{version}-output.apk",
|
|
||||||
"--keystore",
|
|
||||||
keystore_name,
|
|
||||||
]
|
|
||||||
if is_experimental:
|
|
||||||
logger.debug("Using experimental features")
|
|
||||||
args.append("--experimental")
|
|
||||||
args[1::2] = map(lambda i: temp_folder.joinpath(i), args[1::2])
|
|
||||||
|
|
||||||
if self._PATCHES:
|
|
||||||
args.extend(self._PATCHES)
|
|
||||||
|
|
||||||
start = perf_counter()
|
|
||||||
process = Popen(["java", *args], stdout=PIPE)
|
|
||||||
for line in process.stdout:
|
|
||||||
logger.debug(line.decode(), flush=True, end="")
|
|
||||||
process.wait()
|
|
||||||
logger.debug(
|
|
||||||
f"Patching completed for app {app} in {perf_counter() - start:.2f} "
|
|
||||||
f"seconds."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@register
|
|
||||||
def close() -> None:
|
|
||||||
session.close()
|
|
||||||
cache = Path("revanced-cache")
|
|
||||||
if cache.is_dir():
|
|
||||||
rmtree(cache)
|
|
||||||
|
|
||||||
|
|
||||||
def check_java() -> None:
|
|
||||||
logger.debug("Checking if java is available")
|
|
||||||
jd = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT)
|
|
||||||
jd = str(jd)[1:-1]
|
|
||||||
if "Runtime Environment" not in jd:
|
|
||||||
logger.debug("Java Must be installed")
|
|
||||||
exit(-1)
|
|
||||||
if "17" not in jd:
|
|
||||||
logger.debug("Java 17 Must be installed")
|
|
||||||
exit(-1)
|
|
||||||
logger.debug("Cool!! Java is available")
|
|
||||||
|
|
||||||
|
|
||||||
def pre_requisite():
|
|
||||||
check_java()
|
|
||||||
patches = Patches()
|
|
||||||
return patches
|
|
||||||
|
|
||||||
|
|
||||||
def download_revanced(downloader: Type[Downloader]) -> None:
|
|
||||||
assets = (
|
|
||||||
("revanced", "revanced-cli", normal_cli_jar),
|
|
||||||
("revanced", "revanced-integrations", normal_integrations_apk),
|
|
||||||
("revanced", "revanced-patches", normal_patches_jar),
|
|
||||||
("inotia00", "VancedMicroG", "VancedMicroG.apk"),
|
|
||||||
)
|
|
||||||
if build_extended:
|
|
||||||
assets += (
|
|
||||||
("inotia00", "revanced-cli", cli_jar),
|
|
||||||
("inotia00", "revanced-integrations", integrations_apk),
|
|
||||||
("inotia00", "revanced-patches", patches_jar),
|
|
||||||
)
|
|
||||||
with ThreadPoolExecutor() as executor:
|
|
||||||
executor.map(lambda repo: downloader.repository(*repo), assets)
|
|
||||||
logger.info("Downloaded revanced microG ,cli, integrations and patches.")
|
|
||||||
|
|
||||||
|
|
||||||
def upto_down_downloader(app: str, downloader: Type[Downloader]) -> str:
|
|
||||||
return downloader.upto_down_downloader(app)
|
|
||||||
|
|
||||||
|
|
||||||
def download_from_apkmirror(
|
|
||||||
version: str, app: str, downloader: Type[Downloader]
|
|
||||||
) -> str:
|
|
||||||
if version and version != "latest":
|
|
||||||
return downloader.apkmirror_specific_version(app, version)
|
|
||||||
else:
|
|
||||||
return downloader.apkmirror_latest_version(app)
|
|
||||||
|
|
||||||
|
|
||||||
def download_apk_to_patch(version: str, app: str, downloader: Type[Downloader]) -> str:
|
|
||||||
if app in upto_down:
|
|
||||||
return upto_down_downloader(app, downloader)
|
|
||||||
else:
|
|
||||||
return download_from_apkmirror(version, app, downloader)
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
patches = pre_requisite()
|
patches = Patches(env)
|
||||||
downloader = Downloader
|
downloader = Downloader()
|
||||||
download_revanced(downloader)
|
downloader.download_revanced()
|
||||||
|
|
||||||
def get_patches() -> None:
|
logger.info(f"Will Patch only {patches.apps}")
|
||||||
logger.debug(f"Excluding patches for app {app}")
|
for app in patches.apps:
|
||||||
if build_extended and app in extended_apps:
|
|
||||||
excluded_patches = env.list(f"EXCLUDE_PATCH_{app}_EXTENDED".upper(), [])
|
|
||||||
else:
|
|
||||||
excluded_patches = env.list(f"EXCLUDE_PATCH_{app}".upper(), [])
|
|
||||||
for patch in app_patches:
|
|
||||||
arg_parser.include(patch["name"]) if patch[
|
|
||||||
"name"
|
|
||||||
] not in excluded_patches else arg_parser.exclude(patch["name"])
|
|
||||||
excluded = arg_parser.get_excluded_patches()
|
|
||||||
if excluded:
|
|
||||||
logger.debug(f"Excluded patches {excluded} for {app}")
|
|
||||||
else:
|
|
||||||
logger.debug(f"No excluded patches for {app}")
|
|
||||||
|
|
||||||
def get_patches_version() -> Any:
|
|
||||||
experiment = False
|
|
||||||
total_patches, recommended_version = patches.get(app=app)
|
|
||||||
env_version = env.str(f"{app}_VERSION".upper(), None)
|
|
||||||
if env_version:
|
|
||||||
logger.debug(f"Picked {app} version {env_version} from env.")
|
|
||||||
if env_version == "latest" or env_version > recommended_version:
|
|
||||||
experiment = True
|
|
||||||
recommended_version = env_version
|
|
||||||
return total_patches, recommended_version, experiment
|
|
||||||
|
|
||||||
logger.info(f"Will Patch only {apps}")
|
|
||||||
for app in apps:
|
|
||||||
try:
|
try:
|
||||||
arg_parser = ArgParser()
|
arg_parser = ArgParser(patches)
|
||||||
logger.debug("Trying to build %s" % app)
|
logger.debug("Trying to build %s" % app)
|
||||||
app_patches, version, is_experimental = get_patches_version()
|
app_patches, version, is_experimental = patches.get_patches_version(app)
|
||||||
version = download_apk_to_patch(version, app, downloader)
|
version = downloader.download_apk_to_patch(version, app)
|
||||||
get_patches()
|
patches.get_patches(app, arg_parser)
|
||||||
logger.debug(f"Downloaded {app}, version {version}")
|
logger.debug(f"Downloaded {app}, version {version}")
|
||||||
arg_parser.run(app=app, version=version, is_experimental=is_experimental)
|
arg_parser.run(app=app, version=version, is_experimental=is_experimental)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -0,0 +1,203 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from pathlib import Path
|
||||||
|
from queue import PriorityQueue
|
||||||
|
from time import perf_counter
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from environs import Env
|
||||||
|
from loguru import logger
|
||||||
|
from requests import Session
|
||||||
|
from selectolax.lexbor import LexborHTMLParser
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
env = Env()
|
||||||
|
temp_folder = Path("apks")
|
||||||
|
session = Session()
|
||||||
|
session.headers["User-Agent"] = "anything"
|
||||||
|
supported_apps = [
|
||||||
|
"youtube",
|
||||||
|
"youtube_music",
|
||||||
|
"twitter",
|
||||||
|
"reddit",
|
||||||
|
"tiktok",
|
||||||
|
"warnwetter",
|
||||||
|
"spotify",
|
||||||
|
]
|
||||||
|
apps = env.list("PATCH_APPS", supported_apps)
|
||||||
|
build_extended = env.bool("BUILD_EXTENDED", False)
|
||||||
|
extended_apps = ["youtube", "youtube_music"]
|
||||||
|
keystore_name = env.str("KEYSTORE_FILE_NAME", "revanced.keystore")
|
||||||
|
apk_mirror = "https://www.apkmirror.com"
|
||||||
|
github = "https://www.github.com"
|
||||||
|
normal_cli_jar = "revanced-cli.jar"
|
||||||
|
normal_patches_jar = "revanced-patches.jar"
|
||||||
|
normal_integrations_apk = "revanced-integrations.apk"
|
||||||
|
cli_jar = f"inotia00-{normal_cli_jar}" if build_extended else normal_cli_jar
|
||||||
|
patches_jar = f"inotia00-{normal_patches_jar}" if build_extended else normal_patches_jar
|
||||||
|
integrations_apk = (
|
||||||
|
f"inotia00-{normal_integrations_apk}" if build_extended else normal_integrations_apk
|
||||||
|
)
|
||||||
|
apk_mirror_urls = {
|
||||||
|
"reddit": f"{apk_mirror}/apk/redditinc/reddit/",
|
||||||
|
"twitter": f"{apk_mirror}/apk/twitter-inc/twitter/",
|
||||||
|
"tiktok": f"{apk_mirror}/apk/tiktok-pte-ltd/tik-tok-including-musical-ly/",
|
||||||
|
"warnwetter": f"{apk_mirror}/apk/deutscher-wetterdienst/warnwetter/",
|
||||||
|
"youtube": f"{apk_mirror}/apk/google-inc/youtube/",
|
||||||
|
"youtube_music": f"{apk_mirror}/apk/google-inc/youtube-music/",
|
||||||
|
}
|
||||||
|
apk_mirror_version_urls = {
|
||||||
|
"reddit": f"{apk_mirror_urls.get('reddit')}reddit",
|
||||||
|
"twitter": f"{apk_mirror_urls.get('twitter')}twitter",
|
||||||
|
"tiktok": f"{apk_mirror_urls.get('tiktok')}tik-tok-including-musical-ly",
|
||||||
|
"warnwetter": f"{apk_mirror_urls.get('warnwetter')}warnwetter",
|
||||||
|
"youtube": f"{apk_mirror_urls.get('youtube')}youtube",
|
||||||
|
"youtube_music": f"{apk_mirror_urls.get('youtube_music')}youtube-music",
|
||||||
|
}
|
||||||
|
upto_down = ["spotify"]
|
||||||
|
|
||||||
|
|
||||||
|
class Downloader(object):
|
||||||
|
def __init__(self):
|
||||||
|
self._CHUNK_SIZE = 2**21 * 5
|
||||||
|
self._QUEUE: PriorityQueue[Tuple] = PriorityQueue()
|
||||||
|
self._QUEUE_LENGTH = 0
|
||||||
|
|
||||||
|
def _download(self, url: str, file_name: str) -> None:
|
||||||
|
logger.debug(f"Trying to download {file_name} from {url}")
|
||||||
|
self._QUEUE_LENGTH += 1
|
||||||
|
start = perf_counter()
|
||||||
|
resp = session.get(url, stream=True)
|
||||||
|
total = int(resp.headers.get("content-length", 0))
|
||||||
|
bar = tqdm(
|
||||||
|
desc=file_name,
|
||||||
|
total=total,
|
||||||
|
unit="iB",
|
||||||
|
unit_scale=True,
|
||||||
|
unit_divisor=1024,
|
||||||
|
colour="green",
|
||||||
|
)
|
||||||
|
with temp_folder.joinpath(file_name).open("wb") as dl_file, bar:
|
||||||
|
for chunk in resp.iter_content(self._CHUNK_SIZE):
|
||||||
|
size = dl_file.write(chunk)
|
||||||
|
bar.update(size)
|
||||||
|
self._QUEUE.put((perf_counter() - start, file_name))
|
||||||
|
logger.debug(f"Downloaded {file_name}")
|
||||||
|
|
||||||
|
def extract_download_link(self, page: str, app: str):
|
||||||
|
logger.debug(f"Extracting download link from\n{page}")
|
||||||
|
parser = LexborHTMLParser(session.get(page).text)
|
||||||
|
|
||||||
|
resp = session.get(
|
||||||
|
apk_mirror + parser.css_first("a.accent_bg").attributes["href"]
|
||||||
|
)
|
||||||
|
parser = LexborHTMLParser(resp.text)
|
||||||
|
|
||||||
|
href = parser.css_first(
|
||||||
|
"p.notes:nth-child(3) > span:nth-child(1) > a:nth-child(1)"
|
||||||
|
).attributes["href"]
|
||||||
|
self._download(apk_mirror + href, f"{app}.apk")
|
||||||
|
logger.debug("Finished Extracting link and downloading")
|
||||||
|
|
||||||
|
def get_download_page(self, parser, main_page):
|
||||||
|
apm = parser.css(".apkm-badge")
|
||||||
|
sub_url = ""
|
||||||
|
for is_apm in apm:
|
||||||
|
if "APK" in is_apm.text():
|
||||||
|
parser = is_apm.parent
|
||||||
|
sub_url = parser.css_first(".accent_color").attributes["href"]
|
||||||
|
break
|
||||||
|
if sub_url == "":
|
||||||
|
logger.exception(
|
||||||
|
f"Unable to find any apk on apkmirror_specific_version on {main_page}"
|
||||||
|
)
|
||||||
|
sys.exit(-1)
|
||||||
|
download_url = apk_mirror + sub_url
|
||||||
|
return download_url
|
||||||
|
|
||||||
|
def __upto_down_downloader(self, app: str) -> str:
|
||||||
|
page = "https://spotify.en.uptodown.com/android/download"
|
||||||
|
parser = LexborHTMLParser(session.get(page).text)
|
||||||
|
main_page = parser.css_first("#detail-download-button")
|
||||||
|
download_url = main_page.attributes["data-url"]
|
||||||
|
app_version = parser.css_first(".version").text()
|
||||||
|
self._download(download_url, "spotify.apk")
|
||||||
|
logger.debug(f"Downloaded {app} apk from apkmirror_specific_version in rt")
|
||||||
|
return app_version
|
||||||
|
|
||||||
|
def apkmirror_specific_version(self, app: str, version: str) -> str:
|
||||||
|
logger.debug(f"Trying to download {app},specific version {version}")
|
||||||
|
version = version.replace(".", "-")
|
||||||
|
main_page = f"{apk_mirror_version_urls.get(app)}-{version}-release/"
|
||||||
|
parser = LexborHTMLParser(session.get(main_page).text)
|
||||||
|
download_page = self.get_download_page(parser, main_page)
|
||||||
|
self.extract_download_link(download_page, app)
|
||||||
|
logger.debug(f"Downloaded {app} apk from apkmirror_specific_version")
|
||||||
|
return version
|
||||||
|
|
||||||
|
def apkmirror_latest_version(self, app: str) -> str:
|
||||||
|
logger.debug(f"Trying to download {app}'s latest version from apkmirror")
|
||||||
|
page = apk_mirror_urls.get(app)
|
||||||
|
if not page:
|
||||||
|
logger.debug("Invalid app")
|
||||||
|
sys.exit(1)
|
||||||
|
parser = LexborHTMLParser(session.get(page).text)
|
||||||
|
main_page = parser.css_first(".appRowVariantTag>.accent_color").attributes[
|
||||||
|
"href"
|
||||||
|
]
|
||||||
|
int_version = re.search(r"\d", main_page).start()
|
||||||
|
extra_release = main_page.rfind("release") - 1
|
||||||
|
version = main_page[int_version:extra_release]
|
||||||
|
version = version.replace("-", ".")
|
||||||
|
main_page = f"{apk_mirror}{main_page}"
|
||||||
|
parser = LexborHTMLParser(session.get(main_page).text)
|
||||||
|
download_page = self.get_download_page(parser, main_page)
|
||||||
|
self.extract_download_link(download_page, app)
|
||||||
|
logger.debug(f"Downloaded {app} apk from apkmirror_specific_version in rt")
|
||||||
|
return version
|
||||||
|
|
||||||
|
def repository(self, owner: str, name: str, file_name: str) -> None:
|
||||||
|
logger.debug(f"Trying to download {name} from github")
|
||||||
|
repo_url = f"https://api.github.com/repos/{owner}/{name}/releases/latest"
|
||||||
|
r = requests.get(
|
||||||
|
repo_url, headers={"Content-Type": "application/vnd.github.v3+json"}
|
||||||
|
)
|
||||||
|
if name == "revanced-patches":
|
||||||
|
download_url = r.json()["assets"][1]["browser_download_url"]
|
||||||
|
else:
|
||||||
|
download_url = r.json()["assets"][0]["browser_download_url"]
|
||||||
|
self._download(download_url, file_name=file_name)
|
||||||
|
|
||||||
|
def download_revanced(self) -> None:
|
||||||
|
assets = (
|
||||||
|
("revanced", "revanced-cli", normal_cli_jar),
|
||||||
|
("revanced", "revanced-integrations", normal_integrations_apk),
|
||||||
|
("revanced", "revanced-patches", normal_patches_jar),
|
||||||
|
("inotia00", "VancedMicroG", "VancedMicroG.apk"),
|
||||||
|
)
|
||||||
|
if build_extended:
|
||||||
|
assets += (
|
||||||
|
("inotia00", "revanced-cli", cli_jar),
|
||||||
|
("inotia00", "revanced-integrations", integrations_apk),
|
||||||
|
("inotia00", "revanced-patches", patches_jar),
|
||||||
|
)
|
||||||
|
with ThreadPoolExecutor() as executor:
|
||||||
|
executor.map(lambda repo: self.repository(*repo), assets)
|
||||||
|
logger.info("Downloaded revanced microG ,cli, integrations and patches.")
|
||||||
|
|
||||||
|
def upto_down_downloader(self, app: str) -> str:
|
||||||
|
return self.__upto_down_downloader(app)
|
||||||
|
|
||||||
|
def download_from_apkmirror(self, version: str, app: str) -> str:
|
||||||
|
if version and version != "latest":
|
||||||
|
return self.apkmirror_specific_version(app, version)
|
||||||
|
else:
|
||||||
|
return self.apkmirror_latest_version(app)
|
||||||
|
|
||||||
|
def download_apk_to_patch(self, version: str, app: str) -> str:
|
||||||
|
if app in upto_down:
|
||||||
|
return self.upto_down_downloader(app)
|
||||||
|
else:
|
||||||
|
return self.download_from_apkmirror(version, app)
|
||||||
+102
@@ -0,0 +1,102 @@
|
|||||||
|
from subprocess import PIPE, Popen
|
||||||
|
from time import perf_counter
|
||||||
|
from typing import Any, List
|
||||||
|
|
||||||
|
from environs import Env
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
env = Env()
|
||||||
|
|
||||||
|
|
||||||
|
class ArgParser(object):
|
||||||
|
build_extended = env.bool("BUILD_EXTENDED", False)
|
||||||
|
extended_apps = ["youtube", "youtube_music"]
|
||||||
|
keystore_name = env.str("KEYSTORE_FILE_NAME", "revanced.keystore")
|
||||||
|
apk_mirror = "https://www.apkmirror.com"
|
||||||
|
github = "https://www.github.com"
|
||||||
|
normal_cli_jar = "revanced-cli.jar"
|
||||||
|
normal_patches_jar = "revanced-patches.jar"
|
||||||
|
normal_integrations_apk = "revanced-integrations.apk"
|
||||||
|
cli_jar = f"inotia00-{normal_cli_jar}" if build_extended else normal_cli_jar
|
||||||
|
patches_jar = (
|
||||||
|
f"inotia00-{normal_patches_jar}" if build_extended else normal_patches_jar
|
||||||
|
)
|
||||||
|
integrations_apk = (
|
||||||
|
f"inotia00-{normal_integrations_apk}"
|
||||||
|
if build_extended
|
||||||
|
else normal_integrations_apk
|
||||||
|
)
|
||||||
|
apk_mirror_urls = {
|
||||||
|
"reddit": f"{apk_mirror}/apk/redditinc/reddit/",
|
||||||
|
"twitter": f"{apk_mirror}/apk/twitter-inc/twitter/",
|
||||||
|
"tiktok": f"{apk_mirror}/apk/tiktok-pte-ltd/tik-tok-including-musical-ly/",
|
||||||
|
"warnwetter": f"{apk_mirror}/apk/deutscher-wetterdienst/warnwetter/",
|
||||||
|
"youtube": f"{apk_mirror}/apk/google-inc/youtube/",
|
||||||
|
"youtube_music": f"{apk_mirror}/apk/google-inc/youtube-music/",
|
||||||
|
}
|
||||||
|
apk_mirror_version_urls = {
|
||||||
|
"reddit": f"{apk_mirror_urls.get('reddit')}reddit",
|
||||||
|
"twitter": f"{apk_mirror_urls.get('twitter')}twitter",
|
||||||
|
"tiktok": f"{apk_mirror_urls.get('tiktok')}tik-tok-including-musical-ly",
|
||||||
|
"warnwetter": f"{apk_mirror_urls.get('warnwetter')}warnwetter",
|
||||||
|
"youtube": f"{apk_mirror_urls.get('youtube')}youtube",
|
||||||
|
"youtube_music": f"{apk_mirror_urls.get('youtube_music')}youtube-music",
|
||||||
|
}
|
||||||
|
upto_down = ["spotify"]
|
||||||
|
|
||||||
|
def __init__(self, patcher):
|
||||||
|
self._PATCHES = []
|
||||||
|
self._EXCLUDED = []
|
||||||
|
self.patcher = patcher
|
||||||
|
self.keystore_name = env.str("KEYSTORE_FILE_NAME", "revanced.keystore")
|
||||||
|
|
||||||
|
def include(self, name: str) -> None:
|
||||||
|
self._PATCHES.extend(["-i", name])
|
||||||
|
|
||||||
|
def exclude(self, name: str) -> None:
|
||||||
|
self._PATCHES.extend(["-e", name])
|
||||||
|
self._EXCLUDED.append(name)
|
||||||
|
|
||||||
|
def get_excluded_patches(self) -> List[Any]:
|
||||||
|
return self._EXCLUDED
|
||||||
|
|
||||||
|
def run(self, app: str, version: str, is_experimental: bool = False) -> None:
|
||||||
|
logger.debug(f"Sending request to revanced cli for building {app} revanced")
|
||||||
|
cli = self.normal_cli_jar
|
||||||
|
patches = self.normal_patches_jar
|
||||||
|
integrations = self.normal_integrations_apk
|
||||||
|
if self.build_extended and app in self.extended_apps:
|
||||||
|
cli = self.cli_jar
|
||||||
|
patches = self.patches_jar
|
||||||
|
integrations = self.integrations_apk
|
||||||
|
args = [
|
||||||
|
"-jar",
|
||||||
|
cli,
|
||||||
|
"-a",
|
||||||
|
app + ".apk",
|
||||||
|
"-b",
|
||||||
|
patches,
|
||||||
|
"-m",
|
||||||
|
integrations,
|
||||||
|
"-o",
|
||||||
|
f"Re-{app}-{version}-output.apk",
|
||||||
|
"--keystore",
|
||||||
|
self.keystore_name,
|
||||||
|
]
|
||||||
|
if is_experimental:
|
||||||
|
logger.debug("Using experimental features")
|
||||||
|
args.append("--experimental")
|
||||||
|
args[1::2] = map(lambda i: self.patcher.temp_folder.joinpath(i), args[1::2])
|
||||||
|
|
||||||
|
if self._PATCHES:
|
||||||
|
args.extend(self._PATCHES)
|
||||||
|
|
||||||
|
start = perf_counter()
|
||||||
|
process = Popen(["java", *args], stdout=PIPE)
|
||||||
|
for line in process.stdout:
|
||||||
|
logger.debug(line.decode(), flush=True, end="")
|
||||||
|
process.wait()
|
||||||
|
logger.debug(
|
||||||
|
f"Patching completed for app {app} in {perf_counter() - start:.2f} "
|
||||||
|
f"seconds."
|
||||||
|
)
|
||||||
+151
@@ -0,0 +1,151 @@
|
|||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from typing import Any, Dict, List, Tuple
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from requests import Session
|
||||||
|
|
||||||
|
supported_apps = [
|
||||||
|
"youtube",
|
||||||
|
"youtube_music",
|
||||||
|
"twitter",
|
||||||
|
"reddit",
|
||||||
|
"tiktok",
|
||||||
|
"warnwetter",
|
||||||
|
"spotify",
|
||||||
|
]
|
||||||
|
session = Session()
|
||||||
|
|
||||||
|
|
||||||
|
class Patches(object):
|
||||||
|
def check_java(self) -> None:
|
||||||
|
logger.debug("Checking if java is available")
|
||||||
|
jd = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT)
|
||||||
|
jd = str(jd)[1:-1]
|
||||||
|
if "Runtime Environment" not in jd:
|
||||||
|
logger.debug("Java Must be installed")
|
||||||
|
exit(-1)
|
||||||
|
if "17" not in jd:
|
||||||
|
logger.debug("Java 17 Must be installed")
|
||||||
|
exit(-1)
|
||||||
|
logger.debug("Cool!! Java is available")
|
||||||
|
|
||||||
|
def __init__(self, env) -> None:
|
||||||
|
self.env = env
|
||||||
|
|
||||||
|
self.apps = env.list("PATCH_APPS", supported_apps)
|
||||||
|
self.build_extended = env.bool("BUILD_EXTENDED", False)
|
||||||
|
|
||||||
|
self.check_java()
|
||||||
|
logger.debug("fetching all patches")
|
||||||
|
resp = session.get(
|
||||||
|
"https://raw.githubusercontent.com/revanced/revanced-patches/main/patches.json"
|
||||||
|
)
|
||||||
|
patches = resp.json()
|
||||||
|
|
||||||
|
revanced_app_ids = {
|
||||||
|
"com.reddit.frontpage": ("reddit", "_reddit"),
|
||||||
|
"com.ss.android.ugc.trill": ("tiktok", "_tiktok"),
|
||||||
|
"com.twitter.android": ("twitter", "_twitter"),
|
||||||
|
"de.dwd.warnapp": ("warnwetter", "_warnwetter"),
|
||||||
|
"com.spotify.music": ("spotify", "_spotify"),
|
||||||
|
}
|
||||||
|
|
||||||
|
for app_name in (revanced_app_ids[x][1] for x in revanced_app_ids):
|
||||||
|
setattr(self, app_name, [])
|
||||||
|
|
||||||
|
for patch in patches:
|
||||||
|
for compatible_package, version in [
|
||||||
|
(x["name"], x["versions"]) for x in patch["compatiblePackages"]
|
||||||
|
]:
|
||||||
|
if compatible_package in revanced_app_ids:
|
||||||
|
app_name = revanced_app_ids[compatible_package][1]
|
||||||
|
p = {x: patch[x] for x in ["name", "description"]}
|
||||||
|
p["app"] = compatible_package
|
||||||
|
p["version"] = version[-1] if version else "all"
|
||||||
|
getattr(self, app_name).append(p)
|
||||||
|
if self.build_extended:
|
||||||
|
url = "https://raw.githubusercontent.com/inotia00/revanced-patches/revanced-extended/patches.json"
|
||||||
|
else:
|
||||||
|
url = "https://raw.githubusercontent.com/revanced/revanced-patches/main/patches.json"
|
||||||
|
|
||||||
|
resp_extended = session.get(url)
|
||||||
|
extended_patches = resp_extended.json()
|
||||||
|
revanced_extended_app_ids = {
|
||||||
|
"com.google.android.youtube": ("youtube", "_yt"),
|
||||||
|
"com.google.android.apps.youtube.music": ("youtube-music", "_ytm"),
|
||||||
|
}
|
||||||
|
for app_name in (
|
||||||
|
revanced_extended_app_ids[x][1] for x in revanced_extended_app_ids
|
||||||
|
):
|
||||||
|
setattr(self, app_name, [])
|
||||||
|
|
||||||
|
for patch in extended_patches:
|
||||||
|
for compatible_package, version in [
|
||||||
|
(x["name"], x["versions"]) for x in patch["compatiblePackages"]
|
||||||
|
]:
|
||||||
|
if compatible_package in revanced_extended_app_ids:
|
||||||
|
app_name = revanced_extended_app_ids[compatible_package][1]
|
||||||
|
p = {x: patch[x] for x in ["name", "description"]}
|
||||||
|
p["app"] = compatible_package
|
||||||
|
p["version"] = version[-1] if version else "all"
|
||||||
|
getattr(self, app_name).append(p)
|
||||||
|
|
||||||
|
for app_name, app_id in revanced_extended_app_ids.values():
|
||||||
|
n_patches = len(getattr(self, app_id))
|
||||||
|
logger.debug(f"Total patches in {app_name} are {n_patches}")
|
||||||
|
for app_name, app_id in revanced_app_ids.values():
|
||||||
|
n_patches = len(getattr(self, app_id))
|
||||||
|
logger.debug(f"Total patches in {app_name} are {n_patches}")
|
||||||
|
|
||||||
|
def get(self, app: str) -> Tuple[List[Dict[str, str]], str]:
|
||||||
|
logger.debug("Getting patches for %s" % app)
|
||||||
|
app_names = {
|
||||||
|
"reddit": "_reddit",
|
||||||
|
"tiktok": "_tiktok",
|
||||||
|
"twitter": "_twitter",
|
||||||
|
"warnwetter": "_warnwetter",
|
||||||
|
"youtube": "_yt",
|
||||||
|
"youtube_music": "_ytm",
|
||||||
|
"spotify": "_spotify",
|
||||||
|
}
|
||||||
|
if not (app_name := app_names.get(app)):
|
||||||
|
logger.debug("Invalid app name")
|
||||||
|
sys.exit(-1)
|
||||||
|
patches = getattr(self, app_name)
|
||||||
|
version = ""
|
||||||
|
if app in ("youtube", "youtube_music"):
|
||||||
|
version = next(i["version"] for i in patches if i["version"] != "all")
|
||||||
|
logger.debug(f"Recommended Version for patching {app} is {version}")
|
||||||
|
else:
|
||||||
|
logger.debug("No recommended version.")
|
||||||
|
return patches, version
|
||||||
|
|
||||||
|
def get_patches(self, app, arg_parser) -> None:
|
||||||
|
logger.debug(f"Excluding patches for app {app}")
|
||||||
|
if self.build_extended and app in self.extended_apps:
|
||||||
|
excluded_patches = self.env.list(
|
||||||
|
f"EXCLUDE_PATCH_{app}_EXTENDED".upper(), []
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
excluded_patches = self.env.list(f"EXCLUDE_PATCH_{app}".upper(), [])
|
||||||
|
for patch in self.app_patches:
|
||||||
|
arg_parser.include(patch["name"]) if patch[
|
||||||
|
"name"
|
||||||
|
] not in excluded_patches else arg_parser.exclude(patch["name"])
|
||||||
|
excluded = arg_parser.get_excluded_patches()
|
||||||
|
if excluded:
|
||||||
|
logger.debug(f"Excluded patches {excluded} for {app}")
|
||||||
|
else:
|
||||||
|
logger.debug(f"No excluded patches for {app}")
|
||||||
|
|
||||||
|
def get_patches_version(self, app) -> Any:
|
||||||
|
experiment = False
|
||||||
|
total_patches, recommended_version = self.get(app=app)
|
||||||
|
env_version = self.env.str(f"{app}_VERSION".upper(), None)
|
||||||
|
if env_version:
|
||||||
|
logger.debug(f"Picked {app} version {env_version} from env.")
|
||||||
|
if env_version == "latest" or env_version > recommended_version:
|
||||||
|
experiment = True
|
||||||
|
recommended_version = env_version
|
||||||
|
return total_patches, recommended_version, experiment
|
||||||
Reference in New Issue
Block a user