mirror of
https://github.com/AuxXxilium/arc.git
synced 2024-11-23 21:49:52 +07:00
functions: remove unused
Signed-off-by: AuxXxilium <info@auxxxilium.tech>
This commit is contained in:
parent
d48a970cd8
commit
8f6a18f6dd
@ -108,181 +108,5 @@ def makeqr(data, file, location, output):
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-p", "--platforms", type=str, help="The platforms of Syno.")
|
||||
def getmodels(platforms=None):
|
||||
"""
|
||||
Get Syno Models.
|
||||
"""
|
||||
import json, requests, urllib3
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
|
||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
|
||||
session = requests.Session()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
if platforms is not None and platforms != "":
|
||||
PS = platforms.lower().replace(",", " ").split()
|
||||
else:
|
||||
PS = []
|
||||
|
||||
models = []
|
||||
try:
|
||||
req = session.get("https://autoupdate.synology.com/os/v2", timeout=10, verify=False)
|
||||
req.encoding = "utf-8"
|
||||
data = json.loads(req.text)
|
||||
|
||||
for I in data["channel"]["item"]:
|
||||
if not I["title"].startswith("DSM"):
|
||||
continue
|
||||
for J in I["model"]:
|
||||
arch = J["mUnique"].split("_")[1]
|
||||
name = J["mLink"].split("/")[-1].split("_")[1].replace("%2B", "+")
|
||||
if len(PS) > 0 and arch.lower() not in PS:
|
||||
continue
|
||||
if any(name == B["name"] for B in models):
|
||||
continue
|
||||
models.append({"name": name, "arch": arch})
|
||||
|
||||
models = sorted(models, key=lambda k: (k["arch"], k["name"]))
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
models.sort(key=lambda x: (x["arch"], x["name"]))
|
||||
print(json.dumps(models, indent=4))
|
||||
|
||||
@cli.command()
|
||||
@click.option("-p", "--platforms", type=str, help="The platforms of Syno.")
|
||||
def getmodelsbykb(platforms=None):
|
||||
"""
|
||||
Get Syno Models.
|
||||
"""
|
||||
import json, requests, urllib3
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
|
||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
|
||||
session = requests.Session()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
if platforms is not None and platforms != "":
|
||||
PS = platforms.lower().replace(",", " ").split()
|
||||
else:
|
||||
PS = []
|
||||
|
||||
models = []
|
||||
try:
|
||||
import re
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
url="https://kb.synology.com/en-us/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have"
|
||||
#url = "https://kb.synology.cn/zh-cn/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have"
|
||||
req = session.get(url, timeout=10, verify=False)
|
||||
req.encoding = "utf-8"
|
||||
bs = BeautifulSoup(req.text, "html.parser")
|
||||
p = re.compile(r"data: (.*?),$", re.MULTILINE | re.DOTALL)
|
||||
data = json.loads(p.search(bs.find("script", string=p).prettify()).group(1))
|
||||
model = "(.*?)" # (.*?): all, FS6400: one
|
||||
p = re.compile(r"<td>{}<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td>".format(model), re.MULTILINE | re.DOTALL,)
|
||||
it = p.finditer(data["preload"]["content"].replace("\n", "").replace("\t", ""))
|
||||
for i in it:
|
||||
d = i.groups()
|
||||
if len(d) == 6:
|
||||
d = model + d
|
||||
if len(PS) > 0 and d[5].lower() not in PS:
|
||||
continue
|
||||
models.append({"name": d[0].split("<br")[0], "arch": d[5].lower()})
|
||||
except:
|
||||
pass
|
||||
|
||||
models.sort(key=lambda x: (x["arch"], x["name"]))
|
||||
print(json.dumps(models, indent=4))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-m", "--model", type=str, required=True, help="The model of Syno.")
|
||||
@click.option("-v", "--version", type=str, required=True, help="The version of Syno.")
|
||||
def getpats4mv(model, version):
|
||||
import json, requests, urllib3
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
|
||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
|
||||
session = requests.Session()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
pats = {}
|
||||
try:
|
||||
urlInfo = "https://www.synology.com/api/support/findDownloadInfo?lang=en-us"
|
||||
urlSteps = "https://www.synology.com/api/support/findUpgradeSteps?"
|
||||
#urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
|
||||
#urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
|
||||
|
||||
major = "&major={}".format(version.split('.')[0]) if len(version.split('.')) > 0 else ""
|
||||
minor = "&minor={}".format(version.split('.')[1]) if len(version.split('.')) > 1 else ""
|
||||
req = session.get("{}&product={}{}{}".format(urlInfo, model.replace("+", "%2B"), major, minor), timeout=10, verify=False)
|
||||
req.encoding = "utf-8"
|
||||
data = json.loads(req.text)
|
||||
|
||||
build_ver = data['info']['system']['detail'][0]['items'][0]['build_ver']
|
||||
build_num = data['info']['system']['detail'][0]['items'][0]['build_num']
|
||||
buildnano = data['info']['system']['detail'][0]['items'][0]['nano']
|
||||
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
|
||||
if not V in pats:
|
||||
pats[V]={}
|
||||
pats[V]['url'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
|
||||
pats[V]['sum'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
||||
|
||||
from_ver=0
|
||||
for I in data['info']['pubVers']:
|
||||
if from_ver == 0 or I['build'] < from_ver: from_ver = I['build']
|
||||
|
||||
for I in data['info']['productVers']:
|
||||
if not I['version'].startswith(version): continue
|
||||
if major == "" or minor == "":
|
||||
majorTmp = "&major={}".format(I['version'].split('.')[0]) if len(I['version'].split('.')) > 0 else ""
|
||||
minorTmp = "&minor={}".format(I['version'].split('.')[1]) if len(I['version'].split('.')) > 1 else ""
|
||||
reqTmp = session.get("{}&product={}{}{}".format(urlInfo, model.replace("+", "%2B"), majorTmp, minorTmp), timeout=10, verify=False)
|
||||
reqTmp.encoding = "utf-8"
|
||||
dataTmp = json.loads(reqTmp.text)
|
||||
|
||||
build_ver = dataTmp['info']['system']['detail'][0]['items'][0]['build_ver']
|
||||
build_num = dataTmp['info']['system']['detail'][0]['items'][0]['build_num']
|
||||
buildnano = dataTmp['info']['system']['detail'][0]['items'][0]['nano']
|
||||
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
|
||||
if not V in pats:
|
||||
pats[V]={}
|
||||
pats[V]['url'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
|
||||
pats[V]['sum'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
||||
|
||||
for J in I['versions']:
|
||||
to_ver=J['build']
|
||||
reqSteps = session.get("{}&product={}&from_ver={}&to_ver={}".format(urlSteps, model.replace("+", "%2B"), from_ver, to_ver), timeout=10, verify=False)
|
||||
if reqSteps.status_code != 200: continue
|
||||
reqSteps.encoding = "utf-8"
|
||||
dataSteps = json.loads(reqSteps.text)
|
||||
for S in dataSteps['upgrade_steps']:
|
||||
if not 'full_patch' in S or S['full_patch'] is False: continue
|
||||
if not 'build_ver' in S or not S['build_ver'].startswith(version): continue
|
||||
V=__fullversion("{}-{}-{}".format(S['build_ver'], S['build_num'], S['nano']))
|
||||
if not V in pats:
|
||||
pats[V] = {}
|
||||
pats[V]['url'] = S['files'][0]['url'].split('?')[0]
|
||||
pats[V]['sum'] = S['files'][0]['checksum']
|
||||
except:
|
||||
pass
|
||||
|
||||
pats = {k: pats[k] for k in sorted(pats.keys(), reverse=True)}
|
||||
print(json.dumps(pats, indent=4))
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
Loading…
Reference in New Issue
Block a user