build: update

Signed-off-by: AuxXxilium <info@auxxxilium.tech>
This commit is contained in:
AuxXxilium 2024-08-30 20:49:08 +02:00
parent 5c70679969
commit 31b2a6c3ef
5 changed files with 585 additions and 183 deletions

View File

@ -1,5 +1,5 @@
#
# Copyright (C) 2023 AuxXxilium <https://github.com/AuxXxilium> and Ing <https://github.com/wjz304>
# Copyright (C) 2023 AuxXxilium <https://github.com/AuxXxilium>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
@ -33,7 +33,26 @@ jobs:
sudo timedatectl set-timezone "Europe/Berlin"
sudo apt update
sudo apt install -y jq gawk cpio gettext libelf-dev qemu-utils busybox dialog curl sed zip unzip python3
sudo apt install -y \
jq \
build-essential \
libtool \
pkgconf \
libzstd-dev \
liblzma-dev \
libssl-dev \
gawk \
cpio \
gettext \
libelf-dev \
qemu-utils \
busybox \
dialog \
curl \
sed \
zip \
unzip \
python3
sudo snap install yq
- name: Clean Files
@ -42,6 +61,12 @@ jobs:
sudo rm -rf ./files
sudo rm -rf ./dsm
- name: Get data
run: |
sudo pip install -r scripts/requirements.txt
sudo python scripts/func.py getmodels -w "arc-dsm/ws" -j "models.json"
sudo python scripts/func.py getpats -w "arc-dsm/ws" -j "pats.json"
- name: Update
run: |
sudo ./update.sh
@ -49,7 +74,7 @@ jobs:
- name: Check and Push
if: inputs.push == true
run: |
git pull
git fetch
git add .
git commit -m "dsm: update $(date +%Y-%m-%d" "%H:%M:%S)"
git push -f

163
include/func.py Executable file
View File

@ -0,0 +1,163 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2023 AuxXxilium <https://github.com/AuxXxilium> and Ing <https://github.com/wjz304>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
import os, sys, glob, json, yaml, click, shutil, tarfile, kmodule, requests
from openpyxl import Workbook
@click.group()
def cli():
"""
The CLI is a commands to Arc.
"""
pass
@cli.command()
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of Arc.")
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
def getmodels(workpath, jsonpath):
models = {}
with open("{}/platforms.yml".format(workpath), "r") as f:
P_data = yaml.safe_load(f)
P_platforms = P_data.get("platforms", [])
for P in P_platforms:
productvers = {}
for V in P_platforms[P]["productvers"]:
if P_platforms[P]["productvers"][V].get("kpre", "") != "":
productvers[V] = (P_platforms[P]["productvers"][V].get("kpre", "") + "-" + P_platforms[P]["productvers"][V].get("kver", ""))
else:
productvers[V] = P_platforms[P]["productvers"][V].get("kver", "")
models[P] = {"productvers": productvers, "models": []}
req = requests.get("https://autoupdate.synology.com/os/v2")
req.encoding = "utf-8"
data = json.loads(req.text)
for I in data["channel"]["item"]:
if not I["title"].startswith("DSM"):
continue
for J in I["model"]:
arch = J["mUnique"].split("_")[1].lower()
name = J["mLink"].split("/")[-1].split("_")[1].replace("%2B", "+")
if arch not in models.keys():
continue
if name in (A for B in models for A in models[B]["models"]):
continue
models[arch]["models"].append(name)
if jsonpath:
with open(jsonpath, "w") as f:
json.dump(models, f, indent=4, ensure_ascii=False)
@cli.command()
@click.option("-w", "--workpath", type=str, required=True, help="The workpath of Arc.")
@click.option("-j", "--jsonpath", type=str, required=True, help="The output path of jsonfile.")
def getpats(workpath, jsonpath):
def __fullversion(ver):
out = ver
arr = ver.split('-')
if len(arr) > 0:
a = arr[0].split('.')[0] if len(arr[0].split('.')) > 0 else '0'
b = arr[0].split('.')[1] if len(arr[0].split('.')) > 1 else '0'
c = arr[0].split('.')[2] if len(arr[0].split('.')) > 2 else '0'
d = arr[1] if len(arr) > 1 else '00000'
e = arr[2] if len(arr) > 2 else '0'
out = '{}.{}.{}-{}-{}'.format(a,b,c,d,e)
return out
platforms = []
models = []
with open("{}/platforms.yml".format(workpath), "r") as f:
data = yaml.safe_load(f)
platforms = data.get("platforms", [])
req = requests.get("https://autoupdate.synology.com/os/v2")
req.encoding = "utf-8"
data = json.loads(req.text)
for I in data["channel"]["item"]:
if not I["title"].startswith("DSM"):
continue
for J in I["model"]:
arch = J["mUnique"].split("_")[1].lower()
name = J["mLink"].split("/")[-1].split("_")[1].replace("%2B", "+")
if arch not in platforms:
continue
if name in models:
continue
models.append(name)
pats = {}
for M in models:
pats[M] = {}
version = '7'
urlInfo = "https://www.synology.com/api/support/findDownloadInfo?lang=en-us"
urlSteps = "https://www.synology.com/api/support/findUpgradeSteps?"
#urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
#urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
major = "&major={}".format(version.split('.')[0]) if len(version.split('.')) > 0 else ""
minor = "&minor={}".format(version.split('.')[1]) if len(version.split('.')) > 1 else ""
req = requests.get("{}&product={}{}{}".format(urlInfo, M.replace("+", "%2B"), major, minor))
req.encoding = "utf-8"
data = json.loads(req.text)
build_ver = data['info']['system']['detail'][0]['items'][0]['build_ver']
build_num = data['info']['system']['detail'][0]['items'][0]['build_num']
buildnano = data['info']['system']['detail'][0]['items'][0]['nano']
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
if not V in pats[M]:
pats[M][V]={}
pats[M][V]['url'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
pats[M][V]['sum'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
from_ver=0
for I in data['info']['pubVers']:
if from_ver == 0 or I['build'] < from_ver: from_ver = I['build']
for I in data['info']['productVers']:
if not I['version'].startswith(version): continue
if major == "" or minor == "":
majorTmp = "&major={}".format(I['version'].split('.')[0]) if len(I['version'].split('.')) > 0 else ""
minorTmp = "&minor={}".format(I['version'].split('.')[1]) if len(I['version'].split('.')) > 1 else ""
reqTmp = requests.get("{}&product={}{}{}".format(urlInfo, M.replace("+", "%2B"), majorTmp, minorTmp))
reqTmp.encoding = "utf-8"
dataTmp = json.loads(reqTmp.text)
build_ver = dataTmp['info']['system']['detail'][0]['items'][0]['build_ver']
build_num = dataTmp['info']['system']['detail'][0]['items'][0]['build_num']
buildnano = dataTmp['info']['system']['detail'][0]['items'][0]['nano']
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
if not V in pats[M]:
pats[M][V]={}
pats[M][V]['url'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
pats[M][V]['sum'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
for J in I['versions']:
to_ver=J['build']
reqSteps = requests.get("{}&product={}&from_ver={}&to_ver={}".format(urlSteps, M.replace("+", "%2B"), from_ver, to_ver))
if reqSteps.status_code != 200: continue
reqSteps.encoding = "utf-8"
dataSteps = json.loads(reqSteps.text)
for S in dataSteps['upgrade_steps']:
if not 'full_patch' in S or S['full_patch'] is False: continue
if not 'build_ver' in S or not S['build_ver'].startswith(version): continue
V=__fullversion("{}-{}-{}".format(S['build_ver'], S['build_num'], S['nano']))
if not V in pats[M]:
pats[M][V] = {}
pats[M][V]['url'] = S['files'][0]['url'].split('?')[0]
pats[M][V]['sum'] = S['files'][0]['checksum']
if jsonpath:
with open(jsonpath, "w") as f:
json.dump(pats, f, indent=4, ensure_ascii=False)
if __name__ == "__main__":
cli()

View File

@ -31,6 +31,18 @@ def validate_required_param(ctx, param, value):
raise click.MissingParameter(param_decls=[param.name])
return value
def __fullversion(ver):
out = ver
arr = ver.split('-')
if len(arr) > 0:
a = arr[0].split('.')[0] if len(arr[0].split('.')) > 0 else '0'
b = arr[0].split('.')[1] if len(arr[0].split('.')) > 1 else '0'
c = arr[0].split('.')[2] if len(arr[0].split('.')) > 2 else '0'
d = arr[1] if len(arr) > 1 else '00000'
e = arr[2] if len(arr) > 2 else '0'
out = '{}.{}.{}-{}-{}'.format(a,b,c,d,e)
return out
@cli.command()
@click.option("-d", "--data", type=str, callback=mutually_exclusive_options, is_eager=True, help="The data of QRCode.")
@ -42,55 +54,59 @@ def makeqr(data, file, location, output):
"""
Generate a QRCode.
"""
import fcntl, struct
import qrcode
from PIL import Image
try:
import fcntl, struct
import qrcode
from PIL import Image
FBIOGET_VSCREENINFO = 0x4600
FBIOPUT_VSCREENINFO = 0x4601
FBIOGET_FSCREENINFO = 0x4602
FBDEV = "/dev/fb0"
if data is not None:
qr = qrcode.QRCode(version=1, box_size=10, error_correction=qrcode.constants.ERROR_CORRECT_H, border=4,)
qr.add_data(data)
qr.make(fit=True)
img = qr.make_image(fill_color="grey", back_color="black")
img = img.convert("RGBA")
pixels = img.load()
for i in range(img.size[0]):
for j in range(img.size[1]):
if pixels[i, j] == (255, 255, 255, 255):
pixels[i, j] = (255, 255, 255, 0)
FBIOGET_VSCREENINFO = 0x4600
FBIOPUT_VSCREENINFO = 0x4601
FBIOGET_FSCREENINFO = 0x4602
FBDEV = "/dev/fb0"
if data is not None:
qr = qrcode.QRCode(version=1, box_size=10, error_correction=qrcode.constants.ERROR_CORRECT_H, border=4,)
qr.add_data(data)
qr.make(fit=True)
img = qr.make_image(fill_color="grey", back_color="black")
img = img.convert("RGBA")
pixels = img.load()
for i in range(img.size[0]):
for j in range(img.size[1]):
if pixels[i, j] == (255, 255, 255, 255):
pixels[i, j] = (255, 255, 255, 0)
if os.path.exists(os.path.join(WORK_PATH, "logo.png")):
icon = Image.open(os.path.join(WORK_PATH, "logo.png"))
icon = icon.convert("RGBA")
img.paste(icon.resize((int(img.size[0] / 5), int(img.size[1] / 5))), (int((img.size[0] - int(img.size[0] / 5)) / 2), int((img.size[1] - int(img.size[1] / 5)) / 2),),)
if os.path.exists(os.path.join(WORK_PATH, "logo.png")):
icon = Image.open(os.path.join(WORK_PATH, "logo.png"))
icon = icon.convert("RGBA")
img.paste(icon.resize((int(img.size[0] / 5), int(img.size[1] / 5))), (int((img.size[0] - int(img.size[0] / 5)) / 2), int((img.size[1] - int(img.size[1] / 5)) / 2),),)
if file is not None:
img = Image.open(file)
# img = img.convert("RGBA")
# pixels = img.load()
# for i in range(img.size[0]):
# for j in range(img.size[1]):
# if pixels[i, j] == (255, 255, 255, 255):
# pixels[i, j] = (255, 255, 255, 0)
if file is not None:
img = Image.open(file)
# img = img.convert("RGBA")
# pixels = img.load()
# for i in range(img.size[0]):
# for j in range(img.size[1]):
# if pixels[i, j] == (255, 255, 255, 255):
# pixels[i, j] = (255, 255, 255, 0)
(xres, yres) = (1920, 1080)
with open(FBDEV, "rb") as fb:
vi = fcntl.ioctl(fb, FBIOGET_VSCREENINFO, bytes(160))
res = struct.unpack("I" * 40, vi)
if res[0] != 0 and res[1] != 0:
(xres, yres) = (res[0], res[1])
xqr, yqr = (int(xres / 8), int(xres / 8))
img = img.resize((xqr, yqr))
(xres, yres) = (1920, 1080)
with open(FBDEV, "rb") as fb:
vi = fcntl.ioctl(fb, FBIOGET_VSCREENINFO, bytes(160))
res = struct.unpack("I" * 40, vi)
if res[0] != 0 and res[1] != 0:
(xres, yres) = (res[0], res[1])
xqr, yqr = (int(xres / 8), int(xres / 8))
img = img.resize((xqr, yqr))
alpha = Image.new("RGBA", (xres, yres), (0, 0, 0, 0))
if int(location) not in range(0, 8):
location = 0
loc = (img.size[0] * int(location), alpha.size[1] - img.size[1])
alpha.paste(img, loc)
alpha.save(output)
alpha = Image.new("RGBA", (xres, yres), (0, 0, 0, 0))
if int(location) not in range(0, 8):
location = 0
loc = (img.size[0] * int(location), alpha.size[1] - img.size[1])
alpha.paste(img, loc)
alpha.save(output)
except:
pass
@cli.command()
@ -99,8 +115,15 @@ def getmodels(platforms=None):
"""
Get Syno Models.
"""
import re, json
import requests
import json, requests, urllib3
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry # type: ignore
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
session = requests.Session()
session.mount("http://", adapter)
session.mount("https://", adapter)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if platforms is not None and platforms != "":
PS = platforms.lower().replace(",", " ").split()
@ -108,25 +131,210 @@ def getmodels(platforms=None):
PS = []
models = []
req = requests.get("https://autoupdate.synology.com/os/v2")
req.encoding = "utf-8"
data = json.loads(req.text)
try:
req = session.get("https://autoupdate.synology.com/os/v2", timeout=10, verify=False)
req.encoding = "utf-8"
data = json.loads(req.text)
for I in data["channel"]["item"]:
if not I["title"].startswith("DSM"):
continue
for J in I["model"]:
arch = J["mUnique"].split("_")[1]
name = J["mLink"].split("/")[-1].split("_")[1].replace("%2B", "+")
if len(PS) > 0 and arch.lower() not in PS:
for I in data["channel"]["item"]:
if not I["title"].startswith("DSM"):
continue
if any(name == B["name"] for B in models):
continue
models.append({"name": name, "arch": arch})
for J in I["model"]:
arch = J["mUnique"].split("_")[1]
name = J["mLink"].split("/")[-1].split("_")[1].replace("%2B", "+")
if len(PS) > 0 and arch.lower() not in PS:
continue
if any(name == B["name"] for B in models):
continue
models.append({"name": name, "arch": arch})
models = sorted(models, key=lambda k: (k["arch"], k["name"]))
models = sorted(models, key=lambda k: (k["arch"], k["name"]))
except:
pass
models.sort(key=lambda x: (x["arch"], x["name"]))
print(json.dumps(models, indent=4))
@cli.command()
@click.option("-p", "--platforms", type=str, help="The platforms of Syno.")
def getmodelsbykb(platforms=None):
"""
Get Syno Models.
"""
import json, requests, urllib3
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry # type: ignore
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
session = requests.Session()
session.mount("http://", adapter)
session.mount("https://", adapter)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if platforms is not None and platforms != "":
PS = platforms.lower().replace(",", " ").split()
else:
PS = []
models = []
try:
import re
from bs4 import BeautifulSoup
url="https://kb.synology.com/en-us/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have"
#url = "https://kb.synology.cn/zh-cn/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have"
req = session.get(url, timeout=10, verify=False)
req.encoding = "utf-8"
bs = BeautifulSoup(req.text, "html.parser")
p = re.compile(r"data: (.*?),$", re.MULTILINE | re.DOTALL)
data = json.loads(p.search(bs.find("script", string=p).prettify()).group(1))
model = "(.*?)" # (.*?): all, FS6400: one
p = re.compile(r"<td>{}<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td>".format(model), re.MULTILINE | re.DOTALL,)
it = p.finditer(data["preload"]["content"].replace("\n", "").replace("\t", ""))
for i in it:
d = i.groups()
if len(d) == 6:
d = model + d
if len(PS) > 0 and d[5].lower() not in PS:
continue
models.append({"name": d[0].split("<br")[0], "arch": d[5].lower()})
except:
pass
models.sort(key=lambda x: (x["arch"], x["name"]))
print(json.dumps(models, indent=4))
@cli.command()
@click.option("-m", "--model", type=str, required=True, help="The model of Syno.")
@click.option("-v", "--version", type=str, required=True, help="The version of Syno.")
def getpats4mv(model, version):
import json, requests, urllib3
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry # type: ignore
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
session = requests.Session()
session.mount("http://", adapter)
session.mount("https://", adapter)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
pats = {}
try:
urlInfo = "https://www.synology.com/api/support/findDownloadInfo?lang=en-us"
urlSteps = "https://www.synology.com/api/support/findUpgradeSteps?"
#urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
#urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
major = "&major={}".format(version.split('.')[0]) if len(version.split('.')) > 0 else ""
minor = "&minor={}".format(version.split('.')[1]) if len(version.split('.')) > 1 else ""
req = session.get("{}&product={}{}{}".format(urlInfo, model.replace("+", "%2B"), major, minor), timeout=10, verify=False)
req.encoding = "utf-8"
data = json.loads(req.text)
build_ver = data['info']['system']['detail'][0]['items'][0]['build_ver']
build_num = data['info']['system']['detail'][0]['items'][0]['build_num']
buildnano = data['info']['system']['detail'][0]['items'][0]['nano']
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
if not V in pats:
pats[V]={}
pats[V]['url'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
pats[V]['sum'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
from_ver=0
for I in data['info']['pubVers']:
if from_ver == 0 or I['build'] < from_ver: from_ver = I['build']
for I in data['info']['productVers']:
if not I['version'].startswith(version): continue
if major == "" or minor == "":
majorTmp = "&major={}".format(I['version'].split('.')[0]) if len(I['version'].split('.')) > 0 else ""
minorTmp = "&minor={}".format(I['version'].split('.')[1]) if len(I['version'].split('.')) > 1 else ""
reqTmp = session.get("{}&product={}{}{}".format(urlInfo, model.replace("+", "%2B"), majorTmp, minorTmp), timeout=10, verify=False)
reqTmp.encoding = "utf-8"
dataTmp = json.loads(reqTmp.text)
build_ver = dataTmp['info']['system']['detail'][0]['items'][0]['build_ver']
build_num = dataTmp['info']['system']['detail'][0]['items'][0]['build_num']
buildnano = dataTmp['info']['system']['detail'][0]['items'][0]['nano']
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
if not V in pats:
pats[V]={}
pats[V]['url'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
pats[V]['sum'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
for J in I['versions']:
to_ver=J['build']
reqSteps = session.get("{}&product={}&from_ver={}&to_ver={}".format(urlSteps, model.replace("+", "%2B"), from_ver, to_ver), timeout=10, verify=False)
if reqSteps.status_code != 200: continue
reqSteps.encoding = "utf-8"
dataSteps = json.loads(reqSteps.text)
for S in dataSteps['upgrade_steps']:
if not 'full_patch' in S or S['full_patch'] is False: continue
if not 'build_ver' in S or not S['build_ver'].startswith(version): continue
V=__fullversion("{}-{}-{}".format(S['build_ver'], S['build_num'], S['nano']))
if not V in pats:
pats[V] = {}
pats[V]['url'] = S['files'][0]['url'].split('?')[0]
pats[V]['sum'] = S['files'][0]['checksum']
except:
pass
pats = {k: pats[k] for k in sorted(pats.keys(), reverse=True)}
print(json.dumps(pats, indent=4))
@cli.command()
@click.option("-p", "--models", type=str, help="The models of Syno.")
def getpats(models=None):
import json, requests, urllib3, re
from bs4 import BeautifulSoup
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry # type: ignore
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
session = requests.Session()
session.mount("http://", adapter)
session.mount("https://", adapter)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if models is not None and models != "":
MS = models.lower().replace(",", " ").split()
else:
MS = []
pats = {}
try:
req = session.get('https://archive.synology.com/download/Os/DSM', timeout=10, verify=False)
req.encoding = 'utf-8'
bs=BeautifulSoup(req.text, 'html.parser')
p = re.compile(r"(.*?)-(.*?)", re.MULTILINE | re.DOTALL)
l = bs.find_all('a', string=p)
for i in l:
ver = i.attrs['href'].split('/')[-1]
if not ver.startswith('7'): continue
req = session.get('https://archive.synology.com{}'.format(i.attrs['href']), timeout=10, verify=False)
req.encoding = 'utf-8'
bs=BeautifulSoup(req.text, 'html.parser')
p = re.compile(r"^(.*?)_(.*?)_(.*?).pat$", re.MULTILINE | re.DOTALL)
data = bs.find_all('a', string=p)
for item in data:
p = re.compile(r"DSM_(.*?)_(.*?).pat", re.MULTILINE | re.DOTALL)
rels = p.search(item.attrs['href'])
if rels != None:
info = p.search(item.attrs['href']).groups()
model = info[0].replace('%2B', '+')
if len(MS) > 0 and model.lower() not in MS:
continue
if model not in pats.keys():
pats[model]={}
pats[model][__fullversion(ver)] = item.attrs['href']
except:
pass
print(json.dumps(pats, indent=4))
@cli.command()
@click.option("-p", "--platforms", type=str, help="The platforms of Syno.")
def getmodelsoffline(platforms=None):
@ -143,7 +351,7 @@ def getmodelsoffline(platforms=None):
PS = []
models = []
with open(os.path.join(WORK_PATH, "offline.json")) as user_file:
with open(os.path.join('/mnt/p3/configs', "offline.json")) as user_file:
data = json.load(user_file)
for I in data["channel"]["item"]:

5
include/requirements.txt Normal file
View File

@ -0,0 +1,5 @@
bs4
click
kmodule
requests
openpyxl

239
update.sh
View File

@ -7,116 +7,122 @@ function readConfigEntriesArray() {
function getDSM() {
MODEL="${1}"
PLATFORM="${2}"
VERSIONS="$(readConfigEntriesArray "platforms.${PLATFORM}.productvers" "${P_FILE}" | sort -r)"
echo "${VERSIONS}" >"${TMP_PATH}/versions"
PRODUCTVERS="$(readConfigEntriesArray "platforms.${PLATFORM}.productvers" "${P_FILE}" | sort -r)"
echo "${PRODUCTVERS}" >"${TMP_PATH}/productvers"
while IFS= read -r line; do
VERSION="${line}"
PAT_FILE="${MODEL}_${VERSION}.pat"
PAT_PATH="${CACHE_PATH}/dl/${PAT_FILE}"
UNTAR_PAT_PATH="${CACHE_PATH}/${MODEL}/${VERSION}"
DESTINATION="${DSMPATH}/${MODEL}/${VERSION}"
DESTINATIONFILES="${FILESPATH}/${MODEL}/${VERSION}"
# Make Destinations
mkdir -p "${DESTINATION}"
mkdir -p "${DESTINATIONFILES}"
echo "${MODEL} ${VERSION}"
echo "" >>"${TMP_PATH}/dsmdata.yml"
echo "${MODEL} ${VERSION}" >>"${TMP_PATH}/dsmdata.yml"
# Grep PAT_URL
PAT_URL="$(curl -skL "https://www.synology.com/api/support/findDownloadInfo?lang=en-us&product=${MODEL/+/%2B}&major=${VERSION%%.*}&minor=${VERSION##*.}" | jq -r '.info.system.detail[0].items[0].files[0].url')"
PAT_HASH="$(curl -skL "https://www.synology.com/api/support/findDownloadInfo?lang=en-us&product=${MODEL/+/%2B}&major=${VERSION%%.*}&minor=${VERSION##*.}" | jq -r '.info.system.detail[0].items[0].files[0].checksum')"
PAT_URL="${PAT_URL%%\?*}"
echo "${PAT_URL}"
echo "URL: ${PAT_URL}" >>"${TMP_PATH}/dsmdata.yml"
echo "${PAT_HASH}"
echo "HASH: ${PAT_HASH}" >>"${TMP_PATH}/dsmdata.yml"
if [ -f "${DESTINATION}/pat_url" ] && [ -f "${DESTINATION}/pat_hash" ]; then
OLDURL="$(cat "${DESTINATION}/pat_url")"
OLDHASH="$(cat "${DESTINATION}/pat_hash")"
else
OLDURL="0"
OLDHASH="0"
fi
# Check for Update
if [ "${PAT_HASH}" != "${OLDHASH}" ] || [ "${PAT_URL}" != "${OLDURL}" ]; then
mkdir -p "${CACHE_PATH}/dl"
echo "Downloading ${PAT_FILE}"
# Discover remote file size
STATUS=$(curl -k -w "%{http_code}" -L "${PAT_URL}" -o "${PAT_PATH}" --progress-bar)
if [ $? -ne 0 -o ${STATUS} -ne 200 ]; then
rm "${PAT_PATH}"
echo "Error downloading"
PRODUCTVER="${line}"
PJ="$(python scripts/functions.py getpats4mv -m "${MODEL}" -v "${PRODUCTVER}")"
PVS="$(echo "${PJ}" | jq -r 'keys | sort | reverse | join(" ")')"
echo "${PVS}" >"${TMP_PATH}/versions"
while IFS= read -r line; do
VERSION="${line}"
PAT_URL=$(echo "${PJ}" | jq -r ".\"${VERSION}\".url")
PAT_HASH=$(echo "${PJ}" | jq -r ".\"${VERSION}\".sum")
URLVER="$(echo "${PV}" | cut -d'.' -f1,2)"
PAT_FILE="${MODEL}_${VERSION}.pat"
PAT_PATH="${CACHE_PATH}/dl/${PAT_FILE}"
UNTAR_PAT_PATH="${CACHE_PATH}/${MODEL}/${VERSION}"
DESTINATION="${DSMPATH}/${MODEL}/${VERSION}"
DESTINATIONFILES="${FILESPATH}/${MODEL}/${URLVER}"
# Make Destinations
mkdir -p "${DESTINATION}"
mkdir -p "${DESTINATIONFILES}"
echo "${MODEL} ${URLVER} (${VERSION})"
echo "" >>"${TMP_PATH}/dsmdata.yml"
echo "${MODEL} ${URLVER} (${VERSION})" >>"${TMP_PATH}/dsmdata.yml"
echo "${PAT_URL}"
echo "URL: ${PAT_URL}" >>"${TMP_PATH}/dsmdata.yml"
echo "${PAT_HASH}"
echo "HASH: ${PAT_HASH}" >>"${TMP_PATH}/dsmdata.yml"
if [ -f "${DESTINATION}/pat_url" ] && [ -f "${DESTINATION}/pat_hash" ]; then
OLDURL="$(cat "${DESTINATION}/pat_url")"
OLDHASH="$(cat "${DESTINATION}/pat_hash")"
else
OLDURL="0"
OLDHASH="0"
fi
if [ -f "${PAT_PATH}" ]; then
# Export Values
echo "${PAT_HASH}" >"${DESTINATION}/pat_hash"
echo "${PAT_URL}" >"${DESTINATION}/pat_url"
# Extract Files
rm -rf "${UNTAR_PAT_PATH}"
mkdir -p "${UNTAR_PAT_PATH}"
echo -n "Disassembling ${PAT_FILE}: "
header=$(od -bcN2 ${PAT_PATH} | head -1 | awk '{print $3}')
case ${header} in
105)
echo "Uncompressed tar"
isencrypted="no"
;;
213)
echo "Compressed tar"
isencrypted="no"
;;
255)
echo "Encrypted"
isencrypted="yes"
;;
*)
echo -e "Could not determine if pat file is encrypted or not, maybe corrupted, try again!"
;;
esac
if [ "${isencrypted}" = "yes" ]; then
# Check existance of extractor
if [ -f "${EXTRACTOR_PATH}/${EXTRACTOR_BIN}" ]; then
echo "Extractor cached."
fi
# Uses the extractor to untar pat file
echo "Extracting..."
LD_LIBRARY_PATH="${EXTRACTOR_PATH}" "${EXTRACTOR_PATH}/${EXTRACTOR_BIN}" "${PAT_PATH}" "${UNTAR_PAT_PATH}"
else
echo "Extracting..."
tar -xf "${PAT_PATH}" -C "${UNTAR_PAT_PATH}"
if [ $? -ne 0 ]; then
echo "Error extracting"
fi
# Check for Update
if [ "${PAT_HASH}" != "${OLDHASH}" ] || [ "${PAT_URL}" != "${OLDURL}" ]; then
mkdir -p "${CACHE_PATH}/dl"
echo "Downloading ${PAT_FILE}"
# Discover remote file size
STATUS=$(curl -k -w "%{http_code}" -L "${PAT_URL}" -o "${PAT_PATH}" --progress-bar)
if [ $? -ne 0 -o ${STATUS} -ne 200 ]; then
rm "${PAT_PATH}"
echo "Error downloading"
fi
# Export Hash
echo -n "Checking hash of zImage: "
HASH=$(sha256sum ${UNTAR_PAT_PATH}/zImage | awk '{print$1}')
echo "OK"
echo "${HASH}" >"${DESTINATION}/zImage_hash"
echo -n "Checking hash of ramdisk: "
HASH=$(sha256sum ${UNTAR_PAT_PATH}/rd.gz | awk '{print$1}')
echo "OK"
echo "${HASH}" >"${DESTINATION}/ramdisk_hash"
# Copy Files to Destination
echo -n "Copying files: "
cp "${UNTAR_PAT_PATH}/grub_cksum.syno" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/GRUB_VER" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/grub_cksum.syno" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/GRUB_VER" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/zImage" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/rd.gz" "${DESTINATION}"
cd "${DESTINATION}"
tar -cf "${DESTINATIONFILES}/${PAT_HASH}.tar" .
rm -f "${PAT_PATH}"
rm -rf "${UNTAR_PAT_PATH}"
if [ -f "${PAT_PATH}" ]; then
# Export Values
echo "${PAT_HASH}" >"${DESTINATION}/pat_hash"
echo "${PAT_URL}" >"${DESTINATION}/pat_url"
# Extract Files
rm -rf "${UNTAR_PAT_PATH}"
mkdir -p "${UNTAR_PAT_PATH}"
echo -n "Disassembling ${PAT_FILE}: "
header=$(od -bcN2 ${PAT_PATH} | head -1 | awk '{print $3}')
case ${header} in
105)
echo "Uncompressed tar"
isencrypted="no"
;;
213)
echo "Compressed tar"
isencrypted="no"
;;
255)
echo "Encrypted"
isencrypted="yes"
;;
*)
echo -e "Could not determine if pat file is encrypted or not, maybe corrupted, try again!"
;;
esac
if [ "${isencrypted}" = "yes" ]; then
# Check existance of extractor
if [ -f "${EXTRACTOR_PATH}/${EXTRACTOR_BIN}" ]; then
echo "Extractor cached."
fi
# Uses the extractor to untar pat file
echo "Extracting..."
LD_LIBRARY_PATH="${EXTRACTOR_PATH}" "${EXTRACTOR_PATH}/${EXTRACTOR_BIN}" "${PAT_PATH}" "${UNTAR_PAT_PATH}"
else
echo "Extracting..."
tar -xf "${PAT_PATH}" -C "${UNTAR_PAT_PATH}"
if [ $? -ne 0 ]; then
echo "Error extracting"
fi
fi
# Export Hash
echo -n "Checking hash of zImage: "
HASH=$(sha256sum ${UNTAR_PAT_PATH}/zImage | awk '{print$1}')
echo "OK"
echo "${HASH}" >"${DESTINATION}/zImage_hash"
echo -n "Checking hash of ramdisk: "
HASH=$(sha256sum ${UNTAR_PAT_PATH}/rd.gz | awk '{print$1}')
echo "OK"
echo "${HASH}" >"${DESTINATION}/ramdisk_hash"
# Copy Files to Destination
echo -n "Copying files: "
cp "${UNTAR_PAT_PATH}/grub_cksum.syno" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/GRUB_VER" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/grub_cksum.syno" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/GRUB_VER" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/zImage" "${DESTINATION}"
cp "${UNTAR_PAT_PATH}/rd.gz" "${DESTINATION}"
cd "${DESTINATION}"
tar -cf "${DESTINATIONFILES}/${PAT_HASH}.tar" .
rm -f "${PAT_PATH}"
rm -rf "${UNTAR_PAT_PATH}"
fi
echo "DSM Extraction complete: ${MODEL}_${VERSION}"
else
echo "No DSM Update found: ${MODEL}_${VERSION}"
fi
echo "DSM Extraction complete: ${MODEL}_${VERSION}"
else
echo "No DSM Update found: ${MODEL}_${VERSION}"
fi
cd ${HOME}
done <<<$(cat "${TMP_PATH}/versions")
rm -f "${TMP_PATH}/versions"
cd ${HOME}
done < <(cat "${TMP_PATH}/versions")
rm -f "${TMP_PATH}/versions"
done < <(cat "${TMP_PATH}/productvers")
rm -f "${TMP_PATH}/productvers"
}
# Init DSM Files
@ -132,28 +138,23 @@ curl --insecure -s -w "%{http_code}" -L "https://github.com/AuxXxilium/arc-confi
unzip -oq "./configs.zip" -d "${CONFIGS}" >/dev/null 2>&1
P_FILE="${CONFIGS}/platforms.yml"
PS="$(readConfigEntriesArray "platforms" "${P_FILE}" | sort)"
MJ="$(python include/functions.py getmodels -p "${PS[*]}")"
if [[ -z "${MJ}" || "${MJ}" = "[]" ]]; then
dialog --backtitle "$(backtitle)" --title "Model" --title "Model" \
--msgbox "Failed to get models, please try again!" 0 0
return 1
fi
MJ="$(python scripts/functions.py getmodels -p "${PS[*]}")"
echo -n "" >"${TMP_PATH}/modellist"
echo "${MJ}" | jq -c '.[]' | while read -r item; do
name=$(echo "$item" | jq -r '.name')
arch=$(echo "$item" | jq -r '.arch')
echo "${name} ${arch}" >>"${TMP_PATH}/modellist"
done
CACHE_PATH="${HOME}/cache"
RAMDISK_PATH="${CACHE_PATH}/ramdisk"
EXTRACTOR_PATH="${CACHE_PATH}/extractor"
EXTRACTOR_BIN="syno_extract_system_patch"
DSMPATH="${HOME}/dsm"
FILESPATH="${HOME}/files"
while read -r M A; do
CACHE_PATH="${HOME}/cache"
RAMDISK_PATH="${CACHE_PATH}/ramdisk"
EXTRACTOR_PATH="${CACHE_PATH}/extractor"
EXTRACTOR_BIN="syno_extract_system_patch"
DSMPATH="${HOME}/dsm"
FILESPATH="${HOME}/files"
MODEL=$(echo ${M} | sed 's/d$/D/; s/rp$/RP/; s/rp+/RP+/')
getDSM "${MODEL}" "${A}"
done <<<$(cat "${TMP_PATH}/modellist")
done < <(cat "${TMP_PATH}/modellist")
cp -f "${TMP_PATH}/dsmdata.yml" "${HOME}/dsmdata.yml"
# Cleanup DSM Files
rm -rf "${CACHE_PATH}/dl"