mirror of
https://github.com/AuxXxilium/arc.git
synced 2024-11-24 00:09:53 +07:00
tree: rewrite
Signed-off-by: AuxXxilium <info@auxxxilium.tech>
This commit is contained in:
parent
e085595bf6
commit
d6e7da9af8
@ -37,7 +37,6 @@ function addonSelection() {
|
||||
# read platform and kernel version to check if addon exists
|
||||
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
NANOVER="$(readConfigKey "nanover" "${USER_CONFIG_FILE}")"
|
||||
PLATFORM="$(readConfigKey "platform" "${USER_CONFIG_FILE}")"
|
||||
ARCPATCH="$(readConfigKey "arc.patch" "${USER_CONFIG_FILE}")"
|
||||
# read addons from user config
|
||||
@ -52,8 +51,6 @@ function addonSelection() {
|
||||
arrayExistItem "${ADDON}" "${!ADDONS[@]}" && ACT="on" || ACT="off"
|
||||
if [[ "${ADDON}" == "amepatch" || "${ADDON}" == "sspatch" || "${ADDON}" == "arcdns" ]] && [ "${ARCPATCH}" == "false" ]; then
|
||||
continue
|
||||
elif [[ "${ADDON}" == "codecpatch" || "${ADDON}" == "sspatch" ]] && [ "${NANOVER}" == "2" ]; then
|
||||
continue
|
||||
elif [ "${ADDON}" == "cpufreqscaling" ] && [ "${CPUFREQ}" == "false" ]; then
|
||||
continue
|
||||
else
|
||||
@ -84,11 +81,7 @@ function modulesMenu() {
|
||||
PLATFORM="$(readConfigKey "platform" "${USER_CONFIG_FILE}")"
|
||||
KVER="$(readConfigKey "platforms.${PLATFORM}.productvers.\"${PRODUCTVER}\".kver" "${P_FILE}")"
|
||||
# Modify KVER for Epyc7002
|
||||
if [ "${PLATFORM}" == "epyc7002" ]; then
|
||||
KVERP="${PRODUCTVER}-${KVER}"
|
||||
else
|
||||
KVERP="${KVER}"
|
||||
fi
|
||||
[ "${PLATFORM}" == "epyc7002" ] && KVERP="${PRODUCTVER}-${KVER}" || KVERP="${KVER}"
|
||||
# menu loop
|
||||
while true; do
|
||||
dialog --backtitle "$(backtitle)" --cancel-label "Exit" --menu "Choose an Option" 0 0 0 \
|
||||
|
@ -30,7 +30,6 @@ LKM="$(readConfigKey "lkm" "${USER_CONFIG_FILE}")"
|
||||
if [ -n "${MODEL}" ]; then
|
||||
DT="$(readConfigKey "platforms.${PLATFORM}.dt" "${P_FILE}")"
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
NANOVER="$(readConfigKey "nanover" "${USER_CONFIG_FILE}")"
|
||||
ARCCONF="$(readConfigKey "${MODEL}.serial" "${S_FILE}" 2>/dev/null)"
|
||||
fi
|
||||
|
||||
@ -76,7 +75,7 @@ function backtitle() {
|
||||
fi
|
||||
BACKTITLE="${ARC_TITLE}$([ -n "${NEWTAG}" ] && [ "${NEWTAG}" != "${ARC_VERSION}" ] && echo " > ${NEWTAG}") | "
|
||||
BACKTITLE+="${MODEL:-(Model)} | "
|
||||
BACKTITLE+="${PRODUCTVER:-(Version)}$([ -n "${NANOVER}" ] && echo ".${NANOVER}") | "
|
||||
BACKTITLE+="${PRODUCTVER:-(Version)} | "
|
||||
BACKTITLE+="${IPCON:-(IP)}${OFF} | "
|
||||
BACKTITLE+="Patch: ${ARCPATCH} | "
|
||||
BACKTITLE+="Config: ${CONFDONE} | "
|
||||
@ -274,7 +273,6 @@ function arcVersion() {
|
||||
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
|
||||
PLATFORM="$(readConfigKey "platform" "${USER_CONFIG_FILE}")"
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
NANOVER="$(readConfigKey "nanover" "${USER_CONFIG_FILE}")"
|
||||
AUTOMATED="$(readConfigKey "automated" "${USER_CONFIG_FILE}")"
|
||||
# Check for Custom Build
|
||||
if [ "${AUTOMATED}" == "false" ]; then
|
||||
@ -286,21 +284,9 @@ function arcVersion() {
|
||||
[ $? -ne 0 ] && return 0
|
||||
resp=$(cat ${TMP_PATH}/resp)
|
||||
[ -z "${resp}" ] && return 1
|
||||
if [ "${resp}" == "7.2" ]; then
|
||||
dialog --backtitle "$(backtitle)" --title "DSM Version" \
|
||||
--menu "Choose a DSM Version?\n* Recommended Option" 8 40 0 \
|
||||
1 "DSM ${resp}.1 (Stable) *" \
|
||||
2 "DSM ${resp}.2 (Experimental)" \
|
||||
2>"${TMP_PATH}/opt"
|
||||
[ $? -ne 0 ] && return 1
|
||||
opt=$(cat ${TMP_PATH}/opt)
|
||||
[ -z "${opt}" ] && return 1
|
||||
fi
|
||||
if [ "${PRODUCTVER}" != "${resp}" ] || [ "${NANOVER}" != "${opt}" ]; then
|
||||
if [ "${PRODUCTVER}" != "${resp}" ]; then
|
||||
PRODUCTVER="${resp}"
|
||||
writeConfigKey "productver" "${PRODUCTVER}" "${USER_CONFIG_FILE}"
|
||||
NANOVER="${opt}"
|
||||
writeConfigKey "nanover" "${NANOVER}" "${USER_CONFIG_FILE}"
|
||||
# Reset Config if changed
|
||||
writeConfigKey "buildnum" "" "${USER_CONFIG_FILE}"
|
||||
writeConfigKey "paturl" "" "${USER_CONFIG_FILE}"
|
||||
@ -334,11 +320,7 @@ function arcVersion() {
|
||||
# Reset Modules
|
||||
KVER="$(readConfigKey "platforms.${PLATFORM}.productvers.\"${PRODUCTVER}\".kver" "${P_FILE}")"
|
||||
# Modify KVER for Epyc7002
|
||||
if [ "${PLATFORM}" == "epyc7002" ]; then
|
||||
KVERP="${PRODUCTVER}-${KVER}"
|
||||
else
|
||||
KVERP="${KVER}"
|
||||
fi
|
||||
[ "${PLATFORM}" == "epyc7002" ] && KVERP="${PRODUCTVER}-${KVER}" || KVERP="${KVER}"
|
||||
# Rewrite modules
|
||||
writeConfigKey "modules" "{}" "${USER_CONFIG_FILE}"
|
||||
while read -r ID DESC; do
|
||||
@ -562,8 +544,6 @@ function arcSettings() {
|
||||
function arcSummary() {
|
||||
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
NANOVER="$(readConfigKey "nanover" "${USER_CONFIG_FILE}")"
|
||||
[ -n "${NANOVER}" ] && DSMVER="${PRODUCTVER}.${NANOVER}" || DSMVER="${PRODUCTVER}"
|
||||
PLATFORM="$(readConfigKey "platform" "${USER_CONFIG_FILE}")"
|
||||
DT="$(readConfigKey "platforms.${PLATFORM}.dt" "${P_FILE}")"
|
||||
KVER="$(readConfigKey "platforms.${PLATFORM}.productvers.\"${PRODUCTVER}\".kver" "${P_FILE}")"
|
||||
@ -604,7 +584,7 @@ function arcSummary() {
|
||||
# Print Summary
|
||||
SUMMARY="\Z4> DSM Information\Zn"
|
||||
SUMMARY+="\n>> DSM Model: \Zb${MODEL}\Zn"
|
||||
SUMMARY+="\n>> DSM Version: \Zb${DSMVER}\Zn"
|
||||
SUMMARY+="\n>> DSM Version: \Zb${PRODUCTVER}\Zn"
|
||||
SUMMARY+="\n>> DSM Platform: \Zb${PLATFORM}\Zn"
|
||||
SUMMARY+="\n>> DeviceTree: \Zb${DT}\Zn"
|
||||
[ "${MODEL}" == "SA6400" ] && SUMMARY+="\n>> Kernel: \Zb${KERNEL}\Zn"
|
||||
@ -661,98 +641,49 @@ function make() {
|
||||
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
|
||||
PLATFORM="$(readConfigKey "platform" "${USER_CONFIG_FILE}")"
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
NANOVER="$(readConfigKey "nanover" "${USER_CONFIG_FILE}")"
|
||||
DT="$(readConfigKey "platforms.${PLATFORM}.dt" "${P_FILE}")"
|
||||
AUTOMATED="$(readConfigKey "automated" "${USER_CONFIG_FILE}")"
|
||||
PAT_URL=""
|
||||
PAT_HASH=""
|
||||
URLVER=""
|
||||
VALID="false"
|
||||
# Cleanup
|
||||
[ -f "${MOD_ZIMAGE_FILE}" ] && rm -f "${MOD_ZIMAGE_FILE}"
|
||||
[ -f "${MOD_RDGZ_FILE}" ] && rm -f "${MOD_RDGZ_FILE}"
|
||||
[ -d "${UNTAR_PAT_PATH}" ] && rm -rf "${UNTAR_PAT_PATH}"
|
||||
mkdir -p "${UNTAR_PAT_PATH}"
|
||||
# Get PAT Data
|
||||
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
|
||||
--infobox "Get PAT Data from Local File..." 3 40
|
||||
[ -n "${NANOVER}" ] && DSMVER="${PRODUCTVER}.${NANOVER}" || DSMVER="${PRODUCTVER}"
|
||||
PAT_URL="$(readConfigKey "${MODEL}.\"${DSMVER}\".url" "${D_FILE}")"
|
||||
PAT_HASH="$(readConfigKey "${MODEL}.\"${DSMVER}\".hash" "${D_FILE}")"
|
||||
if [ -n "${PAT_URL}" ] && [ -n "${PAT_HASH}" ]; then
|
||||
if echo "${PAT_URL}" | grep -q "https://"; then
|
||||
VALID=true
|
||||
fi
|
||||
fi
|
||||
sleep 2
|
||||
if [ "${OFFLINE}" == "false" ]; then
|
||||
URLCHECK="$(curl --head -skL -m 10 "${PAT_URL}" | head -n 1)"
|
||||
if echo "${URLCHECK}" | grep -q 404; then
|
||||
VALID=false
|
||||
fi
|
||||
if [ "${VALID}" == "false" ] && [ ${NANOVER} -ne 1 ]; then
|
||||
# Get PAT Data
|
||||
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
|
||||
--infobox "Get PAT Data from Syno..." 3 40
|
||||
idx=0
|
||||
while [ ${idx} -le 3 ]; do # Loop 3 times, if successful, break
|
||||
local URL="https://www.synology.com/api/support/findDownloadInfo?lang=en-us&product=${MODEL/+/%2B}&major=${PRODUCTVER%%.*}&minor=${PRODUCTVER##*.}"
|
||||
if [ "${ARCNIC}" == "auto" ]; then
|
||||
PAT_DATA="$(curl -skL -m 10 "${URL}")"
|
||||
else
|
||||
PAT_DATA="$(curl --interface ${ARCNIC} -skL -m 10 "${URL}")"
|
||||
fi
|
||||
if [ "$(echo ${PAT_DATA} | jq -r '.success' 2>/dev/null)" == "true" ]; then
|
||||
if echo ${PAT_DATA} | jq -r '.info.system.detail[0].items[0].files[0].label_ext' 2>/dev/null | grep -q 'pat'; then
|
||||
PAT_URL=$(echo ${PAT_DATA} | jq -r '.info.system.detail[0].items[0].files[0].url')
|
||||
PAT_HASH=$(echo ${PAT_DATA} | jq -r '.info.system.detail[0].items[0].files[0].checksum')
|
||||
PAT_URL=${PAT_URL%%\?*}
|
||||
if [ -n "${PAT_URL}" ] && [ -n "${PAT_HASH}" ]; then
|
||||
if echo "${PAT_URL}" | grep -q "https://"; then
|
||||
VALID=true
|
||||
URLCHECK="$(curl --head -skL -m 10 "${PAT_URL}" | head -n 1)"
|
||||
if echo "${URLCHECK}" | grep -q 404; then
|
||||
VALID=false
|
||||
else
|
||||
break
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
sleep 3
|
||||
idx=$((${idx} + 1))
|
||||
done
|
||||
fi
|
||||
if [ "${VALID}" == "false" ] && [ ${NANOVER} -ne 1 ]; then
|
||||
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
|
||||
--infobox "Get PAT Data from Github..." 3 40
|
||||
idx=0
|
||||
while [ ${idx} -le 3 ]; do # Loop 3 times, if successful, break
|
||||
URL="https://raw.githubusercontent.com/AuxXxilium/arc-dsm/main/dsm/${MODEL/+/%2B}/${PRODUCTVER}/pat_url"
|
||||
HASH="https://raw.githubusercontent.com/AuxXxilium/arc-dsm/main/dsm/${MODEL/+/%2B}/${PRODUCTVER}/pat_hash"
|
||||
if [ "${ARCNIC}" == "auto" ]; then
|
||||
PAT_URL="$(curl -skL -m 10 "${URL}")"
|
||||
PAT_HASH="$(curl -skL -m 10 "${HASH}")"
|
||||
else
|
||||
PAT_URL="$(curl --interface ${ARCNIC} -m 10 -skL "${URL}")"
|
||||
PAT_HASH="$(curl --interface ${ARCNIC} -m 10 -skL "$HASH")"
|
||||
fi
|
||||
PAT_URL=${PAT_URL%%\?*}
|
||||
if [ -n "${PAT_URL}" ] && [ -n "${PAT_HASH}" ]; then
|
||||
if echo "${PAT_URL}" | grep -q "https://"; then
|
||||
VALID=true
|
||||
URLCHECK="$(curl --head -skL -m 10 "${PAT_URL}" | head -n 1)"
|
||||
if echo "${URLCHECK}" | grep -q 404; then
|
||||
VALID=false
|
||||
else
|
||||
break
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
sleep 3
|
||||
idx=$((${idx} + 1))
|
||||
done
|
||||
fi
|
||||
while true; do
|
||||
PJ="$(python ${ARC_PATH}/include/functions.py getpats4mv -m "${MODEL}" -v "${PRODUCTVER}")"
|
||||
if [ -z "${PJ}" || "${PJ}" = "{}" ]; then
|
||||
MSG="Unable to connect to Synology API, Please check the network and try again!"
|
||||
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
|
||||
--yes-label "Retry" \
|
||||
--yesno "${MSG}" 0 0
|
||||
[ $? -eq 0 ] && continue # yes-button
|
||||
return 1
|
||||
else
|
||||
PVS="$(echo "${PJ}" | jq -r 'keys | sort | reverse | join(" ")')"
|
||||
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
|
||||
--no-items --menu "Choose a Product Version" 0 0 0 ${PVS} \
|
||||
2>${TMP_PATH}/resp
|
||||
RET=$?
|
||||
[ ${RET} -ne 0 ] && return
|
||||
PV=$(cat ${TMP_PATH}/resp)
|
||||
PAT_URL=$(echo "${PJ}" | jq -r ".\"${PV}\".url")
|
||||
PAT_SUM=$(echo "${PJ}" | jq -r ".\"${PV}\".sum")
|
||||
URLVER="$(echo "${PV}" | cut -d'.' -f1,2)"
|
||||
[ "${PRODUCTVER}" != "${URLVER}" ] && PRODUCTVER="${URLVER}"
|
||||
fi
|
||||
URLCHECK="$(curl --head -skL -m 10 "${PAT_URL}" | head -n 1)"
|
||||
if echo "${URLCHECK}" | grep -q 404; then
|
||||
VALID=false
|
||||
continue
|
||||
else
|
||||
VALID=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${AUTOMATED}" == "false" ] && [ "${VALID}" == "false" ]; then
|
||||
MSG="Failed to get PAT Data.\n"
|
||||
MSG+="Please manually fill in the URL and Hash of PAT.\n"
|
||||
@ -1051,7 +982,7 @@ else
|
||||
echo "t \"Change User Password \" " >>"${TMP_PATH}/menu"
|
||||
echo "N \"Add new User\" " >>"${TMP_PATH}/menu"
|
||||
echo "J \"Reset DSM Network Config \" " >>"${TMP_PATH}/menu"
|
||||
if [ "${PLATFORM}" == "epyc7002" ] && [ "${NANOVER}" == "1"]; then
|
||||
if [ "${PLATFORM}" == "epyc7002" ]; then
|
||||
echo "K \"Kernel: \Z4${KERNEL}\Zn \" " >>"${TMP_PATH}/menu"
|
||||
fi
|
||||
if [ "${DT}" == "true" ]; then
|
||||
@ -1166,11 +1097,7 @@ else
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
KVER="$(readConfigKey "platforms.${PLATFORM}.productvers.\"${PRODUCTVER}\".kver" "${P_FILE}")"
|
||||
if [ -n "${PLATFORM}" ] && [ -n "${KVER}" ]; then
|
||||
if [ "${PLATFORM}" == "epyc7002" ]; then
|
||||
KVERP="${PRODUCTVER}-${KVER}"
|
||||
else
|
||||
KVERP="${KVER}"
|
||||
fi
|
||||
[ "${PLATFORM}" == "epyc7002" ] && KVERP="${PRODUCTVER}-${KVER}" || KVERP="${KVER}"
|
||||
writeConfigKey "modules" "{}" "${USER_CONFIG_FILE}"
|
||||
while read -r ID DESC; do
|
||||
writeConfigKey "modules.\"${ID}\"" "" "${USER_CONFIG_FILE}"
|
||||
|
@ -51,7 +51,6 @@ PLATFORM="$(readConfigKey "platform" "${USER_CONFIG_FILE}")"
|
||||
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
|
||||
MODELID="$(readConfigKey "modelid" "${USER_CONFIG_FILE}")"
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
NANOVER="$(readConfigKey "nanover" "${USER_CONFIG_FILE}")"
|
||||
BUILDNUM="$(readConfigKey "buildnum" "${USER_CONFIG_FILE}")"
|
||||
SMALLNUM="$(readConfigKey "smallnum" "${USER_CONFIG_FILE}")"
|
||||
LKM="$(readConfigKey "lkm" "${USER_CONFIG_FILE}")"
|
||||
@ -62,11 +61,7 @@ VENDOR="$(dmesg 2>/dev/null | grep -i "DMI:" | sed 's/\[.*\] DMI: //i')"
|
||||
echo -e "\033[1;37mDSM:\033[0m"
|
||||
echo -e "Model: \033[1;37m${MODELID:-${MODEL}}\033[0m"
|
||||
echo -e "Platform: \033[1;37m${PLATFORM}\033[0m"
|
||||
if [ -n "${NANOVER}" ]; then
|
||||
echo -e "Version: \033[1;37m${PRODUCTVER}.${NANOVER}(${BUILDNUM}$([ ${SMALLNUM:-0} -ne 0 ] && echo "u${SMALLNUM}"))\033[0m"
|
||||
else
|
||||
echo -e "Version: \033[1;37m${PRODUCTVER}(${BUILDNUM}$([ ${SMALLNUM:-0} -ne 0 ] && echo "u${SMALLNUM}"))\033[0m"
|
||||
fi
|
||||
echo -e "Version: \033[1;37m${PRODUCTVER}(${BUILDNUM}$([ ${SMALLNUM:-0} -ne 0 ] && echo "u${SMALLNUM}"))\033[0m"
|
||||
echo -e "LKM: \033[1;37m${LKM}\033[0m"
|
||||
echo
|
||||
echo -e "\033[1;37mSystem:\033[0m"
|
||||
|
64
files/initrd/opt/arc/extract-vmlinux
Executable file
64
files/initrd/opt/arc/extract-vmlinux
Executable file
@ -0,0 +1,64 @@
|
||||
#!/bin/sh
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
# ----------------------------------------------------------------------
|
||||
# extract-vmlinux - Extract uncompressed vmlinux from a kernel image
|
||||
#
|
||||
# Inspired from extract-ikconfig
|
||||
# (c) 2009,2010 Dick Streefland <dick@streefland.net>
|
||||
#
|
||||
# (c) 2011 Corentin Chary <corentin.chary@gmail.com>
|
||||
#
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
check_vmlinux()
|
||||
{
|
||||
# Use readelf to check if it's a valid ELF
|
||||
# TODO: find a better to way to check that it's really vmlinux
|
||||
# and not just an elf
|
||||
readelf -h $1 > /dev/null 2>&1 || return 1
|
||||
|
||||
cat $1
|
||||
exit 0
|
||||
}
|
||||
|
||||
try_decompress()
|
||||
{
|
||||
# The obscure use of the "tr" filter is to work around older versions of
|
||||
# "grep" that report the byte offset of the line instead of the pattern.
|
||||
|
||||
# Try to find the header ($1) and decompress from here
|
||||
for pos in `tr "$1\n$2" "\n$2=" < "$img" | grep -abo "^$2"`
|
||||
do
|
||||
pos=${pos%%:*}
|
||||
tail -c+$pos "$img" | $3 > $tmp 2> /dev/null
|
||||
check_vmlinux $tmp
|
||||
done
|
||||
}
|
||||
|
||||
# Check invocation:
|
||||
me=${0##*/}
|
||||
img=$1
|
||||
if [ $# -ne 1 -o ! -s "$img" ]
|
||||
then
|
||||
echo "Usage: $me <kernel-image>" >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# Prepare temp files:
|
||||
tmp=$(mktemp /tmp/vmlinux-XXX)
|
||||
trap "rm -f $tmp" 0
|
||||
|
||||
# That didn't work, so retry after decompression.
|
||||
try_decompress '\037\213\010' xy gunzip
|
||||
try_decompress '\3757zXZ\000' abcde unxz
|
||||
try_decompress 'BZh' xy bunzip2
|
||||
try_decompress '\135\0\0\0' xxx unlzma
|
||||
try_decompress '\211\114\132' xy 'lzop -d'
|
||||
try_decompress '\002!L\030' xxx 'lz4 -d'
|
||||
try_decompress '(\265/\375' xxx unzstd
|
||||
|
||||
# Finally check for uncompressed images or objects:
|
||||
check_vmlinux $img
|
||||
|
||||
# Bail out:
|
||||
echo "$me: Cannot find vmlinux." >&2
|
@ -2,5 +2,7 @@
|
||||
# Compatibility boot
|
||||
|
||||
function compatboot () {
|
||||
# Check for compatibility
|
||||
deleteConfigKey "nanover" "${USER_CONFIG_FILE}"
|
||||
return 0
|
||||
}
|
@ -31,6 +31,18 @@ def validate_required_param(ctx, param, value):
|
||||
raise click.MissingParameter(param_decls=[param.name])
|
||||
return value
|
||||
|
||||
def __fullversion(ver):
|
||||
out = ver
|
||||
arr = ver.split('-')
|
||||
if len(arr) > 0:
|
||||
a = arr[0].split('.')[0] if len(arr[0].split('.')) > 0 else '0'
|
||||
b = arr[0].split('.')[1] if len(arr[0].split('.')) > 1 else '0'
|
||||
c = arr[0].split('.')[2] if len(arr[0].split('.')) > 2 else '0'
|
||||
d = arr[1] if len(arr) > 1 else '00000'
|
||||
e = arr[2] if len(arr) > 2 else '0'
|
||||
out = '{}.{}.{}-{}-{}'.format(a,b,c,d,e)
|
||||
return out
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-d", "--data", type=str, callback=mutually_exclusive_options, is_eager=True, help="The data of QRCode.")
|
||||
@ -96,6 +108,7 @@ def makeqr(data, file, location, output):
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-p", "--platforms", type=str, help="The platforms of Syno.")
|
||||
def getmodels(platforms=None):
|
||||
@ -104,46 +117,225 @@ def getmodels(platforms=None):
|
||||
"""
|
||||
import json, requests, urllib3
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
|
||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
|
||||
session = requests.Session()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
if platforms is not None and platforms != "":
|
||||
PS = platforms.lower().replace(",", " ").split()
|
||||
else:
|
||||
PS = []
|
||||
|
||||
models = []
|
||||
if len(models) == 0:
|
||||
try:
|
||||
req = session.get("https://autoupdate.synology.com/os/v2", timeout=10, verify=False)
|
||||
req.encoding = "utf-8"
|
||||
data = json.loads(req.text)
|
||||
try:
|
||||
req = session.get("https://autoupdate.synology.com/os/v2", timeout=10, verify=False)
|
||||
req.encoding = "utf-8"
|
||||
data = json.loads(req.text)
|
||||
|
||||
for I in data["channel"]["item"]:
|
||||
if not I["title"].startswith("DSM"):
|
||||
for I in data["channel"]["item"]:
|
||||
if not I["title"].startswith("DSM"):
|
||||
continue
|
||||
for J in I["model"]:
|
||||
arch = J["mUnique"].split("_")[1]
|
||||
name = J["mLink"].split("/")[-1].split("_")[1].replace("%2B", "+")
|
||||
if len(PS) > 0 and arch.lower() not in PS:
|
||||
continue
|
||||
for J in I["model"]:
|
||||
arch = J["mUnique"].split("_")[1]
|
||||
name = J["mLink"].split("/")[-1].split("_")[1].replace("%2B", "+")
|
||||
if len(PS) > 0 and arch.lower() not in PS:
|
||||
continue
|
||||
if any(name == B["name"] for B in models):
|
||||
continue
|
||||
models.append({"name": name, "arch": arch})
|
||||
if any(name == B["name"] for B in models):
|
||||
continue
|
||||
models.append({"name": name, "arch": arch})
|
||||
|
||||
models = sorted(models, key=lambda k: (k["arch"], k["name"]))
|
||||
models = sorted(models, key=lambda k: (k["arch"], k["name"]))
|
||||
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
models.sort(key=lambda x: (x["arch"], x["name"]))
|
||||
print(json.dumps(models, indent=4))
|
||||
|
||||
@cli.command()
|
||||
@click.option("-p", "--platforms", type=str, help="The platforms of Syno.")
|
||||
def getmodelsbykb(platforms=None):
|
||||
"""
|
||||
Get Syno Models.
|
||||
"""
|
||||
import json, requests, urllib3
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
|
||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
|
||||
session = requests.Session()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
if platforms is not None and platforms != "":
|
||||
PS = platforms.lower().replace(",", " ").split()
|
||||
else:
|
||||
PS = []
|
||||
|
||||
models = []
|
||||
try:
|
||||
import re
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
url="https://kb.synology.com/en-us/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have"
|
||||
#url = "https://kb.synology.cn/zh-cn/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have"
|
||||
req = session.get(url, timeout=10, verify=False)
|
||||
req.encoding = "utf-8"
|
||||
bs = BeautifulSoup(req.text, "html.parser")
|
||||
p = re.compile(r"data: (.*?),$", re.MULTILINE | re.DOTALL)
|
||||
data = json.loads(p.search(bs.find("script", string=p).prettify()).group(1))
|
||||
model = "(.*?)" # (.*?): all, FS6400: one
|
||||
p = re.compile(r"<td>{}<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td><td>(.*?)<\/td>".format(model), re.MULTILINE | re.DOTALL,)
|
||||
it = p.finditer(data["preload"]["content"].replace("\n", "").replace("\t", ""))
|
||||
for i in it:
|
||||
d = i.groups()
|
||||
if len(d) == 6:
|
||||
d = model + d
|
||||
if len(PS) > 0 and d[5].lower() not in PS:
|
||||
continue
|
||||
models.append({"name": d[0].split("<br")[0], "arch": d[5].lower()})
|
||||
except:
|
||||
pass
|
||||
|
||||
models.sort(key=lambda x: (x["arch"], x["name"]))
|
||||
print(json.dumps(models, indent=4))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-m", "--model", type=str, required=True, help="The model of Syno.")
|
||||
@click.option("-v", "--version", type=str, required=True, help="The version of Syno.")
|
||||
def getpats4mv(model, version):
|
||||
import json, requests, urllib3, re
|
||||
from bs4 import BeautifulSoup
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
|
||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
|
||||
session = requests.Session()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
pats = {}
|
||||
try:
|
||||
urlInfo = "https://www.synology.com/api/support/findDownloadInfo?lang=en-us"
|
||||
urlSteps = "https://www.synology.com/api/support/findUpgradeSteps?"
|
||||
#urlInfo = "https://www.synology.cn/api/support/findDownloadInfo?lang=zh-cn"
|
||||
#urlSteps = "https://www.synology.cn/api/support/findUpgradeSteps?"
|
||||
|
||||
major = "&major={}".format(version.split('.')[0]) if len(version.split('.')) > 0 else ""
|
||||
minor = "&minor={}".format(version.split('.')[1]) if len(version.split('.')) > 1 else ""
|
||||
req = session.get("{}&product={}{}{}".format(urlInfo, model.replace("+", "%2B"), major, minor), timeout=10, verify=False)
|
||||
req.encoding = "utf-8"
|
||||
data = json.loads(req.text)
|
||||
|
||||
build_ver = data['info']['system']['detail'][0]['items'][0]['build_ver']
|
||||
build_num = data['info']['system']['detail'][0]['items'][0]['build_num']
|
||||
buildnano = data['info']['system']['detail'][0]['items'][0]['nano']
|
||||
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
|
||||
if not V in pats:
|
||||
pats[V]={}
|
||||
pats[V]['url'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
|
||||
pats[V]['sum'] = data['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
||||
|
||||
from_ver=0
|
||||
for I in data['info']['pubVers']:
|
||||
if from_ver == 0 or I['build'] < from_ver: from_ver = I['build']
|
||||
|
||||
for I in data['info']['productVers']:
|
||||
if not I['version'].startswith(version): continue
|
||||
if major == "" or minor == "":
|
||||
majorTmp = "&major={}".format(I['version'].split('.')[0]) if len(I['version'].split('.')) > 0 else ""
|
||||
minorTmp = "&minor={}".format(I['version'].split('.')[1]) if len(I['version'].split('.')) > 1 else ""
|
||||
reqTmp = session.get("{}&product={}{}{}".format(urlInfo, model.replace("+", "%2B"), majorTmp, minorTmp), timeout=10, verify=False)
|
||||
reqTmp.encoding = "utf-8"
|
||||
dataTmp = json.loads(reqTmp.text)
|
||||
|
||||
build_ver = dataTmp['info']['system']['detail'][0]['items'][0]['build_ver']
|
||||
build_num = dataTmp['info']['system']['detail'][0]['items'][0]['build_num']
|
||||
buildnano = dataTmp['info']['system']['detail'][0]['items'][0]['nano']
|
||||
V=__fullversion("{}-{}-{}".format(build_ver, build_num, buildnano))
|
||||
if not V in pats:
|
||||
pats[V]={}
|
||||
pats[V]['url'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['url'].split('?')[0]
|
||||
pats[V]['sum'] = dataTmp['info']['system']['detail'][0]['items'][0]['files'][0]['checksum']
|
||||
|
||||
for J in I['versions']:
|
||||
to_ver=J['build']
|
||||
reqSteps = session.get("{}&product={}&from_ver={}&to_ver={}".format(urlSteps, model.replace("+", "%2B"), from_ver, to_ver), timeout=10, verify=False)
|
||||
if reqSteps.status_code != 200: continue
|
||||
reqSteps.encoding = "utf-8"
|
||||
dataSteps = json.loads(reqSteps.text)
|
||||
for S in dataSteps['upgrade_steps']:
|
||||
if not 'full_patch' in S or S['full_patch'] is False: continue
|
||||
if not 'build_ver' in S or not S['build_ver'].startswith(version): continue
|
||||
V=__fullversion("{}-{}-{}".format(S['build_ver'], S['build_num'], S['nano']))
|
||||
if not V in pats:
|
||||
pats[V] = {}
|
||||
pats[V]['url'] = S['files'][0]['url'].split('?')[0]
|
||||
pats[V]['sum'] = S['files'][0]['checksum']
|
||||
except:
|
||||
pass
|
||||
|
||||
pats = {k: pats[k] for k in sorted(pats.keys(), reverse=True)}
|
||||
print(json.dumps(pats, indent=4))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-p", "--models", type=str, help="The models of Syno.")
|
||||
def getpats(models=None):
|
||||
import json, requests, urllib3, re
|
||||
from bs4 import BeautifulSoup
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry # type: ignore
|
||||
|
||||
adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]))
|
||||
session = requests.Session()
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
if models is not None and models != "":
|
||||
MS = models.lower().replace(",", " ").split()
|
||||
else:
|
||||
MS = []
|
||||
|
||||
pats = {}
|
||||
try:
|
||||
req = session.get('https://archive.synology.com/download/Os/DSM', timeout=10, verify=False)
|
||||
req.encoding = 'utf-8'
|
||||
bs=BeautifulSoup(req.text, 'html.parser')
|
||||
p = re.compile(r"(.*?)-(.*?)", re.MULTILINE | re.DOTALL)
|
||||
l = bs.find_all('a', string=p)
|
||||
for i in l:
|
||||
ver = i.attrs['href'].split('/')[-1]
|
||||
if not ver.startswith('7'): continue
|
||||
req = session.get('https://archive.synology.com{}'.format(i.attrs['href']), timeout=10, verify=False)
|
||||
req.encoding = 'utf-8'
|
||||
bs=BeautifulSoup(req.text, 'html.parser')
|
||||
p = re.compile(r"^(.*?)_(.*?)_(.*?).pat$", re.MULTILINE | re.DOTALL)
|
||||
data = bs.find_all('a', string=p)
|
||||
for item in data:
|
||||
p = re.compile(r"DSM_(.*?)_(.*?).pat", re.MULTILINE | re.DOTALL)
|
||||
rels = p.search(item.attrs['href'])
|
||||
if rels != None:
|
||||
info = p.search(item.attrs['href']).groups()
|
||||
model = info[0].replace('%2B', '+')
|
||||
if len(MS) > 0 and model.lower() not in MS:
|
||||
continue
|
||||
if model not in pats.keys():
|
||||
pats[model]={}
|
||||
pats[model][__fullversion(ver)] = item.attrs['href']
|
||||
except:
|
||||
pass
|
||||
|
||||
print(json.dumps(pats, indent=4))
|
||||
|
||||
@cli.command()
|
||||
@click.option("-p", "--platforms", type=str, help="The platforms of Syno.")
|
||||
def getmodelsoffline(platforms=None):
|
||||
|
@ -442,11 +442,7 @@ function updateModules() {
|
||||
PLATFORM="$(readConfigKey "platform" "${USER_CONFIG_FILE}")"
|
||||
KVER="$(readConfigKey "platforms.${PLATFORM}.productvers.\"${PRODUCTVER}\".kver" "${P_FILE}")"
|
||||
# Modify KVER for Epyc7002
|
||||
if [ "${PLATFORM}" = "epyc7002" ]; then
|
||||
KVERP="${PRODUCTVER}-${KVER}"
|
||||
else
|
||||
KVERP="${KVER}"
|
||||
fi
|
||||
[ "${PLATFORM}" == "epyc7002" ] && KVERP="${PRODUCTVER}-${KVER}" || KVERP="${KVER}"
|
||||
fi
|
||||
if [ -n "${PLATFORM}" ] && [ -n "${KVERP}" ]; then
|
||||
writeConfigKey "modules" "{}" "${USER_CONFIG_FILE}"
|
||||
|
@ -54,7 +54,6 @@ initConfigKey "hddsort" "false" "${USER_CONFIG_FILE}"
|
||||
initConfigKey "kernel" "official" "${USER_CONFIG_FILE}"
|
||||
initConfigKey "kernelload" "power" "${USER_CONFIG_FILE}"
|
||||
initConfigKey "kernelpanic" "5" "${USER_CONFIG_FILE}"
|
||||
initConfigKey "nanover" "" "${USER_CONFIG_FILE}"
|
||||
initConfigKey "odp" "false" "${USER_CONFIG_FILE}"
|
||||
initConfigKey "pathash" "" "${USER_CONFIG_FILE}"
|
||||
initConfigKey "paturl" "" "${USER_CONFIG_FILE}"
|
||||
|
@ -67,11 +67,7 @@ writeConfigKey "smallnum" "${SMALLNUM}" "${USER_CONFIG_FILE}"
|
||||
KVER="$(readConfigKey "platforms.${PLATFORM}.productvers.\"${PRODUCTVER}\".kver" "${P_FILE}")"
|
||||
|
||||
# Modify KVER for Epyc7002
|
||||
if [ "${PLATFORM}" == "epyc7002" ]; then
|
||||
KVERP="${PRODUCTVER}-${KVER}"
|
||||
else
|
||||
KVERP="${KVER}"
|
||||
fi
|
||||
[ "${PLATFORM}" == "epyc7002" ] && KVERP="${PRODUCTVER}-${KVER}" || KVERP="${KVER}"
|
||||
|
||||
# Sanity check
|
||||
if [ -z "${PLATFORM}" ] || [ -z "${KVER}" ]; then
|
||||
|
@ -27,7 +27,6 @@ LKM="$(readConfigKey "lkm" "${USER_CONFIG_FILE}")"
|
||||
if [ -n "${MODEL}" ]; then
|
||||
DT="$(readConfigKey "platforms.${PLATFORM}.dt" "${P_FILE}")"
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
NANOVER="$(readConfigKey "nanover" "${USER_CONFIG_FILE}")"
|
||||
ARCCONF="$(readConfigKey "${MODEL}.serial" "${S_FILE}" 2>/dev/null)"
|
||||
fi
|
||||
|
||||
@ -42,7 +41,7 @@ ntpCheck
|
||||
function backtitle() {
|
||||
BACKTITLE="${ARC_TITLE}$([ -n "${NEWTAG}" ] && [ "${NEWTAG}" != "${ARC_VERSION}" ] && echo " > ${NEWTAG}") | "
|
||||
BACKTITLE+="${MODEL:-(Model)} | "
|
||||
BACKTITLE+="${PRODUCTVER:-(Version)}$([ -n "${NANOVER}" ] && echo ".${NANOVER}") | "
|
||||
BACKTITLE+="${PRODUCTVER:-(Version)} | "
|
||||
BACKTITLE+="${IPCON:-(IP)}${OFF} | "
|
||||
BACKTITLE+="Patch: ${ARCPATCH} | "
|
||||
BACKTITLE+="Config: ${CONFDONE} | "
|
||||
|
@ -18,11 +18,7 @@ if [ "${KERNEL}" == "custom" ]; then
|
||||
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
|
||||
KVER="$(readConfigKey "platforms.${PLATFORM}.productvers.\"${PRODUCTVER}\".kver" "${P_FILE}")"
|
||||
# Modify KVER for Epyc7002
|
||||
if [ "${PLATFORM}" == "epyc7002" ]; then
|
||||
KVERP="${PRODUCTVER}-${KVER}"
|
||||
else
|
||||
KVERP="${KVER}"
|
||||
fi
|
||||
[ "${PLATFORM}" == "epyc7002" ] && KVERP="${PRODUCTVER}-${KVER}" || KVERP="${KVER}"
|
||||
# Extract bzImage
|
||||
gzip -dc "${CUSTOM_PATH}/bzImage-${PLATFORM}-${KVERP}.gz" >"${MOD_ZIMAGE_FILE}"
|
||||
else
|
||||
|
Loading…
Reference in New Issue
Block a user