automated: first try

Signed-off-by: AuxXxilium <info@auxxxilium.tech>
This commit is contained in:
AuxXxilium 2024-04-06 20:40:42 +02:00
parent 0e60e6eb7d
commit b6efdd270b
9 changed files with 1171 additions and 4 deletions

View File

@ -0,0 +1,8 @@
---
name: Custom Build
about: Create a Customized Build
title: ''
labels: custom
assignees: ''
---

261
.github/workflows/custom.yml vendored Normal file
View File

@ -0,0 +1,261 @@
#
# Copyright (C) 2023 AuxXxilium <https://github.com/AuxXxilium> and Ing <https://github.com/wjz304>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
name: Custom Build
on:
issues:
types:
- labeled
jobs:
build:
if: github.event.label.name == 'custom'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@main
- name: Init Env
run: |
git config --global user.email "info@auxxxilium.tech"
git config --global user.name "AuxXxilium"
sudo timedatectl set-timezone "Europe/Berlin"
sudo apt update
sudo apt install -y jq gawk cpio gettext libelf-dev qemu-utils busybox dialog curl sed
sudo snap install yq
- name: Check Custom
shell: python
run: |
# -*- coding: utf-8 -*-
import json, subprocess
def set_output(name, value):
subprocess.call(["echo '{}={}' >> $GITHUB_ENV".format(name, value)], shell=True)
issuetitle = ${{ toJSON(github.event.issue.title) }};
issuebody = ${{ toJSON(github.event.issue.body) }};
iscustom = 'false'
MODEL = ''
PRODUCTVER = ''
ADDONS = ''
FORMAT = ''
try:
if issuetitle.lower().startswith('custom'):
jsonbody = json.loads(issuebody)
iscustom = 'true'
MODEL = jsonbody.get('model', '')
PRODUCTVER = jsonbody.get('version', '')
ADDONS = jsonbody.get('addons', '')
FORMAT = jsonbody.get('format', '')
except ValueError as e:
pass
set_output("iscustom", iscustom)
set_output("MODEL", MODEL)
set_output("PRODUCTVER", PRODUCTVER)
set_output("ADDONS", ADDONS)
set_output("FORMAT", FORMAT)
# calculates the version number and push
- name: Calculate Version
run: |
# Calculate Version
VERSION="`date +'%y.%-m.%-d'`-custom"
echo "Version: ${VERSION}"
echo "VERSION=${VERSION}" >> $GITHUB_ENV
if [ -n "${VERSION}" ]; then
# Modify Source File
echo "${VERSION}" >VERSION
echo "${VERSION}" >files/p1/ARC-VERSION
sed 's/^ARC_VERSION=.*/ARC_VERSION="'${VERSION}'"/' -i files/initrd/opt/arc/include/consts.sh
fi
- name: Update Comment Building
if: env.iscustom == 'true'
id: comment
uses: actions-cool/issues-helper@v3
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hi @${{ github.event.issue.user.login }}.
arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }} is being build and package and will be sent to your email later..
> ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
----
# Get extractor, LKM, Addons, Modules and Configs
- name: Get extractor, LKM, Addons, Extensions, Modules and Configs
run: |
. scripts/func.sh
getExtractor "files/p3/extractor"
getLKMs "files/p3/lkms"
getAddons "files/p3/addons"
getModules "files/p3/modules"
getConfigs "files/p3/configs"
getPatches "files/p3/patches"
echo "OK"
# Build incremental
- name: Build image
run: |
. scripts/func.sh
echo "Create Arc Image"
IMAGE_FILE="arc.img"
gzip -dc "files/initrd/opt/arc/grub.img.gz" >"${IMAGE_FILE}"
fdisk -l "${IMAGE_FILE}"
LOOPX=$(sudo losetup -f)
sudo losetup -P "${LOOPX}" "${IMAGE_FILE}"
echo "Mounting image file"
mkdir -p "/tmp/p1"
mkdir -p "/tmp/p3"
sudo mount ${LOOPX}p1 "/tmp/p1"
sudo mount ${LOOPX}p3 "/tmp/p3"
echo "Get Buildroot"
getBuildroot "2023.08.x" "br"
[ ! -f "br/bzImage-arc" ] || [ ! -f "br/initrd-arc" ] && return 1
echo "Repack initrd"
cp -f "br/bzImage-arc" "files/p3/bzImage-arc"
repackInitrd "br/initrd-arc" "files/initrd" "files/p3/initrd-arc"
echo "Set Custom Config"
sudo echo "model: ${{ env.MODEL }}" >"/tmp/p1/preset-config"
sudo echo "productver: ${{ env.PRODUCTVER }}" >>"/tmp/p1/preset-config"
sudo echo "addons:" >>"/tmp/p1/preset-config"
for ADDON in ${{ env.ADDONS }}; do
sudo echo "addons.${ADDON}" >>"/tmp/p1/preset-config"
done
sudo echo "${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}" >"/tmp/p3/.automated"
echo "Copying files"
sudo cp -Rf "files/p1/"* "/tmp/p1"
sudo cp -Rf "files/p3/"* "/tmp/p3"
sudo cp -f "files/p1/preset-config" "/tmp/p1/preset-config"
sync
echo "Unmount image file"
sudo umount "/tmp/p1"
sudo umount "/tmp/p3"
rmdir "/tmp/p1"
rmdir "/tmp/p3"
sudo losetup --detach ${LOOPX}
if [ "${{ env.FORMAT }}" = "dyn" ]; then
echo "Image Converter -> dyn"
qemu-img convert -O vmdk arc.img arc-dyn.vmdk
elif [ "${{ env.FORMAT }}" = "flat" ]; then
echo "Image Converter -> flat"
qemu-img convert -O vmdk -o adapter_type=lsilogic arc.img -o subformat=monolithicFlat arc.vmdk
fi
# Zip image and generate checksum
- name: Pack
run: |
if [ "${{ env.FORMAT }}" = "img" ]; then
zip -9 "arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}.img.zip" arc.img
UPLOAD="$(curl -k -F "file=@arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}.img.zip" "https://file.io")"
if ! echo "${UPLOAD}" | grep -oP '"link":\s*"\K[^"]+'; then
echo "Upload failed"
exit 1
fi
elif [ "${{ env.FORMAT }}" = "dyn" ]; then
zip -9 "arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}.vmdk-dyn.zip" arc-dyn.vmdk
UPLOAD="$(curl -k -F "file=@arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}.vmdk-dyn.zip" "https://file.io")"
if ! echo "${UPLOAD}" | grep -oP '"link":\s*"\K[^"]+'; then
echo "Upload failed"
exit 1
fi
elif [ "${{ env.FORMAT }}" = "flat" ]; then
zip -9 "arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}.vmdk-flat.zip" arc.vmdk arc-flat.vmdk
UPLOAD="$(curl -k -F "file=@arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}.vmdk-flat.zip" "https://file.io")"
if ! echo "${UPLOAD}" | grep -oP '"link":\s*"\K[^"]+'; then
echo "Upload failed"
exit 1
fi
fi
EMAIL=$(curl -s -H "Authorization: token ${{ secrets.ACTION }}" "https://api.github.com/users/${{ github.event.issue.user.login }}" | jq -r '.email')
FIOURL="$(echo "${UPLOAD}" | grep -oP '"link":\s*"\K[^"]+')"
echo "EMAIL=${EMAIL}" >> $GITHUB_ENV
echo "FIOURL=${FIOURL}" >> $GITHUB_ENV
- name: Send mail
if: env.iscustom == 'true' && success() && env.EMAIL != 'null'
uses: dawidd6/action-send-mail@v3
with:
server_address: smtp.gmail.com
server_port: 587
username: ${{ secrets.MAIL_USERNAME }}
password: ${{ secrets.MAIL_PASSWORD }}
subject: arc-${{ env.model }}-${{ env.PRODUCTVER }}-${{ env.VERSION }}
to: ${{ env.EMAIL }}
from: ${{ secrets.MAIL_USERNAME }}
body: |
${{ env.TTDLURL }}
- name: Update Comment Success
if: env.iscustom == 'true' && success() && env.EMAIL != 'null'
uses: actions-cool/issues-helper@v3
with:
actions: 'update-comment'
token: ${{ secrets.GITHUB_TOKEN }}
comment-id: ${{ steps.comment.outputs.comment-id }}
update-mode: replace
body: |
Hi @${{ github.event.issue.user.login }}.
arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }} has been sent to your email.
> ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
----
- name: Update Comment Success
if: env.iscustom == 'true' && success() && env.EMAIL == 'null'
uses: actions-cool/issues-helper@v3
with:
actions: 'update-comment'
token: ${{ secrets.GITHUB_TOKEN }}
comment-id: ${{ steps.comment.outputs.comment-id }}
update-mode: replace
body: |
Hi @${{ github.event.issue.user.login }}.
${{ env.FIOURL }}
> ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
----
- name: Close Issues
if: env.iscustom == 'true' && success()
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
- name: Update Comment Fail
if: env.iscustom == 'true' && failure()
uses: actions-cool/issues-helper@v3
with:
actions: 'update-comment'
token: ${{ secrets.GITHUB_TOKEN }}
comment-id: ${{ steps.comment.outputs.comment-id }}
update-mode: replace
body: |
Hi @${{ github.event.issue.user.login }}.
arc-${{ env.MODEL }}-${{ env.PRODUCTVER }}-${{ env.VERSION }} failed to build and package, please try again.
> ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
----

60
.github/workflows/data.yml vendored Normal file
View File

@ -0,0 +1,60 @@
#
# Copyright (C) 2023 AuxXxilium <https://github.com/AuxXxilium> and Ing <https://github.com/wjz304>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
name: Data
on:
workflow_dispatch:
inputs:
push:
description: "push"
default: false
type: boolean
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@main
- name: Init Env
run: |
git config --global user.email "info@auxxxilium.tech"
git config --global user.name "AuxXxilium"
sudo timedatectl set-timezone "Europe/Berlin"
- name: Get models
run: |
. scripts/func.sh
models="{"
WORK_PATH="files/p3"
mkdir -p "${WORK_PATH}/autoconfigs"
getConfigs "${WORK_PATH}/autoconfigs"
for M in $(find "${WORK_PATH}/autoconfigs" -maxdepth 1 -name \*.yml 2>/dev/null | sed 's/.*\///; s/\.yml//'); do
models+="\"${M}\":["
for V in $(yq eval '.productvers | explode(.) | to_entries | map([.key])[] | .[]' "${WORK_PATH}/autoconfigs/${M}.yml" 2>/dev/null | sort -r); do
models+="\"${V}\","
done
models="${models%,}],"
done
models="${models%,}}"
echo "${models}" >docs/models.json
rm -rf "${WORK_PATH}/autoconfigs"
- name: Check and Push
if: success() && inputs.push == true
run: |
echo "Git push ..."
git pull
status=$(git status -s | grep -E 'docs/models.json' | awk '{printf " %s", $2}')
if [ -n "${status}" ]; then
git add ${status}
git commit -m "data: update $(date +%Y-%m-%d" "%H:%M:%S)"
git push -f
fi

227
docs/custom.html Normal file
View File

@ -0,0 +1,227 @@
<!DOCTYPE html>
<html lang="en-US" data-color-mode="auto" data-light-theme="light" data-dark-theme="dark"
data-a11y-animated-images="system">
<head>
<meta charset="utf-8" />
<link rel="dns-prefetch" href="https://github.githubassets.com" />
<link rel="dns-prefetch" href="https://avatars.githubusercontent.com" />
<link rel="dns-prefetch" href="https://github-cloud.s3.amazonaws.com" />
<link rel="dns-prefetch" href="https://user-images.githubusercontent.com/" />
<link rel="preconnect" href="https://github.githubassets.com" crossorigin />
<link rel="preconnect" href="https://avatars.githubusercontent.com" />
<link crossorigin="anonymous" media="all" rel="stylesheet"
href="https://github.githubassets.com/assets/light-0eace2597ca3.css" />
<link crossorigin="anonymous" media="all" rel="stylesheet"
href="https://github.githubassets.com/assets/dark-a167e256da9c.css" />
<link crossorigin="anonymous" media="all" rel="stylesheet"
href="https://github.githubassets.com/assets/primer-711f412bb361.css" />
<link crossorigin="anonymous" media="all" rel="stylesheet"
href="https://github.githubassets.com/assets/global-78704364aaba.css" />
<style>
select,
input {
width: 400px;
}
</style>
<script src="https://code.jquery.com/jquery-3.3.1.min.js"></script>
<script src="https://polyfill.io/v3/polyfill.min.js"></script>
<!-- <script src="https://fastly.jsdelivr.net/npm/marked/marked.min.js"></script> -->
<script type="application/javascript">
var repo = "AuxXxilium/arc"
function httpGetAsync(theUrl, callback) {
let xmlHttpReq = new XMLHttpRequest();
xmlHttpReq.onreadystatechange = function () {
if (xmlHttpReq.readyState == 4 && xmlHttpReq.status == 200)
callback(xmlHttpReq.responseText);
};
xmlHttpReq.open("GET", theUrl, true); // true for asynchronous
xmlHttpReq.send(null);
}
window.onload = function () {
init();
}
function init() {
httpGetAsync("https://raw.githubusercontent.com/AuxXxilium/arc/main/docs/models.json", function (result) {
_modules = JSON.parse(result);
$("#model").on("change", changeModel);
setModels();
});
}
function setModels() {
var models = Object.keys(_modules).sort();
$("#model")[0].options.length = 0;
for (var i = 0; i < models.length; i++) {
var model = models[i];
$("#model").append(`<option value="${model}">${model}</option>`);
}
changeModel();
}
function changeModel() {
model = $("#model").val();
var versions = _modules[model];
$("#version")[0].options.length = 0;
for (var i = 0; i < versions.length; i++) {
var version = versions[i];
$("#version").append(`<option value="${version}">${version}</option>`);
}
changeproductsimage();
}
function changeproductsimage() {
var model = $("#model").val().replace("#", "").replace("+", "%2B");
$('#products').html(`<img src="https://www.synology.com/api/products/getPhoto?product=${model}&type=img_s&sort=0" width="40%">`);
}
function createIssues() {
var form = document.getElementById("inputs");
let formData = new FormData(form);
var title = "custom";
var body = {};
var _parameters = ["title", "model", "version", "format", "addons"];
for (var key in _parameters) {
var name = _parameters[key];
if (name == "title") {
if ($("#" + name).val()) {
title += " " + $("#" + name).val();
} else {
title += " " + new Date().toISOString().substr(0, 10);
}
continue;
}
if ($("#" + name).val()) {
body[name] = $("#" + name).val();
}
}
body = JSON.stringify(body).replace(/\+/g, "%2b");
window.location.href = `https://github.com/${repo}/issues/new?&abels=custom&template=custom_build.md&title=${title}&body=${body}`;
}
</script>
<title>Arc Loader - Custom</title>
</head>
<body class="logged-in env-production page-responsive" style="word-wrap: break-word">
<div class="application-main left-0 text-left p-3 mx-auto container-xl px-3 px-md-4 px-lg-5 mt-4">
<pre id="logo" style="
display: block;
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas,
Liberation Mono, monospace !important;
font-size: 12px !important;
line-height: 12px !important;
margin: 15px 10px;
color: #0000FF;
">
_ _ _
/ \ _ __ ___ | | ___ __ _ __| | ___ _ __
/ _ \ | '__/ __| | | / _ \ / _` |/ _` |/ _ \ '__|
/ ___ \| | | |__ | |__| (_) | |_| | |_| | __/ |
/_/ \_\_| \___| |_____\___/ \__,_|\__,_|\___|_|
</pre>
<div class="image" id="products" style="height: 50px; margin-left: 100px;">
<img src="https://www.synology.cn/img/products/detail/SA6400/heading.png" width="20%">
</div>
<div class="form-group mt-1 mb-2 ">
<div class="form-group-header">
<label class="color-fg-default text-mono f6">Title:</label>
</div>
<div class="form-group-body">
<input class="form-control input-contrast input-sm" type="text" id="title" name="inputs[title]" value="" />
</div>
</div>
<div class="form-group mt-1 mb-2 ">
<div class="form-group-header">
<label class="color-fg-default text-mono f6">Model (Choose DSM Model):</label>
</div>
<div class="form-group-body">
<select class="form-select form-control select-sm input-contrast" id="model" name="inputs[model]" value="">
<option selected="selected" value="SA6400">SA6400</option>
</select>
</div>
</div>
<div class="form-group mt-1 mb-2 ">
<div class="form-group-header">
<label class="color-fg-default text-mono f6">Version (Choose DSM Version):</label>
</div>
<div class="form-group-body">
<select class="form-select form-control select-sm input-contrast" id="version" name="inputs[version]" value="">
<option selected="selected" value="7.2">7.2</option>
</select>
</div>
</div>
<div class="form-group mt-1 mb-2 ">
<div class="form-group-header">
<label class="color-fg-default text-mono f6">Format (Choose Format for Output File):</label>
</div>
<div class="form-group-body">
<select class="form-select form-control select-sm input-contrast" id="format" name="inputs[format]" value="">
<option selected="selected" value="img">img</option>
<option value="dyn">vmdk-dyn</option>
<option value="flat">vmdk-flat</option>
</select>
</div>
</div>
<div class="form-group mt-1 mb-2 ">
<div class="form-group-header">
<label class="color-fg-default text-mono f6">Addons (Select only needed Addons, not all Addons work with all Models):</label>
</div>
<div class="form-group-body">
<select class="form-select form-control select-sm input-contrast" id="addons" name="inputs[addons]" value="" multiple>
<option value="acpid">acpid</option>
<option value="addincards">addincards</option>
<option value="amepatch">amepatch</option>
<option value="bootwait">bootwait</option>
<option value="codecpatch">codecpatch</option>
<option value="cpuinfo">cpuinfo</option>
<option value="dbgutils">dbgutils</option>
<option value="deduplication">deduplication</option>
<option value="deduplicationhdd">deduplicationhdd</option>
<option value="dsmconfigbackup">dsmconfigbackup</option>
<option value="expands">expands</option>
<option value="hdddb">hdddb</option>
<option value="i915">i915</option>
<option value="lsiutil">lsiutil</option>
<option value="multismb3">multismb3</option>
<option value="nvmecache">nvmecache</option>
<option value="nvmesystem">nvmesystem</option>
<option value="nvmevolume">nvmevolume</option>
<option value="photosfacepatch">photosfacepatch</option>
<option value="powersched">powersched</option>
<option value="reboottoloader">reboottoloader</option>
<option value="rndis">rndis</option>
<option value="sanmanager-repair">sanmanager-repair</option>
<option value="setrootpw">setrootpw</option>
<option value="sortnetif">sortnetif</option>
<option value="storagepanel">storagepanel</option>
<option value="surveillancepatch">surveillancepatch</option>
</select>
</div>
</div>
<div data-replace-remote-form-target="" class="workflow-dispatch">
<form id="inputs">
<button type="button" class="btn State--merged" onclick="return createIssues()" autofocus="">
Start Build
</button>
</form>
</div>
</div>
<footer class="footer width-full container-xl mt-3 text-center color-fg-muted">
<a aria-label="AuxXxilium" title="GitHub" class="footer-octicon mr-2" href="https://github.com/AuxXxilium">
<img class="avatar rounded-2 avatar-user" src="https://avatars.githubusercontent.com/u/67025065?v=4"
width="40" height="40" alt="AuxXxilium" />
</a>
<span> © 2024 by AuxXxilium</span>
</footer>
</body>
</html>

6
docs/index.html Normal file
View File

@ -0,0 +1,6 @@
<!DOCTYPE html>
<html lang="en-US">
<head>
<script language="javascript"> location.replace("./custom.html")</script>
</head>
</html>

View File

@ -295,7 +295,7 @@ function cmdlineMenu() {
dialog --clear --backtitle "$(backtitle)" \
--title "CPU Fix" --menu "Fix?" 0 0 0 \
1 "Install" \
2 "Uninnstall" \
2 "Uninstall" \
2>"${TMP_PATH}/resp"
resp=$(cat ${TMP_PATH}/resp)
[ -z "${resp}" ] && return 1
@ -317,7 +317,7 @@ function cmdlineMenu() {
dialog --clear --backtitle "$(backtitle)" \
--title "RAM Fix" --menu "Fix?" 0 0 0 \
1 "Install" \
2 "Uninnstall" \
2 "Uninstall" \
2>"${TMP_PATH}/resp"
resp=$(cat ${TMP_PATH}/resp)
[ -z "${resp}" ] && return 1
@ -339,7 +339,7 @@ function cmdlineMenu() {
dialog --clear --backtitle "$(backtitle)" \
--title "PCI/IRQ Fix" --menu "Fix?" 0 0 0 \
1 "Install" \
2 "Uninnstall" \
2 "Uninstall" \
2>"${TMP_PATH}/resp"
resp=$(cat ${TMP_PATH}/resp)
[ -z "${resp}" ] && return 1
@ -359,7 +359,7 @@ function cmdlineMenu() {
dialog --clear --backtitle "$(backtitle)" \
--title "C-State Fix" --menu "Fix?" 0 0 0 \
1 "Install" \
2 "Uninnstall" \
2 "Uninstall" \
2>"${TMP_PATH}/resp"
resp=$(cat ${TMP_PATH}/resp)
[ -z "${resp}" ] && return 1

377
files/initrd/opt/arc/automated.sh Executable file
View File

@ -0,0 +1,377 @@
#!/usr/bin/env bash
###############################################################################
# Overlay Init Section
[[ -z "${ARC_PATH}" || ! -d "${ARC_PATH}/include" ]] && ARC_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
. ${ARC_PATH}/include/functions.sh
. ${ARC_PATH}/include/addons.sh
. ${ARC_PATH}/include/modules.sh
. ${ARC_PATH}/include/storage_automated.sh
. ${ARC_PATH}/include/network_automated.sh
. ${ARC_PATH}/arc-functions.sh
[ -z "${LOADER_DISK}" ] && die "Loader Disk not found!"
# Memory: Check Memory installed
RAMFREE=$(($(free -m | grep -i mem | awk '{print$2}') / 1024 + 1))
RAMTOTAL=$((${RAMFREE} * 1024))
[ -z "${RAMTOTAL}" ] || [ ${RAMTOTAL} -le 0 ] && RAMTOTAL=8192
RAMMAX=$((${RAMTOTAL} * 2))
RAMMIN=$((${RAMTOTAL} / 2))
# Check for Hypervisor
if grep -q "^flags.*hypervisor.*" /proc/cpuinfo; then
# Check for Hypervisor
MACHINE="$(lscpu | grep Hypervisor | awk '{print $3}')"
else
MACHINE="NATIVE"
fi
# Get Loader Disk Bus
BUS=$(getBus "${LOADER_DISK}")
# Get DSM Data from Config
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
LAYOUT="$(readConfigKey "layout" "${USER_CONFIG_FILE}")"
KEYMAP="$(readConfigKey "keymap" "${USER_CONFIG_FILE}")"
LKM="$(readConfigKey "lkm" "${USER_CONFIG_FILE}")"
if [ -n "${MODEL}" ]; then
PLATFORM="$(readModelKey "${MODEL}" "platform")"
DT="$(readModelKey "${MODEL}" "dt")"
fi
# Get Arc Data from Config
DIRECTBOOT="$(readConfigKey "arc.directboot" "${USER_CONFIG_FILE}")"
CONFDONE="$(readConfigKey "arc.confdone" "${USER_CONFIG_FILE}")"
BUILDDONE="$(readConfigKey "arc.builddone" "${USER_CONFIG_FILE}")"
ARCPATCH="$(readConfigKey "arc.patch" "${USER_CONFIG_FILE}")"
BOOTIPWAIT="$(readConfigKey "arc.bootipwait" "${USER_CONFIG_FILE}")"
KERNELLOAD="$(readConfigKey "arc.kernelload" "${USER_CONFIG_FILE}")"
KERNELPANIC="$(readConfigKey "arc.kernelpanic" "${USER_CONFIG_FILE}")"
KVMSUPPORT="$(readConfigKey "arc.kvm" "${USER_CONFIG_FILE}")"
MACSYS="$(readConfigKey "arc.macsys" "${USER_CONFIG_FILE}")"
ODP="$(readConfigKey "arc.odp" "${USER_CONFIG_FILE}")"
MODULESCOPY="$(readConfigKey "arc.modulescopy" "${USER_CONFIG_FILE}")"
HDDSORT="$(readConfigKey "arc.hddsort" "${USER_CONFIG_FILE}")"
KERNEL="$(readConfigKey "arc.kernel" "${USER_CONFIG_FILE}")"
USBMOUNT="$(readConfigKey "arc.usbmount" "${USER_CONFIG_FILE}")"
ARCIPV6="$(readConfigKey "arc.ipv6" "${USER_CONFIG_FILE}")"
EMMCBOOT="$(readConfigKey "arc.emmcboot" "${USER_CONFIG_FILE}")"
OFFLINE="$(readConfigKey "arc.offline" "${USER_CONFIG_FILE}")"
EXTERNALCONTROLLER="$(readConfigKey "device.externalcontroller" "${USER_CONFIG_FILE}")"
###############################################################################
# Mounts backtitle dynamically
function backtitle() {
if [ ! -n "${MODEL}" ]; then
MODEL="(Model)"
fi
if [ ! -n "${PRODUCTVER}" ]; then
PRODUCTVER="(Version)"
fi
if [ ! -n "${IPCON}" ]; then
IPCON="(IP)"
fi
BACKTITLE="${ARC_TITLE} | "
BACKTITLE+="${MODEL} | "
BACKTITLE+="${PRODUCTVER} | "
BACKTITLE+="${IPCON} | "
BACKTITLE+="Patch: ${ARCPATCH} | "
BACKTITLE+="Config: ${CONFDONE} | "
BACKTITLE+="Build: ${BUILDDONE} | "
BACKTITLE+="${MACHINE}(${BUS})"
echo "${BACKTITLE}"
}
###############################################################################
# Make Model Config
function arcAutomated() {
# read model config for dt and aes
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
DT="$(readModelKey "${MODEL}" "dt")"
ARCCONF="$(readModelKey "${M}" "arc.serial")"
[ -n "${ARCCONF}" ] && ARCPATH="true" || ARC="false"
if [ "${ARCPATCH}" = "true" ]; then
SN="$(readModelKey "${MODEL}" "arc.serial")"
writeConfigKey "arc.patch" "true" "${USER_CONFIG_FILE}"
elif [ "${ARCPATCH}" = "false" ]; then
SN="$(generateSerial "${MODEL}")"
writeConfigKey "arc.patch" "false" "${USER_CONFIG_FILE}"
fi
ARCPATCH="$(readConfigKey "arc.patch" "${USER_CONFIG_FILE}")"
PLATFORM="$(readModelKey "${MODEL}" "platform")"
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
KVER="$(readModelKey "${MODEL}" "productvers.[${PRODUCTVER}].kver")"
if [ "${PLATFORM}" = "epyc7002" ]; then
KVER="${PRODUCTVER}-${KVER}"
fi
writeConfigKey "arc.confdone" "false" "${USER_CONFIG_FILE}"
writeConfigKey "arc.builddone" "false" "${USER_CONFIG_FILE}"
CONFDONE="$(readConfigKey "arc.confdone" "${USER_CONFIG_FILE}")"
BUILDDONE="$(readConfigKey "arc.builddone" "${USER_CONFIG_FILE}")"
if [[ -f "${ORI_ZIMAGE_FILE}" || -f "${ORI_RDGZ_FILE}" || -f "${MOD_ZIMAGE_FILE}" || -f "${MOD_RDGZ_FILE}" ]]; then
# Delete old files
rm -f "${ORI_ZIMAGE_FILE}" "${ORI_RDGZ_FILE}" "${MOD_ZIMAGE_FILE}" "${MOD_RDGZ_FILE}"
fi
dialog --backtitle "$(backtitle)" --title "Arc Config" \
--infobox "Reconfiguring Synoinfo and Modules" 3 40
# Reset synoinfo
writeConfigKey "synoinfo" "{}" "${USER_CONFIG_FILE}"
while IFS=': ' read -r KEY VALUE; do
writeConfigKey "synoinfo.\"${KEY}\"" "${VALUE}" "${USER_CONFIG_FILE}"
done <<<$(readModelMap "${MODEL}" "productvers.[${PRODUCTVER}].synoinfo")
# Reset modules
writeConfigKey "modules" "{}" "${USER_CONFIG_FILE}"
while read -r ID DESC; do
writeConfigKey "modules.\"${ID}\"" "" "${USER_CONFIG_FILE}"
done <<<$(getAllModules "${PLATFORM}" "${KVER}")
# Check for ACPI Support
if ! grep -q "^flags.*acpi.*" /proc/cpuinfo; then
deleteConfigKey "addons.acpid" "${USER_CONFIG_FILE}"
fi
arcSettings
}
###############################################################################
# Arc Settings Section
function arcSettings() {
# Get Network Config for Loader
dialog --backtitle "$(backtitle)" --colors --title "Network Config" \
--infobox "Network Config..." 3 30
getnet
# Select Portmap for Loader (nonDT)
getmap
if [[ "${DT}" = "false" && $(lspci -d ::106 | wc -l) -gt 0 ]]; then
dialog --backtitle "$(backtitle)" --colors --title "Storage Map" \
--infobox "Storage Map..." 3 30
getmapSelection
fi
# Config is done
writeConfigKey "arc.confdone" "true" "${USER_CONFIG_FILE}"
CONFDONE="$(readConfigKey "arc.confdone" "${USER_CONFIG_FILE}")"
premake
}
###############################################################################
# Building Loader Online
function premake() {
# Read Model Config
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
PLATFORM="$(readModelKey "${MODEL}" "platform")"
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
KVER="$(readModelKey "${MODEL}" "productvers.[${PRODUCTVER}].kver")"
DT="$(readModelKey "${MODEL}" "dt")"
# Read Config for Arc Settings
USBMOUNT="$(readConfigKey "arc.usbmount" "${USER_CONFIG_FILE}")"
KVMSUPPORT="$(readConfigKey "arc.kvm" "${USER_CONFIG_FILE}")"
EMMCBOOT="$(readConfigKey "arc.emmcboot" "${USER_CONFIG_FILE}")"
# Memory: Set mem_max_mb to the amount of installed memory to bypass Limitation
writeConfigKey "synoinfo.mem_max_mb" "${RAMMAX}" "${USER_CONFIG_FILE}"
writeConfigKey "synoinfo.mem_min_mb" "${RAMMIN}" "${USER_CONFIG_FILE}"
# KVM Support
if [ "${KVMSUPPORT}" = "true" ]; then
writeConfigKey "modules.kvm_intel" "" "${USER_CONFIG_FILE}"
writeConfigKey "modules.kvm_amd" "" "${USER_CONFIG_FILE}"
writeConfigKey "modules.kvm" "" "${USER_CONFIG_FILE}"
writeConfigKey "modules.irgbypass" "" "${USER_CONFIG_FILE}"
else
deleteConfigKey "modules.kvm_intel" "${USER_CONFIG_FILE}"
deleteConfigKey "modules.kvm_amd" "${USER_CONFIG_FILE}"
deleteConfigKey "modules.kvm" "${USER_CONFIG_FILE}"
deleteConfigKey "modules.irgbypass" "${USER_CONFIG_FILE}"
fi
# eMMC Boot Support
if [ "${EMMCBOOT}" = "true" ]; then
writeConfigKey "modules.mmc_block" "" "${USER_CONFIG_FILE}"
writeConfigKey "modules.mmc_core" "" "${USER_CONFIG_FILE}"
else
deleteConfigKey "modules.mmc_block" "${USER_CONFIG_FILE}"
deleteConfigKey "modules.mmc_core" "${USER_CONFIG_FILE}"
fi
# Fixes for SA6400
if [ "${PLATFORM}" = "epyc7002" ]; then
KVER="${PRODUCTVER}-${KVER}"
MODULESCOPY="false"
writeConfigKey "arc.modulescopy" "${MODULESCOPY}" "${USER_CONFIG_FILE}"
fi
# Build Loader
make
}
###############################################################################
# Building Loader Online
function make() {
# Read Model Config
MODEL="$(readConfigKey "model" "${USER_CONFIG_FILE}")"
PLATFORM="$(readModelKey "${MODEL}" "platform")"
PRODUCTVER="$(readConfigKey "productver" "${USER_CONFIG_FILE}")"
KVER="$(readModelKey "${MODEL}" "productvers.[${PRODUCTVER}].kver")"
DT="$(readModelKey "${MODEL}" "dt")"
OFFLINE="$(readConfigKey "arc.offline" "${USER_CONFIG_FILE}")"
if [ "${PLATFORM}" = "epyc7002" ]; then
KVER="${PRODUCTVER}-${KVER}"
fi
# Cleanup
if [ -d "${UNTAR_PAT_PATH}" ]; then
rm -rf "${UNTAR_PAT_PATH}"
fi
mkdir -p "${UNTAR_PAT_PATH}"
# Check if all addon exists
while IFS=': ' read -r ADDON PARAM; do
[ -z "${ADDON}" ] && continue
if ! checkAddonExist "${ADDON}" "${PLATFORM}" "${KVER}"; then
deleteConfigKey "addons.${ADDON}" "${USER_CONFIG_FILE}"
continue
fi
done <<<$(readConfigMap "addons" "${USER_CONFIG_FILE}")
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
--infobox "Get PAT Data from Syno..." 3 30
# Get PAT Data from Syno
idx=0
while [ ${idx} -le 3 ]; do # Loop 3 times, if successful, break
PAT_URL="$(curl -m 5 -skL "https://www.synology.com/api/support/findDownloadInfo?lang=en-us&product=${MODEL/+/%2B}&major=${PRODUCTVER%%.*}&minor=${PRODUCTVER##*.}" | jq -r '.info.system.detail[0].items[0].files[0].url')"
PAT_HASH="$(curl -m 5 -skL "https://www.synology.com/api/support/findDownloadInfo?lang=en-us&product=${MODEL/+/%2B}&major=${PRODUCTVER%%.*}&minor=${PRODUCTVER##*.}" | jq -r '.info.system.detail[0].items[0].files[0].checksum')"
PAT_URL=${PAT_URL%%\?*}
if [[ -n "${PAT_URL}" && -n "${PAT_HASH}" ]]; then
break
fi
sleep 3
idx=$((${idx} + 1))
done
if [[ -z "${PAT_URL}" || -z "${PAT_HASH}" ]]; then
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
--infobox "Syno Connection failed,\ntry to get from Github..." 4 30
idx=0
while [ ${idx} -le 3 ]; do # Loop 3 times, if successful, break
PAT_URL="$(curl -m 5 -skL "https://raw.githubusercontent.com/AuxXxilium/arc-dsm/main/dsm/${MODEL/+/%2B}/${PRODUCTVER%%.*}.${PRODUCTVER##*.}/pat_url")"
PAT_HASH="$(curl -m 5 -skL "https://raw.githubusercontent.com/AuxXxilium/arc-dsm/main/dsm/${MODEL/+/%2B}/${PRODUCTVER%%.*}.${PRODUCTVER##*.}/pat_hash")"
PAT_URL=${PAT_URL%%\?*}
if [[ -n "${PAT_URL}" && -n "${PAT_HASH}" ]]; then
break
fi
sleep 3
idx=$((${idx} + 1))
done
fi
if [[ -z "${PAT_URL}" || -z "${PAT_HASH}" ]]; then
dialog --backtitle "$(backtitle)" --title "DSM Data" --aspect 18 \
--infobox "No DSM Data found!\nExit." 0 0
sleep 5
return 1
else
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
--infobox "Get PAT Data sucessfull..." 3 30
fi
if [[ "${PAT_HASH}" != "${PAT_HASH_CONF}" || ! -f "${ORI_ZIMAGE_FILE}" || ! -f "${ORI_RDGZ_FILE}" ]]; then
writeConfigKey "arc.paturl" "${PAT_URL}" "${USER_CONFIG_FILE}"
writeConfigKey "arc.pathash" "${PAT_HASH}" "${USER_CONFIG_FILE}"
# Check for existing Files
DSM_FILE="${UNTAR_PAT_PATH}/${PAT_HASH}.tar"
# Get new Files
DSM_URL="https://raw.githubusercontent.com/AuxXxilium/arc-dsm/main/files/${MODEL/+/%2B}/${PRODUCTVER}/${PAT_HASH}.tar"
STATUS=$(curl --insecure -s -w "%{http_code}" -L "${DSM_URL}" -o "${DSM_FILE}")
if [[ $? -ne 0 || ${STATUS} -ne 200 ]]; then
dialog --backtitle "$(backtitle)" --title "DSM Download" --aspect 18 \
--infobox "No DSM Image found!\nTry Syno Link." 0 0
# Grep PAT_URL
PAT_FILE="${TMP_PATH}/${PAT_HASH}.pat"
STATUS=$(curl -k -w "%{http_code}" -L "${PAT_URL}" -o "${PAT_FILE}" --progress-bar)
if [[ $? -ne 0 || ${STATUS} -ne 200 ]]; then
dialog --backtitle "$(backtitle)" --title "DSM Download" --aspect 18 \
--infobox "No DSM Image found!\ Exit." 0 0
sleep 5
return 1
fi
# Extract Files
header=$(od -bcN2 ${PAT_FILE} | head -1 | awk '{print $3}')
case ${header} in
105)
isencrypted="no"
;;
213)
isencrypted="no"
;;
255)
isencrypted="yes"
;;
*)
echo -e "Could not determine if pat file is encrypted or not, maybe corrupted, try again!"
;;
esac
if [ "${isencrypted}" = "yes" ]; then
# Uses the extractor to untar PAT file
LD_LIBRARY_PATH="${EXTRACTOR_PATH}" "${EXTRACTOR_PATH}/${EXTRACTOR_BIN}" "${PAT_FILE}" "${UNTAR_PAT_PATH}"
else
# Untar PAT file
tar -xf "${PAT_FILE}" -C "${UNTAR_PAT_PATH}" >"${LOG_FILE}" 2>&1
fi
# Cleanup PAT Download
rm -f "${PAT_FILE}"
elif [ -f "${DSM_FILE}" ]; then
tar -xf "${DSM_FILE}" -C "${UNTAR_PAT_PATH}" >"${LOG_FILE}" 2>&1
elif [ ! -f "${UNTAR_PAT_PATH}/zImage" ]; then
dialog --backtitle "$(backtitle)" --title "DSM Download" --aspect 18 \
--infobox "ERROR: No DSM Image found!" 0 0
sleep 5
return 1
fi
dialog --backtitle "$(backtitle)" --colors --title "Arc Build" \
--infobox "Image unpack sucessfull..." 3 30
# Copy DSM Files to Locations if DSM Files not found
cp -f "${UNTAR_PAT_PATH}/grub_cksum.syno" "${PART1_PATH}"
cp -f "${UNTAR_PAT_PATH}/GRUB_VER" "${PART1_PATH}"
cp -f "${UNTAR_PAT_PATH}/grub_cksum.syno" "${PART2_PATH}"
cp -f "${UNTAR_PAT_PATH}/GRUB_VER" "${PART2_PATH}"
cp -f "${UNTAR_PAT_PATH}/zImage" "${ORI_ZIMAGE_FILE}"
cp -f "${UNTAR_PAT_PATH}/rd.gz" "${ORI_RDGZ_FILE}"
rm -rf "${UNTAR_PAT_PATH}"
fi
# Reset Bootcount if User rebuild DSM
if [[ -z "${BOOTCOUNT}" || ${BOOTCOUNT} -gt 0 ]]; then
writeConfigKey "arc.bootcount" "0" "${USER_CONFIG_FILE}"
fi
(
livepatch
sleep 3
) 2>&1 | dialog --backtitle "$(backtitle)" --colors --title "Build Loader" \
--progressbox "Doing the Magic..." 20 70
if [[ -f "${ORI_ZIMAGE_FILE}" && -f "${ORI_RDGZ_FILE}" && -f "${MOD_ZIMAGE_FILE}" && -f "${MOD_RDGZ_FILE}" ]]; then
# Build is done
writeConfigKey "arc.builddone" "true" "${USER_CONFIG_FILE}"
BUILDDONE="$(readConfigKey "arc.builddone" "${USER_CONFIG_FILE}")"
boot && exit 0
fi
}
###############################################################################
# Calls boot.sh to boot into DSM kernel/ramdisk
function boot() {
BUILDDONE="$(readConfigKey "arc.builddone" "${USER_CONFIG_FILE}")"
[ "${BUILDDONE}" = "false" ] && dialog --backtitle "$(backtitle)" --title "Alert" \
--yesno "Config changed, you need to rebuild the Loader?" 0 0
if [ $? -eq 0 ]; then
premake
fi
dialog --backtitle "$(backtitle)" --title "Arc Boot" \
--infobox "Booting DSM...\nPlease stay patient!" 4 25
sleep 2
exec reboot
}
###############################################################################
###############################################################################
# Main loop
arcAutomated
# Inform user
echo -e "Call \033[1;34marc.sh\033[0m to configure Loader"
echo
echo -e "SSH Access:"
echo -e "IP: \033[1;34m${IPCON}\033[0m"
echo -e "User: \033[1;34mroot\033[0m"
echo -e "Password: \033[1;34marc\033[0m"
echo
echo -e "Web Terminal:"
echo -e "Address: \033[1;34mhttp://${IPCON}:7681\033[0m"

View File

@ -0,0 +1,30 @@
# Get Network Config for Loader
function getnet() {
ETHX=$(ls /sys/class/net/ 2>/dev/null | grep eth) || true
ARCPATCH="$(readConfigKey "arc.patch" "${USER_CONFIG_FILE}")"
if [ "${ARCPATCH}" = "true" ]; then
ARCMACNUM=1
for ETH in ${ETHX}; do
ARCMAC="$(readModelKey "${MODEL}" "arc.mac${ARCMACNUM}")"
[ -n "${ARCMAC}" ] && writeConfigKey "mac.${ETH}" "${ARCMAC}" "${USER_CONFIG_FILE}"
[ -z "${ARCMAC}" ] && break
ARCMACNUM=$((${ARCMACNUM} + 1))
ARCMAC=""
done
elif [ "${ARCPATCH}" = "false" ]; then
for ETH in ${ETHX}; do
MACS=$(generateMacAddress "${MODEL}" 1)
writeConfigKey "mac.${ETH}" "${MAC}" "${USER_CONFIG_FILE}"
done
fi
writeConfigKey "arc.macsys" "hardware" "${USER_CONFIG_FILE}"
MACSYS="$(readConfigKey "arc.macsys" "${USER_CONFIG_FILE}")"
}
# Get Amount of NIC
ETHX=$(ls /sys/class/net/ 2>/dev/null | grep eth) || true
# Get actual IP
for ETH in ${ETHX}; do
IPCON="$(getIP ${ETH})"
[ -n "${IPCON}" ] && break
done

View File

@ -0,0 +1,198 @@
# Get PortMap for Loader
function getmap() {
# Sata Disks
SATADRIVES=0
if [ $(lspci -d ::106 | wc -l) -gt 0 ]; then
# Clean old files
[ -f "${TMP_PATH}/drivesmax" ] && rm -f "${TMP_PATH}/drivesmax"
touch "${TMP_PATH}/drivesmax"
[ -f "${TMP_PATH}/drivescon" ] && rm -f "${TMP_PATH}/drivescon"
touch "${TMP_PATH}/drivescon"
[ -f "${TMP_PATH}/ports" ] && rm -f "${TMP_PATH}/ports"
touch "${TMP_PATH}ports"
[ -f "${TMP_PATH}/remap" ] && rm -f "${TMP_PATH}/remap"
touch "${TMP_PATH}/remap"
let DISKIDXMAPIDX=0
DISKIDXMAP=""
let DISKIDXMAPIDXMAX=0
DISKIDXMAPMAX=""
for PCI in $(lspci -d ::106 | awk '{print $1}'); do
NUMPORTS=0
CONPORTS=0
unset HOSTPORTS
declare -A HOSTPORTS
while read -r LINE; do
ATAPORT="$(echo ${LINE} | grep -o 'ata[0-9]*')"
PORT=$(echo ${ATAPORT} | sed 's/ata//')
HOSTPORTS[${PORT}]=$(echo ${LINE} | grep -o 'host[0-9]*$')
done <<<$(ls -l /sys/class/scsi_host | grep -F "${PCI}")
while read -r PORT; do
ls -l /sys/block | grep -F -q "${PCI}/ata${PORT}" && ATTACH=1 || ATTACH=0
PCMD=$(cat /sys/class/scsi_host/${HOSTPORTS[${PORT}]}/ahci_port_cmd)
[ ${PCMD} = 0 ] && DUMMY=1 || DUMMY=0
[ ${ATTACH} = 1 ] && CONPORTS="$((${CONPORTS} + 1))" && echo "$((${PORT} - 1))" >>"${TMP_PATH}/ports"
[ ${DUMMY} = 1 ] # Do nothing for now
NUMPORTS=$((${NUMPORTS} + 1))
done <<<$(echo ${!HOSTPORTS[@]} | tr ' ' '\n' | sort -n)
[ ${NUMPORTS} -gt 8 ] && NUMPORTS=8
[ ${CONPORTS} -gt 8 ] && CONPORTS=8
echo -n "${NUMPORTS}" >>"${TMP_PATH}/drivesmax"
echo -n "${CONPORTS}" >>"${TMP_PATH}/drivescon"
DISKIDXMAP=$DISKIDXMAP$(printf "%02x" $DISKIDXMAPIDX)
let DISKIDXMAPIDX=$DISKIDXMAPIDX+$CONPORTS
DISKIDXMAPMAX=$DISKIDXMAPMAX$(printf "%02x" $DISKIDXMAPIDXMAX)
let DISKIDXMAPIDXMAX=$DISKIDXMAPIDXMAX+$NUMPORTS
SATADRIVES=$((${SATADRIVES} + ${CONPORTS}))
done
fi
# SAS Disks
SASDRIVES=0
if [ $(lspci -d ::107 | wc -l) -gt 0 ]; then
for PCI in $(lspci -d ::107 | awk '{print $1}'); do
NAME=$(lspci -s "${PCI}" | sed "s/\ .*://")
PORT=$(ls -l /sys/class/scsi_host | grep "${PCI}" | awk -F'/' '{print $NF}' | sed 's/host//' | sort -n)
PORTNUM=$(lsscsi -b | grep -v - | grep "\[${PORT}:" | wc -l)
SASDRIVES=$((${SASDRIVES} + ${PORTNUM}))
done
fi
# SCSI Disks
SCSIDRIVES=0
if [ $(lspci -d ::100 | wc -l) -gt 0 ]; then
for PCI in $(lspci -d ::100 | awk '{print $1}'); do
NAME=$(lspci -s "${PCI}" | sed "s/\ .*://")
PORT=$(ls -l /sys/class/scsi_host | grep "${PCI}" | awk -F'/' '{print $NF}' | sed 's/host//' | sort -n)
PORTNUM=$(lsscsi -b | grep -v - | grep "\[${PORT}:" | wc -l)
SCSIDRIVES=$((${SCSIDRIVES} + ${PORTNUM}))
done
fi
# Raid Disks
RAIDDRIVES=0
if [ $(lspci -d ::104 | wc -l) -gt 0 ]; then
for PCI in $(lspci -d ::104 | awk '{print $1}'); do
NAME=$(lspci -s "${PCI}" | sed "s/\ .*://")
PORT=$(ls -l /sys/class/scsi_host | grep "${PCI}" | awk -F'/' '{print $NF}' | sed 's/host//' | sort -n)
PORTNUM=$(lsscsi -b | grep -v - | grep "\[${PORT}:" | wc -l)
RAIDDRIVES=$((${RAIDDRIVES} + ${PORTNUM}))
done
fi
# USB Disks
USBDRIVES=0
if [[ -d "/sys/class/scsi_host" && $(ls -l /sys/class/scsi_host | grep usb | wc -l) -gt 0 ]]; then
for PCI in $(lspci -d ::c03 | awk '{print $1}'); do
NAME=$(lspci -s "${PCI}" | sed "s/\ .*://")
PORT=$(ls -l /sys/class/scsi_host | grep "${PCI}" | awk -F'/' '{print $NF}' | sed 's/host//' | sort -n)
PORTNUM=$(lsscsi -b | grep -v - | grep "\[${PORT}:" | wc -l)
[ ${PORTNUM} -eq 0 ] && continue
USBDRIVES=$((${USBDRIVES} + ${PORTNUM}))
done
fi
# MMC Disks
MMCDRIVES=0
if [[ -d "/sys/class/mmc_host" && $(ls -l /sys/class/mmc_host | grep mmc_host | wc -l) -gt 0 ]]; then
for PCI in $(lspci -d ::805 | awk '{print $1}'); do
NAME=$(lspci -s "${PCI}" | sed "s/\ .*://")
PORTNUM=$(ls -l /sys/block/mmc* | grep "${PCI}" | wc -l)
[ ${PORTNUM} -eq 0 ] && continue
MMCDRIVES=$((${MMCDRIVES} + ${PORTNUM}))
done
fi
# NVMe Disks
NVMEDRIVES=0
if [ $(lspci -d ::108 | wc -l) -gt 0 ]; then
for PCI in $(lspci -d ::108 | awk '{print $1}'); do
NAME=$(lspci -s "${PCI}" | sed "s/\ .*://")
PORT=$(ls -l /sys/class/nvme | grep "${PCI}" | awk -F'/' '{print $NF}' | sed 's/nvme//' | sort -n)
PORTNUM=$(lsscsi -b | grep -v - | grep "\[N:${PORT}:" | wc -l)
NVMEDRIVES=$((${NVMEDRIVES} + ${PORTNUM}))
done
fi
# Disk Count for MaxDisks
DRIVES=$((${SATADRIVES} + ${SASDRIVES} + ${SCSIDRIVES} + ${RAIDDRIVES} + ${USBDRIVES} + ${MMCDRIVES} + ${NVMEDRIVES}))
HARDDRIVES=$((${SATADRIVES} + ${SASDRIVES} + ${SCSIDRIVES} + ${RAIDDRIVES} + ${NVMEDRIVES}))
writeConfigKey "device.satadrives" "${SATADRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.sasdrives" "${SASDRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.scsidrives" "${SCSIDRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.raiddrives" "${RAIDDRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.usbdrives" "${USBDRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.mmcdrives" "${MMCDRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.nvmedrives" "${NVMEDRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.drives" "${DRIVES}" "${USER_CONFIG_FILE}"
writeConfigKey "device.harddrives" "${HARDDRIVES}" "${USER_CONFIG_FILE}"
}
function getmapSelection() {
# Check for Sata Boot
LASTDRIVE=0
while read -r LINE; do
if [[ "${BUS}" != "usb" && ${LINE} -eq 0 && "${LOADER_DISK}" = "/dev/sda" ]]; then
MAXDISKS="$(readModelKey "${MODEL}" "disks")"
if [ ${MAXDISKS} -lt ${DRIVES} ]; then
MAXDISKS=${DRIVES}
fi
echo -n "${LINE}>${MAXDISKS}:">>"${TMP_PATH}/remap"
elif [ ! ${LINE} = ${LASTDRIVE} ]; then
echo -n "${LINE}>${LASTDRIVE}:">>"${TMP_PATH}/remap"
LASTDRIVE=$((${LASTDRIVE} + 1))
elif [ ${LINE} = ${LASTDRIVE} ]; then
LASTDRIVE=$((${LINE} + 1))
fi
done <<<$(cat "${TMP_PATH}/ports")
# Compute PortMap Options
SATAPORTMAPMAX="$(awk '{print $1}' "${TMP_PATH}/drivesmax")"
SATAPORTMAP="$(awk '{print $1}' "${TMP_PATH}/drivescon")"
SATAREMAP="$(awk '{print $1}' "${TMP_PATH}/remap" | sed 's/.$//')"
EXTERNALCONTROLLER="$(readConfigKey "device.externalcontroller" "${USER_CONFIG_FILE}")"
# Show recommended Option to user
if [[ -n "${SATAREMAP}" && "${EXTERNALCONTROLLER}" = "true" && "${MACHINE}" = "NATIVE" ]]; then
writeConfigKey "arc.remap" "maxports" "${USER_CONFIG_FILE}"
elif [[ -n "${SATAREMAP}" && "${EXTERNALCONTROLLER}" = "false" ]]; then
writeConfigKey "arc.remap" "remap" "${USER_CONFIG_FILE}"
else
writeConfigKey "arc.remap" "acports" "${USER_CONFIG_FILE}"
fi
# Check Remap for correct config
REMAP="$(readConfigKey "arc.remap" "${USER_CONFIG_FILE}")"
# Write Map to config and show Map to User
if [ "${REMAP}" = "acports" ]; then
writeConfigKey "cmdline.SataPortMap" "${SATAPORTMAP}" "${USER_CONFIG_FILE}"
writeConfigKey "cmdline.DiskIdxMap" "${DISKIDXMAP}" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.sata_remap" "${USER_CONFIG_FILE}"
elif [ "${REMAP}" = "maxports" ]; then
writeConfigKey "cmdline.SataPortMap" "${SATAPORTMAPMAX}" "${USER_CONFIG_FILE}"
writeConfigKey "cmdline.DiskIdxMap" "${DISKIDXMAPMAX}" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.sata_remap" "${USER_CONFIG_FILE}"
elif [ "${REMAP}" = "remap" ]; then
writeConfigKey "cmdline.sata_remap" "${SATAREMAP}" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.DiskIdxMap" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.SataPortMap" "${USER_CONFIG_FILE}"
elif [ "${REMAP}" = "ahci" ]; then
writeConfigKey "cmdline.ahci_remap" "${SATAREMAP}" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.DiskIdxMap" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.SataPortMap" "${USER_CONFIG_FILE}"
elif [ "${REMAP}" = "user" ]; then
deleteConfigKey "cmdline.SataPortMap" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.DiskIdxMap" "${USER_CONFIG_FILE}"
deleteConfigKey "cmdline.sata_remap" "${USER_CONFIG_FILE}"
fi
}
# Check for Controller // 104=RAID // 106=SATA // 107=SAS // 100=SCSI // c03=USB
if [ $(lspci -d ::106 | wc -l) -gt 0 ]; then
SATACONTROLLER=$(lspci -d ::106 | wc -l)
writeConfigKey "device.satacontroller" "${SATACONTROLLER}" "${USER_CONFIG_FILE}"
fi
if [ $(lspci -d ::107 | wc -l) -gt 0 ]; then
SASCONTROLLER=$(lspci -d ::107 | wc -l)
writeConfigKey "device.sascontroller" "${SASCONTROLLER}" "${USER_CONFIG_FILE}"
writeConfigKey "device.externalcontroller" "true" "${USER_CONFIG_FILE}"
fi
if [ $(lspci -d ::100 | wc -l) -gt 0 ]; then
SCSICONTROLLER=$(lspci -d ::100 | wc -l)
writeConfigKey "device.scsicontroller" "${SCSICONTROLLER}" "${USER_CONFIG_FILE}"
writeConfigKey "device.externalcontroller" "true" "${USER_CONFIG_FILE}"
fi
if [ $(lspci -d ::104 | wc -l) -gt 0 ]; then
RAIDCONTROLLER=$(lspci -d ::104 | wc -l)
writeConfigKey "device.raidcontroller" "${RAIDCONTROLLER}" "${USER_CONFIG_FILE}"
writeConfigKey "device.externalcontroller" "true" "${USER_CONFIG_FILE}"
fi