mirror of
https://github.com/AuxXxilium/synology-wireguard.git
synced 2025-02-21 17:50:00 +07:00
commit
379aa2e583
16
.github/dependabot.yml
vendored
Normal file
16
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: monday
|
||||
time: "09:00"
|
||||
commit-message:
|
||||
prefix: fix
|
||||
prefix-development: chore
|
||||
include: scope
|
||||
labels:
|
||||
- gha
|
||||
- dependencies
|
26
.github/stale.yml
vendored
Normal file
26
.github/stale.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 120
|
||||
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 14
|
||||
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- enhancement
|
||||
- pinned
|
||||
- security
|
||||
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed in 7 days if no further activity occurs.
|
||||
If the issue is still valid, please add a respective comment to prevent this
|
||||
issue from being closed automatically. Thank you for your contributions.
|
||||
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
228
.github/workflows/build.yml
vendored
Normal file
228
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,228 @@
|
||||
# SPDX-FileCopyrightText: © Vegard IT GmbH (https://vegardit.com) and contributors
|
||||
# SPDX-FileContributor: Sebastian Thomschke, Vegard IT GmbH
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
# https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: # build all branches
|
||||
- '**'
|
||||
tags-ignore: # but don't build tags
|
||||
- '**'
|
||||
paths-ignore:
|
||||
- '**/*.adoc'
|
||||
- '**/*.md'
|
||||
- '.editorconfig'
|
||||
- '.git*'
|
||||
- '**/*.rst'
|
||||
- '.github/*.yml'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**/*.adoc'
|
||||
- '**/*.md'
|
||||
- '.editorconfig'
|
||||
- '.git*'
|
||||
- '**/*.rst'
|
||||
- '.github/*.yml'
|
||||
workflow_dispatch:
|
||||
# https://github.blog/changelog/2020-07-06-github-actions-manual-triggers-with-workflow_dispatch/
|
||||
inputs:
|
||||
DSM_VERSION:
|
||||
description: 'DSM Version'
|
||||
required: true
|
||||
default: '7.1'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
DSM_VERSION: '7.1'
|
||||
|
||||
jobs:
|
||||
|
||||
###########################################################
|
||||
build:
|
||||
###########################################################
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# see https://kb.synology.com/en-uk/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have
|
||||
PACKAGE_ARCH:
|
||||
- apollolake # e.g. DS918+
|
||||
- armada37xx
|
||||
- armada38x
|
||||
- avoton
|
||||
- braswell # e.g. DS716+II
|
||||
- broadwell
|
||||
- broadwellnk
|
||||
- bromolow
|
||||
- cedarview
|
||||
- denverton # e.g. RS2418+
|
||||
- geminilake
|
||||
- grantley
|
||||
- kvmx64
|
||||
- monaco
|
||||
- purley
|
||||
- r1000
|
||||
- rtd1296
|
||||
- rtd1619b
|
||||
- v1000
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Git checkout
|
||||
uses: actions/checkout@v3 #https://github.com/actions/checkout
|
||||
|
||||
- name: Set effective DSM version
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
if [[ -n "${{ github.event.inputs.DSM_VERSION }}" ]]; then
|
||||
echo "DSM_VERSION=${{ github.event.inputs.DSM_VERSION }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Build synobuild Docker image
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
docker build -t synobuild .
|
||||
|
||||
- uses: actions/cache@v3
|
||||
if: github.ref_name == 'master'
|
||||
with:
|
||||
path: toolkit_tarballs/ds.*
|
||||
key: synology_toolkit_tarballs_DSM${{ env.DSM_VERSION }}_${{ matrix.PACKAGE_ARCH }}
|
||||
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: toolkit_tarballs/base_env-*
|
||||
key: synology_toolkit_tarballs_DSM${{ env.DSM_VERSION }}_base_env
|
||||
|
||||
- name: Download Synology Toolkit tarballs
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
mkdir -p toolkit_tarballs
|
||||
pushd toolkit_tarballs/
|
||||
|
||||
archive_base_url="https://global.download.synology.com/download/ToolChain/toolkit/$DSM_VERSION"
|
||||
|
||||
archive=base_env-${DSM_VERSION}.txz
|
||||
[[ -f $archive ]] || curl -sSf "$archive_base_url/base/$archive" -o $archive
|
||||
|
||||
archive=ds.${{ matrix.PACKAGE_ARCH }}-$DSM_VERSION.dev.txz
|
||||
[[ -f $archive ]] || curl -sSf "$archive_base_url/${{ matrix.PACKAGE_ARCH }}/$archive" -o $archive
|
||||
|
||||
archive=ds.${{ matrix.PACKAGE_ARCH }}-$DSM_VERSION.env.txz
|
||||
[[ -f $archive ]] || curl -sSf "$archive_base_url/${{ matrix.PACKAGE_ARCH }}/$archive" -o $archive
|
||||
popd
|
||||
|
||||
- name: Build SPK files
|
||||
continue-on-error: true
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
mkdir artifacts
|
||||
docker run --rm --privileged \
|
||||
--env PACKAGE_ARCH=${{ matrix.PACKAGE_ARCH }} \
|
||||
--env DSM_VER=${DSM_VERSION} \
|
||||
-v $(pwd)/artifacts:/result_spk \
|
||||
-v $(pwd)/toolkit_tarballs:/toolkit_tarballs \
|
||||
synobuild
|
||||
ls -l artifacts
|
||||
ls -l artifacts/*
|
||||
|
||||
for spk in artifacts/*/*.spk; do
|
||||
sudo mv "$spk" "${spk%.spk}_DSM${DSM_VERSION}.spk"
|
||||
done
|
||||
|
||||
- name: Upload SPKs
|
||||
if: github.ref == 'refs/heads/master'
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: artifacts-${{ matrix.PACKAGE_ARCH }}
|
||||
path: artifacts/**
|
||||
|
||||
|
||||
###########################################################
|
||||
publish-release:
|
||||
###########################################################
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build
|
||||
if: github.ref == 'refs/heads/master'
|
||||
concurrency: publish-latest-release # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idconcurrency
|
||||
|
||||
steps:
|
||||
- name: Git checkout
|
||||
# only required by "hub release create" to prevent "fatal: Not a git repository"
|
||||
uses: actions/checkout@v3 #https://github.com/actions/checkout
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
|
||||
- name: Set effective DSM version
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
if [[ -n "${{ github.event.inputs.DSM_VERSION }}" ]]; then
|
||||
echo "DSM_VERSION=${{ github.event.inputs.DSM_VERSION }}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Set effective WireGuard version
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
spks=($(ls artifacts-*/WireGuard-*/*.spk))
|
||||
[[ ${spks[0]} =~ ([0-9.]+) ]] && echo "${BASH_REMATCH[1]}"
|
||||
echo "WG_VERSION=${BASH_REMATCH[1]}" >> $GITHUB_ENV
|
||||
|
||||
- name: "Delete previous release"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
api_base_url="$GITHUB_API_URL/repos/$GITHUB_REPOSITORY"
|
||||
|
||||
release_name=WireGuard-${WG_VERSION}-DSM${DSM_VERSION}
|
||||
|
||||
# https://hub.github.com/hub-release.1.html
|
||||
hub release delete "${release_name}" || true
|
||||
|
||||
# delete git tag
|
||||
tag_url="$api_base_url/git/refs/tags/${release_name}"
|
||||
if curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -fsLo /dev/null --head "$tag_url"; then
|
||||
echo "Deleting tag [$tag_url]..."
|
||||
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -fsSL -X DELETE "$tag_url"
|
||||
fi
|
||||
|
||||
- name: "Create release"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
release_name=WireGuard-${WG_VERSION}-DSM${DSM_VERSION}
|
||||
|
||||
spks=($(ls artifacts-*/WireGuard-*/*.spk))
|
||||
|
||||
# https://hub.github.com/hub-release.1.html
|
||||
hub release create "${release_name}" \
|
||||
--message "${release_name}" \
|
||||
--message "**Use at your own risk. We are not responsible if this breaks your NAS. Realistically it should not result in data loss, but it could render your NAS inaccessible if something goes wrong.**" \
|
||||
--message "" \
|
||||
--message "**Especially if you are not comfortable with removing the hard drives from your NAS and manually recovering the data, this is not for you.**" \
|
||||
--message "" \
|
||||
--message "See https://kb.synology.com/en-uk/DSM/tutorial/What_kind_of_CPU_does_my_NAS_have to find the right architecture of your NAS" \
|
||||
${spks[@]/#/--attach }
|
||||
|
||||
- name: "Delete intermediate build artifacts"
|
||||
uses: geekyeggo/delete-artifact@v2 # https://github.com/GeekyEggo/delete-artifact/
|
||||
with:
|
||||
name: "*"
|
||||
failOnError: false
|
1
Makefile
1
Makefile
@ -49,6 +49,7 @@ $(WIREGUARD_DIR)/src/Makefile: $(WIREGUARD_TAR)
|
||||
tar -xf $(WIREGUARD_TAR)
|
||||
patch $(WIREGUARD_DIR)/src/netlink.c $(ROOT_DIR)/patch/netlink.patch
|
||||
patch $(WIREGUARD_DIR)/src/peerlookup.c $(ROOT_DIR)/patch/peerlookup.patch
|
||||
patch $(WIREGUARD_DIR)/src/compat/siphash/siphash.c $(ROOT_DIR)/patch/siphash.patch
|
||||
ifeq ($(APPLY_MEMNEQ_PATCH), 1)
|
||||
patch $(WIREGUARD_DIR)/src/compat/Kbuild.include $(ROOT_DIR)/patch/memneq.patch
|
||||
endif
|
||||
|
@ -16,7 +16,6 @@ recover the data, this might not be for you.
|
||||
|
||||
FAQ/Known issues
|
||||
----------------
|
||||
* The releases in the releases page are for DSM 6 only. For DSM7, you'll need to follow the instructions in this readme and compile it yourself.
|
||||
* The ``Dns = x.x.x.x`` setting is unsupported. If you try it you will get the
|
||||
following message:
|
||||
``/usr/local/bin/wg-quick: line 31: resolvconf: command not found``
|
||||
@ -41,7 +40,6 @@ Compatibility list
|
||||
All models marked *Is working* have been confirmed by users to work. If your
|
||||
model has the same platform as one of the working ones, chances are it will
|
||||
work for you too.
|
||||
**Note**: the releases in the releases page are for DSM 6 only. For DSM7, you'll need to follow the instructions in this readme and compile it yourself.
|
||||
|
||||
=========== ========== =========== ===========================
|
||||
Model Platform DSM Version Is working?
|
||||
@ -68,13 +66,15 @@ DS220+ geminilake 6.2/7.0 Yes
|
||||
DS3617xs broadwell 6.2 Yes
|
||||
DS414slim armada370 *N/A* No (Kernel version too old)
|
||||
DS415+ avoton 6.2 Yes
|
||||
DS418j rtd1296 6.2/7.0 Yes
|
||||
DS418play apollolake 6.2 Yes
|
||||
DS713+ cedarview 6.2 Yes
|
||||
DS716+II braswell 6.2 Yes
|
||||
DS718+ apollolake 6.2 Yes
|
||||
DS720+ geminilake 7.0 Yes
|
||||
DS916+ braswell 6.2 Yes
|
||||
DS916+ braswell 6.2/7.0 Yes
|
||||
DS918+ apollolake 6.2 Yes
|
||||
RS1221+ v1000 7.0 Yes
|
||||
RS214 armada370 *N/A* No (Kernel version too old)
|
||||
RS816 armada38x 6.2 Yes
|
||||
Virtual DSM kvmx64 6.2/7.0 Yes
|
||||
@ -185,6 +185,7 @@ For the DS218j that I have, the complete command looks like this:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
mkdir $(pwd)/artifacts
|
||||
sudo docker run --rm --privileged --env PACKAGE_ARCH=armada38x --env DSM_VER=6.2 -v $(pwd)/artifacts:/result_spk synobuild
|
||||
|
||||
If everything worked you should have a directory called ``artifacts`` that
|
||||
|
30
build.sh
30
build.sh
@ -87,11 +87,31 @@ if [ ! -d "$build_env" ]; then
|
||||
# Ensure the installed toolchain has support for CA signed certificates.
|
||||
# Without this wget on https:// will fail
|
||||
cp /etc/ssl/certs/ca-certificates.crt "$build_env/etc/ssl/certs/"
|
||||
|
||||
# Add patched version of DST Root CA X3 certificate
|
||||
wget -O DSTRootCAX3_Extended.crt "https://crt.sh/?d=8395"
|
||||
sed -i "s/xMDkzMDE0MDExNVow/0MDkzMDE4MTQwM1ow/g" DSTRootCAX3_Extended.crt
|
||||
cat DSTRootCAX3_Extended.crt >> "$build_env/etc/ssl/certs/ca-certificates.crt"
|
||||
|
||||
# workaround for https://github.com/runfalk/synology-wireguard/issues/109
|
||||
# Add patched version of DST Root CA X3 certificate https://crt.sh/?d=8395
|
||||
cat <<EOF >> "$build_env/etc/ssl/certs/ca-certificates.crt"
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
|
||||
MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
|
||||
DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTI0MDkzMDE4MTQwM1ow
|
||||
PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
|
||||
Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
|
||||
AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
|
||||
rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
|
||||
OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
|
||||
xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
|
||||
7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
|
||||
aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
|
||||
HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
|
||||
SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
|
||||
ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
|
||||
AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
|
||||
R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
|
||||
JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
|
||||
Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
|
||||
-----END CERTIFICATE-----
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Patch WireGuard to use its own included memneq implementation if architecture
|
||||
|
84
patch/siphash.patch
Normal file
84
patch/siphash.patch
Normal file
@ -0,0 +1,84 @@
|
||||
--- a/src/compat/siphash/siphash.c
|
||||
+++ b/src/compat/siphash/siphash.c
|
||||
@@ -77,11 +77,11 @@ u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
|
||||
bytemask_from_count(left)));
|
||||
#else
|
||||
switch (left) {
|
||||
- case 7: b |= ((u64)end[6]) << 48; fallthrough;
|
||||
- case 6: b |= ((u64)end[5]) << 40; fallthrough;
|
||||
- case 5: b |= ((u64)end[4]) << 32; fallthrough;
|
||||
+ case 7: b |= ((u64)end[6]) << 48;
|
||||
+ case 6: b |= ((u64)end[5]) << 40;
|
||||
+ case 5: b |= ((u64)end[4]) << 32;
|
||||
case 4: b |= le32_to_cpup(data); break;
|
||||
- case 3: b |= ((u64)end[2]) << 16; fallthrough;
|
||||
+ case 3: b |= ((u64)end[2]) << 16;
|
||||
case 2: b |= le16_to_cpup(data); break;
|
||||
case 1: b |= end[0];
|
||||
}
|
||||
@@ -109,11 +109,11 @@ u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
|
||||
bytemask_from_count(left)));
|
||||
#else
|
||||
switch (left) {
|
||||
- case 7: b |= ((u64)end[6]) << 48; fallthrough;
|
||||
- case 6: b |= ((u64)end[5]) << 40; fallthrough;
|
||||
- case 5: b |= ((u64)end[4]) << 32; fallthrough;
|
||||
+ case 7: b |= ((u64)end[6]) << 48;
|
||||
+ case 6: b |= ((u64)end[5]) << 40;
|
||||
+ case 5: b |= ((u64)end[4]) << 32;
|
||||
case 4: b |= get_unaligned_le32(end); break;
|
||||
- case 3: b |= ((u64)end[2]) << 16; fallthrough;
|
||||
+ case 3: b |= ((u64)end[2]) << 16;
|
||||
case 2: b |= get_unaligned_le16(end); break;
|
||||
case 1: b |= end[0];
|
||||
}
|
||||
@@ -269,11 +269,11 @@ u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
|
||||
bytemask_from_count(left)));
|
||||
#else
|
||||
switch (left) {
|
||||
- case 7: b |= ((u64)end[6]) << 48; fallthrough;
|
||||
- case 6: b |= ((u64)end[5]) << 40; fallthrough;
|
||||
- case 5: b |= ((u64)end[4]) << 32; fallthrough;
|
||||
+ case 7: b |= ((u64)end[6]) << 48;
|
||||
+ case 6: b |= ((u64)end[5]) << 40;
|
||||
+ case 5: b |= ((u64)end[4]) << 32;
|
||||
case 4: b |= le32_to_cpup(data); break;
|
||||
- case 3: b |= ((u64)end[2]) << 16; fallthrough;
|
||||
+ case 3: b |= ((u64)end[2]) << 16;
|
||||
case 2: b |= le16_to_cpup(data); break;
|
||||
case 1: b |= end[0];
|
||||
}
|
||||
@@ -301,11 +301,11 @@ u32 __hsiphash_unaligned(const void *data, size_t len,
|
||||
bytemask_from_count(left)));
|
||||
#else
|
||||
switch (left) {
|
||||
- case 7: b |= ((u64)end[6]) << 48; fallthrough;
|
||||
- case 6: b |= ((u64)end[5]) << 40; fallthrough;
|
||||
- case 5: b |= ((u64)end[4]) << 32; fallthrough;
|
||||
+ case 7: b |= ((u64)end[6]) << 48;
|
||||
+ case 6: b |= ((u64)end[5]) << 40;
|
||||
+ case 5: b |= ((u64)end[4]) << 32;
|
||||
case 4: b |= get_unaligned_le32(end); break;
|
||||
- case 3: b |= ((u64)end[2]) << 16; fallthrough;
|
||||
+ case 3: b |= ((u64)end[2]) << 16;
|
||||
case 2: b |= get_unaligned_le16(end); break;
|
||||
case 1: b |= end[0];
|
||||
}
|
||||
@@ -426,7 +426,7 @@ u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
|
||||
v0 ^= m;
|
||||
}
|
||||
switch (left) {
|
||||
- case 3: b |= ((u32)end[2]) << 16; fallthrough;
|
||||
+ case 3: b |= ((u32)end[2]) << 16;
|
||||
case 2: b |= le16_to_cpup(data); break;
|
||||
case 1: b |= end[0];
|
||||
}
|
||||
@@ -448,7 +448,7 @@ u32 __hsiphash_unaligned(const void *data, size_t len,
|
||||
v0 ^= m;
|
||||
}
|
||||
switch (left) {
|
||||
- case 3: b |= ((u32)end[2]) << 16; fallthrough;
|
||||
+ case 3: b |= ((u32)end[2]) << 16;
|
||||
case 2: b |= get_unaligned_le16(end); break;
|
||||
case 1: b |= end[0];
|
||||
}
|
@ -13,7 +13,9 @@ ARCHS=(
|
||||
"geminilake"
|
||||
"kvmx64"
|
||||
"monaco"
|
||||
"r1000"
|
||||
"rtd1296"
|
||||
"rtd1619b"
|
||||
"x64"
|
||||
)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user