mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-05-02 06:55:58 +00:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a3e4980344 | |||
| cf391086e5 | |||
| bc72ce83ce | |||
| 9eee1a7f95 | |||
| ecd1e29df5 | |||
| b556b5f8c6 | |||
| 0dbee93410 | |||
| 1a7d1da029 | |||
| f6ccf8be5e | |||
| ec81640d67 | |||
| 03a301d736 | |||
| be81d6255e | |||
| c9da2daec2 | |||
| 9015023e8c | |||
| e2a51d4941 | |||
| b4e0bb0686 | |||
| 1109fe4b0d |
+28
-1
@@ -448,11 +448,38 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
|||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
|
## 2026-05-01
|
||||||
|
|
||||||
|
### 🆕 New Scripts
|
||||||
|
|
||||||
|
- SoulSync ([#14124](https://github.com/community-scripts/ProxmoxVE/pull/14124))
|
||||||
|
- Teable ([#14125](https://github.com/community-scripts/ProxmoxVE/pull/14125))
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- Step ca update [@heinemannj](https://github.com/heinemannj) ([#14058](https://github.com/community-scripts/ProxmoxVE/pull/14058))
|
||||||
|
- paperless-ngx: refresh NLTK data on update [@kurtislanderson](https://github.com/kurtislanderson) ([#14144](https://github.com/community-scripts/ProxmoxVE/pull/14144))
|
||||||
|
- [Pelican Panel] stop deleting the public storage [@LetterN](https://github.com/LetterN) ([#14145](https://github.com/community-scripts/ProxmoxVE/pull/14145))
|
||||||
|
|
||||||
|
- #### 🔧 Refactor
|
||||||
|
|
||||||
|
- Mail-Archiver: update dependencies [@tremor021](https://github.com/tremor021) ([#14152](https://github.com/community-scripts/ProxmoxVE/pull/14152))
|
||||||
|
|
||||||
## 2026-04-30
|
## 2026-04-30
|
||||||
|
|
||||||
### 🆕 New Scripts
|
### 🆕 New Scripts
|
||||||
|
|
||||||
- Neko ([#14121](https://github.com/community-scripts/ProxmoxVE/pull/14121))
|
- Nagios ([#14126](https://github.com/community-scripts/ProxmoxVE/pull/14126))
|
||||||
|
- Neko ([#14121](https://github.com/community-scripts/ProxmoxVE/pull/14121))
|
||||||
|
|
||||||
|
### 🚀 Updated Scripts
|
||||||
|
|
||||||
|
- #### 🐞 Bug Fixes
|
||||||
|
|
||||||
|
- alpine-docker: install openssl as core dependency | alpine-komodo: check & install openssl if missing [@MickLesk](https://github.com/MickLesk) ([#14134](https://github.com/community-scripts/ProxmoxVE/pull/14134))
|
||||||
|
- endurain: update source references to Codeberg [@MickLesk](https://github.com/MickLesk) ([#14128](https://github.com/community-scripts/ProxmoxVE/pull/14128))
|
||||||
|
|
||||||
### 💾 Core
|
### 💾 Core
|
||||||
|
|
||||||
|
|||||||
+3
-3
@@ -3,7 +3,7 @@ source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxV
|
|||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: johanngrobe
|
# Author: johanngrobe
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/joaovitoriasilva/endurain
|
# Source: https://codeberg.org/endurain-project/endurain
|
||||||
|
|
||||||
APP="Endurain"
|
APP="Endurain"
|
||||||
var_tags="${var_tags:-sport;social-media}"
|
var_tags="${var_tags:-sport;social-media}"
|
||||||
@@ -28,7 +28,7 @@ function update_script() {
|
|||||||
msg_error "No ${APP} installation found!"
|
msg_error "No ${APP} installation found!"
|
||||||
exit 233
|
exit 233
|
||||||
fi
|
fi
|
||||||
if check_for_gh_release "endurain" "endurain-project/endurain"; then
|
if check_for_codeberg_release "endurain" "endurain-project/endurain"; then
|
||||||
msg_info "Stopping Service"
|
msg_info "Stopping Service"
|
||||||
systemctl stop endurain
|
systemctl stop endurain
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
@@ -38,7 +38,7 @@ function update_script() {
|
|||||||
cp /opt/endurain/frontend/app/dist/env.js /opt/endurain.env.js
|
cp /opt/endurain/frontend/app/dist/env.js /opt/endurain.env.js
|
||||||
msg_ok "Created Backup"
|
msg_ok "Created Backup"
|
||||||
|
|
||||||
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
|
CLEAN_INSTALL=1 fetch_and_deploy_codeberg_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
|
||||||
|
|
||||||
msg_info "Preparing Update"
|
msg_info "Preparing Update"
|
||||||
cd /opt/endurain
|
cd /opt/endurain
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
_ __ _
|
||||||
|
/ | / /___ _____ _(_)___ _____
|
||||||
|
/ |/ / __ `/ __ `/ / __ \/ ___/
|
||||||
|
/ /| / /_/ / /_/ / / /_/ (__ )
|
||||||
|
/_/ |_/\__,_/\__, /_/\____/____/
|
||||||
|
/____/
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
_____ _______
|
||||||
|
/ ___/____ __ __/ / ___/__ ______ _____
|
||||||
|
\__ \/ __ \/ / / / /\__ \/ / / / __ \/ ___/
|
||||||
|
___/ / /_/ / /_/ / /___/ / /_/ / / / / /__
|
||||||
|
/____/\____/\__,_/_//____/\__, /_/ /_/\___/
|
||||||
|
/____/
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
______ __ __
|
||||||
|
/_ __/__ ____ _/ /_ / /__
|
||||||
|
/ / / _ \/ __ `/ __ \/ / _ \
|
||||||
|
/ / / __/ /_/ / /_/ / / __/
|
||||||
|
/_/ \___/\__,_/_.___/_/\___/
|
||||||
|
|
||||||
@@ -28,6 +28,8 @@ function update_script() {
|
|||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
ensure_dependencies libgssapi-krb5-2
|
||||||
|
|
||||||
if check_for_gh_release "mail-archiver" "s1t5/mail-archiver"; then
|
if check_for_gh_release "mail-archiver" "s1t5/mail-archiver"; then
|
||||||
msg_info "Stopping Mail-Archiver"
|
msg_info "Stopping Mail-Archiver"
|
||||||
systemctl stop mail-archiver
|
systemctl stop mail-archiver
|
||||||
|
|||||||
@@ -0,0 +1,90 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: CanbiZ (MickLesk)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/NagiosEnterprises/nagioscore
|
||||||
|
|
||||||
|
APP="Nagios"
|
||||||
|
var_tags="${var_tags:-monitoring;alerts;infrastructure}"
|
||||||
|
var_cpu="${var_cpu:-2}"
|
||||||
|
var_ram="${var_ram:-2048}"
|
||||||
|
var_disk="${var_disk:-20}"
|
||||||
|
var_os="${var_os:-debian}"
|
||||||
|
var_version="${var_version:-13}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
header_info
|
||||||
|
check_container_storage
|
||||||
|
check_container_resources
|
||||||
|
|
||||||
|
if [[ ! -f /usr/local/nagios/etc/nagios.cfg ]]; then
|
||||||
|
msg_error "No ${APP} Installation Found!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
msg_info "Backing up Configuration"
|
||||||
|
cp -a /usr/local/nagios/etc /opt/nagios-etc-backup
|
||||||
|
msg_ok "Backed up Configuration"
|
||||||
|
|
||||||
|
if check_for_gh_release "nagios" "NagiosEnterprises/nagioscore"; then
|
||||||
|
msg_info "Stopping Nagios"
|
||||||
|
systemctl stop nagios
|
||||||
|
msg_ok "Stopped Nagios"
|
||||||
|
|
||||||
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "nagios" "NagiosEnterprises/nagioscore" "tarball"
|
||||||
|
|
||||||
|
msg_info "Building Nagios Core"
|
||||||
|
cd /opt/nagios
|
||||||
|
$STD ./configure --with-httpd-conf=/etc/apache2/sites-enabled
|
||||||
|
$STD make all
|
||||||
|
$STD make install-groups-users
|
||||||
|
usermod -a -G nagios www-data
|
||||||
|
$STD make install
|
||||||
|
$STD make install-daemoninit
|
||||||
|
$STD make install-commandmode
|
||||||
|
$STD make install-webconf
|
||||||
|
$STD a2enmod rewrite
|
||||||
|
$STD a2enmod cgi
|
||||||
|
msg_ok "Built Nagios Core"
|
||||||
|
|
||||||
|
msg_info "Starting Nagios"
|
||||||
|
systemctl restart apache2
|
||||||
|
systemctl start nagios
|
||||||
|
msg_ok "Started Nagios"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if check_for_gh_release "nagios-plugins" "nagios-plugins/nagios-plugins"; then
|
||||||
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "nagios-plugins" "nagios-plugins/nagios-plugins" "tarball"
|
||||||
|
msg_info "Building Nagios Plugins"
|
||||||
|
cd /opt/nagios-plugins
|
||||||
|
$STD ./tools/setup
|
||||||
|
$STD ./configure
|
||||||
|
$STD make
|
||||||
|
$STD make install
|
||||||
|
msg_ok "Built Nagios Plugins"
|
||||||
|
fi
|
||||||
|
|
||||||
|
msg_info "Restoring Configuration"
|
||||||
|
rm -rf /usr/local/nagios/etc
|
||||||
|
cp -a /opt/nagios-etc-backup /usr/local/nagios/etc
|
||||||
|
rm -rf /opt/nagios-etc-backup
|
||||||
|
msg_ok "Restored Configuration"
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed Successfully!\n"
|
||||||
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}/nagios${CL}"
|
||||||
@@ -164,6 +164,14 @@ function update_script() {
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
msg_info "Updating NLTK Data"
|
||||||
|
cd /opt/paperless
|
||||||
|
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data snowball_data
|
||||||
|
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data stopwords
|
||||||
|
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt_tab ||
|
||||||
|
$STD uv run python -m nltk.downloader -d /usr/share/nltk_data punkt
|
||||||
|
msg_ok "Updated NLTK Data"
|
||||||
|
|
||||||
msg_info "Starting all Paperless-ngx Services"
|
msg_info "Starting all Paperless-ngx Services"
|
||||||
systemctl start paperless-consumer paperless-webserver paperless-scheduler paperless-task-queue
|
systemctl start paperless-consumer paperless-webserver paperless-scheduler paperless-task-queue
|
||||||
sleep 1
|
sleep 1
|
||||||
|
|||||||
+11
-5
@@ -45,15 +45,21 @@ function update_script() {
|
|||||||
$STD php artisan down
|
$STD php artisan down
|
||||||
msg_ok "Stopped Service"
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
cp -r /opt/pelican-panel/.env /opt/
|
cp -a /opt/pelican-panel/.env /opt/backup
|
||||||
|
cp -a /opt/pelican-panel/storage/app/public /opt/backup/storage/app/
|
||||||
|
|
||||||
SQLITE_INSTALL=$(ls /opt/pelican-panel/database/*.sqlite 1>/dev/null 2>&1 && echo "true" || echo "false")
|
SQLITE_INSTALL=$(ls /opt/pelican-panel/database/*.sqlite 1>/dev/null 2>&1 && echo "true" || echo "false")
|
||||||
$SQLITE_INSTALL && cp -r /opt/pelican-panel/database/*.sqlite /opt/
|
$SQLITE_INSTALL && cp -r /opt/pelican-panel/database/*.sqlite /opt/backup
|
||||||
rm -rf * .*
|
|
||||||
|
find /opt/pelican-panel -mindepth 1 -maxdepth 1 ! -name 'backup' ! -name 'plugins' -exec rm -rf {} +
|
||||||
|
|
||||||
fetch_and_deploy_gh_release "pelican-panel" "pelican-dev/panel" "prebuild" "latest" "/opt/pelican-panel" "panel.tar.gz"
|
fetch_and_deploy_gh_release "pelican-panel" "pelican-dev/panel" "prebuild" "latest" "/opt/pelican-panel" "panel.tar.gz"
|
||||||
|
|
||||||
msg_info "Updating Pelican Panel"
|
msg_info "Updating Pelican Panel"
|
||||||
mv /opt/.env /opt/pelican-panel/
|
cp -a /opt/backup/.env /opt/pelican-panel/
|
||||||
$SQLITE_INSTALL && mv /opt/*.sqlite /opt/pelican-panel/database/
|
$SQLITE_INSTALL && mv /opt/backup/*.sqlite /opt/pelican-panel/database/
|
||||||
|
cp -a /opt/backup/storage/app/public /opt/pelican-panel/storage/app/
|
||||||
|
|
||||||
$STD composer install --no-dev --optimize-autoloader --no-interaction
|
$STD composer install --no-dev --optimize-autoloader --no-interaction
|
||||||
$STD php artisan p:environment:setup
|
$STD php artisan p:environment:setup
|
||||||
$STD php artisan view:clear
|
$STD php artisan view:clear
|
||||||
|
|||||||
@@ -0,0 +1,68 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: MickLesk (CanbiZ)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/Nezreka/SoulSync
|
||||||
|
|
||||||
|
APP="SoulSync"
|
||||||
|
var_tags="${var_tags:-music;automation;media}"
|
||||||
|
var_cpu="${var_cpu:-2}"
|
||||||
|
var_ram="${var_ram:-2048}"
|
||||||
|
var_disk="${var_disk:-8}"
|
||||||
|
var_os="${var_os:-debian}"
|
||||||
|
var_version="${var_version:-13}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
header_info
|
||||||
|
check_container_storage
|
||||||
|
check_container_resources
|
||||||
|
|
||||||
|
if [[ ! -f ~/.soulsync ]]; then
|
||||||
|
msg_error "No ${APP} Installation Found!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
if check_for_gh_release "soulsync" "Nezreka/SoulSync"; then
|
||||||
|
msg_info "Stopping Service"
|
||||||
|
systemctl stop soulsync
|
||||||
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
|
msg_info "Backing up Data"
|
||||||
|
mv /opt/soulsync/config /opt/soulsync-config.bak
|
||||||
|
mv /opt/soulsync/data /opt/soulsync-data.bak
|
||||||
|
msg_ok "Backed up Data"
|
||||||
|
|
||||||
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "soulsync" "Nezreka/SoulSync" "tarball"
|
||||||
|
|
||||||
|
msg_info "Updating Python Dependencies"
|
||||||
|
cd /opt/soulsync
|
||||||
|
$STD uv venv --clear /opt/soulsync/.venv --python 3.11
|
||||||
|
$STD /opt/soulsync/.venv/bin/pip install -r requirements.txt
|
||||||
|
msg_ok "Updated Python Dependencies"
|
||||||
|
|
||||||
|
mv /opt/soulsync-config.bak /opt/soulsync/config
|
||||||
|
mv /opt/soulsync-data.bak /opt/soulsync/data
|
||||||
|
|
||||||
|
msg_info "Starting Service"
|
||||||
|
systemctl start soulsync
|
||||||
|
msg_ok "Started Service"
|
||||||
|
msg_ok "Updated ${APP}"
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed Successfully!\n"
|
||||||
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:8008${CL}"
|
||||||
@@ -30,6 +30,12 @@ function update_script() {
|
|||||||
msg_info "Updating step-ca and step-cli"
|
msg_info "Updating step-ca and step-cli"
|
||||||
$STD apt update
|
$STD apt update
|
||||||
$STD apt upgrade -y step-ca step-cli
|
$STD apt upgrade -y step-ca step-cli
|
||||||
|
|
||||||
|
# Patch for making $STD happy (/usr/bin/step is a symlink to /usr/bin/step-cli)
|
||||||
|
STEPBIN="$(which step)"
|
||||||
|
rm -f "$STEPBIN"
|
||||||
|
cp -f "$(which step-cli)" "$STEPBIN"
|
||||||
|
|
||||||
$STD systemctl restart step-ca
|
$STD systemctl restart step-ca
|
||||||
msg_ok "Updated step-ca and step-cli"
|
msg_ok "Updated step-ca and step-cli"
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,82 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/build.func)
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: MickLesk (CanbiZ)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/teableio/teable
|
||||||
|
|
||||||
|
APP="Teable"
|
||||||
|
var_tags="${var_tags:-database;no-code;spreadsheet}"
|
||||||
|
var_cpu="${var_cpu:-4}"
|
||||||
|
var_ram="${var_ram:-10240}"
|
||||||
|
var_disk="${var_disk:-25}"
|
||||||
|
var_os="${var_os:-debian}"
|
||||||
|
var_version="${var_version:-13}"
|
||||||
|
var_unprivileged="${var_unprivileged:-1}"
|
||||||
|
|
||||||
|
header_info "$APP"
|
||||||
|
variables
|
||||||
|
color
|
||||||
|
catch_errors
|
||||||
|
|
||||||
|
function update_script() {
|
||||||
|
header_info
|
||||||
|
check_container_storage
|
||||||
|
check_container_resources
|
||||||
|
|
||||||
|
if [[ ! -d /opt/teable ]]; then
|
||||||
|
msg_error "No ${APP} Installation Found!"
|
||||||
|
exit
|
||||||
|
fi
|
||||||
|
|
||||||
|
if check_for_gh_release "teable" "teableio/teable"; then
|
||||||
|
msg_info "Stopping Service"
|
||||||
|
systemctl stop teable
|
||||||
|
msg_ok "Stopped Service"
|
||||||
|
|
||||||
|
msg_info "Backing up Configuration"
|
||||||
|
cp /opt/teable/.env /opt/teable.env.bak
|
||||||
|
msg_ok "Backed up Configuration"
|
||||||
|
|
||||||
|
CLEAN_INSTALL=1 fetch_and_deploy_gh_release "teable" "teableio/teable" "tarball"
|
||||||
|
|
||||||
|
msg_info "Restoring Configuration"
|
||||||
|
mv /opt/teable.env.bak /opt/teable/.env
|
||||||
|
msg_ok "Restored Configuration"
|
||||||
|
|
||||||
|
msg_info "Rebuilding Teable"
|
||||||
|
cd /opt/teable
|
||||||
|
TEABLE_VERSION=$(cat ~/.teable)
|
||||||
|
echo "NEXT_PUBLIC_BUILD_VERSION=\"${TEABLE_VERSION}\"" >>apps/nextjs-app/.env
|
||||||
|
export HUSKY=0
|
||||||
|
export NODE_OPTIONS="--max-old-space-size=8192"
|
||||||
|
$STD pnpm install --frozen-lockfile
|
||||||
|
$STD pnpm -F @teable/db-main-prisma prisma-generate --schema ./prisma/postgres/schema.prisma
|
||||||
|
NODE_ENV=production NEXT_BUILD_ENV_TYPECHECK=false \
|
||||||
|
$STD pnpm -r --filter '!playground' run build
|
||||||
|
msg_ok "Rebuilt Teable"
|
||||||
|
|
||||||
|
msg_info "Running Database Migrations"
|
||||||
|
source /opt/teable/.env
|
||||||
|
$STD pnpm -F @teable/db-main-prisma prisma-migrate deploy --schema ./prisma/postgres/schema.prisma
|
||||||
|
msg_ok "Ran Database Migrations"
|
||||||
|
|
||||||
|
msg_info "Starting Service"
|
||||||
|
systemctl start teable
|
||||||
|
msg_ok "Started Service"
|
||||||
|
msg_ok "Updated successfully!"
|
||||||
|
else
|
||||||
|
msg_ok "No update available."
|
||||||
|
fi
|
||||||
|
exit
|
||||||
|
}
|
||||||
|
|
||||||
|
start
|
||||||
|
build_container
|
||||||
|
description
|
||||||
|
|
||||||
|
msg_ok "Completed Successfully!\n"
|
||||||
|
echo -e "${CREATING}${GN}${APP} setup has been successfully initialized!${CL}"
|
||||||
|
echo -e "${INFO}${YW} Access it using the following URL:${CL}"
|
||||||
|
echo -e "${TAB}${GATEWAY}${BGN}http://${IP}:3000${CL}"
|
||||||
@@ -14,7 +14,7 @@ network_check
|
|||||||
update_os
|
update_os
|
||||||
|
|
||||||
msg_info "Installing Dependencies"
|
msg_info "Installing Dependencies"
|
||||||
$STD apk add tzdata
|
$STD apk add tzdata openssl
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
msg_info "Installing Docker"
|
msg_info "Installing Docker"
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
# Author: johanngrobe
|
# Author: johanngrobe
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
# Source: https://github.com/joaovitoriasilva/endurain
|
# Source: https://codeberg.org/endurain-project/endurain
|
||||||
|
|
||||||
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
color
|
color
|
||||||
@@ -21,7 +21,7 @@ PYTHON_VERSION="3.13" setup_uv
|
|||||||
NODE_VERSION="24" setup_nodejs
|
NODE_VERSION="24" setup_nodejs
|
||||||
PG_VERSION="17" PG_MODULES="postgis" setup_postgresql
|
PG_VERSION="17" PG_MODULES="postgis" setup_postgresql
|
||||||
PG_DB_NAME="enduraindb" PG_DB_USER="endurain" setup_postgresql_db
|
PG_DB_NAME="enduraindb" PG_DB_USER="endurain" setup_postgresql_db
|
||||||
fetch_and_deploy_gh_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
|
fetch_and_deploy_codeberg_release "endurain" "endurain-project/endurain" "tarball" "latest" "/opt/endurain"
|
||||||
|
|
||||||
msg_info "Setting up Endurain"
|
msg_info "Setting up Endurain"
|
||||||
cd /opt/endurain
|
cd /opt/endurain
|
||||||
|
|||||||
@@ -22,7 +22,8 @@ setup_deb822_repo \
|
|||||||
"main"
|
"main"
|
||||||
$STD apt install -y \
|
$STD apt install -y \
|
||||||
dotnet-sdk-10.0 \
|
dotnet-sdk-10.0 \
|
||||||
aspnetcore-runtime-8.0
|
aspnetcore-runtime-8.0 \
|
||||||
|
libgssapi-krb5-2
|
||||||
msg_ok "Installed Dependencies"
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
PG_VERSION="17" setup_postgresql
|
PG_VERSION="17" setup_postgresql
|
||||||
|
|||||||
@@ -0,0 +1,78 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: CanbiZ (MickLesk)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/NagiosEnterprises/nagioscore
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
msg_info "Installing Dependencies"
|
||||||
|
$STD apt install -y \
|
||||||
|
autoconf \
|
||||||
|
automake \
|
||||||
|
build-essential \
|
||||||
|
bc \
|
||||||
|
dc \
|
||||||
|
gawk \
|
||||||
|
gettext \
|
||||||
|
gperf \
|
||||||
|
libgd-dev \
|
||||||
|
libmcrypt-dev \
|
||||||
|
libnet-snmp-perl \
|
||||||
|
libssl-dev \
|
||||||
|
snmp \
|
||||||
|
apache2 \
|
||||||
|
apache2-utils
|
||||||
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
|
PHP_APACHE="YES" setup_php
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "nagios" "NagiosEnterprises/nagioscore" "tarball"
|
||||||
|
|
||||||
|
msg_info "Building Nagios Core"
|
||||||
|
cd /opt/nagios
|
||||||
|
$STD ./configure --with-httpd-conf=/etc/apache2/sites-enabled
|
||||||
|
$STD make all
|
||||||
|
$STD make install-groups-users
|
||||||
|
usermod -a -G nagios www-data
|
||||||
|
$STD make install
|
||||||
|
$STD make install-daemoninit
|
||||||
|
$STD make install-commandmode
|
||||||
|
$STD make install-config
|
||||||
|
$STD make install-webconf
|
||||||
|
$STD a2enmod rewrite
|
||||||
|
$STD a2enmod cgi
|
||||||
|
msg_ok "Built Nagios Core"
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "nagios-plugins" "nagios-plugins/nagios-plugins" "tarball"
|
||||||
|
|
||||||
|
msg_info "Building Nagios Plugins"
|
||||||
|
cd /opt/nagios-plugins
|
||||||
|
$STD ./tools/setup
|
||||||
|
$STD ./configure
|
||||||
|
$STD make
|
||||||
|
$STD make install
|
||||||
|
msg_ok "Built Nagios Plugins"
|
||||||
|
|
||||||
|
msg_info "Configuring Web Authentication"
|
||||||
|
$STD htpasswd -bc /usr/local/nagios/etc/htpasswd.users nagiosadmin nagiosadmin
|
||||||
|
chown root:www-data /usr/local/nagios/etc/htpasswd.users
|
||||||
|
chmod 640 /usr/local/nagios/etc/htpasswd.users
|
||||||
|
msg_ok "Configured Web Authentication"
|
||||||
|
|
||||||
|
msg_info "Starting Services"
|
||||||
|
systemctl enable -q apache2
|
||||||
|
systemctl restart apache2
|
||||||
|
systemctl enable -q --now nagios
|
||||||
|
msg_ok "Started Services"
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
cleanup_lxc
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: MickLesk (CanbiZ)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/Nezreka/SoulSync
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
msg_info "Installing Dependencies"
|
||||||
|
$STD apt install -y \
|
||||||
|
gcc \
|
||||||
|
libffi-dev \
|
||||||
|
libssl-dev \
|
||||||
|
libchromaprint-tools \
|
||||||
|
ffmpeg
|
||||||
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
|
UV_PYTHON="3.11" setup_uv
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "soulsync" "Nezreka/SoulSync" "tarball"
|
||||||
|
|
||||||
|
msg_info "Setting up Application"
|
||||||
|
cd /opt/soulsync
|
||||||
|
$STD uv venv /opt/soulsync/.venv --python 3.11
|
||||||
|
$STD uv pip install -r requirements.txt --python /opt/soulsync/.venv/bin/python
|
||||||
|
mkdir -p /opt/soulsync/{config,data,logs}
|
||||||
|
msg_ok "Set up Application"
|
||||||
|
|
||||||
|
msg_info "Creating Service"
|
||||||
|
cat <<EOF >/etc/systemd/system/soulsync.service
|
||||||
|
[Unit]
|
||||||
|
Description=SoulSync Music Discovery
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=root
|
||||||
|
WorkingDirectory=/opt/soulsync
|
||||||
|
ExecStart=/opt/soulsync/.venv/bin/python web_server.py
|
||||||
|
Environment=PYTHONPATH=/opt/soulsync PYTHONUNBUFFERED=1 DATABASE_PATH=/opt/soulsync/data/music_library.db
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=5
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
systemctl enable -q --now soulsync
|
||||||
|
msg_ok "Created Service"
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
cleanup_lxc
|
||||||
+220
-279
@@ -23,21 +23,34 @@ setup_deb822_repo \
|
|||||||
msg_info "Installing step-ca and step-cli"
|
msg_info "Installing step-ca and step-cli"
|
||||||
$STD apt install -y step-ca step-cli
|
$STD apt install -y step-ca step-cli
|
||||||
|
|
||||||
STEPHOME="/root/.step"
|
STEPPATH="/etc/step-ca"
|
||||||
export STEPPATH=/etc/step-ca
|
STEPHOME="/etc/step"
|
||||||
|
|
||||||
|
export STEPPATH=$STEPPATH
|
||||||
|
echo "export STEPPATH=${STEPPATH}" >> /etc/profile
|
||||||
export STEPHOME=$STEPHOME
|
export STEPHOME=$STEPHOME
|
||||||
|
echo "export STEPHOME=${STEPHOME}" >> /etc/profile
|
||||||
|
|
||||||
sed -i '1i export STEPPATH=/etc/step-ca' /etc/profile
|
mkdir -p "$STEPHOME"
|
||||||
sed -i '1i export STEPHOME=/root/.step' /etc/profile
|
|
||||||
|
|
||||||
setcap CAP_NET_BIND_SERVICE=+eip $(which step-ca)
|
# Patch for making $STD happy (/usr/bin/step is a symlink to /usr/bin/step-cli)
|
||||||
|
STEPBIN="$(which step)"
|
||||||
|
rm -f "$STEPBIN"
|
||||||
|
cp -f "$(which step-cli)" "$STEPBIN"
|
||||||
|
|
||||||
$STD useradd --user-group --system --home $(step path) --shell /bin/false step
|
# Low port-binding capabilities (ports < 1024)
|
||||||
|
# - Default step-ca listener port: 443
|
||||||
|
setcap CAP_NET_BIND_SERVICE=+eip "$(which step-ca)"
|
||||||
|
|
||||||
|
# Service User used by systemd step-ca.service
|
||||||
|
$STD useradd --user-group --system --home "$(step path)" --shell /bin/false step
|
||||||
msg_ok "Installed step-ca and step-cli"
|
msg_ok "Installed step-ca and step-cli"
|
||||||
|
|
||||||
DomainName="$(hostname -d)"
|
DomainName="$(hostname -d)"
|
||||||
|
|
||||||
PKIName="$(prompt_input "Enter PKIName" "MyHomePKI" 30)"
|
PKIName="$(prompt_input "Enter PKIName" "MyHomePKI" 30)"
|
||||||
|
PKICountry="$(prompt_input "Enter PKICountry" "DE" 30)"
|
||||||
|
PKIOrganizationalUnit="$(prompt_input "Enter PKIOrganizationalUnit" "MyHomeLab" 30)"
|
||||||
PKIProvisioner="$(prompt_input "Enter PKIProvisioner" "pki@$DomainName" 30)"
|
PKIProvisioner="$(prompt_input "Enter PKIProvisioner" "pki@$DomainName" 30)"
|
||||||
AcmeProvisioner="$(prompt_input "Enter AcmeProvisioner" "acme@$DomainName" 30)"
|
AcmeProvisioner="$(prompt_input "Enter AcmeProvisioner" "acme@$DomainName" 30)"
|
||||||
X509MinDur="$(prompt_input "Enter X509MinDur" "48h" 30)"
|
X509MinDur="$(prompt_input "Enter X509MinDur" "48h" 30)"
|
||||||
@@ -45,11 +58,15 @@ X509MaxDur="$(prompt_input "Enter X509MaxDur" "87600h" 30)"
|
|||||||
X509DefaultDur="$(prompt_input "Enter X509DefaultDur" "168h" 30)"
|
X509DefaultDur="$(prompt_input "Enter X509DefaultDur" "168h" 30)"
|
||||||
|
|
||||||
msg_info "Initializing step-ca"
|
msg_info "Initializing step-ca"
|
||||||
|
|
||||||
|
# Initialize step-ca
|
||||||
DeploymentType="standalone"
|
DeploymentType="standalone"
|
||||||
FQDN="$(hostname -f)"
|
FQDN="$(hostname -f)"
|
||||||
IP="${LOCAL_IP}"
|
IP="${LOCAL_IP}"
|
||||||
LISTENER=":443"
|
LISTENER=":443"
|
||||||
|
LISTENER_INSECURE=":80"
|
||||||
|
|
||||||
|
# Set different signing CA and Provisioner Passwords
|
||||||
EncryptionPwdDir="$(step path)/encryption"
|
EncryptionPwdDir="$(step path)/encryption"
|
||||||
PwdFile="$EncryptionPwdDir/ca.pwd"
|
PwdFile="$EncryptionPwdDir/ca.pwd"
|
||||||
ProvisionerPwdFile="$EncryptionPwdDir/provisioner.pwd"
|
ProvisionerPwdFile="$EncryptionPwdDir/provisioner.pwd"
|
||||||
@@ -57,19 +74,208 @@ mkdir -p "$EncryptionPwdDir"
|
|||||||
gpg -q --gen-random --armor 2 32 >"$PwdFile"
|
gpg -q --gen-random --armor 2 32 >"$PwdFile"
|
||||||
gpg -q --gen-random --armor 2 32 >"$ProvisionerPwdFile"
|
gpg -q --gen-random --armor 2 32 >"$ProvisionerPwdFile"
|
||||||
|
|
||||||
$STD step ca init --deployment-type="$DeploymentType" --ssh --name="$PKIName" --dns="$FQDN" --dns="$IP" --address="$LISTENER" --provisioner="$PKIProvisioner" --password-file="$PwdFile" --provisioner-password-file="$ProvisionerPwdFile"
|
# Used by systemd step-ca.service
|
||||||
|
|
||||||
ln -s "$PwdFile" "$(step path)/password.txt"
|
ln -s "$PwdFile" "$(step path)/password.txt"
|
||||||
chown -R step:step $(step path)
|
|
||||||
chmod -R 700 $(step path)
|
# Usage of:
|
||||||
$STD step ca provisioner add "$AcmeProvisioner" --type ACME --admin-name "$AcmeProvisioner"
|
# - SSH feature of step-ca
|
||||||
$STD step ca provisioner update "$PKIProvisioner" --x509-min-dur="$X509MinDur" --x509-max-dur="$X509MaxDur" --x509-default-dur="$X509DefaultDur" --allow-renewal-after-expiry
|
# - BadgerDB (badgerv2) => Default DB backend of step-ca
|
||||||
$STD step ca provisioner update "$AcmeProvisioner" --x509-min-dur="$X509MinDur" --x509-max-dur="$X509MaxDur" --x509-default-dur="$X509DefaultDur" --allow-renewal-after-expiry
|
# - badgerFileLoadingMode: FileIO (instead of MemoryMap) for LXC with low RAM
|
||||||
$STD step certificate install --all $(step path)/certs/root_ca.crt
|
$STD step ca init \
|
||||||
|
--deployment-type="$DeploymentType" \
|
||||||
|
--ssh \
|
||||||
|
--name="$PKIName" \
|
||||||
|
--dns="$FQDN" \
|
||||||
|
--dns="$IP" \
|
||||||
|
--address="$LISTENER" \
|
||||||
|
--provisioner="$PKIProvisioner" \
|
||||||
|
--password-file="$PwdFile" \
|
||||||
|
--provisioner-password-file="$ProvisionerPwdFile"
|
||||||
|
|
||||||
|
# Define enhanced x509 CA and Certificate Templates
|
||||||
|
mkdir -p "$(step path)/templates/ca"
|
||||||
|
mkdir -p "$(step path)/templates/x509"
|
||||||
|
|
||||||
|
CARootTemplate="$(step path)/templates/ca/root.tpl"
|
||||||
|
CAIntermediateTemplate="$(step path)/templates/ca/intermediate.tpl"
|
||||||
|
X509LeafTemplate="$(step path)/templates/x509/leaf.tpl"
|
||||||
|
X509LeafTemplateData="$(step path)/templates/x509/leaf_data.tpl"
|
||||||
|
|
||||||
|
cat <<'EOF' >"$CARootTemplate"
|
||||||
|
{
|
||||||
|
"subject": {
|
||||||
|
"country": {{ toJson .Insecure.User.country }},
|
||||||
|
"organization": {{ toJson .Insecure.User.organization }},
|
||||||
|
"organizationalUnit": {{ toJson .Insecure.User.organizationalUnit }},
|
||||||
|
"commonName": {{ toJson .Subject.CommonName }}
|
||||||
|
},
|
||||||
|
"issuer": {{ toJson .Subject }},
|
||||||
|
"keyUsage": ["certSign", "crlSign"],
|
||||||
|
"basicConstraints": {
|
||||||
|
"isCA": true,
|
||||||
|
"maxPathLen": 1
|
||||||
|
},
|
||||||
|
"issuingCertificateURL": [{{ toJson .Insecure.User.issuingCertificateURL }}],
|
||||||
|
"crlDistributionPoints": [{{ toJson .Insecure.User.crlDistributionPoints }}]
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<'EOF' >"$CAIntermediateTemplate"
|
||||||
|
{
|
||||||
|
"subject": {
|
||||||
|
"country": {{ toJson .Insecure.User.country }},
|
||||||
|
"organization": {{ toJson .Insecure.User.organization }},
|
||||||
|
"organizationalUnit": {{ toJson .Insecure.User.organizationalUnit }},
|
||||||
|
"commonName": {{ toJson .Subject.CommonName }}
|
||||||
|
},
|
||||||
|
"keyUsage": ["certSign", "crlSign"],
|
||||||
|
"basicConstraints": {
|
||||||
|
"isCA": true,
|
||||||
|
"maxPathLen": 0
|
||||||
|
},
|
||||||
|
"issuingCertificateURL": [{{ toJson .Insecure.User.issuingCertificateURL }}],
|
||||||
|
"crlDistributionPoints": [{{ toJson .Insecure.User.crlDistributionPoints }}]
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<'EOF' >"$X509LeafTemplate"
|
||||||
|
{
|
||||||
|
"subject": {
|
||||||
|
{{- if .Insecure.User.Country }}
|
||||||
|
"country": {{ toJson .Insecure.User.country }},
|
||||||
|
{{- else }}
|
||||||
|
"country": {{ toJson .country }},
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Insecure.User.organization }}
|
||||||
|
"organization": {{ toJson .Insecure.User.organization }},
|
||||||
|
{{- else }}
|
||||||
|
"organization": {{ toJson .organization }},
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Insecure.User.organizationalUnit }}
|
||||||
|
"organizationalUnit": {{ toJson .Insecure.User.organizationalUnit }},
|
||||||
|
{{- else }}
|
||||||
|
"organizationalUnit": {{ toJson .organizationalUnit }},
|
||||||
|
{{- end }}
|
||||||
|
"commonName": {{ toJson .Subject.CommonName }}
|
||||||
|
},
|
||||||
|
"sans": {{ toJson .SANs }},
|
||||||
|
{{- if typeIs "*rsa.PublicKey" .Insecure.CR.PublicKey }}
|
||||||
|
"keyUsage": ["keyEncipherment", "digitalSignature"],
|
||||||
|
{{- else }}
|
||||||
|
"keyUsage": ["digitalSignature"],
|
||||||
|
{{- end }}
|
||||||
|
"extKeyUsage": ["serverAuth", "clientAuth"],
|
||||||
|
{{- if .Insecure.User.issuingCertificateURL }}
|
||||||
|
"issuingCertificateURL": [{{ toJson .Insecure.User.issuingCertificateURL }}],
|
||||||
|
{{- else }}
|
||||||
|
"issuingCertificateURL": [{{ toJson .issuingCertificateURL }}],
|
||||||
|
{{- end }}
|
||||||
|
{{- if .Insecure.User.crlDistributionPoints }}
|
||||||
|
"crlDistributionPoints": [{{ toJson .Insecure.User.crlDistributionPoints }}]
|
||||||
|
{{- else }}
|
||||||
|
"crlDistributionPoints": [{{ toJson .crlDistributionPoints }}]
|
||||||
|
{{- end }}
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<EOF >"$X509LeafTemplateData"
|
||||||
|
{
|
||||||
|
"country": "${PKICountry}",
|
||||||
|
"organization": "${PKIName}",
|
||||||
|
"organizationalUnit": "${PKIOrganizationalUnit}",
|
||||||
|
"issuingCertificateURL": ["https://${FQDN}${LISTENER}/intermediates.pem"],
|
||||||
|
"crlDistributionPoints": ["https://${FQDN}${LISTENER}/crl"]
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Configure CA Provisioners, DB and CRL settings
|
||||||
|
$STD step ca provisioner add "$AcmeProvisioner" \
|
||||||
|
--type ACME \
|
||||||
|
--admin-name "$AcmeProvisioner"
|
||||||
|
|
||||||
|
$STD step ca provisioner update "$PKIProvisioner" \
|
||||||
|
--x509-min-dur="$X509MinDur" \
|
||||||
|
--x509-max-dur="$X509MaxDur" \
|
||||||
|
--x509-default-dur="$X509DefaultDur" \
|
||||||
|
--x509-template="$X509LeafTemplate" \
|
||||||
|
--x509-template-data="$X509LeafTemplateData" \
|
||||||
|
--allow-renewal-after-expiry
|
||||||
|
|
||||||
|
$STD step ca provisioner update "$AcmeProvisioner" \
|
||||||
|
--x509-min-dur="$X509MinDur" \
|
||||||
|
--x509-max-dur="$X509MaxDur" \
|
||||||
|
--x509-default-dur="$X509DefaultDur" \
|
||||||
|
--x509-template="$X509LeafTemplate" \
|
||||||
|
--x509-template-data="$X509LeafTemplateData" \
|
||||||
|
--allow-renewal-after-expiry
|
||||||
|
|
||||||
|
CAConfig="$(step path)/config/ca.json"
|
||||||
|
jq --arg a "${PKICountry}" '.country = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq --arg a "${PKIName}" '.organization = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq --arg a "${PKIOrganizationalUnit}" '.organizationalUnit = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq --arg a "${PKIName} Online CA" '.commonName = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq '.db.badgerFileLoadingMode = "FileIO"' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq '.crl.enabled = true' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq '.crl.generateOnRevoke = true' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq '.crl.cacheDuration = "24h0m0s"' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq '.crl.renewPeriod = "16h0m0s"' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq --arg a "https://${FQDN}${LISTENER}/crl" '.crl.idpURL = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
jq --arg a "$LISTENER_INSECURE" '.insecureAddress = $a' "${CAConfig}" > "${CAConfig}_tmp" && mv "${CAConfig}_tmp" "${CAConfig}"
|
||||||
|
|
||||||
|
# Generate Root CA Certificate and Key
|
||||||
|
# - Validity: 219168h (~25 Years)
|
||||||
|
# - maxPathLen: 1 (Root -> Intermediate -> Leaf) => Only one Intermediate CA allowed below Root CA
|
||||||
|
# - Active revocation on Intermediate CA and Leaf Certificates by the usage of build-in Certificate Revocation List (CRL)
|
||||||
|
FLAGS=(--force
|
||||||
|
--template="${CARootTemplate}"
|
||||||
|
--not-after="219168h"
|
||||||
|
--password-file="${PwdFile}"
|
||||||
|
--set country="${PKICountry}"
|
||||||
|
--set organization="${PKIName}"
|
||||||
|
--set organizationalUnit="${PKIOrganizationalUnit}"
|
||||||
|
--set issuingCertificateURL="https://${FQDN}${LISTENER}/roots.pem"
|
||||||
|
--set crlDistributionPoints="https://${FQDN}${LISTENER}/crl")
|
||||||
|
|
||||||
|
$STD step certificate create "${PKIName} Root CA" \
|
||||||
|
"$(step path)/certs/root_ca.crt" \
|
||||||
|
"$(step path)/secrets/root_ca_key" \
|
||||||
|
"${FLAGS[@]}"
|
||||||
|
|
||||||
|
# Generate Intermediate CA Certificate Bundle and Key
|
||||||
|
# - Validity: 175368h (~20 Years)
|
||||||
|
# - maxPathLen: 0 (Root -> Intermediate -> Leaf) => Intermediate CA is only allowed to issue Leaf Certificates
|
||||||
|
# - Active revocation on Leaf Certificates by the usage of build-in Certificate Revocation List (CRL)
|
||||||
|
# - Bundle: Certificate Chain (including Root CA Certificate)
|
||||||
|
FLAGS=(--force
|
||||||
|
--template="${CAIntermediateTemplate}"
|
||||||
|
--ca="$(step path)/certs/root_ca.crt"
|
||||||
|
--ca-key="$(step path)/secrets/root_ca_key"
|
||||||
|
--not-after="175368h"
|
||||||
|
--ca-password-file="${PwdFile}"
|
||||||
|
--password-file="${PwdFile}"
|
||||||
|
--bundle
|
||||||
|
--set country="${PKICountry}"
|
||||||
|
--set organization="${PKIName}"
|
||||||
|
--set organizationalUnit="${PKIOrganizationalUnit}"
|
||||||
|
--set issuingCertificateURL="https://${FQDN}${LISTENER}/roots.pem"
|
||||||
|
--set crlDistributionPoints="https://${FQDN}${LISTENER}/crl")
|
||||||
|
|
||||||
|
$STD step certificate create "${PKIName} Intermediate CA" \
|
||||||
|
"$(step path)/certs/intermediate_ca.crt" \
|
||||||
|
"$(step path)/secrets/intermediate_ca_key" \
|
||||||
|
"${FLAGS[@]}"
|
||||||
|
|
||||||
|
# Install Root CA Certificate to System Trust Store
|
||||||
|
$STD step certificate install --all "$(step path)/certs/root_ca.crt"
|
||||||
$STD update-ca-certificates
|
$STD update-ca-certificates
|
||||||
|
|
||||||
|
chown -R step:step "$(step path)"
|
||||||
|
chmod -R 700 "$(step path)"
|
||||||
msg_ok "Initialized step-ca"
|
msg_ok "Initialized step-ca"
|
||||||
|
|
||||||
msg_info "Start step-ca as a Daemon"
|
msg_info "Start step-ca as a Daemon"
|
||||||
|
|
||||||
|
# https://smallstep.com/docs/step-ca/certificate-authority-server-production/#running-step-ca-as-a-daemon
|
||||||
cat <<'EOF' >/etc/systemd/system/step-ca.service
|
cat <<'EOF' >/etc/systemd/system/step-ca.service
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=step-ca service
|
Description=step-ca service
|
||||||
@@ -130,271 +336,6 @@ msg_ok "Started step-ca as a Daemon"
|
|||||||
fetch_and_deploy_gh_release "step-badger" "lukasz-lobocki/step-badger" "prebuild" "latest" "/opt/step-badger" "step-badger_Linux_x86_64.tar.gz"
|
fetch_and_deploy_gh_release "step-badger" "lukasz-lobocki/step-badger" "prebuild" "latest" "/opt/step-badger" "step-badger_Linux_x86_64.tar.gz"
|
||||||
ln -s /opt/step-badger/step-badger /usr/local/bin/step-badger
|
ln -s /opt/step-badger/step-badger /usr/local/bin/step-badger
|
||||||
|
|
||||||
msg_info "Install step-ca Admin script"
|
|
||||||
mkdir -p "$STEPHOME"
|
|
||||||
cat <<'ADDON_EOF' >"$STEPHOME/step-ca-admin.sh"
|
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Copyright (c) 2021-2026 community-scripts ORG
|
|
||||||
# Author: Joerg Heinemann (heinemannj)
|
|
||||||
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
|
||||||
|
|
||||||
function header_info() {
|
|
||||||
clear
|
|
||||||
cat <<"EOF"
|
|
||||||
__ ___ __ _
|
|
||||||
_____/ /____ ____ _________ _ / | ____/ /___ ___ (_)___
|
|
||||||
/ ___/ __/ _ \/ __ \______/ ___/ __ `/ / /| |/ __ / __ `__ \/ / __ \
|
|
||||||
(__ ) /_/ __/ /_/ /_____/ /__/ /_/ / / ___ / /_/ / / / / / / / / / /
|
|
||||||
/____/\__/\___/ .___/ \___/\__,_/ /_/ |_\__,_/_/ /_/ /_/_/_/ /_/
|
|
||||||
/_/
|
|
||||||
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
function die() {
|
|
||||||
echo -e "\n${BL}[ERROR]${GN} ${RD}${1}${CL}\n"
|
|
||||||
exit
|
|
||||||
}
|
|
||||||
|
|
||||||
function success() {
|
|
||||||
echo -e "${BL}[SUCCESS]${GN} ${1}${CL}\n"
|
|
||||||
exit
|
|
||||||
}
|
|
||||||
|
|
||||||
function whiptail_menu() {
|
|
||||||
MENU_ARRAY=()
|
|
||||||
MSG_MAX_LENGTH=0
|
|
||||||
while read -r TAG ITEM; do
|
|
||||||
OFFSET=2
|
|
||||||
((${#ITEM} + OFFSET > MSG_MAX_LENGTH)) && MSG_MAX_LENGTH=${#ITEM}+OFFSET
|
|
||||||
MENU_ARRAY+=("$TAG" "$ITEM " "OFF")
|
|
||||||
done < <(echo "$1")
|
|
||||||
}
|
|
||||||
|
|
||||||
function x509_list() {
|
|
||||||
CERT_LIST=""
|
|
||||||
cp --recursive --force "$(step path)/db/"* "$STEPHOME/db-copy/"
|
|
||||||
cp --recursive --force "$(step path)/certs/"* "$STEPHOME/certs/ca/"
|
|
||||||
if [[ $(step-badger x509Certs "${STEPHOME}/db-copy" 2>/dev/null) ]]; then
|
|
||||||
CERT_LIST=$(step-badger x509Certs ${STEPHOME}/db-copy 2>/dev/null)
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function ssh_list() {
|
|
||||||
CERT_LIST=""
|
|
||||||
cp --recursive --force "$(step path)/db/"* "$STEPHOME/db-copy/"
|
|
||||||
cp --recursive --force "$(step path)/certs/"* "$STEPHOME/certs/ca/"
|
|
||||||
if [[ $(step-badger sshCerts "${STEPHOME}/db-copy" 2>/dev/null) ]]; then
|
|
||||||
CERT_LIST=$(step-badgersshCerts ${STEPHOME}/db-copy 2>/dev/null)
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function x509_serial_to_cn() {
|
|
||||||
x509_list
|
|
||||||
CN="$(echo "${CERT_LIST}" | grep "${SERIAL_NUMBER}" | awk '{print $2}' | sed 's/CN=//g')"
|
|
||||||
CRT="$STEPHOME/certs/x509/$CN.crt"
|
|
||||||
KEY="$STEPHOME/certs/x509/$CN.key"
|
|
||||||
if ! [[ -f ${CRT} ]]; then
|
|
||||||
die "Certificate ${CRT} not found!"
|
|
||||||
elif ! [[ -f ${KEY} ]]; then
|
|
||||||
die "Private Key ${KEY} not found!"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function x509_revoke() {
|
|
||||||
# shellcheck disable=SC2206
|
|
||||||
SERIAL_NUMBER_ARRAY=(${CERT_SERIAL_NUMBERS})
|
|
||||||
for SERIAL_NUMBER in "${SERIAL_NUMBER_ARRAY[@]}"; do
|
|
||||||
echo -e "${BL}[Info]${GN} Revoke x509 Certificate with Serial Number ${BL}${SERIAL_NUMBER}${GN}:${CL}"
|
|
||||||
echo
|
|
||||||
TOKEN=$(step ca token --provisioner="$PROVISIONER" --provisioner-password-file="$PROVISIONER_PASSWORD" --revoke "${SERIAL_NUMBER}")
|
|
||||||
step ca revoke --token "$TOKEN" "${SERIAL_NUMBER}" || die "Failed to revoke certificate!"
|
|
||||||
echo
|
|
||||||
done
|
|
||||||
success "Finished."
|
|
||||||
}
|
|
||||||
|
|
||||||
function x509_renew() {
|
|
||||||
# shellcheck disable=SC2206
|
|
||||||
SERIAL_NUMBER_ARRAY=(${CERT_SERIAL_NUMBERS})
|
|
||||||
for SERIAL_NUMBER in "${SERIAL_NUMBER_ARRAY[@]}"; do
|
|
||||||
echo -e "${BL}[Info]${GN} Renew x509 Certificate with Serial Number ${BL}${SERIAL_NUMBER}${GN}:${CL}"
|
|
||||||
echo
|
|
||||||
x509_serial_to_cn
|
|
||||||
step ca renew "${CRT}" "${KEY}" --force || die "Failed to renew certificate!"
|
|
||||||
echo
|
|
||||||
done
|
|
||||||
success "Finished."
|
|
||||||
}
|
|
||||||
|
|
||||||
function x509_inspect() {
|
|
||||||
# shellcheck disable=SC2206
|
|
||||||
SERIAL_NUMBER_ARRAY=(${CERT_SERIAL_NUMBERS})
|
|
||||||
for SERIAL_NUMBER in "${SERIAL_NUMBER_ARRAY[@]}"; do
|
|
||||||
echo -e "${BL}[Info]${GN} Inspect x509 Certificate with Serial Number ${BL}${SERIAL_NUMBER}${GN}:${CL}\n"
|
|
||||||
x509_serial_to_cn
|
|
||||||
step certificate inspect "${CRT}" || die "Failed to inspect certificate!"
|
|
||||||
if ! [[ $(step certificate inspect "${CRT}" | grep "${SERIAL_NUMBER}") ]]; then
|
|
||||||
die "Serial Number ${SERIAL_NUMBER} mismatch!"
|
|
||||||
fi
|
|
||||||
echo -e "\n${BL}[Info]${GN} Public Key:${CL}\n"
|
|
||||||
cat "${CRT}"
|
|
||||||
echo -e "\n${BL}[Info]${GN} Private Key:${CL}\n"
|
|
||||||
cat "${KEY}"
|
|
||||||
echo
|
|
||||||
done
|
|
||||||
success "Finished."
|
|
||||||
}
|
|
||||||
|
|
||||||
function x509_request() {
|
|
||||||
FQDN=""
|
|
||||||
SAN=""
|
|
||||||
|
|
||||||
while true; do
|
|
||||||
FQDN=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nFQDN (e.g. MyLXC.example.com)' 10 50 "$FQDN" 3>&1 1>&2 2>&3)
|
|
||||||
IP=$(dig +short "$FQDN")
|
|
||||||
if [[ -z "$IP" ]]; then
|
|
||||||
die "Resolution failed for $FQDN!"
|
|
||||||
fi
|
|
||||||
HOST=$(echo "$FQDN" | awk -F'.' '{print $1}')
|
|
||||||
IP=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nIP Address (e.g. x.x.x.x)' 10 50 "$IP" 3>&1 1>&2 2>&3)
|
|
||||||
HOST=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nHostname (e.g. MyHostName)' 10 50 "$HOST" 3>&1 1>&2 2>&3)
|
|
||||||
SAN=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nSubject Alternative Name(s) (SAN) (e.g. myapp-1.example.com, myapp-2.example.com)' 10 50 "$SAN" 3>&1 1>&2 2>&3)
|
|
||||||
VALID_TO=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --inputbox '\nValidity (e.g. 2034-01-31T00:00:00Z)' 10 50 "2034-01-31T00:00:00Z" 3>&1 1>&2 2>&3)
|
|
||||||
|
|
||||||
# shellcheck disable=SC2034
|
|
||||||
if whiptail_yesno=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificate Signing Request (CSR)" --yesno "Continue with below?\n
|
|
||||||
FQDN: $FQDN
|
|
||||||
Hostname: $HOST
|
|
||||||
IP Address: $IP
|
|
||||||
Subject Alternative Name(s) (SAN): $SAN
|
|
||||||
Validity: $VALID_TO" --no-button "Change" --yes-button "Continue" 15 70 3>&1 1>&2 2>&3); then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
echo -e "${BL}[Info]${GN} Request x509 Certificate with subject ${BL}${FQDN}${GN}:${CL}"
|
|
||||||
echo
|
|
||||||
CRT="$STEPHOME/certs/x509/$FQDN.crt"
|
|
||||||
KEY="$STEPHOME/certs/x509/$FQDN.key"
|
|
||||||
|
|
||||||
SAN="$FQDN, $HOST, $IP, $SAN"
|
|
||||||
|
|
||||||
IFS=', ' read -r -a array <<< "$SAN"
|
|
||||||
for element in "${array[@]}"
|
|
||||||
do
|
|
||||||
SAN_ARRAY+=(--san "$element")
|
|
||||||
done
|
|
||||||
|
|
||||||
step ca certificate "$FQDN" "$CRT" "$KEY" \
|
|
||||||
--provisioner="$PROVISIONER" \
|
|
||||||
--provisioner-password-file="$PROVISIONER_PASSWORD" \
|
|
||||||
--not-after="$VALID_TO" \
|
|
||||||
"${SAN_ARRAY[@]}" \
|
|
||||||
|| die "Failed to request certificate!"
|
|
||||||
|
|
||||||
echo -e "\n${BL}[Info]${GN} Inspect Certificate:${CL}\n"
|
|
||||||
step certificate inspect "${CRT}" || die "Failed to inspect certificate!"
|
|
||||||
echo -e "\n${BL}[Info]${GN} Public Key:${CL}\n"
|
|
||||||
cat "${CRT}"
|
|
||||||
echo -e "\n${BL}[Info]${GN} Private Key:${CL}\n"
|
|
||||||
cat "${KEY}"
|
|
||||||
echo
|
|
||||||
success "Finished."
|
|
||||||
}
|
|
||||||
|
|
||||||
set -eEuo pipefail
|
|
||||||
# shellcheck disable=SC2034
|
|
||||||
# shellcheck disable=SC2116
|
|
||||||
# shellcheck disable=SC2028
|
|
||||||
YW=$(echo "\033[33m")
|
|
||||||
# shellcheck disable=SC2116
|
|
||||||
# shellcheck disable=SC2028
|
|
||||||
BL=$(echo "\033[36m")
|
|
||||||
# shellcheck disable=SC2116
|
|
||||||
# shellcheck disable=SC2028
|
|
||||||
RD=$(echo "\033[01;31m")
|
|
||||||
# shellcheck disable=SC2034
|
|
||||||
CM='\xE2\x9C\x94\033'
|
|
||||||
# shellcheck disable=SC2116
|
|
||||||
# shellcheck disable=SC2028
|
|
||||||
GN=$(echo "\033[1;92m")
|
|
||||||
# shellcheck disable=SC2116
|
|
||||||
# shellcheck disable=SC2028
|
|
||||||
CL=$(echo "\033[m")
|
|
||||||
|
|
||||||
# Telemetry
|
|
||||||
# shellcheck disable=SC1090
|
|
||||||
source <(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVE/main/misc/api.func) 2>/dev/null || true
|
|
||||||
declare -f init_tool_telemetry &>/dev/null && init_tool_telemetry "step-ca-admin" "step-ca"
|
|
||||||
|
|
||||||
header_info
|
|
||||||
|
|
||||||
mkdir --parents "$STEPHOME/db-copy/"
|
|
||||||
mkdir --parents "$STEPHOME/certs/ca/_archive/"
|
|
||||||
mkdir --parents "$STEPHOME/certs/ssh/_archive/"
|
|
||||||
mkdir --parents "$STEPHOME/certs/x509/_archive/"
|
|
||||||
|
|
||||||
PROVISIONER=$(jq '.authority.provisioners.[] | select(.type=="JWK") | .name' "$(step path)"/config/ca.json)
|
|
||||||
PROVISIONER="${PROVISIONER#\"}"
|
|
||||||
PROVISIONER="${PROVISIONER%\"}"
|
|
||||||
PROVISIONER_PASSWORD=$(step path)/encryption/provisioner.pwd
|
|
||||||
|
|
||||||
whiptail --backtitle "Proxmox VE Helper Scripts" --title "step-ca Admin" --yesno "This will maintain step-ca issued x509 and ssh Certificates. Proceed?" 10 58
|
|
||||||
|
|
||||||
MENU_ARRAY=("x509" "Maintain x509 Certificates." "ON")
|
|
||||||
MENU_ARRAY+=("ssh" "Maintain ssh Certificates." "OFF")
|
|
||||||
CERT_TYPE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "step-ca Admin" --radiolist "\nSelect Certificate Type:" 16 48 6 "${MENU_ARRAY[@]}" 3>&1 1>&2 2>&3 | tr -d '"')
|
|
||||||
|
|
||||||
[[ -z ${CERT_TYPE} ]] && die "No Certificate Type selected!"
|
|
||||||
|
|
||||||
case ${CERT_TYPE} in
|
|
||||||
("x509")
|
|
||||||
x509_list
|
|
||||||
CERT_LIST=$(echo "$CERT_LIST" | awk 'NR>1 {print $1 " " $2 "|" $3 "|" $4 "|" $5}')
|
|
||||||
if [[ $CERT_LIST ]]; then
|
|
||||||
whiptail_menu "$CERT_LIST"
|
|
||||||
else
|
|
||||||
MENU_ARRAY=()
|
|
||||||
MSG_MAX_LENGTH=2
|
|
||||||
fi
|
|
||||||
MENU_ARRAY+=("" "Create a new Certificate" "OFF")
|
|
||||||
CERT_SERIAL_NUMBERS=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "Certificates on $(hostname)" --checklist "\nSelect Certificate(s) to maintain:\n" 16 $((MSG_MAX_LENGTH + 55)) 6 "${MENU_ARRAY[@]}" 3>&1 1>&2 2>&3 | tr -d '"')
|
|
||||||
|
|
||||||
[[ -z ${CERT_SERIAL_NUMBERS} ]] && x509_request
|
|
||||||
|
|
||||||
MENU_ARRAY=("Renew" "Renew x509 Certificates." "ON")
|
|
||||||
MENU_ARRAY+=("Revoke" "Revoke x509 Certificates." "OFF")
|
|
||||||
MENU_ARRAY+=("Inspect" "Inspect x509 Certificates." "OFF")
|
|
||||||
CERT_MAINTENANCE=$(whiptail --backtitle "Proxmox VE Helper Scripts" --title "step-ca Admin" --radiolist "\nSelect Maintenance Type:" 16 48 6 "${MENU_ARRAY[@]}" 3>&1 1>&2 2>&3 | tr -d '"')
|
|
||||||
|
|
||||||
case ${CERT_MAINTENANCE} in
|
|
||||||
("Renew")
|
|
||||||
x509_renew "${CERT_SERIAL_NUMBERS[@]}"
|
|
||||||
;;
|
|
||||||
("Revoke")
|
|
||||||
x509_revoke "${CERT_SERIAL_NUMBERS[@]}"
|
|
||||||
;;
|
|
||||||
("Inspect")
|
|
||||||
x509_inspect "${CERT_SERIAL_NUMBERS[@]}"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
die "Unsupported CERT_MAINTENANCE Option!"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
;;
|
|
||||||
("ssh")
|
|
||||||
die "Maintain ssh Certificates - To be implemented in future"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
die "Unsupported CERT_TYPE Option!"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
ADDON_EOF
|
|
||||||
chmod 700 "$STEPHOME/step-ca-admin.sh"
|
|
||||||
msg_ok "Installed step-ca Admin script"
|
|
||||||
|
|
||||||
motd_ssh
|
motd_ssh
|
||||||
customize
|
customize
|
||||||
cleanup_lxc
|
cleanup_lxc
|
||||||
|
|||||||
@@ -0,0 +1,94 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Copyright (c) 2021-2026 community-scripts ORG
|
||||||
|
# Author: MickLesk (CanbiZ)
|
||||||
|
# License: MIT | https://github.com/community-scripts/ProxmoxVE/raw/main/LICENSE
|
||||||
|
# Source: https://github.com/teableio/teable
|
||||||
|
|
||||||
|
source /dev/stdin <<<"$FUNCTIONS_FILE_PATH"
|
||||||
|
color
|
||||||
|
verb_ip6
|
||||||
|
catch_errors
|
||||||
|
setting_up_container
|
||||||
|
network_check
|
||||||
|
update_os
|
||||||
|
|
||||||
|
msg_info "Installing Dependencies"
|
||||||
|
$STD apt install -y \
|
||||||
|
build-essential \
|
||||||
|
python3 \
|
||||||
|
git
|
||||||
|
msg_ok "Installed Dependencies"
|
||||||
|
|
||||||
|
NODE_VERSION="24" NODE_MODULE="pnpm" setup_nodejs
|
||||||
|
PG_VERSION="16" setup_postgresql
|
||||||
|
PG_DB_NAME="teable" PG_DB_USER="teable" setup_postgresql_db
|
||||||
|
|
||||||
|
fetch_and_deploy_gh_release "teable" "teableio/teable" "tarball"
|
||||||
|
|
||||||
|
msg_info "Setting up Teable"
|
||||||
|
cd /opt/teable
|
||||||
|
TEABLE_VERSION=$(cat ~/.teable)
|
||||||
|
echo "NEXT_PUBLIC_BUILD_VERSION=\"${TEABLE_VERSION}\"" >>apps/nextjs-app/.env
|
||||||
|
export HUSKY=0
|
||||||
|
export NODE_OPTIONS="--max-old-space-size=8192"
|
||||||
|
$STD pnpm install --frozen-lockfile
|
||||||
|
$STD pnpm -F @teable/db-main-prisma prisma-generate --schema ./prisma/postgres/schema.prisma
|
||||||
|
msg_ok "Set up Teable"
|
||||||
|
|
||||||
|
msg_info "Building Teable"
|
||||||
|
NODE_ENV=production NEXT_BUILD_ENV_TYPECHECK=false \
|
||||||
|
$STD pnpm -r --filter '!playground' run build
|
||||||
|
msg_ok "Built Teable"
|
||||||
|
|
||||||
|
msg_info "Running Database Migrations"
|
||||||
|
PRISMA_DATABASE_URL="postgresql://teable:${PG_DB_PASS}@localhost:5432/teable?schema=public" \
|
||||||
|
$STD pnpm -F @teable/db-main-prisma prisma-migrate deploy --schema ./prisma/postgres/schema.prisma
|
||||||
|
msg_ok "Ran Database Migrations"
|
||||||
|
|
||||||
|
msg_info "Configuring Teable"
|
||||||
|
mkdir -p /opt/teable/.assets /opt/teable/.temporary
|
||||||
|
SECRET_KEY=$(openssl rand -base64 32)
|
||||||
|
cat <<EOF >/opt/teable/.env
|
||||||
|
PRISMA_DATABASE_URL=postgresql://teable:${PG_DB_PASS}@localhost:5432/teable?schema=public&statement_cache_size=1
|
||||||
|
PUBLIC_ORIGIN=http://${LOCAL_IP}:3000
|
||||||
|
SECRET_KEY=${SECRET_KEY}
|
||||||
|
PORT=3000
|
||||||
|
NODE_ENV=production
|
||||||
|
NEXT_TELEMETRY_DISABLED=1
|
||||||
|
BACKEND_CACHE_PROVIDER=sqlite
|
||||||
|
BACKEND_CACHE_SQLITE_URI=sqlite:///opt/teable/.assets/.cache.db
|
||||||
|
NEXTJS_DIR=apps/nextjs-app
|
||||||
|
EOF
|
||||||
|
ln -sf /opt/teable /app
|
||||||
|
rm -rf /opt/teable/static
|
||||||
|
if [ -d "/opt/teable/apps/nestjs-backend/static/static" ]; then
|
||||||
|
ln -sf /opt/teable/apps/nestjs-backend/static/static /opt/teable/static
|
||||||
|
else
|
||||||
|
ln -sf /opt/teable/apps/nestjs-backend/static /opt/teable/static
|
||||||
|
fi
|
||||||
|
msg_ok "Configured Teable"
|
||||||
|
|
||||||
|
msg_info "Creating Service"
|
||||||
|
cat <<EOF >/etc/systemd/system/teable.service
|
||||||
|
[Unit]
|
||||||
|
Description=Teable
|
||||||
|
After=network.target postgresql.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
WorkingDirectory=/opt/teable
|
||||||
|
EnvironmentFile=/opt/teable/.env
|
||||||
|
ExecStart=/usr/bin/node apps/nestjs-backend/dist/index.js
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=5
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
systemctl enable -q --now teable
|
||||||
|
msg_ok "Created Service"
|
||||||
|
|
||||||
|
motd_ssh
|
||||||
|
customize
|
||||||
|
cleanup_lxc
|
||||||
-651
@@ -8665,654 +8665,3 @@ EOF
|
|||||||
$STD apt update
|
$STD apt update
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
# Get latest GitLab release version.
|
|
||||||
# Usage: get_latest_gitlab_release "owner/repo" [strip_v]
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
get_latest_gitlab_release() {
|
|
||||||
local repo="$1"
|
|
||||||
local strip_v="${2:-true}"
|
|
||||||
|
|
||||||
local repo_encoded
|
|
||||||
repo_encoded=$(printf '%s' "$repo" | sed 's|/|%2F|g')
|
|
||||||
|
|
||||||
local header=()
|
|
||||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
|
||||||
|
|
||||||
local temp_file
|
|
||||||
temp_file=$(mktemp)
|
|
||||||
|
|
||||||
local http_code
|
|
||||||
http_code=$(curl --connect-timeout 10 --max-time 30 -sSL \
|
|
||||||
-w "%{http_code}" -o "$temp_file" \
|
|
||||||
"${header[@]}" \
|
|
||||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases?per_page=1&order_by=released_at&sort=desc" 2>/dev/null) || true
|
|
||||||
|
|
||||||
if [[ "$http_code" != "200" ]]; then
|
|
||||||
rm -f "$temp_file"
|
|
||||||
msg_warn "GitLab API call failed for ${repo} (HTTP ${http_code})"
|
|
||||||
return 22
|
|
||||||
fi
|
|
||||||
|
|
||||||
local version
|
|
||||||
version=$(jq -r '.[0].tag_name // empty' "$temp_file")
|
|
||||||
rm -f "$temp_file"
|
|
||||||
|
|
||||||
if [[ -z "$version" ]]; then
|
|
||||||
msg_error "Could not determine latest version for ${repo}"
|
|
||||||
return 250
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$strip_v" == "true" ]]; then
|
|
||||||
[[ "$version" =~ ^v[0-9] ]] && version="${version:1}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "$version"
|
|
||||||
}
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
# Checks for new GitLab release (latest tag).
|
|
||||||
#
|
|
||||||
# Description:
|
|
||||||
# - Queries the GitLab API for the latest release tag
|
|
||||||
# - Compares it to a local cached version (~/.<app>)
|
|
||||||
# - If newer, sets global CHECK_UPDATE_RELEASE and returns 0
|
|
||||||
#
|
|
||||||
# Usage:
|
|
||||||
# if check_for_gl_release "myapp" "owner/repo" [optional] "v1.2.3"; then
|
|
||||||
# # trigger update...
|
|
||||||
# fi
|
|
||||||
# exit 0
|
|
||||||
# } (end of update_script not from the function)
|
|
||||||
#
|
|
||||||
# Notes:
|
|
||||||
# - Requires `jq` (auto-installed if missing)
|
|
||||||
# - Supports GITLAB_TOKEN env var for private/rate-limited repos
|
|
||||||
# - Does not modify anything, only checks version state
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
check_for_gl_release() {
|
|
||||||
local app="$1"
|
|
||||||
local source="$2"
|
|
||||||
local pinned_version_in="${3:-}" # optional
|
|
||||||
local pin_reason="${4:-}" # optional reason shown to user
|
|
||||||
local app_lc="${app,,}"
|
|
||||||
local current_file="$HOME/.${app_lc}"
|
|
||||||
|
|
||||||
msg_info "Checking for update: ${app}"
|
|
||||||
|
|
||||||
# DNS check
|
|
||||||
if ! getent hosts gitlab.com >/dev/null 2>&1; then
|
|
||||||
msg_error "Network error: cannot resolve gitlab.com"
|
|
||||||
return 6
|
|
||||||
fi
|
|
||||||
|
|
||||||
ensure_dependencies jq
|
|
||||||
|
|
||||||
local repo_encoded
|
|
||||||
repo_encoded=$(python3 -c "import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], safe=''))" "$source" 2>/dev/null ||
|
|
||||||
echo "$source" | sed 's|/|%2F|g')
|
|
||||||
|
|
||||||
local header=()
|
|
||||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
|
||||||
|
|
||||||
local releases_json="" http_code=""
|
|
||||||
|
|
||||||
# For pinned versions, try to fetch the specific release tag first
|
|
||||||
if [[ -n "$pinned_version_in" ]]; then
|
|
||||||
local pinned_encoded="${pinned_version_in//\//%2F}"
|
|
||||||
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gl_check.json \
|
|
||||||
"${header[@]}" \
|
|
||||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases/$pinned_encoded" 2>/dev/null) || true
|
|
||||||
if [[ "$http_code" == "200" ]] && [[ -s /tmp/gl_check.json ]]; then
|
|
||||||
releases_json="[$(</tmp/gl_check.json)]"
|
|
||||||
fi
|
|
||||||
rm -f /tmp/gl_check.json
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Fetch full releases list if needed
|
|
||||||
if [[ -z "$releases_json" ]]; then
|
|
||||||
http_code=$(curl -sSL --max-time 20 -w "%{http_code}" -o /tmp/gl_check.json \
|
|
||||||
"${header[@]}" \
|
|
||||||
"https://gitlab.com/api/v4/projects/$repo_encoded/releases?per_page=100&order_by=released_at&sort=desc" 2>/dev/null) || true
|
|
||||||
|
|
||||||
if [[ "$http_code" == "200" ]] && [[ -s /tmp/gl_check.json ]]; then
|
|
||||||
releases_json=$(</tmp/gl_check.json)
|
|
||||||
elif [[ "$http_code" == "401" ]]; then
|
|
||||||
msg_error "GitLab API authentication failed (HTTP 401)."
|
|
||||||
if [[ -n "${GITLAB_TOKEN:-}" ]]; then
|
|
||||||
msg_error "Your GITLAB_TOKEN appears to be invalid or expired."
|
|
||||||
else
|
|
||||||
msg_error "The repository may require authentication. Try: export GITLAB_TOKEN=\"glpat-your_token\""
|
|
||||||
fi
|
|
||||||
rm -f /tmp/gl_check.json
|
|
||||||
return 22
|
|
||||||
elif [[ "$http_code" == "404" ]]; then
|
|
||||||
msg_error "GitLab project not found (HTTP 404). Ensure '${source}' is correct and publicly accessible."
|
|
||||||
rm -f /tmp/gl_check.json
|
|
||||||
return 22
|
|
||||||
elif [[ "$http_code" == "429" ]]; then
|
|
||||||
msg_error "GitLab API rate limit exceeded (HTTP 429)."
|
|
||||||
msg_error "To increase the limit, export a GitLab token: export GITLAB_TOKEN=\"glpat-your_token_here\""
|
|
||||||
rm -f /tmp/gl_check.json
|
|
||||||
return 22
|
|
||||||
elif [[ "$http_code" == "000" || -z "$http_code" ]]; then
|
|
||||||
msg_error "GitLab API connection failed (no response)."
|
|
||||||
msg_error "Check your network/DNS: curl -sSL https://gitlab.com/api/v4/version"
|
|
||||||
rm -f /tmp/gl_check.json
|
|
||||||
return 7
|
|
||||||
else
|
|
||||||
msg_error "Unable to fetch releases for ${app} (HTTP ${http_code})"
|
|
||||||
rm -f /tmp/gl_check.json
|
|
||||||
return 22
|
|
||||||
fi
|
|
||||||
rm -f /tmp/gl_check.json
|
|
||||||
fi
|
|
||||||
|
|
||||||
mapfile -t raw_tags < <(jq -r '.[] | .tag_name' <<<"$releases_json")
|
|
||||||
if ((${#raw_tags[@]} == 0)); then
|
|
||||||
msg_error "No releases found for ${app} on GitLab"
|
|
||||||
return 250
|
|
||||||
fi
|
|
||||||
|
|
||||||
local clean_tags=()
|
|
||||||
for t in "${raw_tags[@]}"; do
|
|
||||||
# Only strip leading 'v' when followed by a digit (e.g. v1.2.3)
|
|
||||||
if [[ "$t" =~ ^v[0-9] ]]; then
|
|
||||||
clean_tags+=("${t:1}")
|
|
||||||
else
|
|
||||||
clean_tags+=("$t")
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
local latest_raw="${raw_tags[0]}"
|
|
||||||
local latest_clean="${clean_tags[0]}"
|
|
||||||
|
|
||||||
# current installed (stored without v)
|
|
||||||
local current=""
|
|
||||||
if [[ -f "$current_file" ]]; then
|
|
||||||
current="$(<"$current_file")"
|
|
||||||
else
|
|
||||||
# Migration: search for any /opt/*_version.txt
|
|
||||||
local legacy_files
|
|
||||||
mapfile -t legacy_files < <(find /opt -maxdepth 1 -type f -name "*_version.txt" 2>/dev/null)
|
|
||||||
if ((${#legacy_files[@]} == 1)); then
|
|
||||||
current="$(<"${legacy_files[0]}")"
|
|
||||||
echo "${current#v}" >"$current_file"
|
|
||||||
rm -f "${legacy_files[0]}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [[ "$current" =~ ^v[0-9] ]]; then
|
|
||||||
current="${current:1}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Pinned version handling
|
|
||||||
if [[ -n "$pinned_version_in" ]]; then
|
|
||||||
local pin_clean
|
|
||||||
if [[ "$pinned_version_in" =~ ^v[0-9] ]]; then
|
|
||||||
pin_clean="${pinned_version_in:1}"
|
|
||||||
else
|
|
||||||
pin_clean="$pinned_version_in"
|
|
||||||
fi
|
|
||||||
local match_raw=""
|
|
||||||
for i in "${!clean_tags[@]}"; do
|
|
||||||
if [[ "${clean_tags[$i]}" == "$pin_clean" ]]; then
|
|
||||||
match_raw="${raw_tags[$i]}"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -z "$match_raw" ]]; then
|
|
||||||
msg_error "Pinned version ${pinned_version_in} not found upstream"
|
|
||||||
return 250
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$current" != "$pin_clean" ]]; then
|
|
||||||
CHECK_UPDATE_RELEASE="$match_raw"
|
|
||||||
msg_ok "Update available: ${app} ${current:-not installed} → ${pin_clean}"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n "$pin_reason" ]]; then
|
|
||||||
msg_ok "No update available: ${app} (${current}) - update held back: ${pin_reason}"
|
|
||||||
else
|
|
||||||
msg_ok "No update available: ${app} (${current}) - update temporarily held back due to issues with newer releases"
|
|
||||||
fi
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# No pinning → use latest
|
|
||||||
if [[ -z "$current" || "$current" != "$latest_clean" ]]; then
|
|
||||||
CHECK_UPDATE_RELEASE="$latest_raw"
|
|
||||||
msg_ok "Update available: ${app} ${current:-not installed} → ${latest_clean}"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
msg_ok "No update available: ${app} (${latest_clean})"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
function fetch_and_deploy_gl_release() {
|
|
||||||
local app="$1"
|
|
||||||
local repo="$2"
|
|
||||||
local mode="${3:-tarball}"
|
|
||||||
local version="${var_appversion:-${4:-latest}}"
|
|
||||||
local target="${5:-/opt/$app}"
|
|
||||||
local asset_pattern="${6:-}"
|
|
||||||
|
|
||||||
if [[ -z "$app" ]]; then
|
|
||||||
app="${repo##*/}"
|
|
||||||
if [[ -z "$app" ]]; then
|
|
||||||
msg_error "fetch_and_deploy_gl_release requires app name or valid repo"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
local app_lc=$(echo "${app,,}" | tr -d ' ')
|
|
||||||
local version_file="$HOME/.${app_lc}"
|
|
||||||
|
|
||||||
local api_timeout="--connect-timeout 10 --max-time 60"
|
|
||||||
local download_timeout="--connect-timeout 15 --max-time 900"
|
|
||||||
|
|
||||||
local current_version=""
|
|
||||||
[[ -f "$version_file" ]] && current_version=$(<"$version_file")
|
|
||||||
|
|
||||||
ensure_dependencies jq
|
|
||||||
|
|
||||||
local repo_encoded
|
|
||||||
repo_encoded=$(python3 -c "import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1], safe=''))" "$repo" 2>/dev/null ||
|
|
||||||
echo "$repo" | sed 's|/|%2F|g')
|
|
||||||
|
|
||||||
local api_base="https://gitlab.com/api/v4/projects/$repo_encoded/releases"
|
|
||||||
local api_url
|
|
||||||
if [[ "$version" != "latest" ]]; then
|
|
||||||
api_url="$api_base/$version"
|
|
||||||
else
|
|
||||||
api_url="$api_base?per_page=1&order_by=released_at&sort=desc"
|
|
||||||
fi
|
|
||||||
|
|
||||||
local header=()
|
|
||||||
[[ -n "${GITLAB_TOKEN:-}" ]] && header=(-H "PRIVATE-TOKEN: $GITLAB_TOKEN")
|
|
||||||
|
|
||||||
local max_retries=3 retry_delay=2 attempt=1 success=false http_code
|
|
||||||
|
|
||||||
while ((attempt <= max_retries)); do
|
|
||||||
http_code=$(curl $api_timeout -sSL -w "%{http_code}" -o /tmp/gl_rel.json "${header[@]}" "$api_url" 2>/dev/null) || true
|
|
||||||
if [[ "$http_code" == "200" ]]; then
|
|
||||||
success=true
|
|
||||||
break
|
|
||||||
elif [[ "$http_code" == "429" ]]; then
|
|
||||||
if ((attempt < max_retries)); then
|
|
||||||
msg_warn "GitLab API rate limit hit, retrying in ${retry_delay}s... (attempt $attempt/$max_retries)"
|
|
||||||
sleep "$retry_delay"
|
|
||||||
retry_delay=$((retry_delay * 2))
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
sleep "$retry_delay"
|
|
||||||
fi
|
|
||||||
((attempt++))
|
|
||||||
done
|
|
||||||
|
|
||||||
if ! $success; then
|
|
||||||
if [[ "$http_code" == "401" ]]; then
|
|
||||||
msg_error "GitLab API authentication failed (HTTP 401)."
|
|
||||||
if [[ -n "${GITLAB_TOKEN:-}" ]]; then
|
|
||||||
msg_error "Your GITLAB_TOKEN appears to be invalid or expired."
|
|
||||||
else
|
|
||||||
msg_error "The repository may require authentication. Try: export GITLAB_TOKEN=\"glpat-your_token\""
|
|
||||||
fi
|
|
||||||
elif [[ "$http_code" == "404" ]]; then
|
|
||||||
msg_error "GitLab project or release not found (HTTP 404)."
|
|
||||||
msg_error "Ensure '$repo' is correct and the project is accessible."
|
|
||||||
elif [[ "$http_code" == "429" ]]; then
|
|
||||||
msg_error "GitLab API rate limit exceeded (HTTP 429)."
|
|
||||||
msg_error "To increase the limit, export a GitLab token before running the script:"
|
|
||||||
msg_error " export GITLAB_TOKEN=\"glpat-your_token_here\""
|
|
||||||
elif [[ "$http_code" == "000" || -z "$http_code" ]]; then
|
|
||||||
msg_error "GitLab API connection failed (no response)."
|
|
||||||
msg_error "Check your network/DNS: curl -sSL https://gitlab.com/api/v4/version"
|
|
||||||
else
|
|
||||||
msg_error "Failed to fetch release metadata (HTTP $http_code)"
|
|
||||||
fi
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
local json tag_name
|
|
||||||
json=$(</tmp/gl_rel.json)
|
|
||||||
|
|
||||||
if [[ "$version" == "latest" ]]; then
|
|
||||||
json=$(echo "$json" | jq '.[0] // empty')
|
|
||||||
if [[ -z "$json" || "$json" == "null" ]]; then
|
|
||||||
msg_error "No releases found for $repo on GitLab"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
|
||||||
if [[ -z "$tag_name" ]]; then
|
|
||||||
msg_error "Could not determine tag name from release metadata"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
|
||||||
local version_safe="${version//\//-}"
|
|
||||||
|
|
||||||
if [[ "$current_version" == "$version" ]]; then
|
|
||||||
$STD msg_ok "$app is already up-to-date (v$version)"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
local tmpdir
|
|
||||||
tmpdir=$(mktemp -d) || return 1
|
|
||||||
local filename=""
|
|
||||||
|
|
||||||
msg_info "Fetching GitLab release: $app ($version)"
|
|
||||||
|
|
||||||
_gl_asset_urls() {
|
|
||||||
local release_json="$1"
|
|
||||||
echo "$release_json" | jq -r '
|
|
||||||
(.assets.links // [])[] | .direct_asset_url // .url
|
|
||||||
'
|
|
||||||
}
|
|
||||||
|
|
||||||
### Tarball Mode ###
|
|
||||||
if [[ "$mode" == "tarball" || "$mode" == "source" ]]; then
|
|
||||||
local direct_tarball_url="https://gitlab.com/$repo/-/archive/$tag_name/${app_lc}-${version_safe}.tar.gz"
|
|
||||||
filename="${app_lc}-${version_safe}.tar.gz"
|
|
||||||
|
|
||||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$direct_tarball_url" || {
|
|
||||||
msg_error "Download failed: $direct_tarball_url"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
mkdir -p "$target"
|
|
||||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
|
||||||
rm -rf "${target:?}/"*
|
|
||||||
fi
|
|
||||||
|
|
||||||
tar --no-same-owner -xzf "$tmpdir/$filename" -C "$tmpdir" || {
|
|
||||||
msg_error "Failed to extract tarball"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
local unpack_dir
|
|
||||||
unpack_dir=$(find "$tmpdir" -mindepth 1 -maxdepth 1 -type d | head -n1)
|
|
||||||
|
|
||||||
shopt -s dotglob nullglob
|
|
||||||
cp -r "$unpack_dir"/* "$target/"
|
|
||||||
shopt -u dotglob nullglob
|
|
||||||
|
|
||||||
### Binary Mode ###
|
|
||||||
elif [[ "$mode" == "binary" ]]; then
|
|
||||||
local arch
|
|
||||||
arch=$(dpkg --print-architecture 2>/dev/null || uname -m)
|
|
||||||
[[ "$arch" == "x86_64" ]] && arch="amd64"
|
|
||||||
[[ "$arch" == "aarch64" ]] && arch="arm64"
|
|
||||||
|
|
||||||
local assets url_match=""
|
|
||||||
assets=$(_gl_asset_urls "$json")
|
|
||||||
|
|
||||||
if [[ -n "$asset_pattern" ]]; then
|
|
||||||
for u in $assets; do
|
|
||||||
case "${u##*/}" in
|
|
||||||
$asset_pattern)
|
|
||||||
url_match="$u"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$url_match" ]]; then
|
|
||||||
for u in $assets; do
|
|
||||||
if [[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]]; then
|
|
||||||
url_match="$u"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$url_match" ]]; then
|
|
||||||
for u in $assets; do
|
|
||||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$url_match" ]]; then
|
|
||||||
local fallback_json
|
|
||||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "binary" "$asset_pattern" "$tag_name"); then
|
|
||||||
json="$fallback_json"
|
|
||||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
|
||||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
|
||||||
msg_info "Fetching GitLab release: $app ($version)"
|
|
||||||
assets=$(_gl_asset_urls "$json")
|
|
||||||
if [[ -n "$asset_pattern" ]]; then
|
|
||||||
for u in $assets; do
|
|
||||||
case "${u##*/}" in $asset_pattern)
|
|
||||||
url_match="$u"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
if [[ -z "$url_match" ]]; then
|
|
||||||
for u in $assets; do
|
|
||||||
[[ "$u" =~ ($arch|amd64|x86_64|aarch64|arm64).*\.deb$ ]] && url_match="$u" && break
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
if [[ -z "$url_match" ]]; then
|
|
||||||
for u in $assets; do
|
|
||||||
[[ "$u" =~ \.deb$ ]] && url_match="$u" && break
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$url_match" ]]; then
|
|
||||||
msg_error "No suitable .deb asset found for $app"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
filename="${url_match##*/}"
|
|
||||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$url_match" || {
|
|
||||||
msg_error "Download failed: $url_match"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
chmod 644 "$tmpdir/$filename"
|
|
||||||
local dpkg_opts=""
|
|
||||||
[[ "${DPKG_FORCE_CONFOLD:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confold"
|
|
||||||
[[ "${DPKG_FORCE_CONFNEW:-}" == "1" ]] && dpkg_opts="-o Dpkg::Options::=--force-confnew"
|
|
||||||
DEBIAN_FRONTEND=noninteractive SYSTEMD_OFFLINE=1 $STD apt install -y $dpkg_opts "$tmpdir/$filename" || {
|
|
||||||
SYSTEMD_OFFLINE=1 $STD dpkg -i "$tmpdir/$filename" || {
|
|
||||||
msg_error "Both apt and dpkg installation failed"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
### Prebuild Mode ###
|
|
||||||
elif [[ "$mode" == "prebuild" ]]; then
|
|
||||||
local pattern="${6%\"}"
|
|
||||||
pattern="${pattern#\"}"
|
|
||||||
[[ -z "$pattern" ]] && {
|
|
||||||
msg_error "Mode 'prebuild' requires 6th parameter (asset filename pattern)"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
local asset_url=""
|
|
||||||
for u in $(_gl_asset_urls "$json"); do
|
|
||||||
filename_candidate="${u##*/}"
|
|
||||||
case "$filename_candidate" in
|
|
||||||
$pattern)
|
|
||||||
asset_url="$u"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -z "$asset_url" ]]; then
|
|
||||||
local fallback_json
|
|
||||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "prebuild" "$pattern" "$tag_name"); then
|
|
||||||
json="$fallback_json"
|
|
||||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
|
||||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
|
||||||
msg_info "Fetching GitLab release: $app ($version)"
|
|
||||||
for u in $(_gl_asset_urls "$json"); do
|
|
||||||
filename_candidate="${u##*/}"
|
|
||||||
case "$filename_candidate" in $pattern)
|
|
||||||
asset_url="$u"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
[[ -z "$asset_url" ]] && {
|
|
||||||
msg_error "No asset matching '$pattern' found"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
filename="${asset_url##*/}"
|
|
||||||
curl $download_timeout -fsSL "${header[@]}" -o "$tmpdir/$filename" "$asset_url" || {
|
|
||||||
msg_error "Download failed: $asset_url"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
local unpack_tmp
|
|
||||||
unpack_tmp=$(mktemp -d)
|
|
||||||
mkdir -p "$target"
|
|
||||||
if [[ "${CLEAN_INSTALL:-0}" == "1" ]]; then
|
|
||||||
rm -rf "${target:?}/"*
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$filename" == *.zip ]]; then
|
|
||||||
ensure_dependencies unzip
|
|
||||||
unzip -q "$tmpdir/$filename" -d "$unpack_tmp" || {
|
|
||||||
msg_error "Failed to extract ZIP archive"
|
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
elif [[ "$filename" == *.tar.* || "$filename" == *.tgz || "$filename" == *.txz ]]; then
|
|
||||||
tar --no-same-owner -xf "$tmpdir/$filename" -C "$unpack_tmp" || {
|
|
||||||
msg_error "Failed to extract TAR archive"
|
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
else
|
|
||||||
msg_error "Unsupported archive format: $filename"
|
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
local top_entries inner_dir
|
|
||||||
top_entries=$(find "$unpack_tmp" -mindepth 1 -maxdepth 1)
|
|
||||||
if [[ "$(echo "$top_entries" | wc -l)" -eq 1 && -d "$top_entries" ]]; then
|
|
||||||
inner_dir="$top_entries"
|
|
||||||
shopt -s dotglob nullglob
|
|
||||||
if compgen -G "$inner_dir/*" >/dev/null; then
|
|
||||||
cp -r "$inner_dir"/* "$target/" || {
|
|
||||||
msg_error "Failed to copy contents from $inner_dir to $target"
|
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
else
|
|
||||||
msg_error "Inner directory is empty: $inner_dir"
|
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
shopt -u dotglob nullglob
|
|
||||||
else
|
|
||||||
shopt -s dotglob nullglob
|
|
||||||
if compgen -G "$unpack_tmp/*" >/dev/null; then
|
|
||||||
cp -r "$unpack_tmp"/* "$target/" || {
|
|
||||||
msg_error "Failed to copy contents to $target"
|
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
else
|
|
||||||
msg_error "Unpacked archive is empty"
|
|
||||||
rm -rf "$tmpdir" "$unpack_tmp"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
shopt -u dotglob nullglob
|
|
||||||
fi
|
|
||||||
|
|
||||||
### Singlefile Mode ###
|
|
||||||
elif [[ "$mode" == "singlefile" ]]; then
|
|
||||||
local pattern="${6%\"}"
|
|
||||||
pattern="${pattern#\"}"
|
|
||||||
[[ -z "$pattern" ]] && {
|
|
||||||
msg_error "Mode 'singlefile' requires 6th parameter (asset filename pattern)"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
local asset_url=""
|
|
||||||
for u in $(_gl_asset_urls "$json"); do
|
|
||||||
filename_candidate="${u##*/}"
|
|
||||||
case "$filename_candidate" in
|
|
||||||
$pattern)
|
|
||||||
asset_url="$u"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -z "$asset_url" ]]; then
|
|
||||||
local fallback_json
|
|
||||||
if fallback_json=$(_gl_scan_older_releases "$repo" "$repo_encoded" "https://gitlab.com" "singlefile" "$pattern" "$tag_name"); then
|
|
||||||
json="$fallback_json"
|
|
||||||
tag_name=$(echo "$json" | jq -r '.tag_name // empty')
|
|
||||||
[[ "$tag_name" =~ ^v[0-9] ]] && version="${tag_name:1}" || version="$tag_name"
|
|
||||||
msg_info "Fetching GitLab release: $app ($version)"
|
|
||||||
for u in $(_gl_asset_urls "$json"); do
|
|
||||||
filename_candidate="${u##*/}"
|
|
||||||
case "$filename_candidate" in $pattern)
|
|
||||||
asset_url="$u"
|
|
||||||
break
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
[[ -z "$asset_url" ]] && {
|
|
||||||
msg_error "No asset matching '$pattern' found"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
filename="${asset_url##*/}"
|
|
||||||
mkdir -p "$target"
|
|
||||||
|
|
||||||
local use_filename="${USE_ORIGINAL_FILENAME:-false}"
|
|
||||||
local target_file="$app"
|
|
||||||
[[ "$use_filename" == "true" ]] && target_file="$filename"
|
|
||||||
|
|
||||||
curl $download_timeout -fsSL "${header[@]}" -o "$target/$target_file" "$asset_url" || {
|
|
||||||
msg_error "Download failed: $asset_url"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
if [[ "$target_file" != *.jar && -f "$target/$target_file" ]]; then
|
|
||||||
chmod +x "$target/$target_file"
|
|
||||||
fi
|
|
||||||
|
|
||||||
else
|
|
||||||
msg_error "Unknown mode: $mode"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "$version" >"$version_file"
|
|
||||||
msg_ok "Deployed: $app ($version)"
|
|
||||||
rm -rf "$tmpdir"
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -151,6 +151,23 @@ function check_proxmox_host() {
|
|||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
# CHECK / INSTALL DOCKER
|
# CHECK / INSTALL DOCKER
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
|
function ensure_openssl() {
|
||||||
|
if command -v openssl &>/dev/null; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
msg_info "Installing openssl"
|
||||||
|
if [[ -f /etc/alpine-release ]]; then
|
||||||
|
$STD apk add openssl
|
||||||
|
elif command -v apt-get &>/dev/null; then
|
||||||
|
$STD apt-get update
|
||||||
|
$STD apt-get install -y openssl
|
||||||
|
else
|
||||||
|
msg_error "openssl is required but could not be installed automatically."
|
||||||
|
exit 10
|
||||||
|
fi
|
||||||
|
msg_ok "Installed openssl"
|
||||||
|
}
|
||||||
|
|
||||||
function check_or_install_docker() {
|
function check_or_install_docker() {
|
||||||
if command -v docker &>/dev/null; then
|
if command -v docker &>/dev/null; then
|
||||||
msg_ok "Docker $(docker --version | cut -d' ' -f3 | tr -d ',') is available"
|
msg_ok "Docker $(docker --version | cut -d' ' -f3 | tr -d ',') is available"
|
||||||
@@ -160,6 +177,7 @@ function check_or_install_docker() {
|
|||||||
msg_error "Docker Compose plugin is not available. Please install it."
|
msg_error "Docker Compose plugin is not available. Please install it."
|
||||||
exit 10
|
exit 10
|
||||||
fi
|
fi
|
||||||
|
ensure_openssl
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -183,6 +201,8 @@ function check_or_install_docker() {
|
|||||||
$STD sh <(curl -fsSL https://get.docker.com)
|
$STD sh <(curl -fsSL https://get.docker.com)
|
||||||
fi
|
fi
|
||||||
msg_ok "Installed Docker"
|
msg_ok "Installed Docker"
|
||||||
|
|
||||||
|
ensure_openssl
|
||||||
}
|
}
|
||||||
|
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
|
|||||||
Reference in New Issue
Block a user