|
|
|
|
@@ -1,6 +1,6 @@
|
|
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
VERSION="v0.5.1"
|
|
|
|
|
# ChangeNotes: DEPENDENCY WARNING: now requires jq. And upstreaming changes from sudo-kraken/podcheck
|
|
|
|
|
VERSION="v0.5.3.0"
|
|
|
|
|
### ChangeNotes: Bugfixes - local image check changed, Gotify-template fixed
|
|
|
|
|
Github="https://github.com/mag37/dockcheck"
|
|
|
|
|
RawUrl="https://raw.githubusercontent.com/mag37/dockcheck/main/dockcheck.sh"
|
|
|
|
|
|
|
|
|
|
@@ -173,27 +173,64 @@ SearchName="$1"
|
|
|
|
|
# Create array of excludes
|
|
|
|
|
IFS=',' read -r -a Excludes <<< "$Exclude" ; unset IFS
|
|
|
|
|
|
|
|
|
|
# Check if required binary exists in PATH or directory
|
|
|
|
|
# Static binary downloader for dependencies
|
|
|
|
|
binary_downloader() {
|
|
|
|
|
BinaryName="$1"
|
|
|
|
|
BinaryUrl="$2"
|
|
|
|
|
case "$(uname --machine)" in
|
|
|
|
|
x86_64|amd64) architecture="amd64" ;;
|
|
|
|
|
arm64|aarch64) architecture="arm64";;
|
|
|
|
|
*) printf "\n%bArchitecture not supported, exiting.%b\n" "$c_red" "$c_reset" ; exit 1;;
|
|
|
|
|
esac
|
|
|
|
|
GetUrl="${BinaryUrl/TEMP/"$architecture"}"
|
|
|
|
|
if [[ $(command -v curl) ]]; then curl -L $GetUrl > "$ScriptWorkDir/$BinaryName" ;
|
|
|
|
|
elif [[ $(command -v wget) ]]; then wget $GetUrl -O "$ScriptWorkDir/$BinaryName" ;
|
|
|
|
|
else printf "%s\n" "curl/wget not available - get $BinaryName manually from the repo link, exiting."; exit 1;
|
|
|
|
|
fi
|
|
|
|
|
[[ -f "$ScriptWorkDir/$BinaryName" ]] && chmod +x "$ScriptWorkDir/$BinaryName"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
distro_checker() {
|
|
|
|
|
if [[ -f /etc/arch-release ]] ; then PkgInstaller="pacman -S"
|
|
|
|
|
elif [[ -f /etc/redhat-release ]] ; then PkgInstaller="dnf install"
|
|
|
|
|
elif [[ -f /etc/SuSE-release ]] ; then PkgInstaller="zypper install"
|
|
|
|
|
elif [[ -f /etc/debian_version ]] ; then PkgInstaller="apt-get install"
|
|
|
|
|
else PkgInstaller="ERROR" ; printf "\n%bNo distribution could be determined%b, falling back to static binary.\n" "$c_yellow" "$c_reset"
|
|
|
|
|
fi
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Dependency check for jq in PATH or directory
|
|
|
|
|
if [[ $(command -v jq) ]]; then jqbin="jq" ;
|
|
|
|
|
elif [[ -f "$ScriptWorkDir/jq" ]]; then jqbin="$ScriptWorkDir/jq" ;
|
|
|
|
|
else
|
|
|
|
|
printf "%s\n" "Required dependency 'jq' missing, do you want to install it?"
|
|
|
|
|
read -r -p "y: With packagemanager (sudo). / s: Download static binary. y/s/[n] " GetJq
|
|
|
|
|
GetJq=${GetJq:-no} # set default to no if nothing is given
|
|
|
|
|
if [[ "$GetJq" =~ [yYsS] ]] ; then
|
|
|
|
|
[[ "$GetJq" =~ [yY] ]] && distro_checker
|
|
|
|
|
if [[ -n "$PkgInstaller" && "$PkgInstaller" != "ERROR" ]] ; then
|
|
|
|
|
(sudo $PkgInstaller jq) ; PkgExitcode="$?"
|
|
|
|
|
[[ "$PkgExitcode" == 0 ]] && jqbin="jq" || printf "\n%bPackagemanager install failed%b, falling back to static binary.\n" "$c_yellow" "$c_reset"
|
|
|
|
|
fi
|
|
|
|
|
if [[ "$GetJq" =~ [nN] || "$PkgInstaller" == "ERROR" || "$PkgExitcode" != 0 ]] ; then
|
|
|
|
|
binary_downloader "jq" "https://github.com/jqlang/jq/releases/latest/download/jq-linux-TEMP"
|
|
|
|
|
[[ -f "$ScriptWorkDir/jq" ]] && jqbin="$ScriptWorkDir/jq"
|
|
|
|
|
fi
|
|
|
|
|
else printf "\n%bDependency missing, exiting.%b\n" "$c_red" "$c_reset" ; exit 1 ;
|
|
|
|
|
fi
|
|
|
|
|
fi
|
|
|
|
|
# Final check if binary is correct
|
|
|
|
|
$jqbin --version &> /dev/null || { printf "%s\n" "jq is not working - try to remove it and re-download it, exiting."; exit 1; }
|
|
|
|
|
|
|
|
|
|
# Dependency check for regctl in PATH or directory
|
|
|
|
|
if [[ $(command -v regctl) ]]; then regbin="regctl" ;
|
|
|
|
|
elif [[ -f "$ScriptWorkDir/regctl" ]]; then regbin="$ScriptWorkDir/regctl" ;
|
|
|
|
|
else
|
|
|
|
|
read -r -p "Required dependency 'regctl' missing, do you want it downloaded? y/[n] " GetDep
|
|
|
|
|
if [[ "$GetDep" =~ [yY] ]] ; then
|
|
|
|
|
# Check architecture
|
|
|
|
|
case "$(uname --machine)" in
|
|
|
|
|
x86_64|amd64) architecture="amd64" ;;
|
|
|
|
|
arm64|aarch64) architecture="arm64";;
|
|
|
|
|
*) echo "Architecture not supported, exiting." ; exit 1;;
|
|
|
|
|
esac
|
|
|
|
|
RegUrl="https://github.com/regclient/regclient/releases/latest/download/regctl-linux-$architecture"
|
|
|
|
|
if [[ $(command -v curl) ]]; then curl -L $RegUrl > "$ScriptWorkDir/regctl" ; chmod +x "$ScriptWorkDir/regctl" ; regbin="$ScriptWorkDir/regctl" ;
|
|
|
|
|
elif [[ $(command -v wget) ]]; then wget $RegUrl -O "$ScriptWorkDir/regctl" ; chmod +x "$ScriptWorkDir/regctl" ; regbin="$ScriptWorkDir/regctl" ;
|
|
|
|
|
else
|
|
|
|
|
printf "%s\n" "curl/wget not available - get regctl manually from the repo link, quitting."
|
|
|
|
|
fi
|
|
|
|
|
else
|
|
|
|
|
printf "%s\n" "Dependency missing, quitting."
|
|
|
|
|
exit 1
|
|
|
|
|
read -r -p "Required dependency 'regctl' missing, do you want it downloaded? y/[n] " GetRegctl
|
|
|
|
|
if [[ "$GetRegctl" =~ [yY] ]] ; then
|
|
|
|
|
binary_downloader "regctl" "https://github.com/regclient/regclient/releases/latest/download/regctl-linux-TEMP"
|
|
|
|
|
[[ -f "$ScriptWorkDir/regctl" ]] && regbin="$ScriptWorkDir/regctl"
|
|
|
|
|
else printf "\n%bDependency missing, exiting.%b\n" "$c_red" "$c_reset" ; exit 1 ;
|
|
|
|
|
fi
|
|
|
|
|
fi
|
|
|
|
|
# Final check if binary is correct
|
|
|
|
|
@@ -210,12 +247,6 @@ else
|
|
|
|
|
exit 1
|
|
|
|
|
fi
|
|
|
|
|
|
|
|
|
|
# Check for jq binary
|
|
|
|
|
if [[ ! $(command -v jq) ]] ; then
|
|
|
|
|
printf "%s\n" "No jq binary, please install jq and try again, exiting."
|
|
|
|
|
exit 1
|
|
|
|
|
fi
|
|
|
|
|
|
|
|
|
|
# Numbered List function
|
|
|
|
|
options() {
|
|
|
|
|
num=1
|
|
|
|
|
@@ -253,8 +284,9 @@ for i in $(docker ps $Stopped --filter "name=$SearchName" --format '{{.Names}}')
|
|
|
|
|
progress_bar "$RegCheckQue" "$ContCount"
|
|
|
|
|
# Looping every item over the list of excluded names and skipping
|
|
|
|
|
for e in "${Excludes[@]}" ; do [[ "$i" == "$e" ]] && continue 2 ; done
|
|
|
|
|
ImageId=$(docker inspect "$i" --format='{{.Image}}')
|
|
|
|
|
RepoUrl=$(docker inspect "$i" --format='{{.Config.Image}}')
|
|
|
|
|
LocalHash=$(docker image inspect "$RepoUrl" --format '{{.RepoDigests}}')
|
|
|
|
|
LocalHash=$(docker image inspect "$ImageId" --format '{{.RepoDigests}}')
|
|
|
|
|
# Checking for errors while setting the variable
|
|
|
|
|
if RegHash=$(${t_out} $regbin -v error image digest --list "$RepoUrl" 2>&1) ; then
|
|
|
|
|
if [[ "$LocalHash" = *"$RegHash"* ]] ; then
|
|
|
|
|
@@ -315,17 +347,17 @@ if [ -n "$GotUpdates" ] ; then
|
|
|
|
|
# Extract labels and metadata
|
|
|
|
|
ContLabels=$(docker inspect "$i" --format '{{json .Config.Labels}}')
|
|
|
|
|
ContImage=$(docker inspect "$i" --format='{{.Config.Image}}')
|
|
|
|
|
ContPath=$(jq -r '."com.docker.compose.project.working_dir"' <<< "$ContLabels")
|
|
|
|
|
ContPath=$($jqbin -r '."com.docker.compose.project.working_dir"' <<< "$ContLabels")
|
|
|
|
|
[ "$ContPath" == "null" ] && ContPath=""
|
|
|
|
|
ContConfigFile=$(jq -r '."com.docker.compose.project.config_files"' <<< "$ContLabels")
|
|
|
|
|
ContConfigFile=$($jqbin -r '."com.docker.compose.project.config_files"' <<< "$ContLabels")
|
|
|
|
|
[ "$ContConfigFile" == "null" ] && ContConfigFile=""
|
|
|
|
|
ContName=$(jq -r '."com.docker.compose.service"' <<< "$ContLabels")
|
|
|
|
|
ContName=$($jqbin -r '."com.docker.compose.service"' <<< "$ContLabels")
|
|
|
|
|
[ "$ContName" == "null" ] && ContName=""
|
|
|
|
|
ContEnv=$(jq -r '."com.docker.compose.project.environment_file"' <<< "$ContLabels")
|
|
|
|
|
ContEnv=$($jqbin -r '."com.docker.compose.project.environment_file"' <<< "$ContLabels")
|
|
|
|
|
[ "$ContEnv" == "null" ] && ContEnv=""
|
|
|
|
|
ContUpdateLabel=$(jq -r '."mag37.dockcheck.update"' <<< "$ContLabels")
|
|
|
|
|
ContUpdateLabel=$($jqbin -r '."mag37.dockcheck.update"' <<< "$ContLabels")
|
|
|
|
|
[ "$ContUpdateLabel" == "null" ] && ContUpdateLabel=""
|
|
|
|
|
ContRestartStack=$(jq -r '."mag37.dockcheck.restart-stack"' <<< "$ContLabels")
|
|
|
|
|
ContRestartStack=$($jqbin -r '."mag37.dockcheck.restart-stack"' <<< "$ContLabels")
|
|
|
|
|
[ "$ContRestartStack" == "null" ] && ContRestartStack=""
|
|
|
|
|
|
|
|
|
|
# Checking if compose-values are empty - hence started with docker run
|
|
|
|
|
|