mirror of
https://github.com/pixelfed/pixelfed.git
synced 2024-11-22 06:21:27 +00:00
implement automatic shellcheck linting
This commit is contained in:
parent
901d11df60
commit
ed0f9d64c8
15 changed files with 171 additions and 81 deletions
12
.github/workflows/docker.yml
vendored
12
.github/workflows/docker.yml
vendored
|
@ -39,6 +39,18 @@ jobs:
|
|||
dockerfile: Dockerfile
|
||||
failure-threshold: error
|
||||
|
||||
shellcheck:
|
||||
name: Shellcheck
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
env:
|
||||
SHELLCHECK_OPTS: --shell=bash --external-sources
|
||||
with:
|
||||
scandir: docker/
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
|
12
docker/.shellcheckrc
Normal file
12
docker/.shellcheckrc
Normal file
|
@ -0,0 +1,12 @@
|
|||
# See: https://github.com/koalaman/shellcheck/blob/master/shellcheck.1.md#rc-files
|
||||
|
||||
source-path=SCRIPTDIR
|
||||
|
||||
# Allow opening any 'source'd file, even if not specified as input
|
||||
external-sources=true
|
||||
|
||||
# Turn on warnings for unquoted variables with safe values
|
||||
enable=quote-safe-variables
|
||||
|
||||
# Turn on warnings for unassigned uppercase variables
|
||||
enable=check-unassigned-uppercase
|
|
@ -1,5 +1,8 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
|
|
|
@ -1,19 +1,22 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
# Ensure the two Docker volumes and dot-env files are owned by the runtime user as other scripts
|
||||
# will be writing to these
|
||||
run-as-current-user chown --verbose ${RUNTIME_UID}:${RUNTIME_GID} "./.env"
|
||||
run-as-current-user chown --verbose ${RUNTIME_UID}:${RUNTIME_GID} "./bootstrap/cache"
|
||||
run-as-current-user chown --verbose ${RUNTIME_UID}:${RUNTIME_GID} "./storage"
|
||||
run-as-current-user chown --verbose "${RUNTIME_UID}:${RUNTIME_GID}" "./.env"
|
||||
run-as-current-user chown --verbose "${RUNTIME_UID}:${RUNTIME_GID}" "./bootstrap/cache"
|
||||
run-as-current-user chown --verbose "${RUNTIME_UID}:${RUNTIME_GID}" "./storage"
|
||||
|
||||
# Optionally fix ownership of configured paths
|
||||
: ${ENTRYPOINT_ENSURE_OWNERSHIP_PATHS:=""}
|
||||
: "${ENTRYPOINT_ENSURE_OWNERSHIP_PATHS:=""}"
|
||||
|
||||
declare -a ensure_ownership_paths=()
|
||||
IFS=' ' read -a ensure_ownership_paths <<<"${ENTRYPOINT_ENSURE_OWNERSHIP_PATHS}"
|
||||
IFS=' ' read -ar ensure_ownership_paths <<<"${ENTRYPOINT_ENSURE_OWNERSHIP_PATHS}"
|
||||
|
||||
if [[ ${#ensure_ownership_paths[@]} == 0 ]]; then
|
||||
log-info "No paths has been configured for ownership fixes via [\$ENTRYPOINT_ENSURE_OWNERSHIP_PATHS]."
|
||||
|
@ -23,5 +26,5 @@ fi
|
|||
|
||||
for path in "${ensure_ownership_paths[@]}"; do
|
||||
log-info "Ensure ownership of [${path}] is correct"
|
||||
stream-prefix-command-output run-as-current-user chown --recursive ${RUNTIME_UID}:${RUNTIME_GID} "${path}"
|
||||
stream-prefix-command-output run-as-current-user chown --recursive "${RUNTIME_UID}:${RUNTIME_GID}" "${path}"
|
||||
done
|
||||
|
|
|
@ -13,7 +13,7 @@ entrypoint-set-script-name "${BASH_SOURCE[0]}"
|
|||
load-config-files
|
||||
|
||||
# We assign a 1MB buffer to the just-in-time calculated max post size to allow for fields and overhead
|
||||
: ${POST_MAX_SIZE_BUFFER:=1M}
|
||||
: "${POST_MAX_SIZE_BUFFER:=1M}"
|
||||
log-info "POST_MAX_SIZE_BUFFER is set to [${POST_MAX_SIZE_BUFFER}]"
|
||||
buffer=$(numfmt --invalid=fail --from=auto --to=none --to-unit=K "${POST_MAX_SIZE_BUFFER}")
|
||||
log-info "POST_MAX_SIZE_BUFFER converted to KB is [${buffer}]"
|
||||
|
@ -23,7 +23,7 @@ log-info "POST_MAX_SIZE will be calculated by [({MAX_PHOTO_SIZE} * {MAX_ALBUM_LE
|
|||
log-info " MAX_PHOTO_SIZE=${MAX_PHOTO_SIZE}"
|
||||
log-info " MAX_ALBUM_LENGTH=${MAX_ALBUM_LENGTH}"
|
||||
log-info " POST_MAX_SIZE_BUFFER=${buffer}"
|
||||
: ${POST_MAX_SIZE:=$(numfmt --invalid=fail --from=auto --from-unit=K --to=si $(((${MAX_PHOTO_SIZE} * ${MAX_ALBUM_LENGTH}) + ${buffer})))}
|
||||
: "${POST_MAX_SIZE:=$(numfmt --invalid=fail --from=auto --from-unit=K --to=si $(((MAX_PHOTO_SIZE * MAX_ALBUM_LENGTH) + buffer)))}"
|
||||
log-info "POST_MAX_SIZE was calculated to [${POST_MAX_SIZE}]"
|
||||
|
||||
# NOTE: must export the value so it's available in other scripts!
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
# Show [git diff] of templates being rendered (will help verify output)
|
||||
: ${ENTRYPOINT_SHOW_TEMPLATE_DIFF:=1}
|
||||
: "${ENTRYPOINT_SHOW_TEMPLATE_DIFF:=1}"
|
||||
# Directory where templates can be found
|
||||
: ${ENTRYPOINT_TEMPLATE_DIR:=/docker/templates/}
|
||||
: "${ENTRYPOINT_TEMPLATE_DIR:=/docker/templates/}"
|
||||
# Root path to write template template_files to (default is '', meaning it will be written to /<path>)
|
||||
: ${ENTRYPOINT_TEMPLATE_OUTPUT_PREFIX:=}
|
||||
: "${ENTRYPOINT_TEMPLATE_OUTPUT_PREFIX:=}"
|
||||
|
||||
declare template_file relative_template_file_path output_file_dir
|
||||
|
||||
|
@ -16,6 +19,8 @@ declare template_file relative_template_file_path output_file_dir
|
|||
load-config-files
|
||||
|
||||
# export all dot-env variables so they are available in templating
|
||||
#
|
||||
# shellcheck disable=SC2068
|
||||
export ${seen_dot_env_variables[@]}
|
||||
|
||||
find "${ENTRYPOINT_TEMPLATE_DIR}" -follow -type f -print | while read -r template_file; do
|
||||
|
@ -46,7 +51,7 @@ find "${ENTRYPOINT_TEMPLATE_DIR}" -follow -type f -print | while read -r templat
|
|||
|
||||
# Render the template
|
||||
log-info "Running [gomplate] on [${template_file}] --> [${output_file_path}]"
|
||||
cat "${template_file}" | gomplate >"${output_file_path}"
|
||||
gomplate <"${template_file}" >"${output_file_path}"
|
||||
|
||||
# Show the diff from the envsubst command
|
||||
if is-true "${ENTRYPOINT_SHOW_TEMPLATE_DIFF}"; then
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
# Allow automatic applying of outstanding/new migrations on startup
|
||||
: ${DOCKER_RUN_ONE_TIME_SETUP_TASKS:=1}
|
||||
: "${DOCKER_RUN_ONE_TIME_SETUP_TASKS:=1}"
|
||||
|
||||
if is-false "${DOCKER_RUN_ONE_TIME_SETUP_TASKS}"; then
|
||||
log-warning "Automatic run of the 'One-time setup tasks' is disabled."
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
# Allow automatic applying of outstanding/new migrations on startup
|
||||
: ${DOCKER_APPLY_NEW_MIGRATIONS_AUTOMATICALLY:=0}
|
||||
: "${DOCKER_APPLY_NEW_MIGRATIONS_AUTOMATICALLY:=0}"
|
||||
|
||||
# Wait for the database to be ready
|
||||
await-database-ready
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
#!/bin/bash
|
||||
source /docker/helpers.sh
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
|
||||
# shellcheck source=SCRIPTDIR/../helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
entrypoint-set-script-name "$0"
|
||||
|
||||
|
|
|
@ -4,42 +4,48 @@ if [[ ${ENTRYPOINT_SKIP:=0} != 0 ]]; then
|
|||
exec "$@"
|
||||
fi
|
||||
|
||||
# Directory where entrypoint scripts lives
|
||||
: ${ENTRYPOINT_ROOT:="/docker/entrypoint.d/"}
|
||||
: "${ENTRYPOINT_ROOT:="/docker"}"
|
||||
export ENTRYPOINT_ROOT
|
||||
|
||||
# Directory where entrypoint scripts lives
|
||||
: "${ENTRYPOINT_D_ROOT:="${ENTRYPOINT_ROOT}/entrypoint.d/"}"
|
||||
export ENTRYPOINT_D_ROOT
|
||||
|
||||
# Space separated list of scripts the entrypoint runner should skip
|
||||
: ${ENTRYPOINT_SKIP_SCRIPTS:=""}
|
||||
: "${ENTRYPOINT_SKIP_SCRIPTS:=""}"
|
||||
|
||||
# Load helper scripts
|
||||
source /docker/helpers.sh
|
||||
#
|
||||
# shellcheck source=SCRIPTDIR/helpers.sh
|
||||
source "${ENTRYPOINT_ROOT}/helpers.sh"
|
||||
|
||||
# Set the entrypoint name for logging
|
||||
entrypoint-set-script-name "entrypoint.sh"
|
||||
|
||||
# Convert ENTRYPOINT_SKIP_SCRIPTS into a native bash array for easier lookup
|
||||
declare -a skip_scripts
|
||||
IFS=' ' read -a skip_scripts <<<"$ENTRYPOINT_SKIP_SCRIPTS"
|
||||
# shellcheck disable=SC2034
|
||||
IFS=' ' read -ar skip_scripts <<<"$ENTRYPOINT_SKIP_SCRIPTS"
|
||||
|
||||
# Ensure the entrypoint root folder exists
|
||||
mkdir -p "${ENTRYPOINT_ROOT}"
|
||||
mkdir -p "${ENTRYPOINT_D_ROOT}"
|
||||
|
||||
# If ENTRYPOINT_ROOT directory is empty, warn and run the regular command
|
||||
if is-directory-empty "${ENTRYPOINT_ROOT}"; then
|
||||
log-warning "No files found in ${ENTRYPOINT_ROOT}, skipping configuration"
|
||||
# If ENTRYPOINT_D_ROOT directory is empty, warn and run the regular command
|
||||
if is-directory-empty "${ENTRYPOINT_D_ROOT}"; then
|
||||
log-warning "No files found in ${ENTRYPOINT_D_ROOT}, skipping configuration"
|
||||
|
||||
exec "$@"
|
||||
fi
|
||||
|
||||
acquire-lock
|
||||
acquire-lock "entrypoint.sh"
|
||||
|
||||
# Start scanning for entrypoint.d files to source or run
|
||||
log-info "looking for shell scripts in [${ENTRYPOINT_ROOT}]"
|
||||
log-info "looking for shell scripts in [${ENTRYPOINT_D_ROOT}]"
|
||||
|
||||
find "${ENTRYPOINT_ROOT}" -follow -type f -print | sort -V | while read -r file; do
|
||||
find "${ENTRYPOINT_D_ROOT}" -follow -type f -print | sort -V | while read -r file; do
|
||||
# Skip the script if it's in the skip-script list
|
||||
if in-array $(get-entrypoint-script-name "${file}") skip_scripts; then
|
||||
log-warning "Skipping script [${script_name}] since it's in the skip list (\$ENTRYPOINT_SKIP_SCRIPTS)"
|
||||
if in-array "$(get-entrypoint-script-name "${file}")" skip_scripts; then
|
||||
log-warning "Skipping script [${file}] since it's in the skip list (\$ENTRYPOINT_SKIP_SCRIPTS)"
|
||||
|
||||
continue
|
||||
fi
|
||||
|
@ -56,6 +62,7 @@ find "${ENTRYPOINT_ROOT}" -follow -type f -print | sort -V | while read -r file;
|
|||
log-info "${notice_message_color}Sourcing [${file}]${color_clear}"
|
||||
log-info ""
|
||||
|
||||
# shellcheck disable=SC1090
|
||||
source "${file}"
|
||||
|
||||
# the sourced file will (should) than the log prefix, so this restores our own
|
||||
|
@ -82,7 +89,7 @@ find "${ENTRYPOINT_ROOT}" -follow -type f -print | sort -V | while read -r file;
|
|||
esac
|
||||
done
|
||||
|
||||
release-lock
|
||||
release-lock "entrypoint.sh"
|
||||
|
||||
log-info "Configuration complete; ready for start up"
|
||||
|
||||
|
|
|
@ -3,6 +3,9 @@ set -e -o errexit -o nounset -o pipefail
|
|||
|
||||
[[ ${ENTRYPOINT_DEBUG:=0} == 1 ]] && set -x
|
||||
|
||||
: "${RUNTIME_UID:="33"}"
|
||||
: "${RUNTIME_GID:="33"}"
|
||||
|
||||
# Some splash of color for important messages
|
||||
declare -g error_message_color="\033[1;31m"
|
||||
declare -g warn_message_color="\033[1;34m"
|
||||
|
@ -23,11 +26,14 @@ declare -a dot_env_files=(
|
|||
# environment keys seen when source dot files (so we can [export] them)
|
||||
declare -ga seen_dot_env_variables=()
|
||||
|
||||
declare -g docker_state_path="$(readlink -f ./storage/docker)"
|
||||
declare -g docker_state_path
|
||||
docker_state_path="$(readlink -f ./storage/docker)"
|
||||
|
||||
declare -g docker_locks_path="${docker_state_path}/lock"
|
||||
declare -g docker_once_path="${docker_state_path}/once"
|
||||
|
||||
declare -g runtime_username=$(id -un ${RUNTIME_UID})
|
||||
declare -g runtime_username
|
||||
runtime_username=$(id -un "${RUNTIME_UID}")
|
||||
|
||||
# We should already be in /var/www, but just to be explicit
|
||||
cd /var/www || log-error-and-exit "could not change to /var/www"
|
||||
|
@ -38,7 +44,7 @@ function entrypoint-set-script-name() {
|
|||
script_name_previous="${script_name}"
|
||||
script_name="${1}"
|
||||
|
||||
log_prefix="[entrypoint / $(get-entrypoint-script-name $1)] - "
|
||||
log_prefix="[entrypoint / $(get-entrypoint-script-name "$1")] - "
|
||||
}
|
||||
|
||||
# @description Restore the log prefix to the previous value that was captured in [entrypoint-set-script-name ]
|
||||
|
@ -86,20 +92,18 @@ function run-command-as() {
|
|||
|
||||
if [[ $exit_code != 0 ]]; then
|
||||
log-error "❌ Error!"
|
||||
return $exit_code
|
||||
return "$exit_code"
|
||||
fi
|
||||
|
||||
log-info-stderr "✅ OK!"
|
||||
return $exit_code
|
||||
return "$exit_code"
|
||||
}
|
||||
|
||||
# @description Streams stdout from the command and echo it
|
||||
# with log prefixing.
|
||||
# @see stream-prefix-command-output
|
||||
function stream-stdout-handler() {
|
||||
local prefix="${1:-}"
|
||||
|
||||
while read line; do
|
||||
while read -r line; do
|
||||
log-info "(stdout) ${line}"
|
||||
done
|
||||
}
|
||||
|
@ -108,7 +112,7 @@ function stream-stdout-handler() {
|
|||
# with a bit of color and log prefixing.
|
||||
# @see stream-prefix-command-output
|
||||
function stream-stderr-handler() {
|
||||
while read line; do
|
||||
while read -r line; do
|
||||
log-info-stderr "(${error_message_color}stderr${color_clear}) ${line}"
|
||||
done
|
||||
}
|
||||
|
@ -123,11 +127,13 @@ function stream-prefix-command-output() {
|
|||
|
||||
# if stdout is being piped, print it like normal with echo
|
||||
if [ ! -t 1 ]; then
|
||||
# shellcheck disable=SC1007
|
||||
stdout= echo >&1 -ne
|
||||
fi
|
||||
|
||||
# if stderr is being piped, print it like normal with echo
|
||||
if [ ! -t 2 ]; then
|
||||
# shellcheck disable=SC1007
|
||||
stderr= echo >&2 -ne
|
||||
fi
|
||||
|
||||
|
@ -141,11 +147,11 @@ function log-error() {
|
|||
local msg
|
||||
|
||||
if [[ $# -gt 0 ]]; then
|
||||
msg="$@"
|
||||
msg="$*"
|
||||
elif [[ ! -t 0 ]]; then
|
||||
read msg || log-error-and-exit "[${FUNCNAME}] could not read from stdin"
|
||||
read -r msg || log-error-and-exit "[${FUNCNAME[0]}] could not read from stdin"
|
||||
else
|
||||
log-error-and-exit "[${FUNCNAME}] did not receive any input arguments and STDIN is empty"
|
||||
log-error-and-exit "[${FUNCNAME[0]}] did not receive any input arguments and STDIN is empty"
|
||||
fi
|
||||
|
||||
echo -e "${error_message_color}${log_prefix}ERROR - ${msg}${color_clear}" >/dev/stderr
|
||||
|
@ -170,11 +176,11 @@ function log-warning() {
|
|||
local msg
|
||||
|
||||
if [[ $# -gt 0 ]]; then
|
||||
msg="$@"
|
||||
msg="$*"
|
||||
elif [[ ! -t 0 ]]; then
|
||||
read msg || log-error-and-exit "[${FUNCNAME}] could not read from stdin"
|
||||
read -r msg || log-error-and-exit "[${FUNCNAME[0]}] could not read from stdin"
|
||||
else
|
||||
log-error-and-exit "[${FUNCNAME}] did not receive any input arguments and STDIN is empty"
|
||||
log-error-and-exit "[${FUNCNAME[0]}] did not receive any input arguments and STDIN is empty"
|
||||
fi
|
||||
|
||||
echo -e "${warn_message_color}${log_prefix}WARNING - ${msg}${color_clear}" >/dev/stderr
|
||||
|
@ -187,15 +193,15 @@ function log-info() {
|
|||
local msg
|
||||
|
||||
if [[ $# -gt 0 ]]; then
|
||||
msg="$@"
|
||||
msg="$*"
|
||||
elif [[ ! -t 0 ]]; then
|
||||
read msg || log-error-and-exit "[${FUNCNAME}] could not read from stdin"
|
||||
read -r msg || log-error-and-exit "[${FUNCNAME[0]}] could not read from stdin"
|
||||
else
|
||||
log-error-and-exit "[${FUNCNAME}] did not receive any input arguments and STDIN is empty"
|
||||
log-error-and-exit "[${FUNCNAME[0]}] did not receive any input arguments and STDIN is empty"
|
||||
fi
|
||||
|
||||
if [ -z "${ENTRYPOINT_QUIET_LOGS:-}" ]; then
|
||||
echo -e "${log_prefix}${msg}"
|
||||
echo -e "${notice_message_color}${log_prefix}${msg}${color_clear}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
@ -206,11 +212,11 @@ function log-info-stderr() {
|
|||
local msg
|
||||
|
||||
if [[ $# -gt 0 ]]; then
|
||||
msg="$@"
|
||||
msg="$*"
|
||||
elif [[ ! -t 0 ]]; then
|
||||
read msg || log-error-and-exit "[${FUNCNAME}] could not read from stdin"
|
||||
read -r msg || log-error-and-exit "[${FUNCNAME[0]}] could not read from stdin"
|
||||
else
|
||||
log-error-and-exit "[${FUNCNAME}] did not receive any input arguments and STDIN is empty"
|
||||
log-error-and-exit "[${FUNCNAME[0]}] did not receive any input arguments and STDIN is empty"
|
||||
fi
|
||||
|
||||
if [ -z "${ENTRYPOINT_QUIET_LOGS:-}" ]; then
|
||||
|
@ -231,15 +237,19 @@ function load-config-files() {
|
|||
fi
|
||||
|
||||
log-info "Sourcing ${file}"
|
||||
# shellcheck disable=SC1090
|
||||
source "${file}"
|
||||
|
||||
# find all keys in the dot-env file and store them in our temp associative array
|
||||
for k in "$(grep -v '^#' "${file}" | cut -d"=" -f1 | xargs)"; do
|
||||
for k in $(grep -v '^#' "${file}" | cut -d"=" -f1 | xargs); do
|
||||
_tmp_dot_env_keys[$k]=1
|
||||
done
|
||||
done
|
||||
|
||||
seen_dot_env_variables=(${!_tmp_dot_env_keys[@]})
|
||||
# Used in other scripts (like templating) for [export]-ing the values
|
||||
#
|
||||
# shellcheck disable=SC2034
|
||||
seen_dot_env_variables=("${!_tmp_dot_env_keys[@]}")
|
||||
}
|
||||
|
||||
# @description Checks if $needle exists in $haystack
|
||||
|
@ -290,7 +300,7 @@ function file-exists() {
|
|||
# @exitcode 0 If $1 contains files
|
||||
# @exitcode 1 If $1 does *NOT* contain files
|
||||
function is-directory-empty() {
|
||||
! find "${1}" -mindepth 1 -maxdepth 1 -type f -print -quit 2>/dev/null | read v
|
||||
! find "${1}" -mindepth 1 -maxdepth 1 -type f -print -quit 2>/dev/null | read -r
|
||||
}
|
||||
|
||||
# @description Ensures a directory exists (via mkdir)
|
||||
|
@ -301,11 +311,11 @@ function ensure-directory-exists() {
|
|||
stream-prefix-command-output mkdir -pv "$@"
|
||||
}
|
||||
|
||||
# @description Find the relative path for a entrypoint script by removing the ENTRYPOINT_ROOT prefix
|
||||
# @description Find the relative path for a entrypoint script by removing the ENTRYPOINT_D_ROOT prefix
|
||||
# @arg $1 string The path to manipulate
|
||||
# @stdout The relative path to the entrypoint script
|
||||
function get-entrypoint-script-name() {
|
||||
echo "${1#"$ENTRYPOINT_ROOT"}"
|
||||
echo "${1#"$ENTRYPOINT_D_ROOT"}"
|
||||
}
|
||||
|
||||
# @description Ensure a command is only run once (via a 'lock' file) in the storage directory.
|
||||
|
@ -373,12 +383,14 @@ function release-lock() {
|
|||
# @arg $@ string The list of trap signals to register
|
||||
function on-trap() {
|
||||
local trap_add_cmd=$1
|
||||
shift || log-error-and-exit "${FUNCNAME} usage error"
|
||||
shift || log-error-and-exit "${FUNCNAME[0]} usage error"
|
||||
|
||||
for trap_add_name in "$@"; do
|
||||
trap -- "$(
|
||||
# helper fn to get existing trap command from output
|
||||
# of trap -p
|
||||
#
|
||||
# shellcheck disable=SC2317
|
||||
extract_trap_cmd() { printf '%s\n' "${3:-}"; }
|
||||
# print existing trap command with newline
|
||||
eval "extract_trap_cmd $(trap -p "${trap_add_name}")"
|
||||
|
@ -403,12 +415,14 @@ function await-database-ready() {
|
|||
|
||||
case "${DB_CONNECTION:-}" in
|
||||
mysql)
|
||||
# shellcheck disable=SC2154
|
||||
while ! echo "SELECT 1" | mysql --user="${DB_USERNAME}" --password="${DB_PASSWORD}" --host="${DB_HOST}" "${DB_DATABASE}" --silent >/dev/null; do
|
||||
staggered-sleep
|
||||
done
|
||||
;;
|
||||
|
||||
pgsql)
|
||||
# shellcheck disable=SC2154
|
||||
while ! echo "SELECT 1" | PGPASSWORD="${DB_PASSWORD}" psql --user="${DB_USERNAME}" --host="${DB_HOST}" "${DB_DATABASE}" >/dev/null; do
|
||||
staggered-sleep
|
||||
done
|
||||
|
@ -417,6 +431,7 @@ function await-database-ready() {
|
|||
sqlsrv)
|
||||
log-warning "Don't know how to check if SQLServer is *truely* ready or not - so will just check if we're able to connect to it"
|
||||
|
||||
# shellcheck disable=SC2154
|
||||
while ! timeout 1 bash -c "cat < /dev/null > /dev/tcp/${DB_HOST}/${DB_PORT}"; do
|
||||
staggered-sleep
|
||||
done
|
||||
|
@ -437,7 +452,7 @@ function await-database-ready() {
|
|||
# @description sleeps between 1 and 3 seconds to ensure a bit of randomness
|
||||
# in multiple scripts/containers doing work almost at the same time.
|
||||
function staggered-sleep() {
|
||||
sleep $(get-random-number-between 1 3)
|
||||
sleep "$(get-random-number-between 1 3)"
|
||||
}
|
||||
|
||||
# @description Helper function to get a random number between $1 and $2
|
||||
|
@ -459,13 +474,13 @@ function show-call-stack() {
|
|||
local src
|
||||
|
||||
# to avoid noise we start with 1 to skip the get_stack function
|
||||
for ((i = 1; i < $stack_size; i++)); do
|
||||
for ((i = 1; i < stack_size; i++)); do
|
||||
func="${FUNCNAME[$i]}"
|
||||
[ x$func = x ] && func=MAIN
|
||||
[ -z "$func" ] && func="MAIN"
|
||||
|
||||
lineno="${BASH_LINENO[$((i - 1))]}"
|
||||
src="${BASH_SOURCE[$i]}"
|
||||
[ x"$src" = x ] && src=non_file_source
|
||||
[ -z "$src" ] && src="non_file_source"
|
||||
|
||||
log-error " at: ${func} ${src}:${lineno}"
|
||||
done
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
#!/bin/bash
|
||||
set -ex -o errexit -o nounset -o pipefail
|
||||
|
||||
: "${APT_PACKAGES_EXTRA:=""}"
|
||||
: "${DOTENV_LINTER_VERSION:=""}"
|
||||
|
||||
# Ensure we keep apt cache around in a Docker environment
|
||||
rm -f /etc/apt/apt.conf.d/docker-clean
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' >/etc/apt/apt.conf.d/keep-cache
|
||||
|
@ -73,16 +76,16 @@ apt-get update
|
|||
apt-get upgrade -y
|
||||
|
||||
apt-get install -y \
|
||||
${standardPackages[*]} \
|
||||
${imageOptimization[*]} \
|
||||
${imageProcessing[*]} \
|
||||
${gdDependencies[*]} \
|
||||
${videoProcessing[*]} \
|
||||
${databaseDependencies[*]} \
|
||||
${APT_PACKAGES_EXTRA}
|
||||
"${standardPackages[@]}" \
|
||||
"${imageOptimization[@]}" \
|
||||
"${imageProcessing[@]}" \
|
||||
"${gdDependencies[@]}" \
|
||||
"${videoProcessing[@]}" \
|
||||
"${databaseDependencies[@]}" \
|
||||
"${APT_PACKAGES_EXTRA}"
|
||||
|
||||
locale-gen
|
||||
update-locale
|
||||
|
||||
# Install dotenv linter (https://github.com/dotenv-linter/dotenv-linter)
|
||||
curl -sSfL https://raw.githubusercontent.com/dotenv-linter/dotenv-linter/master/install.sh | sh -s -- -b /usr/local/bin ${DOTENV_LINTER_VERSION}
|
||||
curl -sSfL https://raw.githubusercontent.com/dotenv-linter/dotenv-linter/master/install.sh | sh -s -- -b /usr/local/bin "${DOTENV_LINTER_VERSION}"
|
||||
|
|
|
@ -1,6 +1,12 @@
|
|||
#!/bin/bash
|
||||
set -ex -o errexit -o nounset -o pipefail
|
||||
|
||||
: "${PHP_PECL_EXTENSIONS:=""}"
|
||||
: "${PHP_PECL_EXTENSIONS_EXTRA:=""}"
|
||||
: "${PHP_EXTENSIONS:=""}"
|
||||
: "${PHP_EXTENSIONS_EXTRA:=""}"
|
||||
: "${PHP_EXTENSIONS_DATABASE:=""}"
|
||||
|
||||
# Grab the PHP source code so we can compile against it
|
||||
docker-php-source extract
|
||||
|
||||
|
@ -14,7 +20,7 @@ docker-php-ext-configure gd \
|
|||
# Optional script folks can copy into their image to do any [docker-php-ext-configure] work before the [docker-php-ext-install]
|
||||
# this can also overwirte the [gd] configure above by simply running it again
|
||||
if [[ -f /install/php-extension-configure.sh ]]; then
|
||||
if [ !-x "$f" ]; then
|
||||
if [ ! -x "/install/php-extension-configure.sh" ]; then
|
||||
echo >&2 "ERROR: found /install/php-extension-configure.sh but its not executable - please [chmod +x] the file!"
|
||||
exit 1
|
||||
fi
|
||||
|
@ -23,10 +29,19 @@ if [[ -f /install/php-extension-configure.sh ]]; then
|
|||
fi
|
||||
|
||||
# Install pecl extensions
|
||||
pecl install ${PHP_PECL_EXTENSIONS} ${PHP_PECL_EXTENSIONS_EXTRA}
|
||||
pecl install "${PHP_PECL_EXTENSIONS}" "${PHP_PECL_EXTENSIONS_EXTRA}"
|
||||
|
||||
# PHP extensions (dependencies)
|
||||
docker-php-ext-install -j$(nproc) ${PHP_EXTENSIONS} ${PHP_EXTENSIONS_EXTRA} ${PHP_EXTENSIONS_DATABASE}
|
||||
docker-php-ext-install \
|
||||
-j "$(nproc)" \
|
||||
"${PHP_EXTENSIONS}" \
|
||||
"${PHP_EXTENSIONS_EXTRA}" \
|
||||
"${PHP_EXTENSIONS_DATABASE}"
|
||||
|
||||
# Enable all extensions
|
||||
docker-php-ext-enable ${PHP_PECL_EXTENSIONS} ${PHP_PECL_EXTENSIONS_EXTRA} ${PHP_EXTENSIONS} ${PHP_EXTENSIONS_EXTRA} ${PHP_EXTENSIONS_DATABASE}
|
||||
docker-php-ext-enable \
|
||||
"${PHP_PECL_EXTENSIONS}" \
|
||||
"${PHP_PECL_EXTENSIONS_EXTRA}" \
|
||||
"${PHP_EXTENSIONS}" \
|
||||
"${PHP_EXTENSIONS_EXTRA}" \
|
||||
"${PHP_EXTENSIONS_DATABASE}"
|
||||
|
|
Loading…
Reference in a new issue