Compare commits

...

39 Commits

Author SHA1 Message Date
Geoff Bourne
d919881092 Auto-merging via docker-versions-create 2020-12-19 08:23:58 -06:00
Geoff Bourne
f613228619 Restored support for PAPER_DOWNLOAD_URL
Fixes #687
2020-12-15 20:40:00 -06:00
Geoff Bourne
e406fee8fa Auto-merging via docker-versions-create 2020-12-15 13:59:46 -06:00
Silthus
ca9f883352 feat: add COPY_CONFIG_DEST option (#689) 2020-12-15 13:31:51 -06:00
Silthus
9d68fa3b88 feat: improvde REMOVE_OLD_MODS option (#688) 2020-12-13 20:00:06 -06:00
Geoff Bourne
d3a5885d95 Fixed handling of query parameters in MODS url
For #684
2020-12-12 15:22:07 -06:00
Geoff Bourne
c1db13c1f6 Fixed dirname handling in find for SPIGOT WORLD handling
For #685
2020-12-12 11:10:13 -06:00
Mike Wilson
31b0f711b8 Fixing some spigot world import issues (#683)
* Fixing adding an existing spigot world and removing incorrectly identifying spigot worlds as multiple worlds

* Update start-finalSetupWorld

Co-authored-by: Geoff Bourne <itzgeoff@gmail.com>

Co-authored-by: Geoff Bourne <itzgeoff@gmail.com>
2020-12-12 08:26:32 -06:00
Geoff Bourne
59ca1ce3a6 Improved URL handling for GENERIC_PACK
For #684
2020-12-11 21:12:44 -06:00
Geoff Bourne
0f7bd5f4fd ci: Added adopt15 branch releases 2020-12-07 10:54:36 -06:00
Geoff Bourne
3497b06391 Auto-merging via docker-versions-create 2020-11-25 15:53:49 -06:00
Geoff Bourne
65b0e0d8bb Auto-merging via docker-versions-create 2020-08-09 13:06:24 -05:00
Geoff Bourne
c5b9c199d6 Auto-merging via docker-versions-create 2020-07-26 08:29:13 -05:00
Geoff Bourne
5a61465c09 ci: Migrated main build and test back to Hub 2020-07-26 08:28:40 -05:00
Geoff Bourne
8a324c30de Auto-merging via docker-versions-create 2020-07-18 18:39:40 -05:00
Geoff Bourne
e0cdf9e2ce Auto-merging via docker-versions-create 2020-07-11 13:12:41 -05:00
Geoff Bourne
57740cb749 Auto-merging via docker-versions-create 2020-07-10 17:10:43 -05:00
Geoff Bourne
966c74cd08 Auto-merging via docker-versions-create 2020-07-04 14:57:07 -05:00
Geoff Bourne
d84b58dfd0 Auto-merging via docker-versions-create 2020-06-20 15:44:09 -05:00
Geoff Bourne
7aaf106ffe Auto-merging via docker-versions-create
# Conflicts:
#	.circleci/config.yml
2020-06-19 13:25:41 -05:00
Geoff Bourne
5f77902441 Auto-merging via docker-versions-create 2020-05-20 08:14:14 -05:00
Geoff Bourne
c200efc9c9 Auto-merging via docker-versions-create 2020-05-02 09:33:49 -05:00
Geoff Bourne
e924126a56 Auto-merging via docker-versions-create 2020-04-25 12:10:22 -05:00
Geoff Bourne
bbd3d3cfc1 Auto-merging via docker-versions-create 2020-04-17 21:28:25 -05:00
Geoff Bourne
d77c19c69b Auto-merging via docker-versions-create 2020-04-11 08:51:36 -05:00
Geoff Bourne
7ee77e4f47 Auto-merging via docker-versions-create 2020-04-10 11:08:41 -05:00
Geoff Bourne
84d0cff4c8 Auto-merging via docker-versions-create 2020-04-03 13:31:30 -05:00
Geoff Bourne
70519b9764 Auto-merging via docker-versions-create
# Conflicts:
#	README.md
#	start-minecraftFinalSetup
2020-04-03 13:28:27 -05:00
Geoff Bourne
4683ea496d Auto-merging via docker-versions-create 2020-04-02 17:47:30 -05:00
Geoff Bourne
0e3a82f9d3 Auto-merging via docker-versions-create 2020-03-30 08:31:53 -05:00
Geoff Bourne
d2554f2271 Auto-merging via docker-versions-create 2020-03-26 20:54:00 -05:00
Geoff Bourne
55e62371ac Auto-merging via docker-versions-create 2020-02-01 08:52:51 -06:00
Geoff Bourne
c9a5fcfac8 Auto-merging via docker-versions-create 2020-01-17 08:29:05 -06:00
Geoff Bourne
a1f8154d05 Auto-merging via docker-versions-create 2019-11-16 09:30:54 -06:00
Geoff Bourne
e5d0a9362a Auto-merging via docker-versions-create 2019-11-16 09:04:17 -06:00
Geoff Bourne
ca9c280b0b Merge branch 'master' into openj9 2019-11-16 09:01:17 -06:00
Geoff Bourne
6ef4e984c7 ci: disable cross-building arm-v7 image 2019-08-13 20:06:57 -05:00
Geoff Bourne
ea4f78346a ci: try cross-building arm-v7 image 2019-08-13 20:04:34 -05:00
Geoff Bourne
007f9426bf Switch to OpenJ9 base image (#360)
(cherry picked from commit 93197ffb77)
2019-08-10 18:56:36 -05:00
11 changed files with 265 additions and 138 deletions

25
.circleci/config.yml Normal file
View File

@@ -0,0 +1,25 @@
version: 2
jobs:
minecraft_server:
docker:
- image: circleci/buildpack-deps:stable
steps:
- checkout
- setup_remote_docker
- run:
name: Build image
command: docker build -t itzg/minecraft-server:${CIRCLE_BRANCH} .
# - run:
# name: Build arm v7 image
# command: docker build -t itzg/minecraft-server:${CIRCLE_BRANCH}-arm-v7 --platform linux/arm/v7 --build-arg ARCH=armv7 .
workflows:
version: 2
build:
jobs:
- minecraft_server:
filters:
branches:
ignore:
- armv7
- multiarch

25
.github/workflows/build-multiarch.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Build and publish multiarch
on:
push:
branches:
- multiarch
tags:
- "[0-9]+.[0-9]+.[0-9]+-multiarch"
jobs:
docker-buildx:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.2.0
- name: Get branch name
uses: nelonoel/branch-name@v1
- name: Docker Buildx
uses: ilteoood/docker_buildx@1.0.4
with:
publish: true
imageName: itzg/minecraft-server
tag: ${{ env.BRANCH_NAME }}
dockerHubUser: ${{ secrets.DOCKER_USER }}
dockerHubPassword: ${{ secrets.DOCKER_PASSWORD }}

View File

@@ -9,6 +9,7 @@ on:
- adopt11
- adopt13
- adopt14
- adopt15
tags:
- "[0-9]+.[0-9]+.[0-9]+"
- "[0-9]+.[0-9]+.[0-9]+-openj9"
@@ -16,6 +17,7 @@ on:
- "[0-9]+.[0-9]+.[0-9]+-adopt11"
- "[0-9]+.[0-9]+.[0-9]+-adopt13"
- "[0-9]+.[0-9]+.[0-9]+-adopt14"
- "[0-9]+.[0-9]+.[0-9]+-adopt15"
jobs:
test:

View File

@@ -1,4 +1,4 @@
FROM openjdk:8u212-jre-alpine
FROM adoptopenjdk/openjdk8-openj9:alpine
LABEL org.opencontainers.image.authors="Geoff Bourne <itzgeoff@gmail.com>"
@@ -70,7 +70,7 @@ COPY log4j2.xml /tmp/log4j2.xml
WORKDIR /data
ENV UID=1000 GID=1000 \
JVM_XX_OPTS="-XX:+UseG1GC" MEMORY="1G" \
MEMORY="1G" \
TYPE=VANILLA VERSION=LATEST FORGEVERSION=RECOMMENDED SPONGEBRANCH=STABLE SPONGEVERSION= FABRICVERSION=LATEST LEVEL=world \
PVP=true DIFFICULTY=easy ENABLE_RCON=true RCON_PORT=25575 RCON_PASSWORD=minecraft \
LEVEL_TYPE=DEFAULT SERVER_PORT=25565 ONLINE_MODE=TRUE SERVER_NAME="Dedicated Server" \

View File

@@ -301,13 +301,17 @@ or downloading a world with the `WORLD` option.
There are two additional volumes that can be mounted; `/mods` and `/config`.
Any files in either of these filesystems will be copied over to the main
`/data` filesystem before starting Minecraft. If you want old mods to be removed as the `/mods` content is updated, then add `-e REMOVE_OLD_MODS=TRUE`.
`/data` filesystem before starting Minecraft. If you want old mods to be removed as the `/mods` content is updated, then add `-e REMOVE_OLD_MODS=TRUE`. If you are running a `BUKKIT` distribution this will affect all files inside the `plugins/` directory. You can fine tune the removal process by specifing the `REMOVE_OLD_MODS_INCLUDE` and `REMOVE_OLD_MODS_EXCLUDE` variables. By default everything will be removed. You can also specify the `REMOVE_OLD_MODS_DEPTH` (default 16) variable to only delete files up to a certain level.
> For example: `-e REMOVE_OLD_MODS=TRUE -e REMOVE_OLD_MODS_INCLUDE="*.jar" -e REMOVE_OLD_MODS_DEPTH=1` will remove all old jar files that are directly inside the `plugins/` or `mods/` directory.
This works well if you want to have a common set of modules in a separate
location, but still have multiple worlds with different server requirements
in either persistent volumes or a downloadable archive.
You can specify the destination of the configs that are located inside the `/config` mount by setting the `COPY_CONFIG_DEST` variable. The configs are copied recursivly to the `/data/config` directory by default. If a file was updated directly inside the `/data/*` directoy and is newer than the file in the `/config/*` mount it will not be overriden.
> For example: `-v ./config:/config -e COPY_CONFIG_DEST=/data` will allow you to copy over your `bukkit.yml` and so on directly into the server directory.
### Replacing variables inside configs
@@ -1076,6 +1080,14 @@ via a `JVM_XX_OPTS` environment variable.
For some cases, if e.g. after removing mods, it could be necessary to startup minecraft with an additional `-D` parameter like `-Dfml.queryResult=confirm`. To address this you can use the environment variable `JVM_DD_OPTS`, which builds the params from a given list of values separated by space, but without the `-D` prefix. To make things running under systems (e.g. Plesk), which doesn't allow `=` inside values, a `:` (colon) could be used instead. The upper example would look like this:
`JVM_DD_OPTS=fml.queryResult:confirm`, and will be converted to `-Dfml.queryResult=confirm`.
The container uses [OpenJ9](https://www.eclipse.org/openj9/docs) and a couple of J9 options are
simplified by environment variables:
- `-e TUNE_VIRTUALIZED=TRUE` : enables the option to
[optimize for virtualized environments](https://www.eclipse.org/openj9/docs/xtunevirtualized/)
- `-e TUNE_NURSERY_SIZES=TRUE` : configures nursery sizes where the initial size is 50%
of the `MAX_MEMORY` and the max size is 80%.
### Enable Remote JMX for Profiling
To enable remote JMX, such as for profiling with VisualVM or JMC, add the environment variable `ENABLE_JMX=true` and add a port forwarding of TCP port 7091, such as:

View File

@@ -1,7 +1,7 @@
#!/bin/bash
#set -x
# Use this variable to indicate a list of branches that docker hub is watching
branches_list=('openj9' 'openj9-nightly' 'adopt11' 'adopt13' 'adopt14' 'multiarch' 'multiarch-latest')
branches_list=('openj9' 'openj9-nightly' 'adopt11' 'adopt13' 'adopt14' 'adopt15' 'multiarch' 'multiarch-latest')
function TrapExit {
echo "Checking out back in master"

View File

@@ -4,54 +4,66 @@
set -o pipefail
isDebugging && set -x
# PaperMC API v2 docs : https://papermc.io/api/docs/swagger-ui/index.html?configUrl=/api/openapi/swagger-config
if [[ $PAPER_DOWNLOAD_URL ]]; then
export SERVER=$(getFilenameFromUrl "${PAPER_DOWNLOAD_URL}")
build=$(curl -fsSL "https://papermc.io/api/v2/projects/paper/versions/${VANILLA_VERSION}" -H "accept: application/json" \
| jq '.builds[-1]')
case $? in
0)
;;
22)
versions=$(curl -fsSL "https://papermc.io/api/v2/projects/paper" -H "accept: application/json")
if [[ $VERSION = LATEST ]]; then
VANILLA_VERSION=$(echo "$versions" | jq -r '.versions[-1]')
log "WARN: using ${VANILLA_VERSION} since that's the latest provided by PaperMC"
# re-execute the current script with the newly computed version
exec $0 "$@"
fi
log "ERROR: ${VANILLA_VERSION} is not published by PaperMC"
log " Set VERSION to one of the following: "
log " $(echo "$versions" | jq -r '.versions | join(", ")')"
if [ -f "$SERVER" ]; then
zarg=(-z "$SERVER")
fi
echo "Preparing custom PaperMC jar from $PAPER_DOWNLOAD_URL"
curl -fsSL -o "$SERVER" "${zarg[@]}" "${PAPER_DOWNLOAD_URL}"
else
# PaperMC API v2 docs : https://papermc.io/api/docs/swagger-ui/index.html?configUrl=/api/openapi/swagger-config
build=$(curl -fsSL "https://papermc.io/api/v2/projects/paper/versions/${VANILLA_VERSION}" -H "accept: application/json" \
| jq '.builds[-1]')
case $? in
0)
;;
22)
versions=$(curl -fsSL "https://papermc.io/api/v2/projects/paper" -H "accept: application/json")
if [[ $VERSION = LATEST ]]; then
VANILLA_VERSION=$(echo "$versions" | jq -r '.versions[-1]')
log "WARN: using ${VANILLA_VERSION} since that's the latest provided by PaperMC"
# re-execute the current script with the newly computed version
exec $0 "$@"
fi
log "ERROR: ${VANILLA_VERSION} is not published by PaperMC"
log " Set VERSION to one of the following: "
log " $(echo "$versions" | jq -r '.versions | join(", ")')"
exit 1
;;
*)
echo "ERROR: unknown error while looking up PaperMC version=${VANILLA_VERSION}"
exit 1
;;
esac
if [ $? != 0 ]; then
echo "ERROR: failed to lookup PaperMC build from version ${VANILLA_VERSION}"
exit 1
;;
*)
echo "ERROR: unknown error while looking up PaperMC version=${VANILLA_VERSION}"
fi
export SERVER=$(curl -fsSL "https://papermc.io/api/v2/projects/paper/versions/${VANILLA_VERSION}/builds/${build}" -H "accept: application/json" \
| jq -r '.downloads.application.name')
if [ $? != 0 ]; then
echo "ERROR: failed to lookup PaperMC download file from version=${VANILLA_VERSION} build=${build}"
exit 1
;;
esac
if [ $? != 0 ]; then
echo "ERROR: failed to lookup PaperMC build from version ${VANILLA_VERSION}"
exit 1
fi
fi
export SERVER=$(curl -fsSL "https://papermc.io/api/v2/projects/paper/versions/${VANILLA_VERSION}/builds/${build}" -H "accept: application/json" \
| jq -r '.downloads.application.name')
if [ $? != 0 ]; then
echo "ERROR: failed to lookup PaperMC download file from version=${VANILLA_VERSION} build=${build}"
exit 1
fi
if [ -f "$SERVER" ]; then
zarg=(-z "$SERVER")
fi
if [ -f "$SERVER" ]; then
zarg=(-z "$SERVER")
fi
log "Downloading PaperMC $VANILLA_VERSION (build $build) ..."
curl -fsSL -o "$SERVER" "${zarg[@]}" \
"https://papermc.io/api/v2/projects/paper/versions/${VANILLA_VERSION}/builds/${build}/downloads/${SERVER}" \
-H "accept: application/java-archive"
if [ $? != 0 ]; then
echo "ERROR: failed to download PaperMC from version=${VANILLA_VERSION} build=${build} download=${SERVER}"
exit 1
log "Downloading PaperMC $VANILLA_VERSION (build $build) ..."
curl -fsSL -o "$SERVER" "${zarg[@]}" \
"https://papermc.io/api/v2/projects/paper/versions/${VANILLA_VERSION}/builds/${build}/downloads/${SERVER}" \
-H "accept: application/java-archive"
if [ $? != 0 ]; then
echo "ERROR: failed to download PaperMC from version=${VANILLA_VERSION} build=${build} download=${SERVER}"
exit 1
fi
fi
# Normalize on Spigot for downstream operations

View File

@@ -1,19 +1,26 @@
#!/bin/bash
set -e
set -e -o pipefail
. ${SCRIPTS:-/}start-utils
if isDebugging; then
set -x
fi
# CURSE_URL_BASE used in manifest downloads below
CURSE_URL_BASE=${CURSE_URL_BASE:-https://minecraft.curseforge.com/projects}
# Remove old mods/plugins
if [ "$REMOVE_OLD_MODS" = "TRUE" ]; then
if [ "$TYPE" = "SPIGOT" ]; then
rm -rf /data/plugins/*
else
rm -rf /data/mods/*
fi
if isTrue ${REMOVE_OLD_MODS}; then
remove_mods_dest="/data/mods"
case ${TYPE} in
SPIGOT|BUKKIT|PAPER)
remove_mods_dest="/data/plugins"
;;
esac
log "Removing old mods in $remove_mods_dest..."
find $remove_mods_dest -mindepth 1 -maxdepth ${REMOVE_OLD_MODS_DEPTH:-16} -wholename "${REMOVE_OLD_MODS_INCLUDE:-*}" -not -wholename "${REMOVE_OLD_MODS_EXCLUDE}" -delete
fi
# If supplied with a URL for a modpack (simple zip of jars), download it and unpack
@@ -22,7 +29,7 @@ if [[ "$MODPACK" ]]; then
if [[ "${MODPACK}" == *.zip ]]; then
downloadUrl="${MODPACK}"
else
downloadUrl=$(curl -Ls -o /dev/null -w %{url_effective} $MODPACK)
downloadUrl=$(curl -Ls -o /dev/null -w %{effective_url} $MODPACK)
if ! [[ $downloadUrl == *.zip ]]; then
log "ERROR Invalid URL given for MODPACK: $downloadUrl resolved from $MODPACK"
log " Must be HTTP or HTTPS and a ZIP file"
@@ -58,39 +65,31 @@ fi
# If supplied with a URL for a plugin download it.
if [[ "$MODS" ]]; then
if [ "$TYPE" = "SPIGOT" ]; then
out_dir=/data/plugins
else
out_dir=/data/mods
fi
mkdir -p "$out_dir"
for i in ${MODS//,/ }
do
if isURL $i; then
if [[ $i == *.jar ]]; then
EFFECTIVE_MOD_URL=$i
else
EFFECTIVE_MOD_URL=$(curl -Ls -o /dev/null -w %{url_effective} $i)
if ! [[ $EFFECTIVE_MOD_URL == *.jar ]]; then
log "ERROR Invalid URL given in MODS: $EFFECTIVE_MOD_URL resolved from $i"
log " Must be HTTP or HTTPS and a JAR file"
exit 1
log "Downloading mod/plugin $i ..."
effective_url=$(resolveEffectiveUrl "$i")
if isValidFileURL jar "${effective_url}"; then
out_file=$(getFilenameFromUrl "${effective_url}")
if ! curl -fsSL -o "${out_dir}/$out_file" "${effective_url}"; then
log "ERROR: failed to download from $i into $out_dir"
exit 2
fi
fi
log "Downloading mod/plugin via HTTP"
log " from $EFFECTIVE_MOD_URL ..."
if ! curl -sSL -o /tmp/${EFFECTIVE_MOD_URL##*/} $EFFECTIVE_MOD_URL; then
log "ERROR: failed to download from $EFFECTIVE_MOD_URL to /tmp/${EFFECTIVE_MOD_URL##*/}"
else
log "ERROR: $effective_url resolved from $i is not a valid jar URL"
exit 2
fi
if [ "$TYPE" = "SPIGOT" ]; then
mkdir -p /data/plugins
mv /tmp/${EFFECTIVE_MOD_URL##*/} /data/plugins/${EFFECTIVE_MOD_URL##*/}
else
mkdir -p /data/mods
mv /tmp/${EFFECTIVE_MOD_URL##*/} /data/mods/${EFFECTIVE_MOD_URL##*/}
fi
rm -f /tmp/${EFFECTIVE_MOD_URL##*/}
else
log "ERROR Invalid URL given in MODS: $i"
exit 1
exit 2
fi
done
fi
@@ -100,7 +99,7 @@ if [[ "$MANIFEST" ]]; then
EFFECTIVE_MANIFEST_FILE=$MANIFEST
elif isURL "$MANIFEST"; then
EFFECTIVE_MANIFEST_FILE=/tmp/manifest.json
EFFECTIVE_MANIFEST_URL=$(curl -Ls -o /dev/null -w %{url_effective} $MANIFEST)
EFFECTIVE_MANIFEST_URL=$(curl -Ls -o /dev/null -w %{effective_url} $MANIFEST)
curl -Ls -o $EFFECTIVE_MANIFEST_FILE "$EFFECTIVE_MANIFEST_URL"
else
log "MANIFEST='$MANIFEST' is not a valid manifest url or location"
@@ -121,7 +120,7 @@ case "X$EFFECTIVE_MANIFEST_FILE" in
do
if [ ! -f $MOD_DIR/${p}_${f}.jar ]
then
redirect_url="$(curl -Ls -o /dev/null -w %{url_effective} ${CURSE_URL_BASE}/${p})"
redirect_url="$(curl -Ls -o /dev/null -w %{effective_url} ${CURSE_URL_BASE}/${p})"
url="$redirect_url/download/${f}/file"
log Downloading curseforge mod $url
# Manifest usually doesn't have mod names. Using id should be fine, tho
@@ -140,10 +139,9 @@ fi
if [[ "${GENERIC_PACK}" ]]; then
if isURL "${GENERIC_PACK}"; then
generic_pack_url=${GENERIC_PACK}
GENERIC_PACK=/tmp/$(basename ${generic_pack_url})
log "Downloading generic pack from ${generic_pack_url} ..."
curl -fsSL -o ${GENERIC_PACK} ${generic_pack_url}
log "Downloading generic pack ..."
curl -fsSL -o /tmp/generic_pack.zip "${GENERIC_PACK}"
GENERIC_PACK=/tmp/generic_pack.zip
fi
sum_file=/data/.generic_pack.sum

View File

@@ -25,7 +25,12 @@ if [[ "$WORLD" ]] && ( isTrue "${FORCE_WORLD_COPY}" || [ ! -d "$worldDest" ] );
mkdir -p /tmp/world-data
(cd /tmp/world-data && unzip -o -q "$zipSrc")
baseDirs=$(find /tmp/world-data -name "level.dat" -exec dirname "{}" \;)
if [ "$TYPE" = "SPIGOT" ]; then
baseDirs=$(find /tmp/world-data -name "level.dat" -not -path "*_nether*" -not -path "*_the_end*" -exec dirname "{}" \;)
else
baseDirs=$(find /tmp/world-data -name "level.dat" -exec dirname "{}" \;)
fi
count=$(echo "$baseDirs" | wc -l)
if [[ $count -gt 1 ]]; then
baseDir="$(echo "$baseDirs" | sed -n ${WORLD_INDEX:-1}p)"
@@ -38,6 +43,11 @@ if [[ "$WORLD" ]] && ( isTrue "${FORCE_WORLD_COPY}" || [ ! -d "$worldDest" ] );
exit 1
fi
rsync --remove-source-files --recursive --delete "$baseDir/" "$worldDest"
if [ "$TYPE" = "SPIGOT" ]; then
log "Copying end and nether ..."
[ -d "${baseDir}_nether" ] && rsync --remove-source-files --recursive --delete "${baseDir}_nether/" "${worldDest}_nether"
[ -d "${baseDir}_the_end" ] && rsync --remove-source-files --recursive --delete "${baseDir}_the_end/" "${worldDest}_the_end"
fi
else
log "Cloning world directory from $WORLD ..."
rsync --recursive --delete "${WORLD%/}"/ "$worldDest"

View File

@@ -2,6 +2,8 @@
. ${SCRIPTS:-/}start-utils
: ${COPY_CONFIG_DEST:="/data/config"}
if [ -n "$OPS" ]; then
log "Setting/adding ops"
rm -rf /data/ops.txt.converted
@@ -54,20 +56,14 @@ done
if [ -d /mods ]; then
log "Copying any mods over..."
mkdir -p /data/mods
if isTrue "${REMOVE_OLD_MODS}"; then
rsyncArgs=(--delete)
fi
rsync -a --out-format="update:%f:Last Modified %M" "${rsyncArgs[@]}" --prune-empty-dirs --update /mods /data
fi
[ -d /data/config ] || mkdir /data/config
for c in /config/*
do
if [ -f "$c" ]; then
log Copying configuration $(basename "$c")
cp -rf "$c" /data/config
fi
done
if [ -d /config ]; then
log "Copying any configs from /config to $COPY_CONFIG_DEST"
mkdir -p $COPY_CONFIG_DEST
rsync -a --out-format="update:%f:Last Modified %M" "${rsyncArgs[@]}" --prune-empty-dirs --update /config/ $COPY_CONFIG_DEST
fi
EXTRA_ARGS=""
# Optional disable console
@@ -91,6 +87,23 @@ if [ -n "$JVM_DD_OPTS" ]; then
done
fi
if isTrue ${TUNE_VIRTUALIZED}; then
JVM_XX_OPTS="${JVM_XX_OPTS} -Xtune:virtualized"
fi
if isTrue ${TUNE_NURSERY_SIZES}; then
case ${MAX_MEMORY^^} in
*G)
MAX_MEMORY_MB=$(( ${MAX_MEMORY%?} * 1024 )) ;;
*M)
MAX_MEMORY_MB=${MAX_MEMORY%?} ;;
esac
NURSERY_MINIMUM=$(( ${MAX_MEMORY_MB} / 2 ))
NURSERY_MAXIMUM=$(( ${MAX_MEMORY_MB} * 4/5 ))
JVM_XX_OPTS="${JVM_XX_OPTS} -Xmns${NURSERY_MINIMUM}M -Xmnx${NURSERY_MAXIMUM}M"
fi
if isTrue ${ENABLE_JMX}; then
: ${JMX_HOST:=0.0.0.0}
: ${JMX_PORT:=7091}

View File

@@ -1,8 +1,14 @@
#!/bin/bash
function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
function join_by() {
local d=$1
shift
echo -n "$1"
shift
printf "%s" "${@/#/$d}"
}
function isURL {
function isURL() {
local value=$1
if [[ ${value:0:8} == "https://" || ${value:0:7} == "http://" ]]; then
@@ -12,90 +18,114 @@ function isURL {
fi
}
function isTrue {
function isValidFileURL() {
suffix=${1:?Missing required suffix arg}
url=${2:?Missing required url arg}
[[ "$url" == http*://*.${suffix} || "$url" == http*://*.${suffix}\?* ]]
}
function resolveEffectiveUrl() {
url="${1:?Missing required url argument}"
if ! curl -Ls -o /dev/null -w %{url_effective} "$url"; then
log "ERROR failed to resolve effective URL from $url"
exit 2
fi
}
function getFilenameFromUrl() {
url="${1:?Missing required url argument}"
strippedOfQuery="${url%\?*}"
basename "$strippedOfQuery"
}
function isTrue() {
local value=${1,,}
result=
case ${value} in
true|on)
result=0
;;
*)
result=1
;;
true | on)
result=0
;;
*)
result=1
;;
esac
return ${result}
}
function isDebugging {
if [[ -v DEBUG ]] && [[ ${DEBUG^^} = TRUE ]]; then
function isDebugging() {
if [[ -v DEBUG ]] && [[ ${DEBUG^^} == TRUE ]]; then
return 0
else
return 1
fi
}
function debug {
function debug() {
if isDebugging; then
log "DEBUG: $*"
fi
}
function logn {
function logn() {
echo -n "[init] $*"
}
function log {
function log() {
echo "[init] $*"
}
function logAutopause {
function logAutopause() {
echo "[Autopause loop] $*"
}
function logAutopauseAction {
function logAutopauseAction() {
echo "[$(date -Iseconds)] [Autopause] $*"
}
function normalizeMemSize {
function normalizeMemSize() {
local scale=1
case ${1,,} in
*k)
scale=1024;;
*m)
scale=1048576;;
*g)
scale=1073741824;;
*k)
scale=1024
;;
*m)
scale=1048576
;;
*g)
scale=1073741824
;;
esac
val=${1:0: -1}
echo $(( val * scale ))
val=${1:0:-1}
echo $((val * scale))
}
function versionLessThan {
function versionLessThan() {
local activeParts
IFS=. read -ra activeParts <<< "${VANILLA_VERSION}"
IFS=. read -ra activeParts <<<"${VANILLA_VERSION}"
local givenParts
IFS=. read -ra givenParts <<< "$1"
IFS=. read -ra givenParts <<<"$1"
if (( ${#activeParts[@]} < 2 )); then
if ((${#activeParts[@]} < 2)); then
return 1
fi
if (( ${#activeParts[@]} == 2 )); then
if (( activeParts[0] < givenParts[0] )) || \
(( activeParts[0] == givenParts[0] && activeParts[1] < givenParts[1] )); then
if ((${#activeParts[@]} == 2)); then
if ((activeParts[0] < givenParts[0])) ||
((activeParts[0] == givenParts[0] && activeParts[1] < givenParts[1])); then
return 0
else
return 1
fi
else
if (( activeParts[0] < givenParts[0] )) || \
(( activeParts[0] == givenParts[0] && activeParts[1] < givenParts[1] )) || \
(( activeParts[0] == givenParts[0] && activeParts[1] == givenParts[1] && activeParts[2] < givenParts[2] )); then
if ((activeParts[0] < givenParts[0])) ||
((activeParts[0] == givenParts[0] && activeParts[1] < givenParts[1])) ||
((activeParts[0] == givenParts[0] && activeParts[1] == givenParts[1] && activeParts[2] < givenParts[2])); then
return 0
else
return 1
@@ -115,10 +145,10 @@ requireVar() {
}
function writeEula() {
if ! echo "# Generated via Docker on $(date)
if ! echo "# Generated via Docker on $(date)
eula=${EULA,,}
" > /data/eula.txt; then
log "ERROR: unable to write eula to /data. Please make sure attached directory is writable by uid=${UID}"
exit 2
fi
" >/data/eula.txt; then
log "ERROR: unable to write eula to /data. Please make sure attached directory is writable by uid=${UID}"
exit 2
fi
}