Initial commit
This commit is contained in:
70
apps/DatabaseSquash/DatabaseExporter/DatabaseExporter.sh
Normal file
70
apps/DatabaseSquash/DatabaseExporter/DatabaseExporter.sh
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
|
||||
DRIVE_LETTER="${BASH_REMATCH[1]}"
|
||||
PATH_REMAINDER="${BASH_REMATCH[2]}"
|
||||
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
|
||||
fi
|
||||
|
||||
BASE_OUTPUT_DIR="$PROJECT_ROOT/data/sql/base"
|
||||
|
||||
read -p "Enter MySQL username: " DB_USER
|
||||
read -p "Enter MySQL password: " DB_PASS
|
||||
read -p "Enter MySQL host (default: localhost): " DB_HOST
|
||||
DB_HOST=${DB_HOST:-localhost}
|
||||
read -p "Enter MySQL port (default: 3306): " DB_PORT
|
||||
DB_PORT=${DB_PORT:-3306}
|
||||
|
||||
# Prompt for database names
|
||||
read -p "Enter name of Auth database [default: acore_auth]: " DB_AUTH
|
||||
DB_AUTH=${DB_AUTH:-acore_auth}
|
||||
read -p "Enter name of Characters database [default: acore_characters]: " DB_CHARACTERS
|
||||
DB_CHARACTERS=${DB_CHARACTERS:-acore_characters}
|
||||
read -p "Enter name of World database [default: acore_world]: " DB_WORLD
|
||||
DB_WORLD=${DB_WORLD:-acore_world}
|
||||
|
||||
# Mapping for folder names
|
||||
declare -A DB_MAP=(
|
||||
["$DB_AUTH"]="db_auth"
|
||||
["$DB_CHARACTERS"]="db_characters"
|
||||
["$DB_WORLD"]="db_world"
|
||||
)
|
||||
|
||||
# Dump each database
|
||||
for DB_NAME in "${!DB_MAP[@]}"; do
|
||||
FOLDER_NAME="${DB_MAP[$DB_NAME]}"
|
||||
echo "📦 Dumping database '$DB_NAME' into folder '$FOLDER_NAME'"
|
||||
echo "$BASE_OUTPUT_DIR/$FOLDER_NAME"
|
||||
mkdir -p "$BASE_OUTPUT_DIR/$FOLDER_NAME"
|
||||
|
||||
TABLES=$(mysql -u "$DB_USER" -p"$DB_PASS" -h "$DB_HOST" -P "$DB_PORT" -N -e "SHOW TABLES FROM \`$DB_NAME\`;")
|
||||
|
||||
if [[ -z "$TABLES" ]]; then
|
||||
echo "⚠️ No tables found or failed to connect to '$DB_NAME'. Skipping."
|
||||
continue
|
||||
fi
|
||||
|
||||
while IFS= read -r raw_table; do
|
||||
TABLE=$(echo "$raw_table" | tr -d '\r"' | xargs)
|
||||
if [[ -n "$TABLE" ]]; then
|
||||
echo " ➤ Dumping table: $TABLE"
|
||||
# --skip-tz-utc needed to keep TIMESTAMP values as-is
|
||||
mysqldump -u $DB_USER -p$DB_PASS -h $DB_HOST -P $DB_PORT --skip-tz-utc --extended-insert $DB_NAME $TABLE > $BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql
|
||||
|
||||
# cleanup files
|
||||
sed -E '
|
||||
s/VALUES[[:space:]]*/VALUES\n/;
|
||||
:a
|
||||
s/\),\(/\),\n\(/g;
|
||||
ta
|
||||
' "$BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql" > "$BASE_OUTPUT_DIR/$FOLDER_NAME/${TABLE}_formatted.sql"
|
||||
mv "$BASE_OUTPUT_DIR/$FOLDER_NAME/${TABLE}_formatted.sql" "$BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql"
|
||||
fi
|
||||
done <<< "$TABLES"
|
||||
done
|
||||
|
||||
echo "✅ Done dumping all specified databases."
|
||||
16
apps/DatabaseSquash/DatabaseExporter/databaseexporter.md
Normal file
16
apps/DatabaseSquash/DatabaseExporter/databaseexporter.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# The AzerothCore Database Exporter for Database Squashes
|
||||
|
||||
> [!CAUTION]
|
||||
> These steps are only for project maintainers who intend to update base files.
|
||||
|
||||
## Requirements
|
||||
|
||||
1. MySQL
|
||||
2. mysqldump
|
||||
|
||||
## Usage
|
||||
|
||||
1. Run DatabaseExporter.sh from the current directory.
|
||||
2. Fill in required data within the CLI.
|
||||
3. The tool will autopopulate the basefile directories.
|
||||
4. Done.
|
||||
52
apps/DatabaseSquash/DatabaseSquash.sh
Normal file
52
apps/DatabaseSquash/DatabaseSquash.sh
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "❗CAUTION"
|
||||
echo "This tool is only supposed to be used by AzerothCore Maintainers."
|
||||
echo "The tool is used to prepare for, and generate a database squash."
|
||||
echo
|
||||
echo "Before you continue make sure you have read"
|
||||
echo "https://github.com/azerothcore/azerothcore-wotlk/blob/master/data/sql/base/database-squash.md"
|
||||
echo
|
||||
read -p "Are you sure you want to continue (Y/N)?" choice
|
||||
case "$choice" in
|
||||
y|Y ) echo "Starting...";;
|
||||
* ) echo "Aborted"; exit 0 ;;
|
||||
esac
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
|
||||
DRIVE_LETTER="${BASH_REMATCH[1]}"
|
||||
PATH_REMAINDER="${BASH_REMATCH[2]}"
|
||||
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
|
||||
fi
|
||||
|
||||
VERSION_UPDATER_PATH="$PROJECT_ROOT/apps/DatabaseSquash/VersionUpdater/versionupdater.sh"
|
||||
|
||||
"$VERSION_UPDATER_PATH"
|
||||
|
||||
echo "✅ VersionUpdater Completed..."
|
||||
echo
|
||||
echo "❗IMPORTANT!"
|
||||
echo "1. Before you continue you need to drop all your databases."
|
||||
echo "2. Run WorldServer to populate the database."
|
||||
echo
|
||||
echo "❗DO NOT continue before you have completed the steps above!"
|
||||
echo
|
||||
echo "The next step will export your database and overwrite the base files."
|
||||
echo
|
||||
read -p "Are you sure you want to export your database (Y/N)?" choice
|
||||
case "$choice" in
|
||||
y|Y ) echo "Starting...";;
|
||||
* ) echo "Aborted"; exit 0 ;;
|
||||
esac
|
||||
|
||||
DATABASE_EXPORTER_PATH="$PROJECT_ROOT/apps/DatabaseSquash/DatabaseExporter/databaseexporter.sh"
|
||||
|
||||
"$DATABASE_EXPORTER_PATH"
|
||||
|
||||
echo "✅ DatabaseExporter Completed..."
|
||||
echo "✅ DatabaseSquash Completed... "
|
||||
echo
|
||||
read -p "Press Enter to exit..."
|
||||
84
apps/DatabaseSquash/VersionUpdater/VersionUpdater.sh
Normal file
84
apps/DatabaseSquash/VersionUpdater/VersionUpdater.sh
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
|
||||
DRIVE_LETTER="${BASH_REMATCH[1]}"
|
||||
PATH_REMAINDER="${BASH_REMATCH[2]}"
|
||||
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
|
||||
fi
|
||||
|
||||
ACORE_JSON_PATH="$PROJECT_ROOT/acore.json"
|
||||
DB_WORLD_UPDATE_DIR="$PROJECT_ROOT/data/sql/updates/db_world"
|
||||
|
||||
VERSION_LINE=$(grep '"version"' "$ACORE_JSON_PATH")
|
||||
VERSION=$(echo "$VERSION_LINE" | sed -E 's/.*"version": *"([^"]+)".*/\1/')
|
||||
|
||||
# Parse version into parts
|
||||
if [[ "$VERSION" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)(.*)$ ]]; then
|
||||
MAJOR="${BASH_REMATCH[1]}"
|
||||
SUFFIX="${BASH_REMATCH[4]}"
|
||||
NEW_VERSION="$((MAJOR + 1)).0.0$SUFFIX"
|
||||
|
||||
# Replace version in file
|
||||
sed -i.bak -E "s/(\"version\": *\")[^\"]+(\" *)/\1$NEW_VERSION\2/" "$ACORE_JSON_PATH"
|
||||
rm -f "$ACORE_JSON_PATH.bak"
|
||||
|
||||
echo "✅ Version updated to $NEW_VERSION"
|
||||
else
|
||||
echo "Error: Could not parse version string: $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract the new major version from NEW_VERSION
|
||||
if [[ "$NEW_VERSION" =~ ^([0-9]+)\. ]]; then
|
||||
NEW_MAJOR="${BASH_REMATCH[1]}"
|
||||
else
|
||||
echo "Error: Unable to extract major version from $NEW_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Prepare SQL content
|
||||
DB_VERSION_CONTENT="'ACDB 335.${NEW_MAJOR}-dev'"
|
||||
SQL_QUERY="UPDATE \`version\` SET \`db_version\`=${DB_VERSION_CONTENT}, \`cache_id\`=${NEW_MAJOR} LIMIT 1;"
|
||||
|
||||
# Format date as yyyy_mm_dd
|
||||
TODAY=$(date +%Y_%m_%d)
|
||||
|
||||
# Ensure directory exists
|
||||
mkdir -p "$DB_WORLD_UPDATE_DIR"
|
||||
|
||||
# List existing files for today
|
||||
existing_files=($(find "$DB_WORLD_UPDATE_DIR" -maxdepth 1 -type f -name "${TODAY}_*.sql" 2>/dev/null))
|
||||
|
||||
# Determine next xx counter
|
||||
# Determine next xx
|
||||
COUNTER="00"
|
||||
if [ ${#existing_files[@]} -gt 0 ]; then
|
||||
max=0
|
||||
for file in "${existing_files[@]}"; do
|
||||
basename=$(basename "$file")
|
||||
if [[ "$basename" =~ ^${TODAY}_([0-9]{2})\.sql$ ]]; then
|
||||
num=${BASH_REMATCH[1]}
|
||||
if [[ "$num" =~ ^[0-9]+$ ]] && (( 10#$num > max )); then
|
||||
max=$((10#$num))
|
||||
fi
|
||||
fi
|
||||
done
|
||||
COUNTER=$(printf "%02d" $((max + 1)))
|
||||
fi
|
||||
|
||||
# Compose final file path
|
||||
SQL_FILENAME="${TODAY}_${COUNTER}.sql"
|
||||
SQL_FILE_PATH="$DB_WORLD_UPDATE_DIR/$SQL_FILENAME"
|
||||
|
||||
# Write to file
|
||||
{
|
||||
echo "-- Auto-generated by VersionUpdater.sh on $(date)"
|
||||
echo "$SQL_QUERY"
|
||||
} > "$SQL_FILE_PATH"
|
||||
|
||||
echo "✅ SQL file created at $SQL_FILE_PATH"
|
||||
10
apps/DatabaseSquash/VersionUpdater/versionupdater.md
Normal file
10
apps/DatabaseSquash/VersionUpdater/versionupdater.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# The AzerothCore Version Updater for Database Squashes
|
||||
|
||||
> [!CAUTION]
|
||||
> These steps are only for project maintainers who intend to update base files.
|
||||
|
||||
## Usage
|
||||
|
||||
1. Run VersionUpdater.sh from the current directory.
|
||||
2. The tool will update acore.json and create a new update sql file.
|
||||
3. Done.
|
||||
11
apps/DatabaseSquash/databasesquash.md
Normal file
11
apps/DatabaseSquash/databasesquash.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# The AzerothCore DatabaseSquash tool for Database Squashes
|
||||
|
||||
> [!CAUTION]
|
||||
> These steps are only for project maintainers who intend to update base files.
|
||||
|
||||
## Usage
|
||||
|
||||
1. Run DatabaseSquash.sh from the current directory.
|
||||
2. The tool will run VersionUpdater.sh and DatabaseExporter.sh
|
||||
3. Follow the instructions in the CLI.
|
||||
4. Done.
|
||||
163
apps/EnumUtils/enumutils_describe.py
Normal file
163
apps/EnumUtils/enumutils_describe.py
Normal file
@@ -0,0 +1,163 @@
|
||||
from re import compile, MULTILINE
|
||||
from os import walk, getcwd
|
||||
|
||||
notice = ('''/*
|
||||
* This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 2 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along
|
||||
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
''')
|
||||
|
||||
if not getcwd().endswith('src'):
|
||||
print('Run this from the src directory!')
|
||||
print('(Invoke as \'python ../apps/EnumUtils/enumutils_describe.py\')')
|
||||
exit(1)
|
||||
|
||||
EnumPattern = compile(r'//\s*EnumUtils: DESCRIBE THIS(?:\s*\(in ([^\)]+)\))?\s+enum\s+([0-9A-Za-z]+)[^\n]*\s*{([^}]+)};')
|
||||
EnumValuesPattern = compile(r'\s+\S.+?(,|$)[^\n]*')
|
||||
EnumValueNamePattern = compile(r'^\s*([a-zA-Z0-9_]+)', flags=MULTILINE)
|
||||
EnumValueSkipLinePattern = compile(r'^\s*//')
|
||||
EnumValueCommentPattern = compile(r'//,?[ \t]*([^\n]+)$')
|
||||
CommentMatchFormat = compile(r'^(((TITLE +(.+?))|(DESCRIPTION +(.+?))) *){1,2}$')
|
||||
CommentSkipFormat = compile(r'^SKIP *$')
|
||||
|
||||
def strescape(str):
|
||||
res = ''
|
||||
for char in str:
|
||||
if char in ('\\', '"') or not (32 <= ord(char) < 127):
|
||||
res += ('\\%03o' % ord(char))
|
||||
else:
|
||||
res += char
|
||||
return '"' + res + '"'
|
||||
|
||||
def processFile(path, filename):
|
||||
input = open('%s/%s.h' % (path, filename),'r')
|
||||
if input is None:
|
||||
print('Failed to open %s.h' % filename)
|
||||
return
|
||||
|
||||
file = input.read()
|
||||
|
||||
enums = []
|
||||
for enum in EnumPattern.finditer(file):
|
||||
prefix = enum.group(1) or ''
|
||||
name = enum.group(2)
|
||||
values = []
|
||||
for value in EnumValuesPattern.finditer(enum.group(3)):
|
||||
valueData = value.group(0)
|
||||
|
||||
valueNameMatch = EnumValueNamePattern.search(valueData)
|
||||
if valueNameMatch is None:
|
||||
if EnumValueSkipLinePattern.search(valueData) is None:
|
||||
print('Name of value not found: %s' % repr(valueData))
|
||||
continue
|
||||
valueName = valueNameMatch.group(1)
|
||||
|
||||
valueCommentMatch = EnumValueCommentPattern.search(valueData)
|
||||
valueComment = None
|
||||
if valueCommentMatch:
|
||||
valueComment = valueCommentMatch.group(1)
|
||||
|
||||
valueTitle = None
|
||||
valueDescription = None
|
||||
|
||||
if valueComment is not None:
|
||||
if CommentSkipFormat.match(valueComment) is not None:
|
||||
continue
|
||||
commentMatch = CommentMatchFormat.match(valueComment)
|
||||
if commentMatch is not None:
|
||||
valueTitle = commentMatch.group(4)
|
||||
valueDescription = commentMatch.group(6)
|
||||
else:
|
||||
valueDescription = valueComment
|
||||
|
||||
if valueTitle is None:
|
||||
valueTitle = valueName
|
||||
if valueDescription is None:
|
||||
valueDescription = ''
|
||||
|
||||
values.append((valueName, valueTitle, valueDescription))
|
||||
|
||||
enums.append((prefix + name, prefix, values))
|
||||
print('%s.h: Enum %s parsed with %d values' % (filename, name, len(values)))
|
||||
|
||||
if not enums:
|
||||
return
|
||||
|
||||
print('Done parsing %s.h (in %s)\n' % (filename, path))
|
||||
output = open('%s/enuminfo_%s.cpp' % (path, filename), 'w')
|
||||
if output is None:
|
||||
print('Failed to create enuminfo_%s.cpp' % filename)
|
||||
return
|
||||
|
||||
# write output file
|
||||
output.write(notice)
|
||||
output.write('#include "%s.h"\n' % filename)
|
||||
output.write('#include "Define.h"\n')
|
||||
output.write('#include "SmartEnum.h"\n')
|
||||
output.write('#include <stdexcept>\n')
|
||||
output.write('\n')
|
||||
output.write('namespace Acore::Impl::EnumUtilsImpl\n')
|
||||
output.write('{\n')
|
||||
for name, prefix, values in enums:
|
||||
tag = ('data for enum \'%s\' in \'%s.h\' auto-generated' % (name, filename))
|
||||
output.write('\n')
|
||||
output.write('/*' + ('*'*(len(tag)+2)) + '*\\\n')
|
||||
output.write('|* ' + tag + ' *|\n')
|
||||
output.write('\\*' + ('*'*(len(tag)+2)) + '*/\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT EnumText EnumUtils<%s>::ToString(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for label, title, description in values:
|
||||
output.write(' case %s: return { %s, %s, %s };\n' % (prefix + label, strescape(label), strescape(title), strescape(description)))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::Count() { return %d; }\n' % (name, len(values)))
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT %s EnumUtils<%s>::FromIndex(size_t index)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (index)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %d: return %s;\n' % (i, prefix + label))
|
||||
output.write(' default: throw std::out_of_range("index");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::ToIndex(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %s: return %d;\n' % (prefix + label, i))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
|
||||
output.write('}\n')
|
||||
|
||||
FilenamePattern = compile(r'^(.+)\.h$')
|
||||
for root, dirs, files in walk('.'):
|
||||
for n in files:
|
||||
nameMatch = FilenamePattern.match(n)
|
||||
if nameMatch is not None:
|
||||
processFile(root, nameMatch.group(1))
|
||||
238
apps/Fmt/FormatReplace.py
Normal file
238
apps/Fmt/FormatReplace.py
Normal file
@@ -0,0 +1,238 @@
|
||||
import pathlib
|
||||
from os import getcwd
|
||||
|
||||
if not getcwd().endswith('src') and not getcwd().endswith('modules'):
|
||||
print('Run this from the src or modules directory!')
|
||||
print('(Invoke as \'python ../apps/Fmt/FormatReplace.py\')')
|
||||
exit(1)
|
||||
|
||||
def isASSERT(line):
|
||||
substring = 'ASSERT'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isABORTMSG(line):
|
||||
substring = 'ABORT_MSG'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def islog(line):
|
||||
substring = 'LOG_'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
# def isSendSysMessage(line):
|
||||
# substring = 'SendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
# def isPSendSysMessage(line):
|
||||
# substring = 'PSendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
def isPQuery(line):
|
||||
substring = 'PQuery'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPExecute(line):
|
||||
substring = 'PExecute'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPAppend(line):
|
||||
substring = 'PAppend'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isStringFormat(line):
|
||||
substring = 'StringFormat'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def haveDelimeter(line):
|
||||
if ';' in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def checkSoloLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), False
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), False
|
||||
elif islog(line):
|
||||
return handleCleanup(line), False
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), False
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), False
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), False
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
elif isStringFormat(line):
|
||||
return handleCleanup(line), False
|
||||
else:
|
||||
return line, False
|
||||
|
||||
def startMultiLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), True
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), True
|
||||
elif islog(line):
|
||||
return handleCleanup(line), True
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), True
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), True
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), True
|
||||
elif isStringFormat(line):
|
||||
return handleCleanup(line), True
|
||||
else :
|
||||
return line, False
|
||||
|
||||
def continueMultiLine(line, existPrevLine):
|
||||
if haveDelimeter(line):
|
||||
existPrevLine = False;
|
||||
return handleCleanup(line), existPrevLine
|
||||
|
||||
def checkTextLine(line, existPrevLine):
|
||||
if existPrevLine:
|
||||
return continueMultiLine(line, existPrevLine)
|
||||
else :
|
||||
if haveDelimeter(line):
|
||||
return checkSoloLine(line)
|
||||
else :
|
||||
return startMultiLine(line)
|
||||
|
||||
def handleCleanup(line):
|
||||
line = line.replace("%s", "{}");
|
||||
line = line.replace("%u", "{}");
|
||||
line = line.replace("%hu", "{}");
|
||||
line = line.replace("%lu", "{}");
|
||||
line = line.replace("%llu", "{}");
|
||||
line = line.replace("%zu", "{}");
|
||||
line = line.replace("%02u", "{:02}");
|
||||
line = line.replace("%03u", "{:03}");
|
||||
line = line.replace("%04u", "{:04}");
|
||||
line = line.replace("%05u", "{:05}");
|
||||
line = line.replace("%02i", "{:02}");
|
||||
line = line.replace("%03i", "{:03}");
|
||||
line = line.replace("%04i", "{:04}");
|
||||
line = line.replace("%05i", "{:05}");
|
||||
line = line.replace("%02d", "{:02}");
|
||||
line = line.replace("%03d", "{:03}");
|
||||
line = line.replace("%04d", "{:04}");
|
||||
line = line.replace("%05d", "{:05}");
|
||||
line = line.replace("%d", "{}");
|
||||
line = line.replace("%i", "{}");
|
||||
line = line.replace("%x", "{:x}");
|
||||
line = line.replace("%X", "{:X}");
|
||||
line = line.replace("%lx", "{:x}");
|
||||
line = line.replace("%lX", "{:X}");
|
||||
line = line.replace("%02X", "{:02X}");
|
||||
line = line.replace("%08X", "{:08X}");
|
||||
line = line.replace("%f", "{}");
|
||||
line = line.replace("%.1f", "{0:.1f}");
|
||||
line = line.replace("%.2f", "{0:.2f}");
|
||||
line = line.replace("%.3f", "{0:.3f}");
|
||||
line = line.replace("%.4f", "{0:.4f}");
|
||||
line = line.replace("%.5f", "{0:.5f}");
|
||||
line = line.replace("%3.1f", "{:3.1f}");
|
||||
line = line.replace("%%", "%");
|
||||
line = line.replace(".c_str()", "");
|
||||
line = line.replace("\" SZFMTD \"", "{}");
|
||||
line = line.replace("\" UI64FMTD \"", "{}");
|
||||
# line = line.replace("\" STRING_VIEW_FMT \"", "{}");
|
||||
# line = line.replace("STRING_VIEW_FMT_ARG", "");
|
||||
return line
|
||||
|
||||
def getDefaultfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def getModifiedfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
prevLines = False
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
line, prevLines = checkTextLine(line, prevLines)
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def updModifiedfile(name, text):
|
||||
file = open(name, "w", encoding="utf8", errors='replace')
|
||||
file.write(text)
|
||||
file.close()
|
||||
|
||||
def handlefile(name):
|
||||
oldtext = getDefaultfile(name)
|
||||
newtext = getModifiedfile(name)
|
||||
|
||||
if oldtext != newtext:
|
||||
updModifiedfile(name, newtext)
|
||||
|
||||
p = pathlib.Path('.')
|
||||
for i in p.glob('**/*'):
|
||||
fname = i.absolute()
|
||||
if '.cpp' in i.name:
|
||||
handlefile(fname)
|
||||
if '.h' in i.name:
|
||||
handlefile(fname)
|
||||
29
apps/bash_shared/common.sh
Normal file
29
apps/bash_shared/common.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
function registerHooks() { acore_event_registerHooks "$@"; }
|
||||
function runHooks() { acore_event_runHooks "$@"; }
|
||||
|
||||
function acore_common_loadConfig() {
|
||||
#shellcheck source=../../conf/dist/config.sh
|
||||
source "$AC_PATH_CONF/dist/config.sh" # include dist to avoid missing conf variables
|
||||
|
||||
# first check if it's defined in env, otherwise use the default
|
||||
USER_CONF_PATH=${USER_CONF_PATH:-"$AC_PATH_CONF/config.sh"}
|
||||
|
||||
if [ -f "$USER_CONF_PATH" ]; then
|
||||
source "$USER_CONF_PATH" # should overwrite previous
|
||||
else
|
||||
echo "NOTICE: file <$USER_CONF_PATH> not found, we use default configuration only."
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Load modules
|
||||
#
|
||||
|
||||
for entry in "$AC_PATH_MODULES/"*/include.sh
|
||||
do
|
||||
if [ -e "$entry" ]; then
|
||||
source "$entry"
|
||||
fi
|
||||
done
|
||||
|
||||
ACORE_VERSION=$("$AC_PATH_DEPS/jsonpath/JSONPath.sh" -f "$AC_PATH_ROOT/acore.json" -b '$.version')
|
||||
30
apps/bash_shared/defines.sh
Normal file
30
apps/bash_shared/defines.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
unamestr=$(uname)
|
||||
if [[ "$unamestr" == 'Darwin' ]]; then
|
||||
if ! command -v brew &>/dev/null ; then
|
||||
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
fi
|
||||
if ! [ "${BASH_VERSINFO}" -ge 4 ]; then
|
||||
brew install bash
|
||||
fi
|
||||
if ! command -v greadlink &>/dev/null ; then
|
||||
brew install coreutils
|
||||
fi
|
||||
AC_PATH_ROOT=$(greadlink -f "$AC_PATH_APPS/../")
|
||||
else
|
||||
AC_PATH_ROOT=$(readlink -f "$AC_PATH_APPS/../")
|
||||
fi
|
||||
|
||||
case $AC_PATH_ROOT in
|
||||
/*) AC_PATH_ROOT=$AC_PATH_ROOT;;
|
||||
*) AC_PATH_ROOT=$PWD/$AC_PATH_ROOT;;
|
||||
esac
|
||||
|
||||
export AC_PATH_CONF="$AC_PATH_ROOT/conf"
|
||||
|
||||
export AC_PATH_MODULES="$AC_PATH_ROOT/modules"
|
||||
|
||||
export AC_PATH_DEPS="$AC_PATH_ROOT/deps"
|
||||
|
||||
export AC_BASH_LIB_PATH="$AC_PATH_DEPS/acore/bash-lib/src"
|
||||
|
||||
export AC_PATH_VAR="$AC_PATH_ROOT/var"
|
||||
25
apps/bash_shared/includes.sh
Normal file
25
apps/bash_shared/includes.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
[[ ${GUARDYVAR:-} -eq 1 ]] && return || readonly GUARDYVAR=1 # include it once
|
||||
|
||||
# force default language for applications
|
||||
LC_ALL=C
|
||||
|
||||
AC_PATH_APPS="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )"
|
||||
|
||||
AC_PATH_SHARED="$AC_PATH_APPS/bash_shared"
|
||||
|
||||
# shellcheck source=./defines.sh
|
||||
source "$AC_PATH_SHARED/defines.sh"
|
||||
|
||||
# shellcheck source=../../deps/acore/bash-lib/src/event/hooks.sh
|
||||
source "$AC_PATH_DEPS/acore/bash-lib/src/event/hooks.sh"
|
||||
|
||||
# shellcheck source=./common.sh
|
||||
source "$AC_PATH_SHARED/common.sh"
|
||||
|
||||
acore_common_loadConfig
|
||||
|
||||
if [[ "$OSTYPE" = "msys" ]]; then
|
||||
AC_BINPATH_FULL="$BINPATH"
|
||||
else
|
||||
export AC_BINPATH_FULL="$BINPATH/bin"
|
||||
fi
|
||||
267
apps/bash_shared/menu_system.sh
Normal file
267
apps/bash_shared/menu_system.sh
Normal file
@@ -0,0 +1,267 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# =============================================================================
|
||||
# AzerothCore Menu System Library
|
||||
# =============================================================================
|
||||
# This library provides a unified menu system for AzerothCore scripts.
|
||||
# It supports ordered menu definitions, short commands, numeric selection,
|
||||
# and proper argument handling.
|
||||
#
|
||||
# Features:
|
||||
# - Single source of truth for menu definitions
|
||||
# - Automatic ID assignment (1, 2, 3...)
|
||||
# - Short command aliases (c, i, q, etc.)
|
||||
# - Interactive mode: numbers + long/short commands
|
||||
# - Direct mode: only long/short commands (no numbers)
|
||||
# - Proper argument forwarding
|
||||
#
|
||||
# Usage:
|
||||
# source "path/to/menu_system.sh"
|
||||
# menu_items=("command|short|description" ...)
|
||||
# menu_run "Menu Title" callback_function "${menu_items[@]}" "$@"
|
||||
# =============================================================================
|
||||
|
||||
# Global arrays for menu state (will be populated by menu_define)
|
||||
declare -a _MENU_KEYS=()
|
||||
declare -a _MENU_SHORTS=()
|
||||
declare -a _MENU_OPTIONS=()
|
||||
|
||||
# Parse menu items and populate global arrays
|
||||
# Usage: menu_define array_elements...
|
||||
function menu_define() {
|
||||
# Clear previous state
|
||||
_MENU_KEYS=()
|
||||
_MENU_SHORTS=()
|
||||
_MENU_OPTIONS=()
|
||||
|
||||
# Parse each menu item: "key|short|description"
|
||||
local item key short desc
|
||||
for item in "$@"; do
|
||||
IFS='|' read -r key short desc <<< "$item"
|
||||
_MENU_KEYS+=("$key")
|
||||
_MENU_SHORTS+=("$short")
|
||||
_MENU_OPTIONS+=("$key ($short): $desc")
|
||||
done
|
||||
}
|
||||
|
||||
# Display menu with numbered options
|
||||
# Usage: menu_display "Menu Title"
|
||||
function menu_display() {
|
||||
local title="$1"
|
||||
|
||||
echo "==== $title ===="
|
||||
for idx in "${!_MENU_OPTIONS[@]}"; do
|
||||
local num=$((idx + 1))
|
||||
printf "%2d) %s\n" "$num" "${_MENU_OPTIONS[$idx]}"
|
||||
done
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Find menu index by user input (number, long command, or short command)
|
||||
# Returns: index (0-based) or -1 if not found
|
||||
# Usage: index=$(menu_find_index "user_input")
|
||||
function menu_find_index() {
|
||||
local user_input="$1"
|
||||
|
||||
# Try numeric selection first
|
||||
if [[ "$user_input" =~ ^[0-9]+$ ]]; then
|
||||
local num=$((user_input - 1))
|
||||
if [[ $num -ge 0 && $num -lt ${#_MENU_KEYS[@]} ]]; then
|
||||
echo "$num"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Try long command name
|
||||
local idx
|
||||
for idx in "${!_MENU_KEYS[@]}"; do
|
||||
if [[ "$user_input" == "${_MENU_KEYS[$idx]}" ]]; then
|
||||
echo "$idx"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
# Try short command
|
||||
for idx in "${!_MENU_SHORTS[@]}"; do
|
||||
if [[ "$user_input" == "${_MENU_SHORTS[$idx]}" ]]; then
|
||||
echo "$idx"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
echo "-1"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Handle direct execution (command line arguments)
|
||||
# Disables numeric selection to prevent confusion with command arguments
|
||||
# Usage: menu_direct_execute callback_function "$@"
|
||||
function menu_direct_execute() {
|
||||
local callback="$1"
|
||||
shift
|
||||
local user_input="$1"
|
||||
shift
|
||||
|
||||
# Disable numeric selection in direct mode
|
||||
if [[ "$user_input" =~ ^[0-9]+$ ]]; then
|
||||
echo "Invalid option. Numeric selection is not allowed when passing arguments."
|
||||
echo "Use command name or short alias instead."
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Find command and execute
|
||||
local idx
|
||||
# try-catch
|
||||
{
|
||||
idx=$(menu_find_index "$user_input")
|
||||
} ||
|
||||
{
|
||||
idx=-1
|
||||
}
|
||||
|
||||
if [[ $idx -ge 0 ]]; then
|
||||
"$callback" "${_MENU_KEYS[$idx]}" "$@"
|
||||
return $?
|
||||
else
|
||||
# Handle help requests directly
|
||||
if [[ "$user_input" == "--help" || "$user_input" == "help" || "$user_input" == "-h" ]]; then
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${_MENU_OPTIONS[@]}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Invalid option. Use --help to see available commands." >&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Handle interactive menu selection
|
||||
# Usage: menu_interactive callback_function "Menu Title"
|
||||
function menu_interactive() {
|
||||
local callback="$1"
|
||||
local title="$2"
|
||||
|
||||
while true; do
|
||||
menu_display "$title"
|
||||
read -r -p "Please enter your choice: " REPLY
|
||||
|
||||
# Parse input to separate command from arguments
|
||||
local input_parts=()
|
||||
read -r -a input_parts <<< "$REPLY"
|
||||
local user_command="${input_parts[0]}"
|
||||
local user_args=("${input_parts[@]:1}")
|
||||
|
||||
# Find and execute command
|
||||
local idx
|
||||
idx=$(menu_find_index "$user_command")
|
||||
if [[ $idx -ge 0 ]]; then
|
||||
# Pass the command key and any additional arguments
|
||||
"$callback" "${_MENU_KEYS[$idx]}" "${user_args[@]}"
|
||||
local exit_code=$?
|
||||
# Exit loop if callback returns 0 (e.g., quit command)
|
||||
if [[ $exit_code -eq 0 && "${_MENU_KEYS[$idx]}" == "quit" ]]; then
|
||||
break
|
||||
fi
|
||||
else
|
||||
# Handle help request
|
||||
if [[ "$REPLY" == "--help" || "$REPLY" == "help" || "$REPLY" == "h" ]]; then
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${_MENU_OPTIONS[@]}"
|
||||
echo ""
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Invalid option. Please try again or use 'help' for available commands." >&2
|
||||
echo ""
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Main menu runner function
|
||||
# Usage: menu_run "Menu Title" callback_function "$@"
|
||||
# The menu items array should be defined globally before calling this function
|
||||
function menu_run() {
|
||||
local title="$1"
|
||||
local callback="$2"
|
||||
shift 2
|
||||
|
||||
# Define menu from globally available menu items array
|
||||
# This expects the calling script to have set up the menu items
|
||||
|
||||
# Handle direct execution if arguments provided
|
||||
if [[ $# -gt 0 ]]; then
|
||||
menu_direct_execute "$callback" "$@"
|
||||
return $?
|
||||
fi
|
||||
|
||||
# Run interactive menu
|
||||
menu_interactive "$callback" "$title"
|
||||
}
|
||||
|
||||
# Alternative menu runner that accepts menu items directly
|
||||
# Usage: menu_run_with_items "Menu Title" callback_function -- "${menu_items_array[@]}" -- "$@"
|
||||
function menu_run_with_items() {
|
||||
local title="$1"
|
||||
local callback="$2"
|
||||
shift 2
|
||||
|
||||
# Parse parameters: menu items are between first and second "--"
|
||||
local menu_items=()
|
||||
local script_args=()
|
||||
|
||||
# Skip first "--"
|
||||
if [[ "$1" == "--" ]]; then
|
||||
shift
|
||||
else
|
||||
echo "Error: menu_run_with_items requires -- separator before menu items" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Collect menu items until second "--"
|
||||
while [[ $# -gt 0 && "$1" != "--" ]]; do
|
||||
menu_items+=("$1")
|
||||
shift
|
||||
done
|
||||
|
||||
# Skip second "--" if present
|
||||
if [[ "$1" == "--" ]]; then
|
||||
shift
|
||||
fi
|
||||
|
||||
# Remaining args are script arguments
|
||||
script_args=("$@")
|
||||
|
||||
# Define menu from provided array
|
||||
menu_define "${menu_items[@]}"
|
||||
|
||||
# Handle direct execution if arguments provided
|
||||
if [[ ${#script_args[@]} -gt 0 ]]; then
|
||||
menu_direct_execute "$callback" "${script_args[@]}"
|
||||
return $?
|
||||
fi
|
||||
|
||||
# Run interactive menu
|
||||
menu_interactive "$callback" "$title"
|
||||
}
|
||||
|
||||
# Utility function to show available commands (for --help)
|
||||
# Usage: menu_show_help
|
||||
function menu_show_help() {
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${_MENU_OPTIONS[@]}"
|
||||
}
|
||||
|
||||
# Utility function to get command key by index
|
||||
# Usage: key=$(menu_get_key index)
|
||||
function menu_get_key() {
|
||||
local idx="$1"
|
||||
if [[ $idx -ge 0 && $idx -lt ${#_MENU_KEYS[@]} ]]; then
|
||||
echo "${_MENU_KEYS[$idx]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Utility function to get all command keys
|
||||
# Usage: keys=($(menu_get_all_keys))
|
||||
function menu_get_all_keys() {
|
||||
printf '%s\n' "${_MENU_KEYS[@]}"
|
||||
}
|
||||
8
apps/ci/ci-compile.sh
Executable file
8
apps/ci/ci-compile.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "compile core"
|
||||
export AC_CCACHE=true
|
||||
./acore.sh "compiler" "all"
|
||||
|
||||
69
apps/ci/ci-conf-core-pch.sh
Normal file
69
apps/ci/ci-conf-core-pch.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CTOOLS_BUILD=none
|
||||
CSCRIPTS=static
|
||||
CMODULES=static
|
||||
CBUILD_TESTING=ON
|
||||
CSCRIPTPCH=ON
|
||||
CCOREPCH=ON
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
69
apps/ci/ci-conf-core.sh
Normal file
69
apps/ci/ci-conf-core.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CTOOLS_BUILD=none
|
||||
CSCRIPTS=static
|
||||
CMODULES=static
|
||||
CBUILD_TESTING=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
36
apps/ci/ci-conf-db.sh
Normal file
36
apps/ci/ci-conf-db.sh
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=db-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
67
apps/ci/ci-conf-tools.sh
Normal file
67
apps/ci/ci-conf-tools.sh
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=maps-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
15
apps/ci/ci-dry-run.sh
Normal file
15
apps/ci/ci-dry-run.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Start mysql
|
||||
sudo systemctl start mysql
|
||||
|
||||
source "$CURRENT_PATH/ci-gen-server-conf-files.sh" $1 "etc" "bin" "root"
|
||||
|
||||
(cd ./env/dist/bin/ && timeout 5m ./$APP_NAME -dry-run)
|
||||
|
||||
# Stop mysql
|
||||
sudo systemctl stop mysql
|
||||
18
apps/ci/ci-error-check.sh
Executable file
18
apps/ci/ci-error-check.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
ERRORS_FILE="./env/dist/bin/Errors.log";
|
||||
|
||||
echo "Checking Startup Errors"
|
||||
echo
|
||||
|
||||
if [[ -s ${ERRORS_FILE} ]]; then
|
||||
printf "The Errors.log file contains startup errors:\n\n";
|
||||
cat ${ERRORS_FILE};
|
||||
printf "\nPlease solve the startup errors listed above!\n";
|
||||
exit 1;
|
||||
else
|
||||
echo "> No startup errors found in Errors.log";
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Done"
|
||||
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
APP_NAME=$1
|
||||
CONFIG_FOLDER=${2:-"etc"}
|
||||
BIN_FOLDER=${3-"bin"}
|
||||
MYSQL_ROOT_PASSWORD=${4:-""}
|
||||
|
||||
# copy dist files to conf files
|
||||
cp ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf.dist ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
# replace login info
|
||||
sed -i "s/127.0.0.1;3306;acore;acore/localhost;3306;root;$MYSQL_ROOT_PASSWORD/" ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
if [[ $APP_NAME == "worldserver" ]]; then
|
||||
sed -i 's/DataDir = \".\"/DataDir = \".\/data"/' ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
git clone --depth=1 --branch=master --single-branch https://github.com/ac-data/ac-data.git ./env/dist/$BIN_FOLDER/data
|
||||
fi
|
||||
107
apps/ci/ci-install-modules.sh
Executable file
107
apps/ci/ci-install-modules.sh
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "install modules"
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-1v1-arena modules/mod-1v1-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-account-mounts modules/mod-account-mounts
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ah-bot modules/mod-ah-bot
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-anticheat modules/mod-anticheat
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-antifarming modules/mod-antifarming
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-arena-3v3-solo-queue modules/mod-arena-3v3-solo-queue
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-arena-replay modules/mod-arena-replay
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-auto-revive modules/mod-auto-revive
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-autobalance modules/mod-autobalance
|
||||
# NOTE: disabled because it causes DB error
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-azerothshard.git modules/mod-azerothshard
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-better-item-reloading modules/mod-better-item-reloading
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-item-reward modules/mod-bg-item-reward
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-reward modules/mod-bg-reward
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-boss-announcer modules/mod-boss-announcer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-breaking-news-override modules/mod-breaking-news-override
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-buff-command modules/mod-buff-command
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cfbg modules/mod-cfbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-character-tools modules/mod-character-tools
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-login modules/mod-chat-login
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-transmitter modules/mod-chat-transmitter
|
||||
# NOTE: disabled because it causes DB startup error
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chromie-xp modules/mod-chromie-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-congrats-on-level modules/mod-congrats-on-level
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-costumes modules/mod-costumes
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cta-switch modules/mod-cta-switch
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-custom-login modules/mod-custom-login
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-desertion-warnings modules/mod-desertion-warnings
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-detailed-logging modules/mod-detailed-logging
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-dmf-switch modules/mod-dmf-switch
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-duel-reset modules/mod-duel-reset
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-dynamic-xp modules/mod-dynamic-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ale modules/mod-ale
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-emblem-transfer modules/mod-emblem-transfer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-fireworks-on-level modules/mod-fireworks-on-level
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-global-chat modules/mod-global-chat
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-guild-zone-system modules/mod-guild-zone-system
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-guildhouse modules/mod-guildhouse
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-individual-xp modules/mod-individual-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-instance-reset modules/mod-instance-reset
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-instanced-worldbosses modules/mod-instanced-worldbosses
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ip-tracker modules/mod-ip-tracker
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-item-level-up modules/mod-item-level-up
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-keep-out modules/mod-keep-out
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-learn-highest-talent modules/mod-learn-highest-talent
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-learn-spells modules/mod-learn-spells
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-low-level-arena modules/mod-low-level-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-low-level-rbg modules/mod-low-level-rbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-mall-teleport modules/mod-mall-teleport
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-morph-all-players modules/mod-morph-all-players
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-morphsummon modules/mod-morphsummon
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-multi-client-check modules/mod-multi-client-check
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-notify-muted modules/mod-notify-muted
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-all-mounts modules/mod-npc-all-mounts
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-beastmaster modules/mod-npc-beastmaster
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-buffer modules/mod-npc-buffer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-codebox modules/mod-npc-codebox
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-enchanter modules/mod-npc-enchanter
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-free-professions modules/mod-npc-free-professions
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-gambler modules/mod-npc-gambler
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-morph modules/mod-npc-morph
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-services modules/mod-npc-services
|
||||
# not yet on azerothcore github
|
||||
git clone --depth=1 --branch=master https://github.com/gozzim/mod-npc-spectator modules/mod-npc-spectator
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-talent-template modules/mod-npc-talent-template
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-titles-tokens modules/mod-npc-titles-tokens
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-phased-duels modules/mod-phased-duels
|
||||
# outdated
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-playerbots modules/mod-playerbots
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pocket-portal modules/mod-pocket-portal
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-premium modules/mod-premium
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-progression-system.git modules/mod-progression-system
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-promotion-azerothcore modules/mod-promotion-azerothcore
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-quests modules/mod-pvp-quests
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-titles modules/mod-pvp-titles
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-zones modules/mod-pvp-zones
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpscript modules/mod-pvpscript
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpstats-announcer modules/mod-pvpstats-announcer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-quest-status modules/mod-quest-status
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-queue-list-cache modules/mod-queue-list-cache
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-quick-teleport modules/mod-quick-teleport
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-racial-trait-swap modules/mod-racial-trait-swap
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-random-enchants modules/mod-random-enchants
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-rdf-expansion modules/mod-rdf-expansion
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-resurrection-scroll modules/mod-resurrection-scroll
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-reward-played-time modules/mod-reward-played-time
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-reward-shop modules/mod-reward-shop
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-server-auto-shutdown.git modules/mod-server-auto-shutdown
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-solocraft modules/mod-solocraft
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-skip-dk-starting-area modules/mod-skip-dk-starting-area
|
||||
# has core patch file
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-spell-regulator modules/mod-spell-regulator
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-starter-guild modules/mod-starter-guild
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-system-vip modules/mod-system-vip
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-tic-tac-toe modules/mod-tic-tac-toe
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-top-arena modules/mod-top-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-transmog modules/mod-transmog
|
||||
# archived / outdated
|
||||
#git clone --depth=1 --branch=master https://github.com/azerothcore/mod-war-effort modules/mod-war-effort
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-weekend-xp modules/mod-weekend-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-who-logged modules/mod-who-logged
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-zone-difficulty modules/mod-zone-difficulty
|
||||
74
apps/ci/ci-install.sh
Normal file
74
apps/ci/ci-install.sh
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CSCRIPTS=static
|
||||
CBUILD_TESTING=ON
|
||||
CSERVERS=ON
|
||||
CTOOLS=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
time sudo apt-get update -y
|
||||
# time sudo apt-get upgrade -y
|
||||
time sudo apt-get install -y git lsb-release sudo
|
||||
time ./acore.sh install-deps
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
54
apps/ci/ci-pending-changelogs.ts
Normal file
54
apps/ci/ci-pending-changelogs.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import * as semver from "https://deno.land/x/semver/mod.ts";
|
||||
|
||||
// specify the needed paths here
|
||||
const CHANGELOG_PATH = "doc/changelog";
|
||||
const CHANGELOG_PENDING_PATH = `${CHANGELOG_PATH}/pendings`;
|
||||
const CHANGELOG_MASTER_FILE = `${CHANGELOG_PATH}/master.md`;
|
||||
const ACORE_JSON = "./acore.json";
|
||||
|
||||
// read the acore.json file to work with the versioning
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
const data = await Deno.readFile(ACORE_JSON);
|
||||
const acoreInfo = JSON.parse(decoder.decode(data));
|
||||
|
||||
let changelogText = await Deno.readTextFile(CHANGELOG_MASTER_FILE);
|
||||
|
||||
const currentVersion = acoreInfo.version;
|
||||
|
||||
const res=Deno.run({ cmd: [ "git", "rev-parse",
|
||||
"HEAD"],
|
||||
stdout: 'piped',
|
||||
stderr: 'piped',
|
||||
stdin: 'null' });
|
||||
await res.status();
|
||||
const gitVersion = new TextDecoder().decode(await res.output());
|
||||
|
||||
|
||||
for await (const dirEntry of Deno.readDir(CHANGELOG_PENDING_PATH)) {
|
||||
if (!dirEntry.isFile || !dirEntry.name.endsWith(".md")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Upgrade the prerelease version number (e.g. 1.0.0-dev.1 -> 1.0.0-dev.2)
|
||||
acoreInfo.version = semver.inc(acoreInfo.version, "prerelease", {
|
||||
includePrerelease: true,
|
||||
});
|
||||
|
||||
// read the pending file found and add it at the beginning of the changelog text
|
||||
const data = await Deno.readTextFile(
|
||||
`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`,
|
||||
);
|
||||
changelogText = `## ${acoreInfo.version} | Commit: [${gitVersion}](https://github.com/azerothcore/azerothcore-wotlk/commit/${gitVersion}\n\n${data}\n${changelogText}`;
|
||||
|
||||
// remove the pending file
|
||||
await Deno.remove(`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`);
|
||||
}
|
||||
|
||||
// write to acore.json and master.md only if new version is available
|
||||
if (currentVersion != acoreInfo.version) {
|
||||
console.log(`Changelog version upgraded from ${currentVersion} to ${acoreInfo.version}`)
|
||||
Deno.writeTextFile(CHANGELOG_MASTER_FILE, changelogText);
|
||||
Deno.writeTextFile(ACORE_JSON, JSON.stringify(acoreInfo, null, 2)+"\n");
|
||||
} else {
|
||||
console.log("No changelogs to add")
|
||||
}
|
||||
74
apps/ci/ci-pending-sql.sh
Normal file
74
apps/ci/ci-pending-sql.sh
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
source "$CURRENT_PATH/../bash_shared/includes.sh"
|
||||
|
||||
UPDATES_PATH="$AC_PATH_ROOT/data/sql/updates"
|
||||
|
||||
# get_next_index "data/sql/updates/db_world/2024_10_14_22.sql"
|
||||
# => 23
|
||||
# get_next_index ""
|
||||
# => 00
|
||||
function get_next_index() {
|
||||
if [[ -n "$1" ]]; then
|
||||
# PREV_COUNT should be a non-zero padded number
|
||||
PREV_COUNT="$(
|
||||
# grabs the filename of the first argument, removes ".sql" suffix.
|
||||
basename "$1" .sql |
|
||||
# get the last number
|
||||
cut -f4 -d_ |
|
||||
# retrieve the last number, without zero padding
|
||||
grep -oE "[1-9][0-9]*$"
|
||||
)"
|
||||
|
||||
printf '%02d' "$((PREV_COUNT + 1))"
|
||||
else
|
||||
echo "00"
|
||||
fi
|
||||
}
|
||||
|
||||
# lists all SQL files in the appropriate data/sql/updates/db_$1, and then moves them to a standard format, ordered by date and how many imports have happened that day. The name should be in this format:
|
||||
#
|
||||
# /path/to/data/sql/updates/db_NAME/YYYY_MM_DD_INDEX.sql
|
||||
#
|
||||
# Where INDEX is a number with a minimum with a minimum width (0-padded) of 2
|
||||
#
|
||||
# for example, "data/sql/updates/db_world/2024_10_01_03.sql" translates to "the third update in the world database from October 01, 2024"
|
||||
|
||||
TODAY="$(date +%Y_%m_%d)"
|
||||
function import() {
|
||||
PENDING_PATH="$AC_PATH_ROOT/data/sql/updates/pending_db_$1"
|
||||
UPDATES_DIR="$UPDATES_PATH/db_$1"
|
||||
|
||||
# Get the most recent SQL file applied to this database. Used for the header comment
|
||||
LATEST_UPDATE="$(find "$UPDATES_DIR" -iname "*.sql" | sort -h | tail -n 1)"
|
||||
# Get latest SQL file applied to this database, today. This could be empty.
|
||||
LATEST_UPDATE_TODAY="$(find "$UPDATES_DIR" -iname "$TODAY*.sql" | sort -h | tail -n 1)"
|
||||
|
||||
for entry in "$PENDING_PATH"/*.sql; do
|
||||
if [[ -f "$entry" ]]; then
|
||||
INDEX="$(get_next_index "$LATEST_UPDATE_TODAY")"
|
||||
OUTPUT_FILE="${UPDATES_DIR}/${TODAY}_${INDEX}.sql"
|
||||
|
||||
# ensure a note is added as a header comment
|
||||
echo "-- DB update $(basename "$LATEST_UPDATE" .sql) -> $(basename "$OUTPUT_FILE" .sql)" >"$OUTPUT_FILE"
|
||||
# fill in the SQL contents under that
|
||||
cat "$entry" >>"$OUTPUT_FILE"
|
||||
# remove the unneeded file
|
||||
rm -f "$entry"
|
||||
# set the newest file to the file we just moved
|
||||
LATEST_UPDATE_TODAY="$OUTPUT_FILE"
|
||||
LATEST_UPDATE="$OUTPUT_FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
import "world"
|
||||
import "characters"
|
||||
import "auth"
|
||||
|
||||
echo "Done."
|
||||
3
apps/ci/ci-run-unit-tests.sh
Executable file
3
apps/ci/ci-run-unit-tests.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
time var/build/obj/src/test/unit_tests
|
||||
47
apps/ci/mac/ci-compile.sh
Executable file
47
apps/ci/mac/ci-compile.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export OPENSSL_ROOT_DIR=$(brew --prefix openssl@3)
|
||||
|
||||
export CCACHE_CPP2=true
|
||||
export CCACHE_MAXSIZE='500M'
|
||||
export CCACHE_COMPRESS=1
|
||||
export CCACHE_COMPRESSLEVEL=9
|
||||
ccache -s
|
||||
|
||||
cd var/build/obj
|
||||
|
||||
mysql_include_path=$(brew --prefix mysql)/include/mysql
|
||||
mysql_lib_path=$(brew --prefix mysql)/lib/libmysqlclient.dylib
|
||||
|
||||
if [ ! -d "$mysql_include_path" ]; then
|
||||
echo "Original mysql include directory doesn't exist. Lets try to use the first available folder in mysql dir."
|
||||
base_dir=$(brew --cellar mysql)/$(basename $(ls -d $(brew --cellar mysql)/*/ | head -n 1))
|
||||
echo "Trying the next mysql base dir: $base_dir"
|
||||
mysql_include_path=$base_dir/include/mysql
|
||||
mysql_lib_path=$base_dir/lib/libmysqlclient.dylib
|
||||
fi
|
||||
|
||||
time cmake ../../../ \
|
||||
-DTOOLS=1 \
|
||||
-DBUILD_TESTING=1 \
|
||||
-DSCRIPTS=static \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DMYSQL_ADD_INCLUDE_PATH=$mysql_include_path \
|
||||
-DMYSQL_LIBRARY=$mysql_lib_path \
|
||||
-DREADLINE_INCLUDE_DIR=$(brew --prefix readline)/include \
|
||||
-DREADLINE_LIBRARY=$(brew --prefix readline)/lib/libreadline.dylib \
|
||||
-DOPENSSL_INCLUDE_DIR="$OPENSSL_ROOT_DIR/include" \
|
||||
-DOPENSSL_SSL_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libssl.dylib" \
|
||||
-DOPENSSL_CRYPTO_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libcrypto.dylib" \
|
||||
-DWITH_WARNINGS=1 \
|
||||
-DCMAKE_C_FLAGS="-Werror" \
|
||||
-DCMAKE_CXX_FLAGS="-Werror" \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
||||
-DUSE_SCRIPTPCH=0 \
|
||||
-DUSE_COREPCH=0 \
|
||||
;
|
||||
|
||||
time make -j $(($(sysctl -n hw.ncpu ) + 2))
|
||||
|
||||
ccache -s
|
||||
263
apps/codestyle/codestyle-cpp.py
Normal file
263
apps/codestyle/codestyle-cpp.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
# Get the src directory of the project
|
||||
src_directory = os.path.join(os.getcwd(), 'src')
|
||||
|
||||
# Global variables
|
||||
error_handler = False
|
||||
results = {
|
||||
"Multiple blank lines check": "Passed",
|
||||
"Trailing whitespace check": "Passed",
|
||||
"GetCounter() check": "Passed",
|
||||
"Misc codestyle check": "Passed",
|
||||
"GetTypeId() check": "Passed",
|
||||
"NpcFlagHelpers check": "Passed",
|
||||
"ItemFlagHelpers check": "Passed",
|
||||
"ItemTemplateFlagHelpers check": "Passed"
|
||||
}
|
||||
|
||||
# Main function to parse all the files of the project
|
||||
def parsing_file(directory: str) -> None:
|
||||
print("Starting AzerothCore CPP Codestyle check...")
|
||||
print(" ")
|
||||
print("Please read the C++ Code Standards for AzerothCore:")
|
||||
print("https://www.azerothcore.org/wiki/cpp-code-standards")
|
||||
print(" ")
|
||||
for root, _, files in os.walk(directory):
|
||||
for file in files:
|
||||
if not file.endswith('.ico'): # Skip .ico files that cannot be read
|
||||
file_path = os.path.join(root, file)
|
||||
file_name = file
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as file:
|
||||
multiple_blank_lines_check(file, file_path)
|
||||
trailing_whitespace_check(file, file_path)
|
||||
get_counter_check(file, file_path)
|
||||
if not file_name.endswith('.cmake') and file_name != 'CMakeLists.txt':
|
||||
misc_codestyle_check(file, file_path)
|
||||
if file_name != 'Object.h':
|
||||
get_typeid_check(file, file_path)
|
||||
if file_name != 'Unit.h':
|
||||
npcflags_helpers_check(file, file_path)
|
||||
if file_name != 'Item.h':
|
||||
itemflag_helpers_check(file, file_path)
|
||||
if file_name != 'ItemTemplate.h':
|
||||
itemtemplateflag_helpers_check(file, file_path)
|
||||
except UnicodeDecodeError:
|
||||
print(f"\nCould not decode file {file_path}")
|
||||
sys.exit(1)
|
||||
# Output the results
|
||||
print("")
|
||||
for check, result in results.items():
|
||||
print(f"{check} : {result}")
|
||||
if error_handler:
|
||||
print("\nPlease fix the codestyle issues above.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(f"\nEverything looks good")
|
||||
|
||||
# Codestyle patterns checking for multiple blank lines
|
||||
def multiple_blank_lines_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
consecutive_blank_lines = 0
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.strip() == '':
|
||||
consecutive_blank_lines += 1
|
||||
if consecutive_blank_lines > 1:
|
||||
print(f"Multiple blank lines found in {file_path} at line {line_number - 1}")
|
||||
check_failed = True
|
||||
else:
|
||||
consecutive_blank_lines = 0
|
||||
# Additional check for the end of the file
|
||||
if consecutive_blank_lines >= 1:
|
||||
print(f"Multiple blank lines found at the end of: {file_path}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Multiple blank lines check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for whitespace at the end of the lines
|
||||
def trailing_whitespace_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.endswith(' \n'):
|
||||
print(f"Trailing whitespace found: {file_path} at line {line_number}")
|
||||
if not error_handler:
|
||||
error_handler = True
|
||||
results["Trailing whitespace check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for ObjectGuid::GetCounter()
|
||||
def get_counter_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'ObjectGuid::GetCounter()' in line:
|
||||
print(f"Please use ObjectGuid::ToString().c_str() instead ObjectGuid::GetCounter(): {file_path} at line {line_number}")
|
||||
if not error_handler:
|
||||
error_handler = True
|
||||
results["GetCounter() check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for GetTypeId()
|
||||
def get_typeid_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'GetTypeId() == TYPEID_ITEM' in line or 'GetTypeId() != TYPEID_ITEM' in line:
|
||||
print(f"Please use IsItem() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_UNIT' in line or 'GetTypeId() != TYPEID_UNIT' in line:
|
||||
print(f"Please use IsCreature() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_PLAYER' in line or 'GetTypeId() != TYPEID_PLAYER' in line:
|
||||
print(f"Please use IsPlayer() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_GAMEOBJECT' in line or 'GetTypeId() != TYPEID_GAMEOBJECT' in line:
|
||||
print(f"Please use IsGameObject() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_DYNOBJECT' in line or 'GetTypeId() != TYPEID_DYNOBJECT' in line:
|
||||
print(f"Please use IsDynamicObject() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["GetTypeId() check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for NpcFlag helpers
|
||||
def npcflags_helpers_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'GetUInt32Value(UNIT_NPC_FLAGS)' in line:
|
||||
print(
|
||||
f"Please use GetNpcFlags() instead of GetUInt32Value(UNIT_NPC_FLAGS): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'HasFlag(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use HasNpcFlag() instead of HasFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'SetUInt32Value(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use ReplaceAllNpcFlags() instead of SetUInt32Value(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'SetFlag(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use SetNpcFlag() instead of SetFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'RemoveFlag(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use RemoveNpcFlag() instead of RemoveFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["NpcFlagHelpers check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for ItemFlag helpers
|
||||
def itemflag_helpers_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_REFUNDABLE)' in line:
|
||||
print(
|
||||
f"Please use IsRefundable() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_REFUNDABLE): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_BOP_TRADEABLE)' in line:
|
||||
print(
|
||||
f"Please use IsBOPTradable() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_BOP_TRADEABLE): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_WRAPPED)' in line:
|
||||
print(
|
||||
f"Please use IsWrapped() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_WRAPPED): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["ItemFlagHelpers check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for ItemTemplate helpers
|
||||
def itemtemplateflag_helpers_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'Flags & ITEM_FLAG' in line:
|
||||
print(
|
||||
f"Please use HasFlag(ItemFlag) instead of 'Flags & ITEM_FLAG_': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'Flags2 & ITEM_FLAG2' in line:
|
||||
print(
|
||||
f"Please use HasFlag2(ItemFlag2) instead of 'Flags2 & ITEM_FLAG2_': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'FlagsCu & ITEM_FLAGS_CU' in line:
|
||||
print(
|
||||
f"Please use HasFlagCu(ItemFlagsCustom) instead of 'FlagsCu & ITEM_FLAGS_CU_': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["ItemTemplateFlagHelpers check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for various codestyle issues
|
||||
def misc_codestyle_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
|
||||
# used to check for "if/else (...) {" "} else" ignores "if/else (...) {...}" "#define ... if/else (...) {"
|
||||
ifelse_curlyregex = r"^[^#define].*\s+(if|else)(\s*\(.*\))?\s*{[^}]*$|}\s*else(\s*{[^}]*$)"
|
||||
# used to catch double semicolons ";;" ignores "(;;)"
|
||||
double_semiregex = r"(?<!\()\s*;;(?!\))"
|
||||
# used to catch tabs
|
||||
tab_regex = r"\t"
|
||||
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'const auto&' in line:
|
||||
print(
|
||||
f"Please use the 'auto const&' syntax instead of 'const auto&': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.search(r'\bconst\s+\w+\s*\*\b', line):
|
||||
print(
|
||||
f"Please use the 'Class/ObjectType const*' syntax instead of 'const Class/ObjectType*': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if [match for match in [' if(', ' if ( '] if match in line]:
|
||||
print(
|
||||
f"Please use the 'if (XXXX)' syntax instead of 'if(XXXX)': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.match(ifelse_curlyregex, line):
|
||||
print(
|
||||
f"Curly brackets are not allowed to be leading or trailing if/else statements. Place it on a new line: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.search(double_semiregex, line):
|
||||
print(
|
||||
f"Double semicolon (;;) found in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.match(tab_regex, line):
|
||||
print(
|
||||
f"Tab found! Replace it to 4 spaces: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Misc codestyle check"] = "Failed"
|
||||
|
||||
# Main function
|
||||
parsing_file(src_directory)
|
||||
411
apps/codestyle/codestyle-sql.py
Normal file
411
apps/codestyle/codestyle-sql.py
Normal file
@@ -0,0 +1,411 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import glob
|
||||
import subprocess
|
||||
|
||||
base_dir = os.getcwd()
|
||||
|
||||
# Get the pending directory of the project
|
||||
pattern = os.path.join(base_dir, 'data/sql/updates/pending_db_*')
|
||||
src_directory = glob.glob(pattern)
|
||||
|
||||
# Get files from base dir
|
||||
base_pattern = os.path.join(base_dir, 'data/sql/base/db_*')
|
||||
base_directory = glob.glob(base_pattern)
|
||||
|
||||
# Get files from archive dir
|
||||
archive_pattern = os.path.join(base_dir, 'data/sql/archive/db_*')
|
||||
archive_directory = glob.glob(archive_pattern)
|
||||
|
||||
# Global variables
|
||||
error_handler = False
|
||||
results = {
|
||||
"Multiple blank lines check": "Passed",
|
||||
"Trailing whitespace check": "Passed",
|
||||
"SQL codestyle check": "Passed",
|
||||
"INSERT & DELETE safety usage check": "Passed",
|
||||
"Missing semicolon check": "Passed",
|
||||
"Backtick check": "Passed",
|
||||
"Directory check": "Passed",
|
||||
"Table engine check": "Passed"
|
||||
}
|
||||
|
||||
# Collect all files in all directories
|
||||
def collect_files_from_directories(directories: list) -> list:
|
||||
all_files = []
|
||||
for directory in directories:
|
||||
for root, _, files in os.walk(directory):
|
||||
for file in files:
|
||||
if not file.endswith('.sh'): # Skip .sh files
|
||||
all_files.append(os.path.join(root, file))
|
||||
return all_files
|
||||
|
||||
# Used to find changed or added files compared to master.
|
||||
def get_changed_files() -> list:
|
||||
subprocess.run(["git", "fetch", "origin", "master"], check=True)
|
||||
result = subprocess.run(
|
||||
["git", "diff", "--name-status", "origin/master"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
changed_files = []
|
||||
for line in result.stdout.strip().splitlines():
|
||||
if not line:
|
||||
continue
|
||||
status, path = line.split(maxsplit=1)
|
||||
if status in ("A", "M"):
|
||||
changed_files.append(path)
|
||||
return changed_files
|
||||
|
||||
# Main function to parse all the files of the project
|
||||
def parsing_file(files: list) -> None:
|
||||
print("Starting AzerothCore SQL Codestyle check...")
|
||||
print(" ")
|
||||
print("Please read the SQL Standards for AzerothCore:")
|
||||
print("https://www.azerothcore.org/wiki/sql-standards")
|
||||
print(" ")
|
||||
|
||||
# Iterate over all files in data/sql/updates/pending_db_*
|
||||
for file_path in files:
|
||||
if "base" not in file_path and "archive" not in file_path:
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as file:
|
||||
multiple_blank_lines_check(file, file_path)
|
||||
trailing_whitespace_check(file, file_path)
|
||||
sql_check(file, file_path)
|
||||
insert_delete_safety_check(file, file_path)
|
||||
semicolon_check(file, file_path)
|
||||
backtick_check(file, file_path)
|
||||
non_innodb_engine_check(file, file_path)
|
||||
except UnicodeDecodeError:
|
||||
print(f"\n❌ Could not decode file {file_path}")
|
||||
sys.exit(1)
|
||||
|
||||
# Make sure we only check changed or added files when we work with base/archive paths
|
||||
changed_files = get_changed_files()
|
||||
# Iterate over all file paths
|
||||
for file_path in changed_files:
|
||||
if "base" in file_path or "archive" in file_path:
|
||||
try:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
directory_check(f, file_path)
|
||||
except UnicodeDecodeError:
|
||||
print(f"\n❌ Could not decode file {file_path}")
|
||||
sys.exit(1)
|
||||
|
||||
# Output the results
|
||||
print("\n ")
|
||||
for check, result in results.items():
|
||||
print(f"{check} : {result}")
|
||||
if error_handler:
|
||||
print("\n ")
|
||||
print("\n❌ Please fix the codestyle issues above.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("\n ")
|
||||
print(f"\n✅ Everything looks good")
|
||||
|
||||
# Codestyle patterns checking for multiple blank lines
|
||||
def multiple_blank_lines_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
consecutive_blank_lines = 0
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.strip() == '':
|
||||
consecutive_blank_lines += 1
|
||||
if consecutive_blank_lines > 1:
|
||||
print(f"❌ Multiple blank lines found in {file_path} at line {line_number - 1}")
|
||||
check_failed = True
|
||||
else:
|
||||
consecutive_blank_lines = 0
|
||||
# Additional check for the end of the file
|
||||
if consecutive_blank_lines >= 1:
|
||||
print(f"❌ Multiple blank lines found at the end of: {file_path}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Multiple blank lines check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for whitespace at the end of the lines
|
||||
def trailing_whitespace_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.endswith(' \n'):
|
||||
print(f"❌ Trailing whitespace found: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Trailing whitespace check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for various codestyle issues
|
||||
def sql_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if [match for match in ['broadcast_text'] if match in line]:
|
||||
print(
|
||||
f"❌ DON'T EDIT broadcast_text TABLE UNLESS YOU KNOW WHAT YOU ARE DOING!\nThis error can safely be ignored if the changes are approved to be sniffed: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if "EntryOrGuid" in line:
|
||||
print(
|
||||
f"❌ Please use entryorguid syntax instead of EntryOrGuid in {file_path} at line {line_number}\nWe recommend to use keira to have the right syntax in auto-query generation")
|
||||
check_failed = True
|
||||
if [match for match in [';;'] if match in line]:
|
||||
print(
|
||||
f"❌ Double semicolon (;;) found in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.match(r"\t", line):
|
||||
print(
|
||||
f"❌ Tab found! Replace it to 4 spaces: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
last_line = line[-1].strip()
|
||||
if last_line:
|
||||
print(
|
||||
f"❌ The last line is not a newline. Please add a newline: {file_path}")
|
||||
check_failed = True
|
||||
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["SQL codestyle check"] = "Failed"
|
||||
|
||||
def insert_delete_safety_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
not_delete = ["creature_template", "gameobject_template", "item_template", "quest_template"]
|
||||
check_failed = False
|
||||
previous_line = ""
|
||||
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.startswith("--"):
|
||||
continue
|
||||
if "INSERT" in line and "DELETE" not in previous_line:
|
||||
print(f"❌ No DELETE keyword found before the INSERT in {file_path} at line {line_number}\nIf this error is intended, please notify a maintainer")
|
||||
check_failed = True
|
||||
previous_line = line
|
||||
match = re.match(r"DELETE FROM\s+`([^`]+)`", line, re.IGNORECASE)
|
||||
if match:
|
||||
table_name = match.group(1)
|
||||
if table_name in not_delete:
|
||||
print(
|
||||
f"❌ Entries from {table_name} should not be deleted! {file_path} at line {line_number}\nIf this error is intended, please notify a maintainer")
|
||||
check_failed = True
|
||||
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["INSERT & DELETE safety usage check"] = "Failed"
|
||||
|
||||
def semicolon_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
|
||||
file.seek(0) # Reset file pointer to the start
|
||||
check_failed = False
|
||||
|
||||
query_open = False
|
||||
in_block_comment = False
|
||||
inside_values_block = False
|
||||
|
||||
lines = file.readlines()
|
||||
total_lines = len(lines)
|
||||
|
||||
def get_next_non_blank_line(start):
|
||||
""" Get the next non-blank, non-comment line starting from `start` """
|
||||
for idx in range(start, total_lines):
|
||||
next_line = lines[idx].strip()
|
||||
if next_line and not next_line.startswith('--') and not next_line.startswith('/*'):
|
||||
return next_line
|
||||
return None
|
||||
|
||||
for line_number, line in enumerate(lines, start=1):
|
||||
stripped_line = line.strip()
|
||||
|
||||
# Skip single-line comments
|
||||
if stripped_line.startswith('--'):
|
||||
continue
|
||||
|
||||
# Handle block comments
|
||||
if in_block_comment:
|
||||
if '*/' in stripped_line:
|
||||
in_block_comment = False
|
||||
stripped_line = stripped_line.split('*/', 1)[1].strip()
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
if '/*' in stripped_line:
|
||||
query_open = False # Reset query state at start of block comment
|
||||
in_block_comment = True
|
||||
stripped_line = stripped_line.split('/*', 1)[0].strip()
|
||||
|
||||
# Skip empty lines (unless inside values block)
|
||||
if not stripped_line and not inside_values_block:
|
||||
continue
|
||||
|
||||
# Remove inline comments after SQL
|
||||
stripped_line = stripped_line.split('--', 1)[0].strip()
|
||||
|
||||
if stripped_line.upper().startswith("SET") and not stripped_line.endswith(";"):
|
||||
print(f"❌ Missing semicolon in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
# Detect query start
|
||||
if not query_open and any(keyword in stripped_line.upper() for keyword in ["SELECT", "INSERT", "UPDATE", "DELETE", "REPLACE"]):
|
||||
query_open = True
|
||||
|
||||
# Detect start of multi-line VALUES block
|
||||
if any(kw in stripped_line.upper() for kw in ["INSERT", "REPLACE"]) and "VALUES" in stripped_line.upper():
|
||||
inside_values_block = True
|
||||
query_open = True # Ensure query is marked open too
|
||||
|
||||
if inside_values_block:
|
||||
if not stripped_line:
|
||||
continue # Allow blank lines inside VALUES block
|
||||
|
||||
if stripped_line.startswith('('):
|
||||
# Get next non-blank line to detect if we're at the last row
|
||||
next_line = get_next_non_blank_line(line_number)
|
||||
|
||||
if next_line and next_line.startswith('('):
|
||||
# Expect comma if another row follows
|
||||
if not stripped_line.endswith(','):
|
||||
print(f"❌ Missing comma in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
else:
|
||||
# Expect semicolon if this is the final row
|
||||
if not stripped_line.endswith(';'):
|
||||
print(f"❌ Missing semicolon in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
inside_values_block = False
|
||||
query_open = False
|
||||
else:
|
||||
inside_values_block = False # Close block if semicolon was found
|
||||
|
||||
elif query_open and not inside_values_block:
|
||||
# Normal query handling (outside multi-row VALUES block)
|
||||
if line_number == total_lines and not stripped_line.endswith(';'):
|
||||
print(f"❌ Missing semicolon in {file_path} at the last line {line_number}")
|
||||
check_failed = True
|
||||
query_open = False
|
||||
elif stripped_line.endswith(';'):
|
||||
query_open = False
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Missing semicolon check"] = "Failed"
|
||||
|
||||
def backtick_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0)
|
||||
check_failed = False
|
||||
|
||||
# Find SQL clauses
|
||||
pattern = re.compile(
|
||||
r'\b(SELECT|FROM|JOIN|WHERE|GROUP BY|ORDER BY|DELETE FROM|UPDATE|INSERT INTO|SET|REPLACE|REPLACE INTO)\s+(.*?)(?=;$|(?=\b(?:WHERE|SET|VALUES)\b)|$)',
|
||||
re.IGNORECASE | re.DOTALL
|
||||
)
|
||||
|
||||
# Make sure to ignore values enclosed in single- and doublequotes
|
||||
quote_pattern = re.compile(r"'(?:\\'|[^'])*'|\"(?:\\\"|[^\"])*\"")
|
||||
|
||||
for line_number, line in enumerate(file, start=1):
|
||||
# Ignore comments
|
||||
if line.startswith('--'):
|
||||
continue
|
||||
|
||||
# Sanitize single- and doublequotes to prevent false positives
|
||||
sanitized_line = quote_pattern.sub('', line)
|
||||
matches = pattern.findall(sanitized_line)
|
||||
|
||||
for clause, content in matches:
|
||||
# Find all words and exclude @variables
|
||||
words = re.findall(r'\b(?<!@)([a-zA-Z_][a-zA-Z0-9_]*)\b', content)
|
||||
|
||||
for word in words:
|
||||
# Skip MySQL keywords
|
||||
if word.upper() in {"SELECT", "FROM", "JOIN", "WHERE", "GROUP", "BY", "ORDER",
|
||||
"DELETE", "UPDATE", "INSERT", "INTO", "SET", "VALUES", "AND",
|
||||
"IN", "OR", "REPLACE", "NOT", "BETWEEN",
|
||||
"DISTINCT", "HAVING", "LIMIT", "OFFSET", "AS", "ON", "INNER",
|
||||
"LEFT", "RIGHT", "FULL", "OUTER", "CROSS", "NATURAL",
|
||||
"EXISTS", "LIKE", "IS", "NULL", "UNION", "ALL", "ASC", "DESC",
|
||||
"CASE", "WHEN", "THEN", "ELSE", "END", "CREATE", "TABLE",
|
||||
"ALTER", "DROP", "DATABASE", "INDEX", "VIEW", "TRIGGER",
|
||||
"PROCEDURE", "FUNCTION", "PRIMARY", "KEY", "FOREIGN", "REFERENCES",
|
||||
"CONSTRAINT", "DEFAULT", "AUTO_INCREMENT", "UNIQUE", "CHECK",
|
||||
"SHOW", "DESCRIBE", "EXPLAIN", "USE", "GRANT", "REVOKE",
|
||||
"BEGIN", "COMMIT", "ROLLBACK", "SAVEPOINT", "LOCK", "UNLOCK",
|
||||
"WITH", "RECURSIVE", "COLUMN", "ENGINE", "CHARSET", "COLLATE",
|
||||
"IF", "ELSEIF", "LOOP", "WHILE", "DO", "HANDLER", "LEAVE",
|
||||
"ITERATE", "DECLARE", "CURSOR", "FETCH", "OPEN", "CLOSE"}:
|
||||
continue
|
||||
|
||||
# Make sure the word is enclosed in backticks
|
||||
if not re.search(rf'`{re.escape(word)}`', content):
|
||||
print(f"❌ Missing backticks around ({word}). {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Backtick check"] = "Failed"
|
||||
|
||||
def directory_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0)
|
||||
check_failed = False
|
||||
|
||||
# Normalize path and split into parts
|
||||
normalized_path = os.path.normpath(file_path) # handles / and \
|
||||
path_parts = normalized_path.split(os.sep)
|
||||
|
||||
# Fail if '/base/' is part of the path
|
||||
if "base" in path_parts:
|
||||
print(f"❗ {file_path} is changed/added in the base directory.\nIf this is intended, please notify a maintainer.")
|
||||
check_failed = True
|
||||
|
||||
# Fail if '/archive/' is part of the path
|
||||
if "archive" in path_parts:
|
||||
print(f"❗ {file_path} is changed/added in the archive directory.\nIf this is intended, please notify a maintainer.")
|
||||
check_failed = True
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Directory check"] = "Failed"
|
||||
|
||||
def non_innodb_engine_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0)
|
||||
check_failed = False
|
||||
|
||||
engine_pattern = re.compile(r'ENGINE\s*=\s*([a-zA-Z0-9_]+)', re.IGNORECASE)
|
||||
|
||||
for line_number, line in enumerate(file, start=1):
|
||||
match = engine_pattern.search(line)
|
||||
if match:
|
||||
engine = match.group(1).lower()
|
||||
if engine != "innodb":
|
||||
print(f"❌ Non-InnoDB engine found: '{engine}' in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Table engine check"] = "Failed"
|
||||
|
||||
# Collect all files from matching directories
|
||||
all_files = collect_files_from_directories(src_directory) + collect_files_from_directories(base_directory) + collect_files_from_directories(archive_directory)
|
||||
|
||||
# Main function
|
||||
parsing_file(all_files)
|
||||
2
apps/compiler/.gitignore
vendored
Normal file
2
apps/compiler/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
config.sh
|
||||
|
||||
32
apps/compiler/README.md
Normal file
32
apps/compiler/README.md
Normal file
@@ -0,0 +1,32 @@
|
||||
## How to compile:
|
||||
|
||||
first of all, if you need some custom configuration you have to copy
|
||||
/conf/dist/config.sh in /conf/config.sh and configure it
|
||||
|
||||
* for a "clean" compilation you must run all scripts in their order:
|
||||
|
||||
./1-clean.sh
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you add/rename/delete some sources and you need to compile it you have to run:
|
||||
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you have modified code only, you just need to run
|
||||
|
||||
./3-build.sh
|
||||
|
||||
|
||||
## compiler.sh
|
||||
|
||||
compiler.sh script contains an interactive menu to clean/compile/build. You can also run actions directly by command lines specifying the option.
|
||||
Ex:
|
||||
./compiler.sh 3
|
||||
|
||||
It will start the build process (it's equivalent to ./3-build.sh)
|
||||
|
||||
## Note:
|
||||
|
||||
For an optimal development process and **really faster** compilation time, is suggested to use clang instead of gcc
|
||||
65
apps/compiler/compiler.sh
Executable file
65
apps/compiler/compiler.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/includes/includes.sh"
|
||||
source "$AC_PATH_APPS/bash_shared/menu_system.sh"
|
||||
|
||||
# Menu definition using the new system
|
||||
# Format: "key|short|description"
|
||||
comp_menu_items=(
|
||||
"build|b|Configure and compile"
|
||||
"clean|cl|Clean build files"
|
||||
"configure|cfg|Run CMake"
|
||||
"compile|cmp|Compile only"
|
||||
"all|a|clean, configure and compile"
|
||||
"ccacheClean|cc|Clean ccache files, normally not needed"
|
||||
"ccacheShowStats|cs|show ccache statistics"
|
||||
"quit|q|Close this menu"
|
||||
)
|
||||
|
||||
# Menu command handler - called by menu system for each command
|
||||
function handle_compiler_command() {
|
||||
local key="$1"
|
||||
shift
|
||||
|
||||
case "$key" in
|
||||
"build")
|
||||
comp_build
|
||||
;;
|
||||
"clean")
|
||||
comp_clean
|
||||
;;
|
||||
"configure")
|
||||
comp_configure
|
||||
;;
|
||||
"compile")
|
||||
comp_compile
|
||||
;;
|
||||
"all")
|
||||
comp_all
|
||||
;;
|
||||
"ccacheClean")
|
||||
comp_ccacheClean
|
||||
;;
|
||||
"ccacheShowStats")
|
||||
comp_ccacheShowStats
|
||||
;;
|
||||
"quit")
|
||||
echo "Closing compiler menu..."
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option. Use --help to see available commands."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Hook support (preserved from original)
|
||||
runHooks "ON_AFTER_OPTIONS" # you can create your custom options
|
||||
|
||||
# Run the menu system
|
||||
menu_run_with_items "ACORE COMPILER" handle_compiler_command -- "${comp_menu_items[@]}" -- "$@"
|
||||
7
apps/compiler/includes/defines.sh
Normal file
7
apps/compiler/includes/defines.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
# you can choose build type from cmd argument
|
||||
if [ ! -z $1 ]
|
||||
then
|
||||
CCTYPE=$1
|
||||
CCTYPE=${CCTYPE^} # capitalize first letter if it's not yet
|
||||
fi
|
||||
|
||||
205
apps/compiler/includes/functions.sh
Normal file
205
apps/compiler/includes/functions.sh
Normal file
@@ -0,0 +1,205 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# shellcheck source=../../../deps/acore/bash-lib/src/common/boolean.sh
|
||||
source "$AC_BASH_LIB_PATH/common/boolean.sh"
|
||||
|
||||
# Set SUDO variable - one liner
|
||||
SUDO=""
|
||||
|
||||
IS_SUDO_ENABLED=${AC_ENABLE_ROOT_CMAKE_INSTALL:-0}
|
||||
|
||||
# Allow callers to opt-out from privilege escalation during install/perms adjustments
|
||||
if [[ $IS_SUDO_ENABLED == 1 ]]; then
|
||||
SUDO=$([ "$EUID" -ne 0 ] && echo "sudo" || echo "")
|
||||
fi
|
||||
|
||||
function comp_clean() {
|
||||
DIRTOCLEAN=${BUILDPATH:-var/build/obj}
|
||||
PATTERN="$DIRTOCLEAN/*"
|
||||
|
||||
echo "Cleaning build files in $DIRTOCLEAN"
|
||||
|
||||
[ -d "$DIRTOCLEAN" ] && rm -rf $PATTERN
|
||||
}
|
||||
|
||||
function comp_ccacheEnable() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
export CCACHE_MAXSIZE=${CCACHE_MAXSIZE:-'1000MB'}
|
||||
#export CCACHE_DEPEND=true
|
||||
export CCACHE_SLOPPINESS=${CCACHE_SLOPPINESS:-pch_defines,time_macros,include_file_mtime}
|
||||
export CCACHE_CPP2=${CCACHE_CPP2:-true} # optimization for clang
|
||||
export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1}
|
||||
export CCACHE_COMPRESSLEVEL=${CCACHE_COMPRESSLEVEL:-9}
|
||||
export CCACHE_COMPILERCHECK=${CCACHE_COMPILERCHECK:-content}
|
||||
export CCACHE_LOGFILE=${CCACHE_LOGFILE:-"$CCACHE_DIR/cache.debug"}
|
||||
#export CCACHE_NODIRECT=true
|
||||
|
||||
export CCUSTOMOPTIONS="$CCUSTOMOPTIONS -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
}
|
||||
|
||||
function comp_ccacheClean() {
|
||||
[ "$AC_CCACHE" != true ] && echo "ccache is disabled" && return
|
||||
|
||||
echo "Cleaning ccache"
|
||||
ccache -C
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_ccacheResetStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -zc
|
||||
}
|
||||
|
||||
function comp_ccacheShowStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_configure() {
|
||||
CWD=$(pwd)
|
||||
|
||||
cd $BUILDPATH
|
||||
|
||||
echo "Build path: $BUILDPATH"
|
||||
echo "DEBUG info: $CDEBUG"
|
||||
echo "Compilation type: $CTYPE"
|
||||
echo "CCache: $AC_CCACHE"
|
||||
# -DCMAKE_BUILD_TYPE=$CCTYPE disable optimization "slow and huge amount of ram"
|
||||
# -DWITH_COREDEBUG=$CDEBUG compiled with debug information
|
||||
|
||||
#-DSCRIPTS_COMMANDS=$CSCRIPTS -DSCRIPTS_CUSTOM=$CSCRIPTS -DSCRIPTS_EASTERNKINGDOMS=$CSCRIPTS -DSCRIPTS_EVENTS=$CSCRIPTS -DSCRIPTS_KALIMDOR=$CSCRIPTS \
|
||||
#-DSCRIPTS_NORTHREND=$CSCRIPTS -DSCRIPTS_OUTDOORPVP=$CSCRIPTS -DSCRIPTS_OUTLAND=$CSCRIPTS -DSCRIPTS_PET=$CSCRIPTS -DSCRIPTS_SPELLS=$CSCRIPTS -DSCRIPTS_WORLD=$CSCRIPTS \
|
||||
#-DAC_WITH_UNIT_TEST=$CAC_UNIT_TEST -DAC_WITH_PLUGINS=$CAC_PLG \
|
||||
|
||||
local DCONF=""
|
||||
if [ ! -z "$CONFDIR" ]; then
|
||||
DCONF="-DCONF_DIR=$CONFDIR"
|
||||
fi
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
OSOPTIONS=""
|
||||
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
darwin*)
|
||||
OSOPTIONS=" -DMYSQL_ADD_INCLUDE_PATH=/usr/local/include -DMYSQL_LIBRARY=/usr/local/lib/libmysqlclient.dylib -DREADLINE_INCLUDE_DIR=/usr/local/opt/readline/include -DREADLINE_LIBRARY=/usr/local/opt/readline/lib/libreadline.dylib -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl@3/include -DOPENSSL_SSL_LIBRARIES=/usr/local/opt/openssl@3/lib/libssl.dylib -DOPENSSL_CRYPTO_LIBRARIES=/usr/local/opt/openssl@3/lib/libcrypto.dylib "
|
||||
;;
|
||||
msys*)
|
||||
OSOPTIONS=" -DMYSQL_INCLUDE_DIR=C:\tools\mysql\current\include -DMYSQL_LIBRARY=C:\tools\mysql\current\lib\mysqlclient.lib "
|
||||
;;
|
||||
esac
|
||||
|
||||
cmake $SRCPATH -DCMAKE_INSTALL_PREFIX=$BINPATH $DCONF \
|
||||
-DAPPS_BUILD=$CAPPS_BUILD \
|
||||
-DTOOLS_BUILD=$CTOOLS_BUILD \
|
||||
-DSCRIPTS=$CSCRIPTS \
|
||||
-DMODULES=$CMODULES \
|
||||
-DBUILD_TESTING=$CBUILD_TESTING \
|
||||
-DUSE_SCRIPTPCH=$CSCRIPTPCH \
|
||||
-DUSE_COREPCH=$CCOREPCH \
|
||||
-DCMAKE_BUILD_TYPE=$CTYPE \
|
||||
-DWITH_WARNINGS=$CWARNINGS \
|
||||
-DCMAKE_C_COMPILER=$CCOMPILERC \
|
||||
-DCMAKE_CXX_COMPILER=$CCOMPILERCXX \
|
||||
$CBUILD_APPS_LIST $CBUILD_TOOLS_LIST $OSOPTIONS $CCUSTOMOPTIONS
|
||||
|
||||
cd $CWD
|
||||
|
||||
runHooks "ON_AFTER_CONFIG"
|
||||
}
|
||||
|
||||
function comp_compile() {
|
||||
[ $MTHREADS == 0 ] && MTHREADS=$(grep -c ^processor /proc/cpuinfo) && MTHREADS=$(($MTHREADS + 2))
|
||||
|
||||
echo "Using $MTHREADS threads"
|
||||
|
||||
pushd "$BUILDPATH" >> /dev/null || exit 1
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
comp_ccacheResetStats
|
||||
|
||||
time cmake --build . --config $CTYPE -j $MTHREADS
|
||||
|
||||
comp_ccacheShowStats
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
msys*)
|
||||
cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
linux*|darwin*)
|
||||
local confDir
|
||||
confDir=${CONFDIR:-"$AC_BINPATH_FULL/../etc"}
|
||||
|
||||
# create the folders before installing to
|
||||
# set the current user and permissions
|
||||
echo "Creating $AC_BINPATH_FULL..."
|
||||
mkdir -p "$AC_BINPATH_FULL"
|
||||
echo "Creating $confDir..."
|
||||
mkdir -p "$confDir"
|
||||
mkdir -p "$confDir/modules"
|
||||
|
||||
confDir=$(realpath "$confDir")
|
||||
|
||||
echo "Cmake install..."
|
||||
$SUDO cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
# set all aplications SUID bit
|
||||
if [[ $IS_SUDO_ENABLED == 0 ]]; then
|
||||
echo "Skipping root ownership and SUID changes (IS_SUDO_ENABLED=0)"
|
||||
else
|
||||
echo "Setting permissions on binary files"
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec $SUDO chown root:root -- {} +
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec $SUDO chmod u+s -- {} +
|
||||
$SUDO setcap cap_sys_nice=eip "$AC_BINPATH_FULL/worldserver"
|
||||
$SUDO setcap cap_sys_nice=eip "$AC_BINPATH_FULL/authserver"
|
||||
fi
|
||||
|
||||
|
||||
if ( isTrue "$AC_ENABLE_CONF_COPY_ON_INSTALL" ) then
|
||||
echo "Copying default configuration files to $confDir ..."
|
||||
[[ -f "$confDir/worldserver.conf.dist" && ! -f "$confDir/worldserver.conf" ]] && \
|
||||
cp -v "$confDir/worldserver.conf.dist" "$confDir/worldserver.conf"
|
||||
[[ -f "$confDir/authserver.conf.dist" && ! -f "$confDir/authserver.conf" ]] && \
|
||||
cp -v "$confDir/authserver.conf.dist" "$confDir/authserver.conf"
|
||||
[[ -f "$confDir/dbimport.conf.dist" && ! -f "$confDir/dbimport.conf" ]] && \
|
||||
cp -v "$confDir/dbimport.conf.dist" "$confDir/dbimport.conf"
|
||||
|
||||
for f in "$confDir/modules/"*.dist
|
||||
do
|
||||
[[ -e $f ]] || break # handle the case of no *.dist files
|
||||
if [[ ! -f "${f%.dist}" ]]; then
|
||||
echo "Copying module config $(basename "${f%.dist}")"
|
||||
cp -v "$f" "${f%.dist}";
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
esac
|
||||
|
||||
runHooks "ON_AFTER_BUILD"
|
||||
}
|
||||
|
||||
function comp_build() {
|
||||
comp_configure
|
||||
comp_compile
|
||||
}
|
||||
|
||||
function comp_all() {
|
||||
comp_clean
|
||||
comp_build
|
||||
}
|
||||
23
apps/compiler/includes/includes.sh
Normal file
23
apps/compiler/includes/includes.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/../../bash_shared/includes.sh"
|
||||
|
||||
AC_PATH_COMPILER="$AC_PATH_APPS/compiler"
|
||||
|
||||
if [ -f "$AC_PATH_COMPILER/config.sh" ]; then
|
||||
source "$AC_PATH_COMPILER/config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
function ac_on_after_build() {
|
||||
# move the run engine
|
||||
cp -rvf "$AC_PATH_APPS/startup-scripts/src/"* "$BINPATH"
|
||||
}
|
||||
|
||||
registerHooks "ON_AFTER_BUILD" ac_on_after_build
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/defines.sh"
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/functions.sh"
|
||||
|
||||
mkdir -p $BUILDPATH
|
||||
mkdir -p $BINPATH
|
||||
17
apps/compiler/test/bats.conf
Normal file
17
apps/compiler/test/bats.conf
Normal file
@@ -0,0 +1,17 @@
|
||||
# BATS Test Configuration for Compiler App
|
||||
|
||||
# Set test timeout (in seconds)
|
||||
export BATS_TEST_TIMEOUT=60
|
||||
|
||||
# Enable verbose output for debugging
|
||||
export BATS_VERBOSE_RUN=1
|
||||
|
||||
# Test output format
|
||||
export BATS_FORMATTER=pretty
|
||||
|
||||
# Enable colored output
|
||||
export BATS_NO_PARALLELIZE_ACROSS_FILES=1
|
||||
export BATS_NO_PARALLELIZE_WITHIN_FILE=1
|
||||
|
||||
# Compiler specific test configuration
|
||||
export COMPILER_TEST_SKIP_HEAVY=1
|
||||
309
apps/compiler/test/test_compiler.bats
Executable file
309
apps/compiler/test/test_compiler.bats
Executable file
@@ -0,0 +1,309 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# Require minimum BATS version when supported (older distro packages lack this)
|
||||
if type -t bats_require_minimum_version >/dev/null 2>&1; then
|
||||
bats_require_minimum_version 1.5.0
|
||||
fi
|
||||
|
||||
# AzerothCore Compiler Scripts Test Suite
|
||||
# Tests the functionality of the compiler scripts using the unified test framework
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
# Setup that runs before each test
|
||||
setup() {
|
||||
compiler_setup
|
||||
export SCRIPT_DIR="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)"
|
||||
export COMPILER_SCRIPT="$SCRIPT_DIR/compiler.sh"
|
||||
}
|
||||
|
||||
# Cleanup that runs after each test
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
# ===== COMPILER SCRIPT TESTS =====
|
||||
|
||||
@test "compiler: should show help with --help argument" {
|
||||
run bash -c "echo '' | timeout 5s $COMPILER_SCRIPT --help"
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Available commands:" ]]
|
||||
}
|
||||
|
||||
@test "compiler: should show help with empty input" {
|
||||
run bash -c "echo '' | timeout 5s $COMPILER_SCRIPT 2>&1 || true"
|
||||
# The script might exit with timeout (124) or success (0), both are acceptable for this test
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 124 ]]
|
||||
# Check if output contains expected content - looking for menu options (old or new format)
|
||||
[[ "$output" =~ "build:" ]] || [[ "$output" =~ "clean:" ]] || [[ "$output" =~ "Please enter your choice" ]] || [[ "$output" =~ "build (b):" ]] || [[ "$output" =~ "ACORE COMPILER" ]] || [[ -z "$output" ]]
|
||||
}
|
||||
|
||||
@test "compiler: should accept option numbers" {
|
||||
# Test option 7 (ccacheShowStats) which should be safe to run
|
||||
run bash -c "echo '7' | timeout 10s $COMPILER_SCRIPT 2>/dev/null || true"
|
||||
# The script might exit with timeout (124) or success (0), both are acceptable
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 124 ]]
|
||||
}
|
||||
|
||||
@test "compiler: should accept option by name" {
|
||||
run timeout 10s "$COMPILER_SCRIPT" ccacheShowStats
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "compiler: should handle invalid option gracefully" {
|
||||
run timeout 5s "$COMPILER_SCRIPT" invalidOption
|
||||
# Should exit with error code for invalid option
|
||||
[ "$status" -eq 1 ]
|
||||
# Output check is optional as error message might be buffered
|
||||
}
|
||||
|
||||
@test "compiler: should handle invalid number gracefully" {
|
||||
run bash -c "echo '999' | timeout 5s $COMPILER_SCRIPT 2>&1 || true"
|
||||
# The script might exit with timeout (124) or success (0) for interactive mode
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 124 ]]
|
||||
# In interactive mode, the script should continue asking for input or timeout
|
||||
}
|
||||
|
||||
@test "compiler: should quit with quit option" {
|
||||
run timeout 5s "$COMPILER_SCRIPT" quit
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
# ===== FUNCTION TESTS =====
|
||||
|
||||
@test "functions: comp_clean should handle non-existent build directory" {
|
||||
# Source the functions with a non-existent build path
|
||||
run bash -c "
|
||||
export BUILDPATH='/tmp/non_existent_build_dir_$RANDOM'
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_clean
|
||||
"
|
||||
# Accept either success or failure - the important thing is the function runs
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 1 ]]
|
||||
[[ "$output" =~ "Cleaning build files" ]]
|
||||
}
|
||||
|
||||
@test "functions: comp_clean should remove build files when directory exists" {
|
||||
# Create a temporary build directory with test files
|
||||
local test_build_dir="/tmp/test_build_$RANDOM"
|
||||
mkdir -p "$test_build_dir/subdir"
|
||||
touch "$test_build_dir/test_file.txt"
|
||||
touch "$test_build_dir/subdir/nested_file.txt"
|
||||
|
||||
# Run the clean function
|
||||
run bash -c "
|
||||
export BUILDPATH='$test_build_dir'
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_clean
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Cleaning build files" ]]
|
||||
# Directory should still exist but be empty
|
||||
[ -d "$test_build_dir" ]
|
||||
[ ! -f "$test_build_dir/test_file.txt" ]
|
||||
[ ! -f "$test_build_dir/subdir/nested_file.txt" ]
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$test_build_dir"
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheShowStats should run without errors when ccache enabled" {
|
||||
run bash -c "
|
||||
export AC_CCACHE=true
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheShowStats
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheShowStats should do nothing when ccache disabled" {
|
||||
run bash -c "
|
||||
export AC_CCACHE=false
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheShowStats
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
# Should produce no output when ccache is disabled
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheClean should handle disabled ccache" {
|
||||
run bash -c "
|
||||
export AC_CCACHE=false
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheClean
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "ccache is disabled" ]]
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheClean should run when ccache enabled" {
|
||||
# Only run if ccache is actually available
|
||||
if command -v ccache >/dev/null 2>&1; then
|
||||
run bash -c "
|
||||
export AC_CCACHE=true
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheClean
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Cleaning ccache" ]]
|
||||
else
|
||||
skip "ccache not available on system"
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheEnable should set environment variables" {
|
||||
# Call the function in a subshell to capture environment changes
|
||||
run bash -c "
|
||||
export AC_CCACHE=true
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheEnable
|
||||
env | grep CCACHE | head -5
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCACHE_MAXSIZE" ]] || [[ "$output" =~ "CCACHE_COMPRESS" ]]
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheEnable should not set variables when ccache disabled" {
|
||||
# Call the function and verify it returns early when ccache is disabled
|
||||
run bash -c "
|
||||
export AC_CCACHE=false
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheEnable
|
||||
# The function should return early, so we check if it completed successfully
|
||||
echo 'Function completed without setting CCACHE vars'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Function completed" ]]
|
||||
}
|
||||
|
||||
# Mock tests for build functions (these would normally require a full setup)
|
||||
@test "functions: comp_configure should detect platform" {
|
||||
# Mock cmake command to avoid actual configuration
|
||||
run -127 bash -c "
|
||||
function cmake() {
|
||||
echo 'CMAKE called with args: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
# Set required variables
|
||||
export BUILDPATH='/tmp'
|
||||
export SRCPATH='/tmp'
|
||||
export BINPATH='/tmp'
|
||||
export CTYPE='Release'
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run configure in the /tmp directory
|
||||
cd /tmp && comp_configure
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "Platform:" ]] || [[ "$output" =~ "CMAKE called with args:" ]]
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_compile should detect thread count" {
|
||||
# Mock cmake command to avoid actual compilation
|
||||
run -127 bash -c "
|
||||
function cmake() {
|
||||
echo 'CMAKE called with args: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
# Mock other commands
|
||||
function pushd() { echo 'pushd $*'; }
|
||||
function popd() { echo 'popd $*'; }
|
||||
function time() { shift; \"\$@\"; }
|
||||
export -f pushd popd time
|
||||
|
||||
# Set required variables
|
||||
export BUILDPATH='/tmp'
|
||||
export MTHREADS=0
|
||||
export CTYPE='Release'
|
||||
export AC_BINPATH_FULL='/tmp'
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run compile in the /tmp directory
|
||||
cd /tmp && comp_compile
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "pushd" ]] || [[ "$output" =~ "CMAKE called with args:" ]]
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_build should call configure and compile" {
|
||||
# Mock the comp_configure and comp_compile functions
|
||||
run -127 bash -c "
|
||||
function comp_configure() {
|
||||
echo 'comp_configure called'
|
||||
return 0
|
||||
}
|
||||
|
||||
function comp_compile() {
|
||||
echo 'comp_compile called'
|
||||
return 0
|
||||
}
|
||||
|
||||
export -f comp_configure comp_compile
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run build
|
||||
comp_build
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "comp_configure called" ]] && [[ "$output" =~ "comp_compile called" ]]
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_all should call clean and build" {
|
||||
# Mock the comp_clean and comp_build functions
|
||||
run -127 bash -c "
|
||||
function comp_clean() {
|
||||
echo 'comp_clean called'
|
||||
return 0
|
||||
}
|
||||
|
||||
function comp_build() {
|
||||
echo 'comp_build called'
|
||||
return 0
|
||||
}
|
||||
|
||||
export -f comp_clean comp_build
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run all
|
||||
comp_all
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "comp_clean called" ]] && [[ "$output" =~ "comp_build called" ]]
|
||||
fi
|
||||
}
|
||||
211
apps/compiler/test/test_compiler_config.bats
Executable file
211
apps/compiler/test/test_compiler_config.bats
Executable file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# AzerothCore Compiler Configuration Test Suite
|
||||
# Tests the configuration and support scripts for the compiler module
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
# Setup that runs before each test
|
||||
setup() {
|
||||
compiler_setup
|
||||
export SCRIPT_DIR="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)"
|
||||
}
|
||||
|
||||
# Cleanup that runs after each test
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
# ===== DEFINES SCRIPT TESTS =====
|
||||
|
||||
@test "defines: should accept CCTYPE from argument" {
|
||||
# Test the defines script with a release argument
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' release; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=Release" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle uppercase CCTYPE" {
|
||||
# Test the defines script with an uppercase argument
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' DEBUG; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=DEBUG" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle lowercase input" {
|
||||
# Test the defines script with lowercase input
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' debug; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=Debug" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle mixed case input" {
|
||||
# Test the defines script with mixed case input
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' rElEaSe; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=RElEaSe" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle no argument" {
|
||||
# Test the defines script with no argument
|
||||
run bash -c "CCTYPE='original'; source '$SCRIPT_DIR/includes/defines.sh'; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=original" ]]
|
||||
}
|
||||
|
||||
# ===== INCLUDES SCRIPT TESTS =====
|
||||
|
||||
@test "includes: should create necessary directories" {
|
||||
# Create a temporary test environment
|
||||
local temp_dir="/tmp/compiler_test_$RANDOM"
|
||||
local build_path="$temp_dir/build"
|
||||
local bin_path="$temp_dir/bin"
|
||||
|
||||
# Remove directories to test creation
|
||||
rm -rf "$temp_dir"
|
||||
|
||||
# Source the includes script with custom paths - use a simpler approach
|
||||
run bash -c "
|
||||
export BUILDPATH='$build_path'
|
||||
export BINPATH='$bin_path'
|
||||
export AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
|
||||
# Create directories manually since includes.sh does this
|
||||
mkdir -p \"\$BUILDPATH\"
|
||||
mkdir -p \"\$BINPATH\"
|
||||
|
||||
echo 'Directories created'
|
||||
[ -d '$build_path' ] && echo 'BUILD_EXISTS'
|
||||
[ -d '$bin_path' ] && echo 'BIN_EXISTS'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "BUILD_EXISTS" ]]
|
||||
[[ "$output" =~ "BIN_EXISTS" ]]
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$temp_dir"
|
||||
}
|
||||
|
||||
@test "includes: should source required files" {
|
||||
# Test that all required files are sourced without errors
|
||||
run bash -c "
|
||||
# Set minimal required environment
|
||||
AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
BUILDPATH='/tmp'
|
||||
BINPATH='/tmp'
|
||||
source '$SCRIPT_DIR/includes/includes.sh'
|
||||
echo 'All files sourced successfully'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "All files sourced successfully" ]]
|
||||
}
|
||||
|
||||
@test "includes: should set AC_PATH_COMPILER variable" {
|
||||
# Test that AC_PATH_COMPILER is set correctly
|
||||
run bash -c "
|
||||
AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
BUILDPATH='/tmp'
|
||||
BINPATH='/tmp'
|
||||
source '$SCRIPT_DIR/includes/includes.sh'
|
||||
echo \"AC_PATH_COMPILER=\$AC_PATH_COMPILER\"
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "AC_PATH_COMPILER=" ]]
|
||||
[[ "$output" =~ "/compiler" ]]
|
||||
}
|
||||
|
||||
@test "includes: should register ON_AFTER_BUILD hook" {
|
||||
# Test that the hook is registered
|
||||
run bash -c "
|
||||
AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
BUILDPATH='/tmp'
|
||||
BINPATH='/tmp'
|
||||
source '$SCRIPT_DIR/includes/includes.sh'
|
||||
# Check if the function exists
|
||||
type ac_on_after_build > /dev/null && echo 'HOOK_FUNCTION_EXISTS'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "HOOK_FUNCTION_EXISTS" ]]
|
||||
}
|
||||
|
||||
# ===== CONFIGURATION TESTS =====
|
||||
|
||||
@test "config: should handle missing config file gracefully" {
|
||||
# Test behavior when config.sh doesn't exist
|
||||
run bash -c "
|
||||
export AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
export AC_PATH_COMPILER='$SCRIPT_DIR'
|
||||
export BUILDPATH='/tmp'
|
||||
export BINPATH='/tmp'
|
||||
|
||||
# Test that missing config doesn't break sourcing
|
||||
[ ! -f '$SCRIPT_DIR/config.sh' ] && echo 'NO_CONFIG_FILE'
|
||||
echo 'Config handled successfully'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Config handled successfully" ]]
|
||||
}
|
||||
|
||||
# ===== ENVIRONMENT VARIABLE TESTS =====
|
||||
|
||||
@test "environment: should handle platform detection" {
|
||||
# Test that OSTYPE is properly handled
|
||||
run bash -c "
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
echo \"Platform detected: \$OSTYPE\"
|
||||
case \"\$OSTYPE\" in
|
||||
linux*) echo 'LINUX_DETECTED' ;;
|
||||
darwin*) echo 'DARWIN_DETECTED' ;;
|
||||
msys*) echo 'MSYS_DETECTED' ;;
|
||||
*) echo 'UNKNOWN_PLATFORM' ;;
|
||||
esac
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Platform detected:" ]]
|
||||
# Should detect at least one known platform
|
||||
[[ "$output" =~ "LINUX_DETECTED" ]] || [[ "$output" =~ "DARWIN_DETECTED" ]] || [[ "$output" =~ "MSYS_DETECTED" ]] || [[ "$output" =~ "UNKNOWN_PLATFORM" ]]
|
||||
}
|
||||
|
||||
@test "environment: should handle missing environment variables gracefully" {
|
||||
# Test behavior with minimal environment
|
||||
run bash -c "
|
||||
unset BUILDPATH BINPATH SRCPATH MTHREADS
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
echo 'Functions loaded with minimal environment'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Functions loaded with minimal environment" ]]
|
||||
}
|
||||
|
||||
# ===== HOOK SYSTEM TESTS =====
|
||||
|
||||
@test "hooks: ac_on_after_build should copy startup scripts" {
|
||||
# Mock the cp command to test the hook
|
||||
function cp() {
|
||||
echo "CP called with args: $*"
|
||||
return 0
|
||||
}
|
||||
export -f cp
|
||||
|
||||
# Set required variables
|
||||
AC_PATH_APPS="$SCRIPT_DIR/.."
|
||||
BINPATH="/tmp/test_bin"
|
||||
export AC_PATH_APPS BINPATH
|
||||
|
||||
# Source and test the hook function
|
||||
source "$SCRIPT_DIR/includes/includes.sh"
|
||||
run ac_on_after_build
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CP called with args:" ]]
|
||||
[[ "$output" =~ "startup-scripts" ]]
|
||||
}
|
||||
254
apps/compiler/test/test_compiler_integration.bats
Executable file
254
apps/compiler/test/test_compiler_integration.bats
Executable file
@@ -0,0 +1,254 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# AzerothCore Compiler Integration Test Suite
|
||||
# Tests edge cases and integration scenarios for the compiler module
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
# Setup that runs before each test
|
||||
setup() {
|
||||
compiler_setup
|
||||
export SCRIPT_DIR="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)"
|
||||
}
|
||||
|
||||
# Cleanup that runs after each test
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
# ===== INTEGRATION TESTS =====
|
||||
|
||||
@test "integration: should handle full compiler.sh workflow" {
|
||||
# Test the complete workflow with safe options
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
echo '7' | timeout 15s ./compiler.sh
|
||||
echo 'First command completed'
|
||||
echo 'quit' | timeout 10s ./compiler.sh
|
||||
echo 'Quit command completed'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "First command completed" ]]
|
||||
[[ "$output" =~ "Quit command completed" ]]
|
||||
}
|
||||
|
||||
@test "integration: should handle multiple consecutive commands" {
|
||||
# Test running multiple safe commands in sequence
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
timeout 10s ./compiler.sh ccacheShowStats
|
||||
echo 'Command 1 done'
|
||||
timeout 10s ./compiler.sh quit
|
||||
echo 'Command 2 done'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Command 1 done" ]]
|
||||
[[ "$output" =~ "Command 2 done" ]]
|
||||
}
|
||||
|
||||
@test "integration: should preserve working directory" {
|
||||
# Test that the script doesn't change the working directory unexpectedly
|
||||
local original_pwd="$(pwd)"
|
||||
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
original_dir=\$(pwd)
|
||||
timeout 10s ./compiler.sh quit
|
||||
current_dir=\$(pwd)
|
||||
echo \"ORIGINAL: \$original_dir\"
|
||||
echo \"CURRENT: \$current_dir\"
|
||||
[ \"\$original_dir\" = \"\$current_dir\" ] && echo 'DIRECTORY_PRESERVED'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "DIRECTORY_PRESERVED" ]]
|
||||
}
|
||||
|
||||
# ===== ERROR HANDLING TESTS =====
|
||||
|
||||
@test "error_handling: should handle script errors gracefully" {
|
||||
# Test script behavior with set -e when encountering errors
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
# Try to source a non-existent file to test error handling
|
||||
timeout 5s bash -c 'set -e; source /nonexistent/file.sh' || echo 'ERROR_HANDLED'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "ERROR_HANDLED" ]]
|
||||
}
|
||||
|
||||
@test "error_handling: should validate function availability" {
|
||||
# Test that required functions are available after sourcing
|
||||
run bash -c "
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Check for key functions
|
||||
type comp_clean > /dev/null && echo 'COMP_CLEAN_AVAILABLE'
|
||||
type comp_configure > /dev/null && echo 'COMP_CONFIGURE_AVAILABLE'
|
||||
type comp_compile > /dev/null && echo 'COMP_COMPILE_AVAILABLE'
|
||||
type comp_build > /dev/null && echo 'COMP_BUILD_AVAILABLE'
|
||||
type comp_all > /dev/null && echo 'COMP_ALL_AVAILABLE'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "COMP_CLEAN_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_CONFIGURE_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_COMPILE_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_BUILD_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_ALL_AVAILABLE" ]]
|
||||
}
|
||||
|
||||
# ===== PERMISSION TESTS =====
|
||||
|
||||
@test "permissions: should handle permission requirements" {
|
||||
# Test script behavior with different permission scenarios
|
||||
run bash -c "
|
||||
# Test SUDO variable detection
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
echo \"SUDO variable: '\$SUDO'\"
|
||||
[ -n \"\$SUDO\" ] && echo 'SUDO_SET' || echo 'SUDO_EMPTY'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
# Should set SUDO appropriately based on EUID
|
||||
[[ "$output" =~ "SUDO_SET" ]] || [[ "$output" =~ "SUDO_EMPTY" ]]
|
||||
}
|
||||
|
||||
# ===== CLEANUP TESTS =====
|
||||
|
||||
@test "cleanup: comp_clean should handle various file types" {
|
||||
# Create a comprehensive test directory structure
|
||||
local test_dir="/tmp/compiler_cleanup_test_$RANDOM"
|
||||
mkdir -p "$test_dir/subdir1/subdir2"
|
||||
|
||||
# Create various file types
|
||||
touch "$test_dir/regular_file.txt"
|
||||
touch "$test_dir/executable_file.sh"
|
||||
touch "$test_dir/.hidden_file"
|
||||
touch "$test_dir/subdir1/nested_file.obj"
|
||||
touch "$test_dir/subdir1/subdir2/deep_file.a"
|
||||
ln -s "$test_dir/regular_file.txt" "$test_dir/symlink_file"
|
||||
|
||||
# Make one file executable
|
||||
chmod +x "$test_dir/executable_file.sh"
|
||||
|
||||
# Test cleanup
|
||||
run bash -c "
|
||||
export BUILDPATH='$test_dir'
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_clean
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Cleaning build files" ]]
|
||||
|
||||
# Verify cleanup (directory should exist but files should be cleaned)
|
||||
[ -d "$test_dir" ]
|
||||
|
||||
# The cleanup might not remove all files depending on the implementation
|
||||
# Let's check if at least some cleanup occurred
|
||||
local remaining_files=$(find "$test_dir" -type f | wc -l)
|
||||
# Either all files are gone, or at least some cleanup happened
|
||||
[[ "$remaining_files" -eq 0 ]] || [[ "$remaining_files" -lt 6 ]]
|
||||
|
||||
# Cleanup test directory
|
||||
rm -rf "$test_dir"
|
||||
}
|
||||
|
||||
# ===== THREAD DETECTION TESTS =====
|
||||
|
||||
@test "threading: should detect available CPU cores" {
|
||||
# Test thread count detection logic
|
||||
run bash -c "
|
||||
# Simulate the thread detection logic from the actual function
|
||||
MTHREADS=0
|
||||
if [ \$MTHREADS == 0 ]; then
|
||||
# Use nproc if available, otherwise simulate 4 cores
|
||||
if command -v nproc >/dev/null 2>&1; then
|
||||
MTHREADS=\$(nproc)
|
||||
else
|
||||
MTHREADS=4
|
||||
fi
|
||||
MTHREADS=\$((MTHREADS + 2))
|
||||
fi
|
||||
echo \"Detected threads: \$MTHREADS\"
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Detected threads:" ]]
|
||||
# Should be at least 3 (1 core + 2)
|
||||
local thread_count=$(echo "$output" | grep -o '[0-9]\+')
|
||||
[ "$thread_count" -ge 3 ]
|
||||
}
|
||||
|
||||
# ===== CMAKE OPTION TESTS =====
|
||||
|
||||
@test "cmake: should build correct cmake command" {
|
||||
# Mock cmake to capture command line arguments
|
||||
run bash -c "
|
||||
function cmake() {
|
||||
echo 'CMAKE_COMMAND: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
# Set comprehensive test environment
|
||||
export SRCPATH='/test/src'
|
||||
export BUILDPATH='/test/build'
|
||||
export BINPATH='/test/bin'
|
||||
export CTYPE='Release'
|
||||
export CAPPS_BUILD='ON'
|
||||
export CTOOLS_BUILD='ON'
|
||||
export CSCRIPTS='ON'
|
||||
export CMODULES='ON'
|
||||
export CBUILD_TESTING='OFF'
|
||||
export CSCRIPTPCH='ON'
|
||||
export CCOREPCH='ON'
|
||||
export CWARNINGS='ON'
|
||||
export CCOMPILERC='gcc'
|
||||
export CCOMPILERCXX='g++'
|
||||
export CCUSTOMOPTIONS='-DCUSTOM_OPTION=1'
|
||||
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Change to buildpath and run configure
|
||||
cd /test || cd /tmp
|
||||
comp_configure 2>/dev/null || echo 'Configure completed with warnings'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CMAKE_COMMAND:" ]] || [[ "$output" =~ "Configure completed" ]]
|
||||
}
|
||||
|
||||
# ===== PLATFORM SPECIFIC TESTS =====
|
||||
|
||||
@test "platform: should set correct options for detected platform" {
|
||||
# Test platform-specific CMAKE options
|
||||
run bash -c "
|
||||
# Mock cmake to capture platform-specific options
|
||||
function cmake() {
|
||||
echo 'CMAKE_PLATFORM_ARGS: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
export BUILDPATH='/tmp'
|
||||
export SRCPATH='/tmp'
|
||||
export BINPATH='/tmp'
|
||||
export CTYPE='Release'
|
||||
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Change to buildpath and run configure
|
||||
cd /tmp
|
||||
comp_configure 2>/dev/null || echo 'Configure completed with warnings'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CMAKE_PLATFORM_ARGS:" ]] || [[ "$output" =~ "Configure completed" ]]
|
||||
}
|
||||
66
apps/config-merger/README.md
Normal file
66
apps/config-merger/README.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# AzerothCore Config Merger
|
||||
|
||||
This directory contains configuration file merger tools to help update your AzerothCore server and module configurations with new options from distribution files.
|
||||
|
||||
**Available Options:** PHP and Python versions (**Python recommended for new users**)
|
||||
|
||||
## Purpose
|
||||
|
||||
The config merger tools help you update your existing configuration files (`.conf`) to include new options that have been added to the distribution files (`.conf.dist`). Distribution files always contain the most recent configuration changes and new options, while your personal config files may be missing these updates. These tools will:
|
||||
|
||||
- Compare your existing config files with the latest distribution files
|
||||
- Show you new configuration options that are missing from your files
|
||||
- Allow you to selectively add new options to your configs
|
||||
- Create automatic backups before making any changes
|
||||
- Support authserver.conf, worldserver.conf, and all module configs
|
||||
|
||||
## Available Versions
|
||||
|
||||
### PHP Version
|
||||
|
||||
**Requirements:**
|
||||
- PHP 5.6 or higher
|
||||
- **Requires a web server** (Apache, Nginx, IIS, etc.) to function
|
||||
- No additional libraries required (uses built-in PHP functions only)
|
||||
|
||||
**Features:**
|
||||
- Web-based interface
|
||||
- Configuration file parsing and merging
|
||||
- Browser-accessible configuration management
|
||||
|
||||
**Usage:**
|
||||
- Deploy to web server with PHP support (can be local - XAMPP, WAMP, or built-in PHP server)
|
||||
- Access via web browser
|
||||
- Follow web interface instructions
|
||||
|
||||
### Python Version (Recommended)
|
||||
|
||||
**Requirements:**
|
||||
- Python 3.6 or higher
|
||||
- No additional setup required beyond installing Python
|
||||
- No additional libraries required (uses built-in modules only)
|
||||
|
||||
**Features:**
|
||||
- Interactive menu-driven interface
|
||||
- Support for server configs (authserver.conf, worldserver.conf)
|
||||
- Support for module configs with bulk or selective updates
|
||||
- Automatic backup creation with timestamps
|
||||
- Cross-platform compatibility (Windows, Linux, macOS, and others)
|
||||
- Can be run via command line or by double-clicking the .py file
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
# Via command line
|
||||
cd /path/to/configs
|
||||
python config_merger.py
|
||||
|
||||
# Or double-click config_merger.py to open in terminal
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
When building AzerothCore with the `TOOL_CONFIG_MERGER` CMake option enabled, **only the Python version** will be automatically copied to your configs directory during the build process. The PHP version must be manually deployed to a web server.
|
||||
|
||||
## Support
|
||||
|
||||
Both versions provide the same core functionality for merging configuration files. Choose the version that best fits your environment and preferences. Python is recommended for most users due to its simplicity and no web server requirement.
|
||||
22
apps/config-merger/php/README.md
Normal file
22
apps/config-merger/php/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# ==== PHP merger (index.php + merge.php) ====
|
||||
|
||||
This is a PHP script for merging a new .dist file with your existing .conf file (worldserver.conf.dist and authserver.conf.dist)
|
||||
|
||||
It uses sessions so it is multi user safe, it adds any options that are removed to the bottom of the file commented out, just in case it removes something it shouldn't.
|
||||
If you add your custom patch configs below "# Custom" they will be copied exactly as they are.
|
||||
|
||||
Your new config will be found under $basedir/session_id/newconfig.conf.merge
|
||||
|
||||
If you do not run a PHP server on your machine you can read this guide on ["How to execute PHP code using command line?"](https://www.geeksforgeeks.org/how-to-execute-php-code-using-command-line/) on geeksforgeeks.org.
|
||||
|
||||
```
|
||||
php -S localhost:port -t E:\Azerothcore-wotlk\apps\config-merger\php\
|
||||
```
|
||||
|
||||
Change port to an available port to use. i.e 8000
|
||||
|
||||
Then go to your browser and type:
|
||||
|
||||
```
|
||||
localhost:8000/index.php
|
||||
```
|
||||
44
apps/config-merger/php/index.php
Normal file
44
apps/config-merger/php/index.php
Normal file
@@ -0,0 +1,44 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity/AzerothCore Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
?>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=windows-1251">
|
||||
<FORM enctype="multipart/form-data" ACTION="merge.php" METHOD="POST">
|
||||
Dist File (.conf.dist)
|
||||
<br />
|
||||
<INPUT name="File1" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
Current Conf File (.conf)
|
||||
<br />
|
||||
<INPUT name="File2" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="0" CHECKED >Windows -
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="1" >UNIX/Linux
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE="submit" VALUE="Submit">
|
||||
<br />
|
||||
<br />
|
||||
If you have any custom settings, such as from patches,
|
||||
<br />
|
||||
make sure they are at the bottom of the file following
|
||||
<br />
|
||||
this block (add it if it's not there)
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
# Custom
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
<br />
|
||||
|
||||
</FORM>
|
||||
179
apps/config-merger/php/merge.php
Normal file
179
apps/config-merger/php/merge.php
Normal file
@@ -0,0 +1,179 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
|
||||
error_reporting(0);
|
||||
|
||||
if (!empty($_FILES['File1']) && !empty($_FILES['File2']))
|
||||
{
|
||||
session_id();
|
||||
session_start();
|
||||
$basedir = "merge";
|
||||
$eol = "\r\n";
|
||||
if ($_POST['eol'])
|
||||
$eol = "\n";
|
||||
else
|
||||
$eol = "\r\n";
|
||||
if (!file_exists($basedir))
|
||||
mkdir($basedir);
|
||||
if (!file_exists($basedir."/".session_id()))
|
||||
mkdir($basedir."/".session_id());
|
||||
$upload1 = $basedir."/".session_id()."/".basename($_FILES['File1']['name']);
|
||||
$upload2 = $basedir."/".session_id()."/".basename($_FILES['File2']['name']);
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/worldserver.conf.merge";
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/authserver.conf.merge";
|
||||
else
|
||||
$newconfig = $basedir."/".session_id()."/UnkownConfigFile.conf.merge";
|
||||
|
||||
$out_file = fopen($newconfig, "w");
|
||||
$success = false;
|
||||
if (move_uploaded_file($_FILES['File1']['tmp_name'], $upload1))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
if (move_uploaded_file($_FILES['File2']['tmp_name'], $upload2))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
|
||||
if ($success)
|
||||
{
|
||||
$custom_found = false;
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$in_file2 = fopen($upload2,"r");
|
||||
$array1 = array();
|
||||
$array2 = array();
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if ((substr($line,0,1) != '#' && substr($line,0,1) != ''))
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array1[$key] = $val;
|
||||
}
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2) && !$custom_found)
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array2[$key] = $val;
|
||||
}
|
||||
if (strtolower($line) == "# custom")
|
||||
$custom_found = true;
|
||||
else
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
fclose($in_file1);
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
{
|
||||
$array1[$k] = $v;
|
||||
unset($array2[$k]);
|
||||
}
|
||||
}
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
$array = array();
|
||||
while (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array[$key] = $val;
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
foreach($array as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
fwrite($out_file, $k."=".$array1[$k].$eol);
|
||||
else
|
||||
continue;
|
||||
}
|
||||
unset($array);
|
||||
if (!feof($in_file1))
|
||||
fwrite($out_file, $line.$eol);
|
||||
}
|
||||
else
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
if ($custom_found)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# Custom".$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2))
|
||||
{
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
}
|
||||
$first = true;
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if ($first)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# The Following values were removed from the config.".$eol);
|
||||
$first = false;
|
||||
}
|
||||
fwrite($out_file, "# ".$k."=".$v.$eol);
|
||||
}
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
|
||||
unset($array1);
|
||||
unset($array2);
|
||||
fclose($in_file1);
|
||||
fclose($in_file2);
|
||||
fclose($out_file);
|
||||
unlink($upload1);
|
||||
unlink($upload2);
|
||||
|
||||
echo "Process done";
|
||||
echo "<br /><a href=".$newconfig.">Click here to retrieve your merged conf</a>";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
echo "An error has occurred";
|
||||
}
|
||||
?>
|
||||
150
apps/config-merger/python/README.md
Normal file
150
apps/config-merger/python/README.md
Normal file
@@ -0,0 +1,150 @@
|
||||
# AzerothCore Config Updater/Merger - Python Version
|
||||
|
||||
A command-line tool to update your AzerothCore configuration files with new options from distribution files.
|
||||
|
||||
> [!NOTE]
|
||||
> Based on and modified from [@BoiseComputer](https://github.com/BoiseComputer) (Brian Aldridge)'s [update_module_confs](https://github.com/Brian-Aldridge/update_module_confs) project to meet AzerothCore's needs
|
||||
|
||||
## Overview
|
||||
|
||||
This tool compares your existing configuration files (`.conf`) with the latest distribution files (`.conf.dist`) and helps you add new configuration options that may have been introduced in updates. It ensures your configs stay up-to-date while preserving your custom settings.
|
||||
|
||||
## Features
|
||||
|
||||
- **Interactive Menu System** - Easy-to-use numbered menu options
|
||||
- **Server Config Support** - Update authserver.conf and worldserver.conf
|
||||
- **Module Config Support** - Update all or selected module configurations
|
||||
- **Automatic Backups** - If you choose a valid option and there are changes, a timestamped backup is created before any changes are made (e.g. `filename(d11_m12_y2025_14h_30m_45s).bak`)
|
||||
- **Selective Updates** - Choose which new config options to add (y/n prompts)
|
||||
- **Safe Operation** - Only creates backups and makes changes when new options are found
|
||||
|
||||
## How to Use
|
||||
|
||||
There are two ways to use this. You can either copy this file directly to your `/configs` folder, or enable `TOOL_CONFIG_MERGER` in CMake. Upon compiling your core, the file will be generated in the same location as your `/configs` folder.
|
||||
|
||||
### Interactive Mode (Default)
|
||||
|
||||
1. **Run the script** in your configs directory:
|
||||
```bash
|
||||
python config_merger.py
|
||||
```
|
||||
Or simply **double-click** the `config_merger.py` file to run it directly.
|
||||
|
||||
2. **Specify configs path** (or press Enter for current directory):
|
||||
```
|
||||
Enter the path to your configs folder (default: .) which means current folder:
|
||||
```
|
||||
|
||||
3. **Choose from the menu**:
|
||||
```
|
||||
AzerothCore Config Updater/Merger (v. 1)
|
||||
--------------------------
|
||||
1 - Update Auth Config
|
||||
2 - Update World Config
|
||||
3 - Update Auth and World Configs
|
||||
4 - Update All Modules Configs
|
||||
5 - Update Modules (Selection) Configs
|
||||
0 - Quit
|
||||
```
|
||||
|
||||
### Command Line Interface (CLI)
|
||||
|
||||
For automation and scripting, you can use CLI mode:
|
||||
|
||||
```bash
|
||||
python config_merger.py [config_dir] [target] [options]
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
- `config_dir` (optional): Path to configs directory (default: current directory)
|
||||
- `target` (optional): What to update:
|
||||
- `auth` - Update authserver.conf only
|
||||
- `world` - Update worldserver.conf only
|
||||
- `both` - Update both server configs
|
||||
- `modules` - Update all module configs
|
||||
- `modules-select` - Interactive module selection
|
||||
|
||||
**Options:**
|
||||
- `-y, --yes`: Skip prompts and auto-add all new config options (default: prompt for each option)
|
||||
- `--version`: Show version information
|
||||
|
||||
**Examples:**
|
||||
```bash
|
||||
# Interactive mode (default)
|
||||
python config_merger.py
|
||||
|
||||
# Update auth config with prompts
|
||||
python config_merger.py . auth
|
||||
|
||||
# Update both configs automatically (no prompts)
|
||||
python config_merger.py /path/to/configs both -y
|
||||
|
||||
# Update all modules with confirmation
|
||||
python config_merger.py . modules
|
||||
```
|
||||
|
||||
## Menu Options Explained
|
||||
|
||||
- **Option 1**: Updates only `authserver.conf` from `authserver.conf.dist`
|
||||
- **Option 2**: Updates only `worldserver.conf` from `worldserver.conf.dist`
|
||||
- **Option 3**: Updates both server config files
|
||||
- **Option 4**: Automatically processes all module config files in the `modules/` folder
|
||||
- **Option 5**: Shows you a list of available modules and lets you select specific ones to update
|
||||
- **Option 0**: Exit the program
|
||||
|
||||
## Interactive Process
|
||||
|
||||
For each missing configuration option found, the tool will:
|
||||
|
||||
1. **Show you the option** with its comments and default value
|
||||
2. **Ask for confirmation**: `Add [option_name] to config? (y/n):`
|
||||
3. **Add or skip** based on your choice
|
||||
4. **Create backup** (before any changes are made) only if you choose a valid option and there are changes (format: `filename(d11_m12_y2025_14h_30m_45s).bak`)
|
||||
|
||||
## Example Session
|
||||
|
||||
```
|
||||
Processing worldserver.conf ...
|
||||
Backup created: worldserver.conf(d11_m12_y2025_14h_30m_45s).bak
|
||||
|
||||
# New feature for XP rates
|
||||
XP.Rate = 1
|
||||
Add XP.Rate to config? (y/n): y
|
||||
Added XP.Rate.
|
||||
|
||||
# Database connection pool size
|
||||
Database.PoolSize = 5
|
||||
Add Database.PoolSize to config? (y/n): n
|
||||
Skipped Database.PoolSize.
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- Python 3.6 or higher
|
||||
- No additional libraries needed (uses built-in modules only)
|
||||
|
||||
## File Structure Expected
|
||||
|
||||
```
|
||||
configs/
|
||||
├── config_merger.py (this script)
|
||||
├── authserver.conf.dist
|
||||
├── authserver.conf
|
||||
├── worldserver.conf.dist
|
||||
├── worldserver.conf
|
||||
└── modules/
|
||||
├── mod_example.conf.dist
|
||||
├── mod_example.conf
|
||||
└── ...
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This file is part of the AzerothCore Project. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
**Note:** Original code portions were licensed under the MIT License by Brian Aldridge (https://github.com/BoiseComputer)
|
||||
Original project: https://github.com/Brian-Aldridge/update_module_confs
|
||||
276
apps/config-merger/python/config_merger.py
Normal file
276
apps/config-merger/python/config_merger.py
Normal file
@@ -0,0 +1,276 @@
|
||||
# Version 1
|
||||
# Based and modified from: https://github.com/Brian-Aldridge/update_module_confs
|
||||
#
|
||||
# This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# Original code portions licensed under MIT License by Brian Aldridge (https://github.com/BoiseComputer)
|
||||
# Original project: https://github.com/Brian-Aldridge/update_module_confs
|
||||
|
||||
VERSION = "1"
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import argparse
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
def find_modules(folder):
|
||||
dist_files = []
|
||||
try:
|
||||
files = os.listdir(folder)
|
||||
except (OSError, IOError) as e:
|
||||
print(f"[ERROR] Could not list directory '{folder}': {e}")
|
||||
return []
|
||||
for file in files:
|
||||
if file.endswith('.conf.dist'):
|
||||
dist_files.append(file)
|
||||
return sorted(dist_files)
|
||||
|
||||
def prompt_module_selection(dist_files):
|
||||
print("Found the following modules:")
|
||||
for idx, fname in enumerate(dist_files, 1):
|
||||
print(f" {idx}. {fname}")
|
||||
nums = input("Enter numbers of modules to update (comma-separated): ").strip()
|
||||
raw_inputs = [x.strip() for x in nums.split(",") if x.strip()]
|
||||
indices = []
|
||||
invalid = []
|
||||
for x in raw_inputs:
|
||||
if not x.isdigit():
|
||||
invalid.append(f"'{x}' (not a number)")
|
||||
continue
|
||||
idx = int(x)
|
||||
if 0 < idx <= len(dist_files):
|
||||
indices.append(idx-1)
|
||||
else:
|
||||
invalid.append(f"'{x}' (out of range, must be 1-{len(dist_files)})")
|
||||
if invalid:
|
||||
print("Invalid input:")
|
||||
for msg in invalid:
|
||||
print(f" {msg}")
|
||||
if not indices:
|
||||
print("No valid module numbers were entered.")
|
||||
return []
|
||||
selected = [dist_files[i] for i in indices]
|
||||
return selected
|
||||
|
||||
def backup_file(filepath):
|
||||
timestamp = datetime.now().strftime("d%d_m%m_y%Y_%Hh_%Mm_%Ss")
|
||||
bakpath = f"{filepath}({timestamp}).bak"
|
||||
try:
|
||||
shutil.copy2(filepath, bakpath)
|
||||
print(f" Backup created: {bakpath}")
|
||||
except (OSError, IOError) as e:
|
||||
print(f"[ERROR] Failed to create backup '{bakpath}': {e}")
|
||||
return False
|
||||
return True
|
||||
|
||||
def parse_conf(filepath):
|
||||
# Returns a dict of key: (line, [preceding_comments])
|
||||
try:
|
||||
with open(filepath, encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
except (OSError, IOError) as e:
|
||||
print(f"[ERROR] Failed to read config file '{filepath}': {e}")
|
||||
return None
|
||||
conf = {}
|
||||
comments = []
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#"):
|
||||
comments.append(line)
|
||||
continue
|
||||
if stripped.startswith("[") and stripped.endswith("]"):
|
||||
# Ignore [headers of configs]
|
||||
comments.clear()
|
||||
continue
|
||||
if stripped.count("=") == 1:
|
||||
key, value = [s.strip() for s in stripped.split("=", 1)]
|
||||
if '#' in value:
|
||||
value = value.split('#', 1)[0].rstrip()
|
||||
if key:
|
||||
conf[key] = (f"{key} = {value}\n", comments.copy())
|
||||
comments.clear()
|
||||
continue
|
||||
return conf
|
||||
|
||||
def find_missing_keys(dist_conf, user_conf):
|
||||
missing = {}
|
||||
for key, (line, comments) in dist_conf.items():
|
||||
if key not in user_conf:
|
||||
missing[key] = (line, comments)
|
||||
return missing
|
||||
|
||||
def update_conf(dist_path, conf_path, skip_prompts=False):
|
||||
if not os.path.exists(conf_path):
|
||||
print(f" User config {conf_path} does not exist, skipping.")
|
||||
return False
|
||||
dist_conf = parse_conf(dist_path)
|
||||
user_conf = parse_conf(conf_path)
|
||||
missing = find_missing_keys(dist_conf, user_conf)
|
||||
if not missing:
|
||||
print(" No new config options to add.")
|
||||
return False
|
||||
updated = False
|
||||
lines_to_add = []
|
||||
for key, (line, comments) in missing.items():
|
||||
if skip_prompts:
|
||||
lines_to_add.append((comments, line, key))
|
||||
else:
|
||||
print("\n" + "".join(comments if comments else []) + line, end="")
|
||||
add = input(f" Add {key} to config? (y/n): ").strip().lower()
|
||||
if add in ("", "y", "yes"):
|
||||
lines_to_add.append((comments, line, key))
|
||||
else:
|
||||
print(f" Skipped {key}.")
|
||||
if lines_to_add:
|
||||
backup_file(conf_path)
|
||||
# Write using system's default line ending to avoid mixing CRLF and LF in the config file
|
||||
newline = os.linesep.encode('utf-8')
|
||||
with open(conf_path, "ab") as f:
|
||||
for comments, line, key in lines_to_add:
|
||||
if comments:
|
||||
for c in comments:
|
||||
f.write(c.rstrip('\r\n').encode('utf-8') + newline)
|
||||
f.write(line.rstrip('\r\n').encode('utf-8') + newline)
|
||||
print(f" Added {key}.")
|
||||
updated = True
|
||||
return updated
|
||||
|
||||
def update_server_config(config_name, config_dir, skip_prompts=False):
|
||||
dist_path = os.path.join(config_dir, f"{config_name}.conf.dist")
|
||||
conf_path = os.path.join(config_dir, f"{config_name}.conf")
|
||||
|
||||
if not os.path.exists(dist_path):
|
||||
print(f" Distribution config {dist_path} does not exist, skipping.")
|
||||
return False
|
||||
|
||||
print(f"\nProcessing {config_name}.conf ...")
|
||||
return update_conf(dist_path, conf_path, skip_prompts)
|
||||
|
||||
def update_modules(config_dir, selected_only=False, skip_prompts=False):
|
||||
modules_dir = os.path.join(config_dir, "modules")
|
||||
if not os.path.exists(modules_dir):
|
||||
print(f" Modules directory {modules_dir} does not exist, skipping.")
|
||||
return
|
||||
|
||||
dist_files = find_modules(modules_dir)
|
||||
if not dist_files:
|
||||
print(" No .conf.dist files found in modules folder.")
|
||||
return
|
||||
|
||||
if selected_only:
|
||||
selected = prompt_module_selection(dist_files)
|
||||
if not selected:
|
||||
print(" No modules selected.")
|
||||
return
|
||||
else:
|
||||
selected = dist_files
|
||||
|
||||
for dist_fname in selected:
|
||||
module = dist_fname[:-5] # Removes ".dist"
|
||||
conf_fname = module # e.g., mod_x.conf
|
||||
dist_path = os.path.join(modules_dir, dist_fname)
|
||||
conf_path = os.path.join(modules_dir, conf_fname)
|
||||
print(f"\nProcessing {conf_fname} ...")
|
||||
update_conf(dist_path, conf_path, skip_prompts)
|
||||
|
||||
def show_main_menu():
|
||||
print(f"\nAzerothCore Config Updater/Merger (v. {VERSION})")
|
||||
print("--------------------------")
|
||||
print("1 - Update Auth Config")
|
||||
print("2 - Update World Config")
|
||||
print("3 - Update Auth and World Configs")
|
||||
print("4 - Update All Modules Configs")
|
||||
print("5 - Update Modules (Selection) Configs")
|
||||
print("0 - Quit")
|
||||
return input("Select an option: ").strip()
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='AzerothCore Config Updater/Merger')
|
||||
parser.add_argument('config_dir', nargs='?', default='.',
|
||||
help='Path to configs directory (default: current directory)')
|
||||
parser.add_argument('target', nargs='?',
|
||||
choices=['auth', 'world', 'both', 'modules', 'modules-select'],
|
||||
help='What to update: auth, world, both, modules, modules-select')
|
||||
parser.add_argument('-y', '--yes', action='store_true',
|
||||
help='Automatically answer yes to all prompts')
|
||||
parser.add_argument('--version', action='version', version=f'%(prog)s {VERSION}')
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
# If no target specified, run interactive mode
|
||||
if args.target is None:
|
||||
print(f"AzerothCore Config Updater/Merger (v. {VERSION})")
|
||||
print("==========================")
|
||||
config_dir = input("Enter the path to your configs folder (Default / Empty will use the folder where this script is located): ").strip()
|
||||
if not config_dir:
|
||||
config_dir = "."
|
||||
|
||||
if not os.path.isdir(config_dir):
|
||||
print("Provided path is not a valid directory.")
|
||||
return
|
||||
|
||||
while True:
|
||||
choice = show_main_menu()
|
||||
|
||||
if choice == "1":
|
||||
update_server_config("authserver", config_dir)
|
||||
elif choice == "2":
|
||||
update_server_config("worldserver", config_dir)
|
||||
elif choice == "3":
|
||||
update_server_config("authserver", config_dir)
|
||||
update_server_config("worldserver", config_dir)
|
||||
elif choice == "4":
|
||||
update_modules(config_dir, selected_only=False)
|
||||
elif choice == "5":
|
||||
update_modules(config_dir, selected_only=True)
|
||||
elif choice == "0":
|
||||
print("Goodbye!")
|
||||
break
|
||||
else:
|
||||
print("Invalid selection. Please try again.")
|
||||
else:
|
||||
# CLI mode
|
||||
config_dir = args.config_dir
|
||||
|
||||
if not os.path.isdir(config_dir):
|
||||
print(f"Error: Directory '{config_dir}' does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"AzerothCore Config Updater/Merger (v. {VERSION}) - CLI Mode")
|
||||
print(f"Config directory: {os.path.abspath(config_dir)}")
|
||||
print(f"Target: {args.target}")
|
||||
if args.yes:
|
||||
print("Skip prompts: Yes")
|
||||
|
||||
if args.target == 'auth':
|
||||
update_server_config("authserver", config_dir, args.yes)
|
||||
elif args.target == 'world':
|
||||
update_server_config("worldserver", config_dir, args.yes)
|
||||
elif args.target == 'both':
|
||||
update_server_config("authserver", config_dir, args.yes)
|
||||
update_server_config("worldserver", config_dir, args.yes)
|
||||
elif args.target == 'modules':
|
||||
update_modules(config_dir, selected_only=False, skip_prompts=args.yes)
|
||||
elif args.target == 'modules-select':
|
||||
if args.yes:
|
||||
print("Warning: --yes flag ignored for modules-select (requires interactive selection)")
|
||||
update_modules(config_dir, selected_only=True, skip_prompts=False)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
261
apps/docker/Dockerfile
Normal file
261
apps/docker/Dockerfile
Normal file
@@ -0,0 +1,261 @@
|
||||
ARG UBUNTU_VERSION=22.04 # lts
|
||||
|
||||
# This target lays out the general directory skeleton for AzerothCore,
|
||||
# This target isn't intended to be directly used
|
||||
FROM ubuntu:$UBUNTU_VERSION AS skeleton
|
||||
|
||||
# Note: ARG instructions defined after FROM are available in this build stage.
|
||||
# Placing ARG TZ here (after FROM) ensures it is accessible for configuring the timezone below.
|
||||
ARG TZ=Etc/UTC
|
||||
ARG DOCKER=1
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
ENV AC_FORCE_CREATE_DB=1
|
||||
|
||||
RUN mkdir -pv \
|
||||
/azerothcore/bin \
|
||||
/azerothcore/data \
|
||||
/azerothcore/deps \
|
||||
/azerothcore/env/dist/bin \
|
||||
/azerothcore/env/dist/data/Cameras \
|
||||
/azerothcore/env/dist/data/dbc \
|
||||
/azerothcore/env/dist/data/maps \
|
||||
/azerothcore/env/dist/data/mmaps \
|
||||
/azerothcore/env/dist/data/vmaps \
|
||||
/azerothcore/env/dist/logs \
|
||||
/azerothcore/env/dist/temp \
|
||||
/azerothcore/env/dist/etc \
|
||||
/azerothcore/modules \
|
||||
/azerothcore/src \
|
||||
/azerothcore/build
|
||||
|
||||
# Configure Timezone
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends tzdata ca-certificates \
|
||||
&& ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime \
|
||||
&& echo "$TZ" > /etc/timezone \
|
||||
&& dpkg-reconfigure --frontend noninteractive tzdata \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /azerothcore
|
||||
|
||||
# This target builds the docker image
|
||||
# This target can be useful to inspect the explicit outputs from the build,
|
||||
FROM skeleton AS build
|
||||
|
||||
ARG CTOOLS_BUILD="all"
|
||||
ARG CTYPE="RelWithDebInfo"
|
||||
ARG CCACHE_CPP2="true"
|
||||
ARG CSCRIPTPCH="OFF"
|
||||
ARG CSCRIPTS="static"
|
||||
ARG CMODULES="static"
|
||||
ARG CSCRIPTS_DEFAULT_LINKAGE="static"
|
||||
ARG CWITH_WARNINGS="ON"
|
||||
ARG CMAKE_EXTRA_OPTIONS=""
|
||||
ARG GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
|
||||
ARG CCACHE_DIR="/ccache"
|
||||
ARG CCACHE_MAXSIZE="1000MB"
|
||||
ARG CCACHE_SLOPPINESS="pch_defines,time_macros,include_file_mtime"
|
||||
ARG CCACHE_COMPRESS=""
|
||||
ARG CCACHE_COMPRESSLEVEL="9"
|
||||
ARG CCACHE_COMPILERCHECK="content"
|
||||
ARG CCACHE_LOGFILE=""
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
build-essential ccache libtool cmake-data make cmake clang \
|
||||
git lsb-base curl unzip default-mysql-client openssl \
|
||||
default-libmysqlclient-dev libboost-all-dev libssl-dev libmysql++-dev \
|
||||
libreadline-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY CMakeLists.txt /azerothcore/CMakeLists.txt
|
||||
COPY conf /azerothcore/conf
|
||||
COPY deps /azerothcore/deps
|
||||
COPY src /azerothcore/src
|
||||
COPY modules /azerothcore/modules
|
||||
|
||||
ARG CACHEBUST=1
|
||||
|
||||
WORKDIR /azerothcore/build
|
||||
|
||||
RUN --mount=type=cache,target=/ccache,sharing=locked \
|
||||
# This may seem silly (and it is), but AzerothCore wants the git repo at
|
||||
# build time. The git repo is _huge_ and it's not something that really
|
||||
# makes sense to mount into the container, but this way we can let the build
|
||||
# have the information it needs without including the hundreds of megabytes
|
||||
# of git repo into the container.
|
||||
--mount=type=bind,target=/azerothcore/.git,source=.git \
|
||||
git config --global --add safe.directory /azerothcore \
|
||||
&& cmake /azerothcore \
|
||||
-DCMAKE_INSTALL_PREFIX="/azerothcore/env/dist" \
|
||||
-DAPPS_BUILD="all" \
|
||||
-DTOOLS_BUILD="$CTOOLS_BUILD" \
|
||||
-DSCRIPTS="$CSCRIPTS" \
|
||||
-DMODULES="$CMODULES" \
|
||||
-DWITH_WARNINGS="$CWITH_WARNINGS" \
|
||||
-DCMAKE_BUILD_TYPE="$CTYPE" \
|
||||
-DCMAKE_CXX_COMPILER="clang++" \
|
||||
-DCMAKE_C_COMPILER="clang" \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER="ccache" \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER="ccache" \
|
||||
-DBoost_USE_STATIC_LIBS="ON" \
|
||||
&& cmake --build . --config "$CTYPE" -j $(($(nproc) + 1)) \
|
||||
&& cmake --install . --config "$CTYPE"
|
||||
|
||||
#############################
|
||||
# Base runtime for services #
|
||||
#############################
|
||||
|
||||
FROM skeleton AS runtime
|
||||
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
ENV ACORE_COMPONENT=undefined
|
||||
|
||||
# Install base dependencies for azerothcore
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmysqlclient21 libreadline8 \
|
||||
gettext-base default-mysql-client && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=build /azerothcore/env/dist/etc/ /azerothcore/env/ref/etc
|
||||
|
||||
VOLUME /azerothcore/env/dist/etc
|
||||
|
||||
ENV PATH="/azerothcore/env/dist/bin:$PATH"
|
||||
|
||||
RUN groupadd --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
useradd -d /azerothcore --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
chown -R "$DOCKER_USER:$DOCKER_USER" /azerothcore
|
||||
|
||||
COPY --chown=$USER_ID:$GROUP_ID \
|
||||
--chmod=755 \
|
||||
apps/docker/entrypoint.sh /azerothcore/entrypoint.sh
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
ENTRYPOINT ["/usr/bin/env", "bash", "/azerothcore/entrypoint.sh"]
|
||||
|
||||
###############
|
||||
# Auth Server #
|
||||
###############
|
||||
|
||||
FROM runtime AS authserver
|
||||
LABEL description="AzerothCore Auth Server"
|
||||
|
||||
ENV ACORE_COMPONENT=authserver
|
||||
# Don't run database migrations. We can leave that up to the db-import container
|
||||
ENV AC_UPDATES_ENABLE_DATABASES=0
|
||||
# This disables user prompts. The console is still active, however
|
||||
ENV AC_DISABLE_INTERACTIVE=1
|
||||
ENV AC_CLOSE_IDLE_CONNECTIONS=0
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
--from=build \
|
||||
/azerothcore/env/dist/bin/authserver /azerothcore/env/dist/bin/authserver
|
||||
|
||||
|
||||
CMD ["authserver"]
|
||||
|
||||
################
|
||||
# World Server #
|
||||
################
|
||||
|
||||
FROM runtime AS worldserver
|
||||
|
||||
LABEL description="AzerothCore World Server"
|
||||
|
||||
ENV ACORE_COMPONENT=worldserver
|
||||
# Don't run database migrations. We can leave that up to the db-import container
|
||||
ENV AC_UPDATES_ENABLE_DATABASES=0
|
||||
# This disables user prompts. The console is still active, however
|
||||
ENV AC_DISABLE_INTERACTIVE=1
|
||||
ENV AC_CLOSE_IDLE_CONNECTIONS=0
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
--from=build \
|
||||
/azerothcore/env/dist/bin/worldserver /azerothcore/env/dist/bin/worldserver
|
||||
|
||||
VOLUME /azerothcore/env/dist/etc
|
||||
|
||||
CMD ["worldserver"]
|
||||
|
||||
#############
|
||||
# DB Import #
|
||||
#############
|
||||
|
||||
FROM runtime AS db-import
|
||||
|
||||
LABEL description="AzerothCore Database Import tool"
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
ENV ACORE_COMPONENT=dbimport
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
data data
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
modules modules
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER\
|
||||
--from=build \
|
||||
/azerothcore/env/dist/bin/dbimport /azerothcore/env/dist/bin/dbimport
|
||||
|
||||
CMD [ "/azerothcore/env/dist/bin/dbimport" ]
|
||||
|
||||
###############
|
||||
# Client Data #
|
||||
###############
|
||||
|
||||
FROM skeleton AS client-data
|
||||
|
||||
LABEL description="AzerothCore client-data"
|
||||
|
||||
ENV DATAPATH=/azerothcore/env/dist/data
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y curl unzip && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER apps apps
|
||||
|
||||
VOLUME /azerothcore/env/dist/data
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
CMD ["bash", "-c", "source /azerothcore/apps/installer/includes/functions.sh && inst_download_client_data" ]
|
||||
|
||||
##################
|
||||
# Map Extractors #
|
||||
##################
|
||||
|
||||
FROM runtime AS tools
|
||||
|
||||
LABEL description="AzerothCore Tools"
|
||||
|
||||
WORKDIR /azerothcore/env/dist/
|
||||
|
||||
RUN mkdir -pv /azerothcore/env/dist/Cameras \
|
||||
/azerothcore/env/dist/dbc \
|
||||
/azerothcore/env/dist/maps \
|
||||
/azerothcore/env/dist/mmaps \
|
||||
/azerothcore/env/dist/vmaps
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/map_extractor /azerothcore/env/dist/bin/map_extractor
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/mmaps_generator /azerothcore/env/dist/bin/mmaps_generator
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/vmap4_assembler /azerothcore/env/dist/bin/vmap4_assembler
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/vmap4_extractor /azerothcore/env/dist/bin/vmap4_extractor
|
||||
108
apps/docker/Dockerfile.dev-server
Normal file
108
apps/docker/Dockerfile.dev-server
Normal file
@@ -0,0 +1,108 @@
|
||||
#syntax=docker/dockerfile:1.2
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# DEV: Stage used for the development environment
|
||||
# and the locally built services
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:24.04 as dev
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
ARG TZ=Etc/UTC
|
||||
|
||||
LABEL description="AC base image for dev containers"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
ENV DOCKER=1
|
||||
|
||||
# Ensure ac-dev-server can properly pull versions
|
||||
ENV GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=$TZ
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
# Classic install
|
||||
git \
|
||||
clang lldb lld clang-format clang-tidy \
|
||||
make cmake \
|
||||
gcc g++ \
|
||||
libmysqlclient-dev \
|
||||
libssl-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libncurses-dev \
|
||||
mysql-server \
|
||||
libboost-all-dev \
|
||||
# Other
|
||||
curl \
|
||||
unzip \
|
||||
sudo \
|
||||
gdb gdbserver \
|
||||
libtool \
|
||||
build-essential \
|
||||
cmake-data \
|
||||
openssl \
|
||||
google-perftools libgoogle-perftools-dev \
|
||||
libmysql++-dev \
|
||||
ccache \
|
||||
tzdata \
|
||||
# Utility for column command used by dashboard
|
||||
util-linux \
|
||||
# Certificates for downloading client data
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Ensure git will work with the AzerothCore source directory
|
||||
RUN git config --global --add safe.directory /azerothcore
|
||||
|
||||
# change timezone in container
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime \
|
||||
&& echo $TZ > /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata
|
||||
|
||||
# Create a non-root user
|
||||
RUN userdel --remove ubuntu \
|
||||
&& addgroup --gid "$GROUP_ID" "$DOCKER_USER" \
|
||||
&& adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" \
|
||||
&& passwd -d "$DOCKER_USER" \
|
||||
&& echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# must be created to set the correct permissions on them
|
||||
RUN mkdir -p \
|
||||
/azerothcore/env/dist/bin \
|
||||
/azerothcore/env/dist/data/Cameras \
|
||||
/azerothcore/env/dist/data/dbc \
|
||||
/azerothcore/env/dist/data/maps \
|
||||
/azerothcore/env/dist/data/mmaps \
|
||||
/azerothcore/env/dist/data/vmaps \
|
||||
/azerothcore/env/dist/logs \
|
||||
/azerothcore/env/dist/temp \
|
||||
/azerothcore/env/dist/etc \
|
||||
/azerothcore/var/build/obj
|
||||
|
||||
# Correct permissions for non-root operations
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /home/acore /run /opt /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# copy only necessary files for the acore dashboard
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER apps /azerothcore/apps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER bin /azerothcore/bin
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER conf /azerothcore/conf
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER data /azerothcore/data
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER deps /azerothcore/deps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.json /azerothcore/acore.json
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.sh /azerothcore/acore.sh
|
||||
|
||||
# Download deno and make sure the dashboard works
|
||||
RUN bash /azerothcore/acore.sh quit
|
||||
|
||||
WORKDIR /azerothcore
|
||||
41
apps/docker/README.md
Normal file
41
apps/docker/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Docker
|
||||
|
||||
Full documentation is [on our wiki](https://www.azerothcore.org/wiki/install-with-docker#installation)
|
||||
|
||||
## Building
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Ensure that you have docker, docker compose (v2), and the docker buildx command
|
||||
installed.
|
||||
|
||||
It's all bundled with [Docker Desktop](https://docs.docker.com/get-docker/),
|
||||
though if you're using Linux you can install them through your distribution's
|
||||
package manage or by using the [documentation from docker](https://docs.docker.com/engine/install/)
|
||||
|
||||
### Running the Build
|
||||
|
||||
1. Build containers with command
|
||||
|
||||
```console
|
||||
$ docker compose build
|
||||
```
|
||||
|
||||
1. Note that the initial build will take a long time, though subsequent builds should be faster
|
||||
|
||||
2. Start containers with command
|
||||
|
||||
```console
|
||||
$ docker compose up -d
|
||||
# Skip the build step
|
||||
$ docker compose up -d --build
|
||||
```
|
||||
|
||||
1. Note that this command may take a while the first time, for the database import
|
||||
|
||||
3. (on first install) You'll need to attach to the worldserver and create an Admin account
|
||||
|
||||
```console
|
||||
$ docker compose attach ac-worldserver
|
||||
AC> account create admin password 3 -1
|
||||
```
|
||||
216
apps/docker/docker-cmd.sh
Normal file
216
apps/docker/docker-cmd.sh
Normal file
@@ -0,0 +1,216 @@
|
||||
#!/bin/bash
|
||||
|
||||
# TODO(michaeldelago) decide if we need a wrapper like this around docker
|
||||
# commands.
|
||||
#
|
||||
# Running the docker commands should be simple and familiar.
|
||||
# Introducting extra steps through the dashboard can cause issues with people
|
||||
# getting started, especially if they already know docker.
|
||||
#
|
||||
# If a new user knows docker, they will feel (pretty close) to right at home.
|
||||
# If a new user doesn't know docker, it's easy to learn and the knowledge
|
||||
# applies to much more than azerothcore
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
COMPOSE_DOCKER_CLI_BUILD="1"
|
||||
DOCKER_BUILDKIT="1"
|
||||
# BUILDKIT_INLINE_CACHE="1"
|
||||
|
||||
function usage () {
|
||||
cat <<EOF
|
||||
Wrapper for shell scripts around docker
|
||||
|
||||
usage: $(basename $0) ACTION [ ACTION... ] [ ACTION_ARG... ]
|
||||
|
||||
actions:
|
||||
EOF
|
||||
# the `-s` will remove the "#" and properly space the action and description
|
||||
cat <<EOF | column -t -l2 -s'#'
|
||||
> start:app # Start the development worldserver and authserver
|
||||
> start:app:d # Start the development worldserver and authserver in detached mode
|
||||
> build # build the development worldserver and authserver
|
||||
> pull # pull the development worldserver and authserver
|
||||
> build:nocache # build the development worldserver and authserver without cache
|
||||
> clean:build # clean build artifacts from the dev server
|
||||
> client-data # download client data in the dev server
|
||||
> dev:up start # the dev server
|
||||
> dev:build # compile azerothcore using the dev server
|
||||
> dev:dash # execute the dashboard in the dev server container
|
||||
> dev:shell [ ARGS... ] # open a bash shell in the dev server
|
||||
> prod:build # Build the service containers used by acore-docker
|
||||
> prod:pull # Pull the containers used by acore-docker
|
||||
> prod:up # Start the services used by acore-docker
|
||||
> prod:up:d # start the services used by acore-docker in the background
|
||||
> attach SERVICE # attach to a service currently running in docker compose
|
||||
EOF
|
||||
}
|
||||
|
||||
# If no args, just spit usage and exit
|
||||
[[ $# -eq 0 ]] && usage && exit
|
||||
|
||||
# loop through commands passed
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
start:app)
|
||||
set -x
|
||||
docker compose up
|
||||
set +x
|
||||
# pop the head off of the queue of args
|
||||
# After this, the value of $1 is the value of $2
|
||||
shift
|
||||
;;
|
||||
|
||||
start:app:d)
|
||||
set -x
|
||||
docker compose up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build)
|
||||
set -x
|
||||
docker compose build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull)
|
||||
set -x
|
||||
docker compose pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:nocache)
|
||||
set -x
|
||||
docker compose build --no-cache
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
clean:build)
|
||||
set -x
|
||||
# Don't run 'docker buildx prune' since it may "escape" our bubble
|
||||
# and affect other projects on the user's workstation/server
|
||||
cat <<EOF
|
||||
This command has been deprecated, and at the moment does not do anything.
|
||||
If you'd like to build without cache, use the command './acore.sh docker build:nocache' or look into the 'docker buildx prune command'
|
||||
|
||||
> https://docs.docker.com/engine/reference/commandline/buildx_prune/
|
||||
EOF
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
client-data)
|
||||
set -x
|
||||
docker compose up ac-client-data-init
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:up)
|
||||
set -x
|
||||
docker compose --profile dev up ac-dev-server -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:build)
|
||||
set -x
|
||||
docker compose --profile dev run --rm ac-dev-server bash /azerothcore/acore.sh compiler build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:dash)
|
||||
set -x
|
||||
docker compose --profile dev run --rm ac-dev-server bash /azerothcore/acore.sh ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:shell)
|
||||
set -x
|
||||
docker compose --profile dev up -d ac-dev-server
|
||||
docker compose --profile dev exec ac-dev-server bash ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:prod|prod:build)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker build
|
||||
|
||||
The build will continue in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull:prod|prod:pull)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker pull
|
||||
|
||||
The image pull will continue in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up|start:prod)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker start:app
|
||||
|
||||
The containers will start in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose up
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up:d|start:prod:d)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker start:app:d
|
||||
|
||||
The containers will start in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
attach)
|
||||
SERVICE="$2"
|
||||
set -x
|
||||
docker compose attach "$SERVICE"
|
||||
set +x
|
||||
shift
|
||||
shift # Second to pass the argument
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown or empty arg"
|
||||
usage
|
||||
exit 1
|
||||
esac
|
||||
done
|
||||
54
apps/docker/entrypoint.sh
Normal file
54
apps/docker/entrypoint.sh
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
CONF_DIR="${CONF_DIR:-/azerothcore/env/dist/etc}"
|
||||
LOGS_DIR="${LOGS_DIR:-/azerothcore/env/dist/logs}"
|
||||
|
||||
if ! touch "$CONF_DIR/.write-test" || ! touch "$LOGS_DIR/.write-test"; then
|
||||
cat <<EOF
|
||||
===== WARNING =====
|
||||
The current user doesn't have write permissions for
|
||||
the configuration dir ($CONF_DIR) or logs dir ($LOGS_DIR).
|
||||
It's likely that services will fail due to this.
|
||||
|
||||
This is usually caused by cloning the repository as root,
|
||||
so the files are owned by root (uid 0).
|
||||
|
||||
To resolve this, you can set the ownership of the
|
||||
configuration directory with the command on the host machine.
|
||||
Note that if the files are owned as root, the ownership must
|
||||
be changed as root (hence sudo).
|
||||
|
||||
$ sudo chown -R $(id -u):$(id -g) /path/to$CONF_DIR /path/to$LOGS_DIR
|
||||
|
||||
Alternatively, you can set the DOCKER_USER environment
|
||||
variable (on the host machine) to "root", though this
|
||||
isn't recommended.
|
||||
|
||||
$ DOCKER_USER=root docker-compose up -d
|
||||
====================
|
||||
EOF
|
||||
fi
|
||||
|
||||
[[ -f "$CONF_DIR/.write-test" ]] && rm -f "$CONF_DIR/.write-test"
|
||||
[[ -f "$LOGS_DIR/.write-test" ]] && rm -f "$LOGS_DIR/.write-test"
|
||||
|
||||
# Copy all default config files to env/dist/etc if they don't already exist
|
||||
# -r == recursive
|
||||
# -n == no clobber (don't overwrite)
|
||||
# -v == be verbose
|
||||
cp -rnv /azerothcore/env/ref/etc/* "$CONF_DIR"
|
||||
|
||||
CONF="$CONF_DIR/$ACORE_COMPONENT.conf"
|
||||
CONF_DIST="$CONF_DIR/$ACORE_COMPONENT.conf.dist"
|
||||
|
||||
# Copy the "dist" file to the "conf" if the conf doesn't already exist
|
||||
if [[ -f "$CONF_DIST" ]]; then
|
||||
cp -vn "$CONF_DIST" "$CONF"
|
||||
else
|
||||
touch "$CONF"
|
||||
fi
|
||||
|
||||
echo "Starting $ACORE_COMPONENT..."
|
||||
|
||||
exec "$@"
|
||||
83
apps/extractor/extractor.bat
Normal file
83
apps/extractor/extractor.bat
Normal file
@@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESS 1, 2, 3 OR 4 to select your task, or 5 to EXIT.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO WARNING! when extracting the vmaps extractor will
|
||||
ECHO output the text below, it's intended and not an error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Press 1, 2, 3 or 4 to start extracting or 5 to exit.
|
||||
ECHO 1 - Extract base files (NEEDED) and cameras.
|
||||
ECHO 2 - Extract vmaps (needs maps to be extracted before you run this) (OPTIONAL, highly recommended)
|
||||
ECHO 3 - Extract mmaps (needs vmaps to be extracted before you run this, may take hours) (OPTIONAL, highly recommended)
|
||||
ECHO 4 - Extract all (may take hours)
|
||||
ECHO 5 - EXIT
|
||||
ECHO.
|
||||
SET /P M=Type 1, 2, 3, 4 or 5 then press ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
85
apps/extractor/extractor.sh
Executable file
85
apps/extractor/extractor.sh
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/bin/bash
|
||||
function Base {
|
||||
echo "Extract Base"
|
||||
rm -rf dbc maps Cameras
|
||||
./map_extractor
|
||||
Menu
|
||||
}
|
||||
|
||||
function VMaps {
|
||||
echo "Extract VMaps"
|
||||
mkdir -p Buildings vmaps
|
||||
rm -rf Buildings/* vmaps/*
|
||||
./vmap4_extractor
|
||||
./vmap4_assembler Buildings vmaps
|
||||
rmdir -rf Buildings
|
||||
Menu
|
||||
}
|
||||
|
||||
function MMaps {
|
||||
echo "This may take a few hours to complete. Please be patient."
|
||||
mkdir -p mmaps
|
||||
rm -rf mmaps/*
|
||||
./mmaps_generator
|
||||
Menu
|
||||
}
|
||||
|
||||
function All {
|
||||
echo "This may take a few hours to complete. Please be patient."
|
||||
rm -rf dbc maps Cameras
|
||||
mkdir -p Buildings vmaps mmaps
|
||||
rm -rf Buildings/* vmaps/* mmaps/*
|
||||
./map_extractor
|
||||
./vmap4_extractor
|
||||
./vmap4_assembler Buildings vmaps
|
||||
rmdir -rf Buildings
|
||||
./mmaps_generator
|
||||
Menu
|
||||
}
|
||||
|
||||
function Menu {
|
||||
echo ""
|
||||
echo "..............................................."
|
||||
echo "AzerothCore dbc, maps, vmaps, mmaps extractor"
|
||||
echo "..............................................."
|
||||
echo "PRESS 1, 2, 3 OR 4 to select your task, or 5 to EXIT."
|
||||
echo "..............................................."
|
||||
echo ""
|
||||
echo "WARNING! when extracting the vmaps extractor will"
|
||||
echo "output the text below, it's intended and not an error:"
|
||||
echo ".........................................."
|
||||
echo "Extracting World\Wmo\Band\Final_Stage.wmo"
|
||||
echo "No such file."
|
||||
echo "Couldn't open RootWmo!!!"
|
||||
echo "Done!"
|
||||
echo " .........................................."
|
||||
echo ""
|
||||
echo "Press 1, 2, 3 or 4 to start extracting or 5 to exit."
|
||||
echo "1 - Extract base files (NEEDED) and cameras."
|
||||
echo "2 - Extract vmaps (needs maps to be extracted before you run this) (OPTIONAL, highly recommended)"
|
||||
echo "3 - Extract mmaps (needs vmaps to be extracted before you run this, may take hours) (OPTIONAL, highly recommended)"
|
||||
echo "4 - Extract all (may take hours)"
|
||||
echo "5 - EXIT"
|
||||
echo ""
|
||||
|
||||
read -rp "Type 1, 2, 3, 4 or 5 then press ENTER: " choice
|
||||
|
||||
case $choice in
|
||||
1) Base ;;
|
||||
2) VMaps ;;
|
||||
3) MMaps ;;
|
||||
4) All ;;
|
||||
5) exit 0;;
|
||||
*) echo "Invalid choice."; read -rp "Type 1, 2, 3, 4 or 5 then press ENTER: " choice ;;
|
||||
esac
|
||||
}
|
||||
|
||||
if [ -d "./Data" ] && [ -f "map_extractor" ] && [ -f "vmap4_extractor" ] && [ -f "vmap4_assembler" ] && [ -f "mmaps_generator" ]; then
|
||||
echo "The required files and folder exist in the current directory."
|
||||
chmod +x map_extractor vmap4_extractor vmap4_assembler mmaps_generator
|
||||
Menu
|
||||
else
|
||||
echo "One or more of the required files or folder is missing from the current directory."
|
||||
echo "Place map_extractor vmap4_extractor vmap4_assembler mmaps_generator"
|
||||
echo "In your WoW folder with WoW.exe"
|
||||
fi
|
||||
83
apps/extractor/extractor_es.bat
Normal file
83
apps/extractor/extractor_es.bat
Normal file
@@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESIONE 1, 2, 3 O 4 para seleccionar su tarea, o 5 para SALIR.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO ADVERTENCIA: al extraer los vmaps del extractor
|
||||
ECHO la salida del texto de abajo, es intencional y no un error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Pulse 1, 2, 3 o 4 para iniciar la extraccion o 5 para salir.
|
||||
ECHO 1 - Extraer los archivos base (NECESARIOS) y las cámaras.
|
||||
ECHO 2 - Extraer vmaps (necesita que los mapas se extraigan antes de ejecutar esto) (OPCIONAL, muy recomendable)
|
||||
ECHO 3 - Extraer mmaps (necesita que los vmaps se extraigan antes de ejecutar esto, puede llevar horas) (OPCIONAL, muy recomendable)
|
||||
ECHO 4 - Extraer todo (puede llevar varias horas)
|
||||
ECHO 5 - SALIR
|
||||
ECHO.
|
||||
SET /P M=Escriba 1, 2, 3, 4 o 5 y pulse ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
85
apps/extractor/extractor_es.sh
Executable file
85
apps/extractor/extractor_es.sh
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/bin/bash
|
||||
function Base {
|
||||
echo "Extrayendo archivos base"
|
||||
rm -rf dbc maps Cameras
|
||||
./map_extractor
|
||||
Menu
|
||||
}
|
||||
|
||||
function VMaps {
|
||||
echo "Extrayendo VMaps"
|
||||
mkdir -p Buildings vmaps
|
||||
rm -rf Buildings/* vmaps/*
|
||||
./vmap4_extractor
|
||||
./vmap4_assembler Buildings vmaps
|
||||
rmdir -rf Buildings
|
||||
Menu
|
||||
}
|
||||
|
||||
function MMaps {
|
||||
echo "Esto puede tardar unas horas en completarse. Por favor, tenga paciencia."
|
||||
mkdir -p mmaps
|
||||
rm -rf mmaps/*
|
||||
./mmaps_generator
|
||||
Menu
|
||||
}
|
||||
|
||||
function All {
|
||||
echo "Esto puede tardar varias horas en completarse. Por favor, tenga paciencia."
|
||||
rm -rf dbc maps Cameras
|
||||
mkdir -p Buildings vmaps mmaps
|
||||
rm -rf Buildings/* vmaps/* mmaps/*
|
||||
./map_extractor
|
||||
./vmap4_extractor
|
||||
./vmap4_assembler Buildings vmaps
|
||||
rmdir -rf Buildings
|
||||
./mmaps_generator
|
||||
Menu
|
||||
}
|
||||
|
||||
function Menu {
|
||||
echo ""
|
||||
echo "..............................................."
|
||||
echo "Extractor de dbc, maps, vmaps, mmaps de AzerothCore"
|
||||
echo "..............................................."
|
||||
echo "PRESIONE 1, 2, 3 O 4 para seleccionar su tarea, o 5 para SALIR."
|
||||
echo "..............................................."
|
||||
echo ""
|
||||
echo "ADVERTENCIA: al extraer los vmaps del extractor"
|
||||
echo "la salida del texto de abajo, es intencional y no un error:"
|
||||
echo ".........................................."
|
||||
echo "Extracting World\Wmo\Band\Final_Stage.wmo"
|
||||
echo "No such file."
|
||||
echo "Couldn't open RootWmo!!!"
|
||||
echo "Done!"
|
||||
echo ".........................................."
|
||||
echo ""
|
||||
echo "Presione 1, 2, 3 o 4 para iniciar la extracción o 5 para salir."
|
||||
echo "1 - Extraer los archivos base (NECESARIOS) y las cámaras."
|
||||
echo "2 - Extraer vmaps (necesita que los mapas se extraigan antes de ejecutar esto) (OPCIONAL, muy recomendable)"
|
||||
echo "3 - Extraer mmaps (necesita que los vmaps se extraigan antes de ejecutar esto, puede llevar horas) (OPCIONAL, muy recomendable)"
|
||||
echo "4 - Extraer todo (puede llevar varias horas)"
|
||||
echo "5 - SALIR"
|
||||
echo ""
|
||||
|
||||
read -rp "Escriba 1, 2, 3, 4 o 5 y pulse ENTER: " choice
|
||||
|
||||
case $choice in
|
||||
1) Base ;;
|
||||
2) VMaps ;;
|
||||
3) MMaps ;;
|
||||
4) All ;;
|
||||
5) exit 0;;
|
||||
*) echo "Opción inválida."; read -rp "Escriba 1, 2, 3, 4 o 5 y presione ENTER: " choice ;;
|
||||
esac
|
||||
}
|
||||
|
||||
if [ -d "./Data" ] && [ -f "map_extractor" ] && [ -f "vmap4_extractor" ] && [ -f "vmap4_assembler" ] && [ -f "mmaps_generator" ]; then
|
||||
echo "Los archivos y carpetas requeridos existen en el directorio actual."
|
||||
chmod +x map_extractor vmap4_extractor vmap4_assembler mmaps_generator
|
||||
Menu
|
||||
else
|
||||
echo "Uno o más archivos o carpetas requeridos no se encuentran en el directorio actual."
|
||||
echo "Coloque map_extractor vmap4_extractor vmap4_assembler mmaps_generator"
|
||||
echo "en su directorio de WoW junto con WoW.exe"
|
||||
fi
|
||||
5
apps/git_tools/setup_git_commit_template.sh
Normal file
5
apps/git_tools/setup_git_commit_template.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
## Set a local git commit template
|
||||
git config --local commit.template ".git_commit_template.txt" ;
|
||||
echo "--- Successfully set the default commit template for this repository only. Verify with: git config -e"
|
||||
34
apps/git_tools/subrepo-update.sh
Executable file
34
apps/git_tools/subrepo-update.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#######################
|
||||
#
|
||||
# README
|
||||
#
|
||||
# This script is used to automatically update
|
||||
# submodules and subrepos included in this project
|
||||
# Subrepo are updated in bidirectional way (pull + push)
|
||||
# because they are intended to be developed by this organization
|
||||
#
|
||||
# NOTE: only maintainers and CI should run this script and
|
||||
# keep it updated
|
||||
#
|
||||
#######################
|
||||
|
||||
set -e
|
||||
ROOT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../"
|
||||
# update all submodules
|
||||
git submodule update --init --recursive
|
||||
git submodule foreach git pull origin master
|
||||
# include libraries for git subrepo
|
||||
source "$ROOT_PATH/deps/git-subrepo/.rc"
|
||||
source "$ROOT_PATH/deps/acore/bash-lib/src/git-utils/subrepo.sh"
|
||||
|
||||
echo "> Pulling and update all subrepos"
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/bash-lib master deps/acore/bash-lib
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/cmake-utils master deps/acore/cmake-utils
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/mysql-tools master deps/acore/mysql-tools
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/joiner master deps/acore/joiner
|
||||
1319
apps/grafana/1_General.json
Normal file
1319
apps/grafana/1_General.json
Normal file
File diff suppressed because it is too large
Load Diff
691
apps/grafana/2_Maps.json
Normal file
691
apps/grafana/2_Maps.json
Normal file
@@ -0,0 +1,691 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
},
|
||||
{
|
||||
"datasource": "Influx",
|
||||
"enable": true,
|
||||
"iconColor": "#C0C6BE",
|
||||
"iconSize": 13,
|
||||
"lineColor": "rgba(255, 96, 96, 0.592157)",
|
||||
"name": "Global Events",
|
||||
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
|
||||
"showLine": true,
|
||||
"textColumn": "text",
|
||||
"titleColumn": "title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 6,
|
||||
"iteration": 1595939001794,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 2,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [
|
||||
{
|
||||
"alias": "Unload tile",
|
||||
"transform": "negative-Y"
|
||||
}
|
||||
],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Load tile",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'LoadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
},
|
||||
{
|
||||
"alias": "Unload tile",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'UnloadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Map",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 1,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Pathfinding queries",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"mmap_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'CalculatePath' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "MMap",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": null,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 5,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 14
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 4,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": false,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_id"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_instanceid"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "map_creatures",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/^$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Creatures",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": null,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 5,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 22
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 5,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": false,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_id"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_instanceid"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "map_gameobjects",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/^$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Gameobjects",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"refresh": "1m",
|
||||
"schemaVersion": 25,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"allFormat": "regex values",
|
||||
"allValue": null,
|
||||
"current": {
|
||||
"text": "Acore",
|
||||
"value": "Acore"
|
||||
},
|
||||
"datasource": "Influx",
|
||||
"definition": "",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": null,
|
||||
"multi": false,
|
||||
"multiFormat": "regex values",
|
||||
"name": "realm",
|
||||
"options": [],
|
||||
"query": "show tag values from events with key = realm",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-15m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"now": true,
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "browser",
|
||||
"title": "Maps, vmaps and mmaps",
|
||||
"uid": "6IhqWiWGz",
|
||||
"version": 2
|
||||
}
|
||||
280
apps/grafana/3_Network.json
Normal file
280
apps/grafana/3_Network.json
Normal file
@@ -0,0 +1,280 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
},
|
||||
{
|
||||
"datasource": "Influx",
|
||||
"enable": true,
|
||||
"iconColor": "#C0C6BE",
|
||||
"iconSize": 13,
|
||||
"lineColor": "rgba(255, 96, 96, 0.592157)",
|
||||
"name": "Global Events",
|
||||
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
|
||||
"showLine": true,
|
||||
"textColumn": "text",
|
||||
"titleColumn": "title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 7,
|
||||
"iteration": 1595939048589,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 1,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Processed packets",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "processed_packets",
|
||||
"query": "SELECT sum(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "sum"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/$realm$/"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"alias": "Processed packets / mean per session",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "processed_packets",
|
||||
"query": "SELECT mean(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Processed packets",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"refresh": "1m",
|
||||
"schemaVersion": 25,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"allFormat": "regex values",
|
||||
"allValue": null,
|
||||
"current": {
|
||||
"text": "Acore",
|
||||
"value": "Acore"
|
||||
},
|
||||
"datasource": "Influx",
|
||||
"definition": "",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": null,
|
||||
"multi": false,
|
||||
"multiFormat": "regex values",
|
||||
"name": "realm",
|
||||
"options": [],
|
||||
"query": "show tag values from events with key = realm",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-15m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"now": true,
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "browser",
|
||||
"title": "Network",
|
||||
"uid": "_QtkMmWMk",
|
||||
"version": 2
|
||||
}
|
||||
1677
apps/grafana/4_Performance_profiling.json
Normal file
1677
apps/grafana/4_Performance_profiling.json
Normal file
File diff suppressed because it is too large
Load Diff
9
apps/installer/includes/config/config-main.sh
Normal file
9
apps/installer/includes/config/config-main.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" || exit ; pwd )
|
||||
|
||||
# shellcheck source=./config.sh
|
||||
source "$CURRENT_PATH/config.sh"
|
||||
|
||||
acore_dash_config "$@"
|
||||
|
||||
60
apps/installer/includes/config/config.sh
Normal file
60
apps/installer/includes/config/config.sh
Normal file
@@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" || exit ; pwd )
|
||||
|
||||
# shellcheck source=../../../bash_shared/includes.sh
|
||||
source "$CURRENT_PATH/../../../bash_shared/includes.sh"
|
||||
# shellcheck source=../includes.sh
|
||||
source "$CURRENT_PATH/../includes.sh"
|
||||
# shellcheck source=../../../bash_shared/menu_system.sh
|
||||
source "$AC_PATH_APPS/bash_shared/menu_system.sh"
|
||||
|
||||
function acore_dash_configShowValue() {
|
||||
if [ $# -ne 1 ]; then
|
||||
echo "Usage: show <VAR_NAME>"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local varName="$1"
|
||||
local varValue="${!varName}"
|
||||
if [ -z "$varValue" ]; then
|
||||
echo "$varName is not set."
|
||||
else
|
||||
echo "$varName=$varValue"
|
||||
fi
|
||||
}
|
||||
|
||||
function acore_dash_configLoad() {
|
||||
acore_common_loadConfig
|
||||
echo "Configuration loaded into the current shell session."
|
||||
}
|
||||
|
||||
# Configuration management menu definition
|
||||
# Format: "key|short|description"
|
||||
config_menu_items=(
|
||||
"show|s|Show configuration variable value"
|
||||
"load|l|Load configurations variables within the current shell session"
|
||||
"help|h|Show detailed help"
|
||||
"quit|q|Close this menu"
|
||||
)
|
||||
|
||||
# Menu command handler for configuration operations
|
||||
function handle_config_command() {
|
||||
local key="$1"
|
||||
shift
|
||||
|
||||
case "$key" in
|
||||
"show")
|
||||
acore_dash_configShowValue "$@"
|
||||
;;
|
||||
"load")
|
||||
acore_dash_configLoad
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
function acore_dash_config() {
|
||||
menu_run_with_items "CONFIG MANAGER" handle_config_command -- "${config_menu_items[@]}" -- "$@"
|
||||
return $?
|
||||
}
|
||||
|
||||
187
apps/installer/includes/functions.sh
Normal file
187
apps/installer/includes/functions.sh
Normal file
@@ -0,0 +1,187 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Set SUDO variable - one liner
|
||||
if [[ "$OSTYPE" == "msys"* ]]; then
|
||||
SUDO=""
|
||||
else
|
||||
SUDO=$([ "$EUID" -ne 0 ] && echo "sudo" || echo "")
|
||||
fi
|
||||
|
||||
function inst_configureOS() {
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
solaris*) echo "Solaris is not supported yet" ;;
|
||||
darwin*) source "$AC_PATH_INSTALLER/includes/os_configs/osx.sh" ;;
|
||||
linux*)
|
||||
# If $OSDISTRO is set, use this value (from config.sh)
|
||||
if [ ! -z "$OSDISTRO" ]; then
|
||||
DISTRO=$OSDISTRO
|
||||
# If available, use LSB to identify distribution
|
||||
elif command -v lsb_release >/dev/null 2>&1 ; then
|
||||
DISTRO=$(lsb_release -is)
|
||||
# Otherwise, use release info file
|
||||
else
|
||||
DISTRO=$(ls -d /etc/[A-Za-z]*[_-][rv]e[lr]* | grep -v "lsb" | cut -d'/' -f3 | cut -d'-' -f1 | cut -d'_' -f1)
|
||||
fi
|
||||
|
||||
case $DISTRO in
|
||||
# add here distro that are debian or ubuntu based
|
||||
# TODO: find a better way, maybe checking the existance
|
||||
# of a package manager
|
||||
"neon" | "ubuntu" | "Ubuntu")
|
||||
DISTRO="ubuntu"
|
||||
;;
|
||||
"debian" | "Debian")
|
||||
DISTRO="debian"
|
||||
;;
|
||||
*)
|
||||
echo "Distro: $DISTRO, is not supported. If your distribution is based on debian or ubuntu,
|
||||
please set the 'OSDISTRO' environment variable to one of these distro (you can use config.sh file)"
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
DISTRO=${DISTRO,,}
|
||||
|
||||
echo "Distro: $DISTRO"
|
||||
|
||||
# TODO: implement different configurations by distro
|
||||
source "$AC_PATH_INSTALLER/includes/os_configs/$DISTRO.sh"
|
||||
;;
|
||||
*bsd*) echo "BSD is not supported yet" ;;
|
||||
msys*) source "$AC_PATH_INSTALLER/includes/os_configs/windows.sh" ;;
|
||||
*) echo "This platform is not supported" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Use the data/sql/create/create_mysql.sql to initialize the database
|
||||
function inst_dbCreate() {
|
||||
echo "Creating database..."
|
||||
|
||||
# Attempt to connect with MYSQL_ROOT_PASSWORD
|
||||
if [ ! -z "$MYSQL_ROOT_PASSWORD" ]; then
|
||||
if $SUDO mysql -u root -p"$MYSQL_ROOT_PASSWORD" < "$AC_PATH_ROOT/data/sql/create/create_mysql.sql" 2>/dev/null; then
|
||||
echo "Database created successfully."
|
||||
return 0
|
||||
else
|
||||
echo "Failed to connect with provided password, falling back to interactive mode..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# In CI environments or when no password is set, try without password first
|
||||
if [[ "$CONTINUOUS_INTEGRATION" == "true" ]]; then
|
||||
echo "CI environment detected, attempting connection without password..."
|
||||
|
||||
if $SUDO mysql -u root < "$AC_PATH_ROOT/data/sql/create/create_mysql.sql" 2>/dev/null; then
|
||||
echo "Database created successfully."
|
||||
return 0
|
||||
else
|
||||
echo "Failed to connect without password, falling back to interactive mode..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Try with password (interactive mode)
|
||||
echo "Please enter your sudo and your MySQL root password if prompted."
|
||||
$SUDO mysql -u root -p < "$AC_PATH_ROOT/data/sql/create/create_mysql.sql"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Database creation failed. Please check your MySQL server and credentials."
|
||||
exit 1
|
||||
fi
|
||||
echo "Database created successfully."
|
||||
}
|
||||
|
||||
function inst_updateRepo() {
|
||||
cd "$AC_PATH_ROOT"
|
||||
if [ ! -z $INSTALLER_PULL_FROM ]; then
|
||||
git pull "$ORIGIN_REMOTE" "$INSTALLER_PULL_FROM"
|
||||
else
|
||||
git pull "$ORIGIN_REMOTE" $(git rev-parse --abbrev-ref HEAD)
|
||||
fi
|
||||
}
|
||||
|
||||
function inst_resetRepo() {
|
||||
cd "$AC_PATH_ROOT"
|
||||
git reset --hard $(git rev-parse --abbrev-ref HEAD)
|
||||
git clean -f
|
||||
}
|
||||
|
||||
function inst_compile() {
|
||||
comp_configure
|
||||
comp_build
|
||||
}
|
||||
|
||||
function inst_cleanCompile() {
|
||||
comp_clean
|
||||
inst_compile
|
||||
}
|
||||
|
||||
function inst_allInOne() {
|
||||
inst_configureOS
|
||||
inst_compile
|
||||
inst_dbCreate
|
||||
inst_download_client_data
|
||||
}
|
||||
|
||||
############################################################
|
||||
# Module helpers and dispatcher #
|
||||
############################################################
|
||||
|
||||
# Returns the default branch name of a GitHub repo in the azerothcore org.
|
||||
# If the API call fails, defaults to "master".
|
||||
function inst_get_default_branch() {
|
||||
local repo="$1"
|
||||
local def
|
||||
def=$(curl --silent "https://api.github.com/repos/azerothcore/${repo}" \
|
||||
| "$AC_PATH_DEPS/jsonpath/JSONPath.sh" -b '$.default_branch')
|
||||
if [ -z "$def" ]; then
|
||||
def="master"
|
||||
fi
|
||||
echo "$def"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Module Management System
|
||||
# =============================================================================
|
||||
# Load the module manager functions from the dedicated modules-manager directory
|
||||
source "$AC_PATH_INSTALLER/includes/modules-manager/modules.sh"
|
||||
|
||||
function inst_simple_restarter {
|
||||
echo "Running $1 ..."
|
||||
bash "$AC_PATH_APPS/startup-scripts/src/simple-restarter" "$AC_BINPATH_FULL" "$1"
|
||||
echo
|
||||
#disown -a
|
||||
#jobs -l
|
||||
}
|
||||
|
||||
function inst_download_client_data {
|
||||
# change the following version when needed
|
||||
local VERSION=v19
|
||||
|
||||
echo "#######################"
|
||||
echo "Client data downloader"
|
||||
echo "#######################"
|
||||
|
||||
# first check if it's defined in env, otherwise use the default
|
||||
local path="${DATAPATH:-$AC_BINPATH_FULL}"
|
||||
local zipPath="${DATAPATH_ZIP:-"$path/data.zip"}"
|
||||
|
||||
dataVersionFile="$path/data-version"
|
||||
|
||||
[ -f "$dataVersionFile" ] && source "$dataVersionFile"
|
||||
|
||||
# create the path if doesn't exists
|
||||
mkdir -p "$path"
|
||||
|
||||
if [ "$VERSION" == "$INSTALLED_VERSION" ]; then
|
||||
echo "Data $VERSION already installed. If you want to force the download remove the following file: $dataVersionFile"
|
||||
return
|
||||
fi
|
||||
|
||||
echo "Downloading client data in: $zipPath ..."
|
||||
curl -L https://github.com/wowgaming/client-data/releases/download/$VERSION/data.zip > "$zipPath" \
|
||||
&& echo "unzip downloaded file in $path..." && unzip -q -o "$zipPath" -d "$path/" \
|
||||
&& echo "Remove downloaded file" && rm "$zipPath" \
|
||||
&& echo "INSTALLED_VERSION=$VERSION" > "$dataVersionFile"
|
||||
}
|
||||
|
||||
|
||||
23
apps/installer/includes/includes.sh
Normal file
23
apps/installer/includes/includes.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
[[ ${INSTALLER_GUARDYVAR:-} -eq 1 ]] && return || readonly INSTALLER_GUARDYVAR=1 # include it once
|
||||
|
||||
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd )
|
||||
|
||||
# shellcheck source=../../bash_shared/includes.sh
|
||||
source "$CURRENT_PATH/../../bash_shared/includes.sh"
|
||||
|
||||
AC_PATH_INSTALLER="$AC_PATH_APPS/installer"
|
||||
|
||||
J_PATH="$AC_PATH_DEPS/acore/joiner"
|
||||
J_PATH_MODULES="$AC_PATH_MODULES"
|
||||
|
||||
# shellcheck source=../../../deps/acore/joiner/joiner.sh
|
||||
source "$J_PATH/joiner.sh"
|
||||
|
||||
# shellcheck source=../../compiler/includes/includes.sh
|
||||
source "$AC_PATH_APPS/compiler/includes/includes.sh"
|
||||
|
||||
# shellcheck source=../../../deps/semver_bash/semver.sh
|
||||
source "$AC_PATH_DEPS/semver_bash/semver.sh"
|
||||
|
||||
# shellcheck source=../includes/functions.sh
|
||||
source "$AC_PATH_INSTALLER/includes/functions.sh"
|
||||
311
apps/installer/includes/modules-manager/README.md
Normal file
311
apps/installer/includes/modules-manager/README.md
Normal file
@@ -0,0 +1,311 @@
|
||||
# AzerothCore Module Manager
|
||||
|
||||
This directory contains the module management system for AzerothCore, providing advanced functionality for installing, updating, and managing server modules.
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
- **Advanced Syntax**: Support for `repo[:dirname][@branch[:commit]]` format
|
||||
- **Cross-Format Recognition**: Intelligent matching across URLs, SSH, and simple names
|
||||
- **Custom Directory Naming**: Prevent conflicts with custom directory names
|
||||
- **Duplicate Prevention**: Smart detection and prevention of duplicate installations
|
||||
- **Multi-Host Support**: GitHub, GitLab, and other Git hosts
|
||||
- **Module Exclusion**: Support for excluding modules via environment variable
|
||||
- **Interactive Menu System**: Easy-to-use menu interface for module management
|
||||
- **Colored Output**: Enhanced terminal output with color support (respects NO_COLOR)
|
||||
- **Flat Directory Structure**: Uses flat module installation (no owner subfolders)
|
||||
|
||||
## 📁 File Structure
|
||||
|
||||
```
|
||||
modules-manager/
|
||||
├── modules.sh # Core module management functions
|
||||
└── README.md # This documentation file
|
||||
```
|
||||
|
||||
## 🔧 Module Specification Syntax
|
||||
|
||||
The module manager supports flexible syntax for specifying modules:
|
||||
|
||||
### New Syntax Format
|
||||
```bash
|
||||
repo[:dirname][@branch[:commit]]
|
||||
```
|
||||
|
||||
### Examples
|
||||
|
||||
| Specification | Description |
|
||||
|---------------|-------------|
|
||||
| `mod-transmog` | Simple module name, uses default branch and directory |
|
||||
| `mod-transmog:my-custom-dir` | Custom directory name |
|
||||
| `mod-transmog@develop` | Specific branch |
|
||||
| `mod-transmog:custom@develop:abc123` | Custom directory, branch, and commit |
|
||||
| `https://github.com/owner/repo.git@main` | Full URL with branch |
|
||||
| `git@github.com:owner/repo.git:custom-dir` | SSH URL with custom directory |
|
||||
|
||||
## 🎯 Usage Examples
|
||||
|
||||
### Installing Modules
|
||||
|
||||
```bash
|
||||
# Simple module installation
|
||||
./acore.sh module install mod-transmog
|
||||
|
||||
# Install with custom directory name
|
||||
./acore.sh module install mod-transmog:my-transmog-dir
|
||||
|
||||
# Install specific branch
|
||||
./acore.sh module install mod-transmog@develop
|
||||
|
||||
# Install with full specification
|
||||
./acore.sh module install mod-transmog:custom-dir@develop:abc123
|
||||
|
||||
# Install from URL
|
||||
./acore.sh module install https://github.com/azerothcore/mod-transmog.git@main
|
||||
|
||||
# Install multiple modules
|
||||
./acore.sh module install mod-transmog mod-ale:custom-eluna
|
||||
|
||||
# Install all modules from list
|
||||
./acore.sh module install --all
|
||||
```
|
||||
|
||||
### Updating Modules
|
||||
|
||||
```bash
|
||||
# Update specific module
|
||||
./acore.sh module update mod-transmog
|
||||
|
||||
# Update all modules
|
||||
./acore.sh module update --all
|
||||
|
||||
# Update with branch specification
|
||||
./acore.sh module update mod-transmog@develop
|
||||
```
|
||||
|
||||
### Removing Modules
|
||||
|
||||
```bash
|
||||
# Remove by simple name (cross-format recognition)
|
||||
./acore.sh module remove mod-transmog
|
||||
|
||||
# Remove by URL (recognizes same module)
|
||||
./acore.sh module remove https://github.com/azerothcore/mod-transmog.git
|
||||
|
||||
# Remove multiple modules
|
||||
./acore.sh module remove mod-transmog mod-ale
|
||||
```
|
||||
|
||||
### Searching Modules
|
||||
|
||||
```bash
|
||||
# Search for modules
|
||||
./acore.sh module search transmog
|
||||
|
||||
# Search with multiple terms
|
||||
./acore.sh module search auction house
|
||||
|
||||
# Search with input prompt
|
||||
./acore.sh module search
|
||||
```
|
||||
|
||||
### Listing Installed Modules
|
||||
|
||||
```bash
|
||||
# List all installed modules
|
||||
./acore.sh module list
|
||||
```
|
||||
|
||||
### Interactive Menu
|
||||
|
||||
```bash
|
||||
# Start interactive menu system
|
||||
./acore.sh module
|
||||
|
||||
# Menu options:
|
||||
# s - Search for available modules
|
||||
# i - Install one or more modules
|
||||
# u - Update installed modules
|
||||
# r - Remove installed modules
|
||||
# l - List installed modules
|
||||
# h - Show detailed help
|
||||
# q - Close this menu
|
||||
```
|
||||
|
||||
## 🔍 Cross-Format Recognition
|
||||
|
||||
The system intelligently recognizes the same module across different specification formats:
|
||||
|
||||
```bash
|
||||
# These all refer to the same module:
|
||||
mod-transmog
|
||||
azerothcore/mod-transmog
|
||||
https://github.com/azerothcore/mod-transmog.git
|
||||
git@github.com:azerothcore/mod-transmog.git
|
||||
```
|
||||
|
||||
This allows:
|
||||
- Installing with one format and removing with another
|
||||
- Preventing duplicates regardless of specification format
|
||||
- Consistent module tracking across different input methods
|
||||
|
||||
## 🛡️ Conflict Prevention
|
||||
|
||||
The system prevents common conflicts:
|
||||
|
||||
### Directory Conflicts
|
||||
```bash
|
||||
# If 'mod-transmog' directory already exists:
|
||||
$ ./acore.sh module install mod-transmog:mod-transmog
|
||||
Possible solutions:
|
||||
1. Use a different directory name: mod-transmog:my-custom-name
|
||||
2. Remove the existing directory first
|
||||
3. Use the update command if this is the same module
|
||||
```
|
||||
|
||||
### Duplicate Module Prevention
|
||||
The system uses intelligent owner/name matching to prevent installing the same module multiple times, even when specified in different formats.
|
||||
|
||||
## 🚫 Module Exclusion
|
||||
|
||||
You can exclude modules from installation using the `MODULES_EXCLUDE_LIST` environment variable:
|
||||
|
||||
```bash
|
||||
# Exclude specific modules (space-separated)
|
||||
export MODULES_EXCLUDE_LIST="mod-test-module azerothcore/mod-dev-only"
|
||||
./acore.sh module install --all # Will skip excluded modules
|
||||
|
||||
# Supports cross-format matching
|
||||
export MODULES_EXCLUDE_LIST="https://github.com/azerothcore/mod-transmog.git"
|
||||
./acore.sh module install mod-transmog # Will be skipped as excluded
|
||||
```
|
||||
|
||||
The exclusion system:
|
||||
- Uses the same cross-format recognition as other module operations
|
||||
- Works with all installation methods (`install`, `install --all`)
|
||||
- Provides clear feedback when modules are skipped
|
||||
- Supports URLs, owner/name format, and simple names
|
||||
|
||||
## 🎨 Color Support
|
||||
|
||||
The module manager provides enhanced terminal output with colors:
|
||||
|
||||
- **Info**: Cyan text for informational messages
|
||||
- **Success**: Green text for successful operations
|
||||
- **Warning**: Yellow text for warnings
|
||||
- **Error**: Red text for errors
|
||||
- **Headers**: Bold cyan text for section headers
|
||||
|
||||
Color support is automatically disabled when:
|
||||
- Output is not to a terminal (piped/redirected)
|
||||
- `NO_COLOR` environment variable is set
|
||||
- Terminal doesn't support colors
|
||||
|
||||
You can force color output with:
|
||||
```bash
|
||||
export FORCE_COLOR=1
|
||||
```
|
||||
|
||||
## 🔄 Integration
|
||||
|
||||
### Including in Scripts
|
||||
```bash
|
||||
# Source the module functions
|
||||
source "$AC_PATH_INSTALLER/includes/modules-manager/modules.sh"
|
||||
|
||||
# Use module functions
|
||||
inst_module_install "mod-transmog:custom-dir@develop"
|
||||
```
|
||||
|
||||
### Testing
|
||||
The module system is tested through the main installer test suite:
|
||||
```bash
|
||||
./apps/installer/test/test_module_commands.bats
|
||||
```
|
||||
|
||||
## 📋 Module List Format
|
||||
|
||||
Modules are tracked in `conf/modules.list` with the format:
|
||||
```
|
||||
# Comments start with #
|
||||
repo_reference branch commit
|
||||
|
||||
# Examples:
|
||||
azerothcore/mod-transmog master abc123def456
|
||||
https://github.com/custom/mod-custom.git develop def456abc789
|
||||
mod-ale:custom-eluna-dir main 789abc123def
|
||||
```
|
||||
|
||||
The list maintains:
|
||||
- **Alphabetical ordering** by normalized owner/name for consistency
|
||||
- **Original format preservation** of how modules were specified
|
||||
- **Automatic deduplication** across different specification formats
|
||||
- **Custom directory tracking** when specified
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `MODULES_LIST_FILE` | Override default modules list path | `$AC_PATH_ROOT/conf/modules.list` |
|
||||
| `MODULES_EXCLUDE_LIST` | Space-separated list of modules to exclude | - |
|
||||
| `J_PATH_MODULES` | Modules installation directory | `$AC_PATH_ROOT/modules` |
|
||||
| `AC_PATH_ROOT` | AzerothCore root path | - |
|
||||
| `NO_COLOR` | Disable colored output | - |
|
||||
| `FORCE_COLOR` | Force colored output even when not TTY | - |
|
||||
|
||||
### Default Paths
|
||||
- **Modules list**: `$AC_PATH_ROOT/conf/modules.list`
|
||||
- **Installation directory**: `$J_PATH_MODULES` (flat structure, no owner subfolders)
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
### Core Functions
|
||||
|
||||
| Function | Purpose |
|
||||
|----------|---------|
|
||||
| `inst_module()` | Main dispatcher and interactive menu |
|
||||
| `inst_parse_module_spec()` | Parse advanced module syntax |
|
||||
| `inst_extract_owner_name()` | Normalize modules for cross-format recognition |
|
||||
| `inst_mod_list_*()` | Module list management (read/write/update) |
|
||||
| `inst_module_*()` | Module operations (install/update/remove/search) |
|
||||
|
||||
### Key Features
|
||||
|
||||
- **Flat Directory Structure**: All modules install directly under `modules/` without owner subdirectories
|
||||
- **Smart Conflict Detection**: Prevents directory name conflicts with helpful suggestions
|
||||
- **Cross-Platform Compatibility**: Works on Linux, macOS, and Windows (Git Bash)
|
||||
- **Version Compatibility**: Checks `acore-module.json` for AzerothCore version compatibility
|
||||
- **Git Integration**: Uses Joiner system for Git repository management
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For debugging module operations, you can examine the generated commands:
|
||||
```bash
|
||||
# Check what Joiner commands would be executed
|
||||
tail -f /tmp/joiner_called.txt # In test environments
|
||||
```
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
When modifying the module manager:
|
||||
|
||||
1. **Maintain backwards compatibility** with existing module list format
|
||||
2. **Update tests** in `test_module_commands.bats` for new functionality
|
||||
3. **Update this documentation** for any new features or changes
|
||||
4. **Test cross-format recognition** thoroughly across all supported formats
|
||||
5. **Ensure helpful error messages** for common user mistakes
|
||||
6. **Test exclusion functionality** with various module specification formats
|
||||
7. **Verify color output** works correctly in different terminal environments
|
||||
|
||||
### Testing Guidelines
|
||||
|
||||
```bash
|
||||
# Run all module-related tests
|
||||
cd apps/installer
|
||||
bats test/test_module_commands.bats
|
||||
|
||||
# Test with different environments
|
||||
NO_COLOR=1 ./acore.sh module list
|
||||
FORCE_COLOR=1 ./acore.sh module help
|
||||
```
|
||||
7
apps/installer/includes/modules-manager/module-main.sh
Normal file
7
apps/installer/includes/modules-manager/module-main.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" || exit ; pwd )
|
||||
|
||||
source "$CURRENT_PATH/modules.sh"
|
||||
|
||||
inst_module "$@"
|
||||
1066
apps/installer/includes/modules-manager/modules.sh
Normal file
1066
apps/installer/includes/modules-manager/modules.sh
Normal file
File diff suppressed because it is too large
Load Diff
46
apps/installer/includes/os_configs/debian.sh
Normal file
46
apps/installer/includes/os_configs/debian.sh
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Set SUDO variable - one liner
|
||||
SUDO=$([ "$EUID" -ne 0 ] && echo "sudo" || echo "")
|
||||
|
||||
if ! command -v lsb_release &>/dev/null ; then
|
||||
$SUDO apt-get install -y lsb-release
|
||||
fi
|
||||
|
||||
DEBIAN_VERSION=$(lsb_release -sr)
|
||||
DEBIAN_VERSION_MIN="12"
|
||||
|
||||
if [[ $DEBIAN_VERSION -lt $DEBIAN_VERSION_MIN ]]; then
|
||||
echo "########## ########## ##########"
|
||||
echo ""
|
||||
echo " using unsupported Debian version" $DEBIAN_VERSION
|
||||
echo " please update to Debian" $DEBIAN_VERSION_MIN "or later"
|
||||
echo ""
|
||||
echo "########## ########## ##########"
|
||||
fi
|
||||
|
||||
$SUDO apt-get update -y
|
||||
|
||||
$SUDO apt-get install -y gdbserver gdb unzip curl \
|
||||
libncurses-dev libreadline-dev clang g++ \
|
||||
gcc git cmake make ccache \
|
||||
libssl-dev libbz2-dev \
|
||||
libboost-all-dev gnupg wget jq screen tmux expect
|
||||
|
||||
VAR_PATH="$CURRENT_PATH/../../../../var"
|
||||
|
||||
# run noninteractive install for MYSQL
|
||||
# Version
|
||||
MYSQL_APT_CONFIG_VERSION=0.8.36-1
|
||||
# # # # #
|
||||
mkdir -p "$VAR_PATH/mysqlpackages" && cd "$VAR_PATH/mysqlpackages"
|
||||
# Download
|
||||
wget "https://dev.mysql.com/get/mysql-apt-config_${MYSQL_APT_CONFIG_VERSION}_all.deb"
|
||||
# Install
|
||||
sudo DEBIAN_FRONTEND="noninteractive" dpkg -i ./mysql-apt-config_${MYSQL_APT_CONFIG_VERSION}_all.deb
|
||||
sudo apt update
|
||||
sudo DEBIAN_FRONTEND="noninteractive" apt install -y mysql-server libmysqlclient-dev
|
||||
# Cleanup
|
||||
rm -v mysql-apt-config_${MYSQL_APT_CONFIG_VERSION}_all* && unset MYSQL_APT_CONFIG_VERSION
|
||||
34
apps/installer/includes/os_configs/osx.sh
Normal file
34
apps/installer/includes/os_configs/osx.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
##########################################
|
||||
## workaround for python upgrade issue https://github.com/actions/runner-images/issues/6817
|
||||
rm /usr/local/bin/2to3 || true
|
||||
rm /usr/local/bin/2to3-3.10 || true
|
||||
rm /usr/local/bin/2to3-3.11 || true
|
||||
rm /usr/local/bin/2to3-3.12 || true
|
||||
rm /usr/local/bin/idle3 || true
|
||||
rm /usr/local/bin/idle3.10 || true
|
||||
rm /usr/local/bin/idle3.11 || true
|
||||
rm /usr/local/bin/idle3.12 || true
|
||||
rm /usr/local/bin/pydoc3 || true
|
||||
rm /usr/local/bin/pydoc3.10 || true
|
||||
rm /usr/local/bin/pydoc3.11 || true
|
||||
rm /usr/local/bin/pydoc3.12 || true
|
||||
rm /usr/local/bin/python3 || true
|
||||
rm /usr/local/bin/python3.10 || true
|
||||
rm /usr/local/bin/python3.11 || true
|
||||
rm /usr/local/bin/python3.12 || true
|
||||
rm /usr/local/bin/python3-config || true
|
||||
rm /usr/local/bin/python3.10-config || true
|
||||
rm /usr/local/bin/python3.11-config || true
|
||||
rm /usr/local/bin/python3.12-config || true
|
||||
##########################################
|
||||
|
||||
brew update
|
||||
|
||||
##########################################
|
||||
## workaround for cmake already being installed in the github runners
|
||||
if ! command -v cmake &>/dev/null ; then
|
||||
brew install cmake
|
||||
fi
|
||||
##########################################
|
||||
|
||||
brew install openssl@3 readline boost bash-completion curl unzip mysql ccache expect tmux screen jq
|
||||
56
apps/installer/includes/os_configs/ubuntu.sh
Normal file
56
apps/installer/includes/os_configs/ubuntu.sh
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Set SUDO variable - one liner
|
||||
SUDO=$([ "$EUID" -ne 0 ] && echo "sudo" || echo "")
|
||||
|
||||
if ! command -v lsb_release &>/dev/null ; then
|
||||
$SUDO apt-get install -y lsb-release
|
||||
fi
|
||||
|
||||
UBUNTU_VERSION=$(lsb_release -sr);
|
||||
|
||||
case $UBUNTU_VERSION in
|
||||
"22.04")
|
||||
;;
|
||||
"24.04")
|
||||
;;
|
||||
*)
|
||||
echo "########## ########## ##########"
|
||||
echo ""
|
||||
echo " using unsupported Ubuntu version " $UBUNTU_VERSION
|
||||
echo " please update to Ubuntu 22.04 or later"
|
||||
echo ""
|
||||
echo "########## ########## ##########"
|
||||
;;
|
||||
esac
|
||||
|
||||
$SUDO apt update
|
||||
|
||||
# shared deps
|
||||
DEBIAN_FRONTEND="noninteractive" $SUDO \
|
||||
apt-get -y install ccache clang cmake curl google-perftools libmysqlclient-dev make unzip jq screen tmux \
|
||||
libreadline-dev libncurses5-dev libncursesw5-dev libbz2-dev git gcc g++ libssl-dev \
|
||||
libncurses-dev libboost-all-dev gdb gdbserver expect
|
||||
|
||||
VAR_PATH="$CURRENT_PATH/../../../../var"
|
||||
|
||||
|
||||
# Do not install MySQL if we are in docker (It will be used a docker container instead) or we are explicitly skipping it.
|
||||
if [[ $DOCKER != 1 && $SKIP_MYSQL_INSTALL != 1 ]]; then
|
||||
# run noninteractive install for MYSQL 8.4 LTS
|
||||
wget https://dev.mysql.com/get/mysql-apt-config_0.8.35-1_all.deb -P "$VAR_PATH"
|
||||
# resolve expired key issue
|
||||
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys A8D3785C
|
||||
DEBIAN_FRONTEND="noninteractive" $SUDO dpkg -i "$VAR_PATH/mysql-apt-config_0.8.35-1_all.deb"
|
||||
$SUDO apt-get update
|
||||
DEBIAN_FRONTEND="noninteractive" $SUDO apt-get install -y mysql-server
|
||||
fi
|
||||
|
||||
|
||||
if [[ $CONTINUOUS_INTEGRATION ]]; then
|
||||
$SUDO systemctl enable mysql.service
|
||||
$SUDO systemctl start mysql.service
|
||||
fi
|
||||
|
||||
29
apps/installer/includes/os_configs/windows.sh
Normal file
29
apps/installer/includes/os_configs/windows.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
# install chocolatey before
|
||||
|
||||
# powershell.exe -NoProfile -InputFormat None -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))" && SET "PATH=%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
|
||||
|
||||
# install automatically following packages:
|
||||
# cmake
|
||||
# git
|
||||
# microsoft-build-tools
|
||||
# mysql
|
||||
|
||||
INSTALL_ARGS=()
|
||||
|
||||
if [[ $CONTINUOUS_INTEGRATION ]]; then
|
||||
INSTALL_ARGS+=(--no-progress)
|
||||
else
|
||||
{ # try
|
||||
choco uninstall -y -n cmake.install cmake # needed to make sure that following install set the env properly
|
||||
} || { # catch
|
||||
echo "nothing to do"
|
||||
}
|
||||
|
||||
choco install -y --skip-checksums "${INSTALL_ARGS[@]}" git visualstudio2022community
|
||||
fi
|
||||
|
||||
choco install -y --skip-checksums "${INSTALL_ARGS[@]}" cmake.install -y --installargs 'ADD_CMAKE_TO_PATH=System'
|
||||
choco install -y --skip-checksums "${INSTALL_ARGS[@]}" visualstudio2022-workload-nativedesktop
|
||||
choco install -y --skip-checksums "${INSTALL_ARGS[@]}" openssl --force --version=3.5.4
|
||||
choco install -y --skip-checksums "${INSTALL_ARGS[@]}" boost-msvc-14.3 --force --version=1.87.0
|
||||
choco install -y --skip-checksums "${INSTALL_ARGS[@]}" mysql --force --version=8.4.6
|
||||
119
apps/installer/main.sh
Normal file
119
apps/installer/main.sh
Normal file
@@ -0,0 +1,119 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Dashboard Script
|
||||
#
|
||||
# This script provides an interactive menu system for AzerothCore management
|
||||
# using the unified menu system library.
|
||||
#
|
||||
# Usage:
|
||||
# ./acore.sh - Interactive mode with numeric and text selection
|
||||
# ./acore.sh <command> [args] - Direct command execution (only text commands, no numbers)
|
||||
#
|
||||
# Interactive Mode:
|
||||
# - Select options by number (1, 2, 3...), command name (init, compiler, etc.),
|
||||
# or short alias (i, c, etc.)
|
||||
# - All selection methods work in interactive mode
|
||||
#
|
||||
# Direct Command Mode:
|
||||
# - Only command names and short aliases are accepted (e.g., './acore.sh compiler build', './acore.sh c build')
|
||||
# - Numeric selection is disabled to prevent confusion with command arguments
|
||||
# - Examples: './acore.sh init', './acore.sh compiler clean', './acore.sh module install mod-name'
|
||||
#
|
||||
# Menu System:
|
||||
# - Uses unified menu system from bash_shared/menu_system.sh
|
||||
# - Single source of truth for menu definitions
|
||||
# - Consistent behavior across all AzerothCore tools
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
source "$CURRENT_PATH/includes/includes.sh"
|
||||
source "$AC_PATH_APPS/bash_shared/menu_system.sh"
|
||||
|
||||
# Menu: single ordered source of truth (no functions in strings)
|
||||
# Format: "key|short|description"
|
||||
menu_items=(
|
||||
"init|i|First Installation"
|
||||
"install-deps|d|Configure OS dep"
|
||||
"pull|u|Update Repository"
|
||||
"reset|r|Reset & Clean Repository"
|
||||
"setup-db|r|Install db only"
|
||||
"compiler|c|Run compiler tool"
|
||||
"module|m|Module manager (search/install/update/remove)"
|
||||
"client-data|gd|download client data from github repository (beta)"
|
||||
"run-worldserver|rw|execute a simple restarter for worldserver"
|
||||
"run-authserver|ra|execute a simple restarter for authserver"
|
||||
"test|t|Run test framework"
|
||||
"docker|dr|Run docker tools"
|
||||
"version|v|Show AzerothCore version"
|
||||
"service-manager|sm|Run service manager to run authserver and worldserver in background"
|
||||
"config|cf|Configuration manager"
|
||||
"quit|q|Exit from this menu"
|
||||
)
|
||||
|
||||
|
||||
# Menu command handler - called by menu system for each command
|
||||
function handle_menu_command() {
|
||||
local key="$1"
|
||||
shift
|
||||
|
||||
case "$key" in
|
||||
"init")
|
||||
inst_allInOne
|
||||
;;
|
||||
"install-deps")
|
||||
inst_configureOS
|
||||
;;
|
||||
"pull")
|
||||
inst_updateRepo
|
||||
;;
|
||||
"reset")
|
||||
inst_resetRepo
|
||||
;;
|
||||
"setup-db")
|
||||
inst_dbCreate
|
||||
;;
|
||||
"compiler")
|
||||
bash "$AC_PATH_APPS/compiler/compiler.sh" "$@"
|
||||
;;
|
||||
"module")
|
||||
bash "$AC_PATH_APPS/installer/includes/modules-manager/module-main.sh" "$@"
|
||||
;;
|
||||
"client-data")
|
||||
inst_download_client_data
|
||||
;;
|
||||
"run-worldserver")
|
||||
inst_simple_restarter worldserver
|
||||
;;
|
||||
"run-authserver")
|
||||
inst_simple_restarter authserver
|
||||
;;
|
||||
"test")
|
||||
bash "$AC_PATH_APPS/test-framework/test-main.sh" "$@"
|
||||
;;
|
||||
"docker")
|
||||
DOCKER=1 bash "$AC_PATH_ROOT/apps/docker/docker-cmd.sh" "$@"
|
||||
exit
|
||||
;;
|
||||
"version")
|
||||
printf "AzerothCore Rev. %s\n" "$ACORE_VERSION"
|
||||
exit
|
||||
;;
|
||||
"service-manager")
|
||||
bash "$AC_PATH_APPS/startup-scripts/src/service-manager.sh" "$@"
|
||||
exit
|
||||
;;
|
||||
"config")
|
||||
bash "$AC_PATH_APPS/installer/includes/config/config-main.sh" "$@"
|
||||
;;
|
||||
"quit")
|
||||
echo "Goodbye!"
|
||||
exit
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option. Use --help to see available commands."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Run the menu system
|
||||
menu_run_with_items "ACORE DASHBOARD" handle_menu_command -- "${menu_items[@]}" -- "$@"
|
||||
14
apps/installer/test/bats.conf
Normal file
14
apps/installer/test/bats.conf
Normal file
@@ -0,0 +1,14 @@
|
||||
# BATS Test Configuration
|
||||
|
||||
# Set test timeout (in seconds)
|
||||
export BATS_TEST_TIMEOUT=30
|
||||
|
||||
# Enable verbose output for debugging
|
||||
export BATS_VERBOSE_RUN=1
|
||||
|
||||
# Test output format
|
||||
export BATS_FORMATTER=pretty
|
||||
|
||||
# Enable colored output
|
||||
export BATS_NO_PARALLELIZE_ACROSS_FILES=1
|
||||
export BATS_NO_PARALLELIZE_WITHIN_FILE=1
|
||||
755
apps/installer/test/test_module_commands.bats
Executable file
755
apps/installer/test/test_module_commands.bats
Executable file
@@ -0,0 +1,755 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# Tests for installer module commands (search/install/update/remove)
|
||||
# Focused on installer:module install behavior using a mocked joiner
|
||||
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
setup() {
|
||||
acore_test_setup
|
||||
# Point to the installer src directory (not needed in this test)
|
||||
|
||||
# Set installer/paths environment for the test
|
||||
export AC_PATH_APPS="$TEST_DIR/apps"
|
||||
export AC_PATH_ROOT="$TEST_DIR"
|
||||
export AC_PATH_DEPS="$TEST_DIR/deps"
|
||||
export AC_PATH_MODULES="$TEST_DIR/modules"
|
||||
export MODULES_LIST_FILE="$TEST_DIR/conf/modules.list"
|
||||
|
||||
# Create stubbed deps: joiner.sh (sourced by includes) and semver
|
||||
mkdir -p "$TEST_DIR/deps/acore/joiner"
|
||||
cat > "$TEST_DIR/deps/acore/joiner/joiner.sh" << 'EOF'
|
||||
#!/usr/bin/env bash
|
||||
# Stub joiner functions used by installer
|
||||
Joiner:add_repo() {
|
||||
# arguments: url name branch basedir
|
||||
echo "ADD $@" > "$TEST_DIR/joiner_called.txt"
|
||||
return 0
|
||||
}
|
||||
Joiner:upd_repo() {
|
||||
echo "UPD $@" > "$TEST_DIR/joiner_called.txt"
|
||||
return 0
|
||||
}
|
||||
Joiner:remove() {
|
||||
echo "REM $@" > "$TEST_DIR/joiner_called.txt"
|
||||
return 0
|
||||
}
|
||||
EOF
|
||||
chmod +x "$TEST_DIR/deps/acore/joiner/joiner.sh"
|
||||
|
||||
mkdir -p "$TEST_DIR/deps/semver_bash"
|
||||
# Minimal semver stub
|
||||
cat > "$TEST_DIR/deps/semver_bash/semver.sh" << 'EOF'
|
||||
#!/usr/bin/env bash
|
||||
# semver stub
|
||||
semver::satisfies() { return 0; }
|
||||
EOF
|
||||
chmod +x "$TEST_DIR/deps/semver_bash/semver.sh"
|
||||
|
||||
# Provide a minimal compiler includes file expected by installer
|
||||
mkdir -p "$TEST_DIR/apps/compiler/includes"
|
||||
touch "$TEST_DIR/apps/compiler/includes/includes.sh"
|
||||
|
||||
# Provide minimal bash_shared includes to satisfy installer include
|
||||
mkdir -p "$TEST_DIR/apps/bash_shared"
|
||||
cat > "$TEST_DIR/apps/bash_shared/includes.sh" << 'EOF'
|
||||
#!/usr/bin/env bash
|
||||
# minimal stub
|
||||
EOF
|
||||
|
||||
# Copy the menu system needed by modules.sh
|
||||
cp "$AC_TEST_ROOT/apps/bash_shared/menu_system.sh" "$TEST_DIR/apps/bash_shared/"
|
||||
|
||||
# Copy the real installer app into the test apps dir
|
||||
mkdir -p "$TEST_DIR/apps"
|
||||
cp -r "$(cd "$AC_TEST_ROOT/apps/installer" && pwd)" "$TEST_DIR/apps/installer"
|
||||
}
|
||||
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
@test "module install should call joiner and record entry in modules list" {
|
||||
cd "$TEST_DIR"
|
||||
|
||||
# Source installer includes and call the install function directly to avoid menu interaction
|
||||
run bash -c "source '$TEST_DIR/apps/installer/includes/includes.sh' && inst_module_install example-module@main:abcd1234"
|
||||
|
||||
# Check that joiner was called
|
||||
[ -f "$TEST_DIR/joiner_called.txt" ]
|
||||
grep -q "ADD" "$TEST_DIR/joiner_called.txt"
|
||||
|
||||
# Check modules list was created and contains the repo_ref and branch
|
||||
[ -f "$TEST_DIR/conf/modules.list" ]
|
||||
grep -q "azerothcore/example-module main" "$TEST_DIR/conf/modules.list"
|
||||
}
|
||||
|
||||
@test "module install with owner/name format should work" {
|
||||
cd "$TEST_DIR"
|
||||
|
||||
# Test with owner/name format
|
||||
run bash -c "source '$TEST_DIR/apps/installer/includes/includes.sh' && inst_module_install myorg/mymodule"
|
||||
|
||||
# Check that joiner was called with correct URL
|
||||
[ -f "$TEST_DIR/joiner_called.txt" ]
|
||||
grep -q "ADD https://github.com/myorg/mymodule mymodule" "$TEST_DIR/joiner_called.txt"
|
||||
|
||||
# Check modules list contains the entry
|
||||
[ -f "$TEST_DIR/conf/modules.list" ]
|
||||
grep -q "myorg/mymodule" "$TEST_DIR/conf/modules.list"
|
||||
}
|
||||
|
||||
@test "module remove should call joiner remove and update modules list" {
|
||||
cd "$TEST_DIR"
|
||||
|
||||
# First install a module
|
||||
bash -c "source '$TEST_DIR/apps/installer/includes/includes.sh' && inst_module_install test-module"
|
||||
|
||||
# Then remove it
|
||||
run bash -c "source '$TEST_DIR/apps/installer/includes/includes.sh' && inst_module_remove test-module"
|
||||
|
||||
# Check that joiner remove was called
|
||||
[ -f "$TEST_DIR/joiner_called.txt" ]
|
||||
# With flat structure, basedir is empty; ensure name is present
|
||||
grep -q "REM test-module" "$TEST_DIR/joiner_called.txt"
|
||||
|
||||
# Check modules list no longer contains the entry
|
||||
[ -f "$TEST_DIR/conf/modules.list" ]
|
||||
! grep -q "azerothcore/test-module" "$TEST_DIR/conf/modules.list"
|
||||
}
|
||||
|
||||
# Tests for intelligent module management (duplicate prevention and cross-format removal)
|
||||
|
||||
@test "inst_extract_owner_name should extract owner/name from various formats" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test simple name
|
||||
run inst_extract_owner_name "mod-transmog"
|
||||
[ "$output" = "azerothcore/mod-transmog" ]
|
||||
|
||||
# Test owner/name format
|
||||
run inst_extract_owner_name "azerothcore/mod-transmog"
|
||||
[ "$output" = "azerothcore/mod-transmog" ]
|
||||
|
||||
# Test HTTPS URL
|
||||
run inst_extract_owner_name "https://github.com/azerothcore/mod-transmog.git"
|
||||
[ "$output" = "azerothcore/mod-transmog" ]
|
||||
|
||||
# Test SSH URL
|
||||
run inst_extract_owner_name "git@github.com:azerothcore/mod-transmog.git"
|
||||
[ "$output" = "azerothcore/mod-transmog" ]
|
||||
|
||||
# Test GitLab URL
|
||||
run inst_extract_owner_name "https://gitlab.com/myorg/mymodule.git"
|
||||
[ "$output" = "myorg/mymodule" ]
|
||||
}
|
||||
|
||||
@test "inst_extract_owner_name should handle URLs with ports correctly" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test HTTPS URL with port
|
||||
run inst_extract_owner_name "https://example.com:8080/user/repo.git"
|
||||
[ "$output" = "user/repo" ]
|
||||
|
||||
# Test SSH URL with port
|
||||
run inst_extract_owner_name "ssh://git@example.com:2222/owner/module"
|
||||
[ "$output" = "owner/module" ]
|
||||
|
||||
# Test URL with port and custom directory (should ignore the directory part)
|
||||
run inst_extract_owner_name "https://gitlab.internal:9443/team/project.git:custom-dir"
|
||||
[ "$output" = "team/project" ]
|
||||
|
||||
# Test complex URL with port (should extract owner/name correctly)
|
||||
run inst_extract_owner_name "https://git.company.com:8443/department/awesome-module.git"
|
||||
[ "$output" = "department/awesome-module" ]
|
||||
}
|
||||
|
||||
@test "duplicate module entries should be prevented across different formats" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Add module via simple name
|
||||
inst_mod_list_upsert "mod-transmog" "master" "abc123"
|
||||
|
||||
# Verify it's in the list
|
||||
grep -q "mod-transmog master abc123" "$TEST_DIR/conf/modules.list"
|
||||
|
||||
# Add same module via owner/name format - should replace, not duplicate
|
||||
inst_mod_list_upsert "azerothcore/mod-transmog" "dev" "def456"
|
||||
|
||||
# Should only have one entry (the new one)
|
||||
[ "$(grep -c "azerothcore/mod-transmog" "$TEST_DIR/conf/modules.list")" -eq 1 ]
|
||||
grep -q "azerothcore/mod-transmog dev def456" "$TEST_DIR/conf/modules.list"
|
||||
! grep -q "mod-transmog master abc123" "$TEST_DIR/conf/modules.list"
|
||||
}
|
||||
|
||||
@test "module installed via URL should be recognized when checking with different formats" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Install via HTTPS URL
|
||||
inst_mod_list_upsert "https://github.com/azerothcore/mod-transmog.git" "master" "abc123"
|
||||
|
||||
# Should be detected as installed using simple name
|
||||
run inst_mod_is_installed "mod-transmog"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should be detected as installed using owner/name
|
||||
run inst_mod_is_installed "azerothcore/mod-transmog"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should be detected as installed using SSH URL
|
||||
run inst_mod_is_installed "git@github.com:azerothcore/mod-transmog.git"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Non-existent module should not be detected
|
||||
run inst_mod_is_installed "mod-nonexistent"
|
||||
[ "$status" -ne 0 ]
|
||||
}
|
||||
|
||||
@test "module installed via URL with port should be recognized correctly" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Install via URL with port
|
||||
inst_mod_list_upsert "https://gitlab.internal:9443/myorg/my-module.git" "master" "abc123"
|
||||
|
||||
# Should be detected as installed using normalized owner/name
|
||||
run inst_mod_is_installed "myorg/my-module"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should be detected when checking with different URL format
|
||||
run inst_mod_is_installed "ssh://git@gitlab.internal:9443/myorg/my-module"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should be detected when checking with custom directory syntax
|
||||
run inst_mod_is_installed "myorg/my-module:custom-dir"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Different module should not be detected
|
||||
run inst_mod_is_installed "myorg/different-module"
|
||||
[ "$status" -ne 0 ]
|
||||
}
|
||||
|
||||
@test "cross-format module removal should work" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Install via SSH URL
|
||||
inst_mod_list_upsert "git@github.com:azerothcore/mod-transmog.git" "master" "abc123"
|
||||
|
||||
# Verify it's installed
|
||||
grep -q "git@github.com:azerothcore/mod-transmog.git" "$TEST_DIR/conf/modules.list"
|
||||
|
||||
# Remove using simple name
|
||||
inst_mod_list_remove "mod-transmog"
|
||||
|
||||
# Should be completely removed
|
||||
! grep -q "azerothcore/mod-transmog" "$TEST_DIR/conf/modules.list"
|
||||
! grep -q "git@github.com:azerothcore/mod-transmog.git" "$TEST_DIR/conf/modules.list"
|
||||
}
|
||||
|
||||
@test "module installation should prevent duplicates when already installed" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Install via simple name first
|
||||
inst_mod_list_upsert "mod-worldchat" "master" "abc123"
|
||||
|
||||
# Try to install same module via URL - should detect it's already installed
|
||||
run inst_mod_is_installed "https://github.com/azerothcore/mod-worldchat.git"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Add via URL should replace the existing entry
|
||||
inst_mod_list_upsert "https://github.com/azerothcore/mod-worldchat.git" "dev" "def456"
|
||||
|
||||
# Should only have one entry
|
||||
[ "$(grep -c "azerothcore/mod-worldchat" "$TEST_DIR/conf/modules.list")" -eq 1 ]
|
||||
grep -q "https://github.com/azerothcore/mod-worldchat.git dev def456" "$TEST_DIR/conf/modules.list"
|
||||
}
|
||||
|
||||
@test "module update --all uses flat structure (no branch subfolders)" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Prepare modules.list with one entry and a matching local directory
|
||||
mkdir -p "$TEST_DIR/conf"
|
||||
echo "azerothcore/mod-transmog master abc123" > "$TEST_DIR/conf/modules.list"
|
||||
mkdir -p "$TEST_DIR/modules/mod-transmog"
|
||||
|
||||
# Run update all
|
||||
run bash -c "source '$TEST_DIR/apps/installer/includes/includes.sh' && inst_module_update --all"
|
||||
|
||||
# Verify Joiner:upd_repo received flat structure args (no basedir)
|
||||
[ -f "$TEST_DIR/joiner_called.txt" ]
|
||||
grep -q "UPD https://github.com/azerothcore/mod-transmog mod-transmog master" "$TEST_DIR/joiner_called.txt"
|
||||
}
|
||||
|
||||
@test "module update specific uses flat structure with override branch" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Create local directory so update proceeds
|
||||
mkdir -p "$TEST_DIR/modules/mymodule"
|
||||
|
||||
# Run update specifying owner/name and branch
|
||||
run bash -c "source '$TEST_DIR/apps/installer/includes/includes.sh' && inst_module_update myorg/mymodule@dev"
|
||||
|
||||
# Should call joiner with name 'mymodule' and branch 'dev' (no basedir)
|
||||
[ -f "$TEST_DIR/joiner_called.txt" ]
|
||||
grep -q "UPD https://github.com/myorg/mymodule mymodule dev" "$TEST_DIR/joiner_called.txt"
|
||||
}
|
||||
|
||||
@test "custom directory names should work with new syntax" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test parsing with custom directory name
|
||||
run inst_parse_module_spec "mod-transmog:my-custom-dir@develop:abc123"
|
||||
[ "$status" -eq 0 ]
|
||||
# Should output: repo_ref owner name branch commit url dirname
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "azerothcore/mod-transmog" ]
|
||||
[ "$owner" = "azerothcore" ]
|
||||
[ "$name" = "mod-transmog" ]
|
||||
[ "$branch" = "develop" ]
|
||||
[ "$commit" = "abc123" ]
|
||||
[ "$url" = "https://github.com/azerothcore/mod-transmog" ]
|
||||
[ "$dirname" = "my-custom-dir" ]
|
||||
}
|
||||
|
||||
@test "directory conflict detection should work" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Create a fake existing directory
|
||||
mkdir -p "$TEST_DIR/modules/existing-dir"
|
||||
|
||||
# Should detect conflict
|
||||
run inst_check_module_conflict "existing-dir" "mod-test"
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "Directory 'existing-dir' already exists" ]]
|
||||
[[ "$output" =~ "Use a different directory name: mod-test:my-custom-name" ]]
|
||||
|
||||
# Should not detect conflict for non-existing directory
|
||||
run inst_check_module_conflict "non-existing-dir" "mod-test"
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "module update should work with custom directories" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# First add module with custom directory to list
|
||||
inst_mod_list_upsert "azerothcore/mod-transmog:custom-dir" "master" "abc123"
|
||||
|
||||
# Create fake module directory structure
|
||||
mkdir -p "$TEST_DIR/modules/custom-dir/.git"
|
||||
echo "ref: refs/heads/master" > "$TEST_DIR/modules/custom-dir/.git/HEAD"
|
||||
|
||||
# Mock git commands in the fake module directory
|
||||
cat > "$TEST_DIR/modules/custom-dir/.git/config" << 'EOF'
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
[remote "origin"]
|
||||
url = https://github.com/azerothcore/mod-transmog
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
[branch "master"]
|
||||
remote = origin
|
||||
merge = refs/heads/master
|
||||
EOF
|
||||
|
||||
# Test update with custom directory should work
|
||||
# Note: This would require more complex mocking for full integration test
|
||||
# For now, just test the parsing recognizes the custom directory
|
||||
run inst_parse_module_spec "azerothcore/mod-transmog:custom-dir"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$dirname" = "custom-dir" ]
|
||||
}
|
||||
|
||||
@test "URL formats should be properly normalized" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test various URL formats produce same owner/name
|
||||
run inst_extract_owner_name "https://github.com/azerothcore/mod-transmog"
|
||||
local url_format="$output"
|
||||
|
||||
run inst_extract_owner_name "https://github.com/azerothcore/mod-transmog.git"
|
||||
local url_git_format="$output"
|
||||
|
||||
run inst_extract_owner_name "git@github.com:azerothcore/mod-transmog.git"
|
||||
local ssh_format="$output"
|
||||
|
||||
run inst_extract_owner_name "azerothcore/mod-transmog"
|
||||
local owner_name_format="$output"
|
||||
|
||||
run inst_extract_owner_name "mod-transmog"
|
||||
local simple_format="$output"
|
||||
|
||||
# All should normalize to the same owner/name
|
||||
[ "$url_format" = "azerothcore/mod-transmog" ]
|
||||
[ "$url_git_format" = "azerothcore/mod-transmog" ]
|
||||
[ "$ssh_format" = "azerothcore/mod-transmog" ]
|
||||
[ "$owner_name_format" = "azerothcore/mod-transmog" ]
|
||||
[ "$simple_format" = "azerothcore/mod-transmog" ]
|
||||
}
|
||||
|
||||
# Tests for module exclusion functionality
|
||||
|
||||
@test "module exclusion should work with MODULES_EXCLUDE_LIST" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test exclusion with simple name
|
||||
export MODULES_EXCLUDE_LIST="mod-test-module"
|
||||
run inst_mod_is_excluded "mod-test-module"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Test exclusion with owner/name format
|
||||
export MODULES_EXCLUDE_LIST="azerothcore/mod-test"
|
||||
run inst_mod_is_excluded "mod-test"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Test exclusion with space-separated list
|
||||
export MODULES_EXCLUDE_LIST="mod-one mod-two mod-three"
|
||||
run inst_mod_is_excluded "mod-two"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Test exclusion with newline-separated list
|
||||
export MODULES_EXCLUDE_LIST="
|
||||
mod-alpha
|
||||
mod-beta
|
||||
mod-gamma
|
||||
"
|
||||
run inst_mod_is_excluded "mod-beta"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Test exclusion with URL format
|
||||
export MODULES_EXCLUDE_LIST="https://github.com/azerothcore/mod-transmog.git"
|
||||
run inst_mod_is_excluded "mod-transmog"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Test non-excluded module
|
||||
export MODULES_EXCLUDE_LIST="mod-other"
|
||||
run inst_mod_is_excluded "mod-transmog"
|
||||
[ "$status" -eq 1 ]
|
||||
|
||||
# Test empty exclusion list
|
||||
unset MODULES_EXCLUDE_LIST
|
||||
run inst_mod_is_excluded "mod-transmog"
|
||||
[ "$status" -eq 1 ]
|
||||
}
|
||||
|
||||
@test "install --all should skip excluded modules" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Setup modules list with excluded module
|
||||
mkdir -p "$TEST_DIR/conf"
|
||||
cat > "$TEST_DIR/conf/modules.list" << 'EOF'
|
||||
azerothcore/mod-transmog master abc123
|
||||
azerothcore/mod-excluded master def456
|
||||
EOF
|
||||
|
||||
# Set exclusion list
|
||||
export MODULES_EXCLUDE_LIST="mod-excluded"
|
||||
|
||||
# Mock the install process to capture output
|
||||
run bash -c "source '$TEST_DIR/apps/installer/includes/includes.sh' && inst_module_install --all 2>&1"
|
||||
|
||||
# Should show that excluded module was skipped
|
||||
[[ "$output" == *"azerothcore/mod-excluded"* && "$output" == *"Excluded by MODULES_EXCLUDE_LIST"* && "$output" == *"skipping"* ]]
|
||||
}
|
||||
|
||||
@test "exclusion should work with multiple formats in same list" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test multiple exclusion formats
|
||||
export MODULES_EXCLUDE_LIST="mod-test https://github.com/azerothcore/mod-transmog.git custom/mod-other"
|
||||
|
||||
run inst_mod_is_excluded "mod-test"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run inst_mod_is_excluded "azerothcore/mod-transmog"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run inst_mod_is_excluded "custom/mod-other"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run inst_mod_is_excluded "mod-allowed"
|
||||
[ "$status" -eq 1 ]
|
||||
}
|
||||
|
||||
# Tests for color support functionality
|
||||
|
||||
@test "color functions should work correctly" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test that print functions exist and work
|
||||
run print_info "test message"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run print_warn "test warning"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run print_error "test error"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run print_success "test success"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run print_skip "test skip"
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
run print_header "test header"
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "color support should respect NO_COLOR environment variable" {
|
||||
cd "$TEST_DIR"
|
||||
|
||||
# Test with NO_COLOR set
|
||||
export NO_COLOR=1
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Colors should be empty when NO_COLOR is set
|
||||
[ -z "$C_RED" ]
|
||||
[ -z "$C_GREEN" ]
|
||||
[ -z "$C_RESET" ]
|
||||
}
|
||||
|
||||
# Tests for interactive menu system
|
||||
|
||||
@test "module help should display comprehensive help" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
run inst_module_help
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should contain key sections
|
||||
[[ "$output" =~ "Module Manager Help" ]]
|
||||
[[ "$output" =~ "Usage:" ]]
|
||||
[[ "$output" =~ "Module Specification Syntax:" ]]
|
||||
[[ "$output" =~ "Examples:" ]]
|
||||
}
|
||||
|
||||
@test "module list should show installed modules correctly" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Setup modules list
|
||||
mkdir -p "$TEST_DIR/conf"
|
||||
cat > "$TEST_DIR/conf/modules.list" << 'EOF'
|
||||
azerothcore/mod-transmog master abc123
|
||||
custom/mod-test develop def456
|
||||
EOF
|
||||
|
||||
run inst_module_list
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
# Should show both modules
|
||||
[[ "$output" =~ "mod-transmog" ]]
|
||||
[[ "$output" =~ "custom/mod-test" ]]
|
||||
[[ "$output" =~ "master" ]]
|
||||
[[ "$output" =~ "develop" ]]
|
||||
}
|
||||
|
||||
@test "module list should handle empty list gracefully" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Ensure empty modules list
|
||||
mkdir -p "$TEST_DIR/conf"
|
||||
touch "$TEST_DIR/conf/modules.list"
|
||||
|
||||
run inst_module_list
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "No modules installed" ]]
|
||||
}
|
||||
|
||||
# Tests for advanced parsing edge cases
|
||||
|
||||
@test "parsing should handle complex URL formats" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test GitLab URL with custom directory and branch
|
||||
run inst_parse_module_spec "https://gitlab.com/myorg/mymodule.git:custom-dir@develop:abc123"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "https://gitlab.com/myorg/mymodule.git" ]
|
||||
[ "$owner" = "myorg" ]
|
||||
[ "$name" = "mymodule" ]
|
||||
[ "$branch" = "develop" ]
|
||||
[ "$commit" = "abc123" ]
|
||||
[ "$dirname" = "custom-dir" ]
|
||||
}
|
||||
|
||||
@test "parsing should handle URLs with ports correctly (fix for port/dirname confusion)" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test HTTPS URL with port - should NOT treat port as dirname
|
||||
run inst_parse_module_spec "https://example.com:8080/user/repo.git"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "https://example.com:8080/user/repo.git" ]
|
||||
[ "$owner" = "user" ]
|
||||
[ "$name" = "repo" ]
|
||||
[ "$branch" = "-" ]
|
||||
[ "$commit" = "-" ]
|
||||
[ "$url" = "https://example.com:8080/user/repo.git" ]
|
||||
[ "$dirname" = "repo" ] # Should default to repo name, NOT port number
|
||||
}
|
||||
|
||||
@test "parsing should handle URLs with ports and custom directory correctly" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test URL with port AND custom directory - should parse custom directory correctly
|
||||
run inst_parse_module_spec "https://example.com:8080/user/repo.git:custom-dir"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "https://example.com:8080/user/repo.git" ]
|
||||
[ "$owner" = "user" ]
|
||||
[ "$name" = "repo" ]
|
||||
[ "$branch" = "-" ]
|
||||
[ "$commit" = "-" ]
|
||||
[ "$url" = "https://example.com:8080/user/repo.git" ]
|
||||
[ "$dirname" = "custom-dir" ] # Should be custom-dir, not port number
|
||||
}
|
||||
|
||||
@test "parsing should handle SSH URLs with ports correctly" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test SSH URL with port
|
||||
run inst_parse_module_spec "ssh://git@example.com:2222/user/repo"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "ssh://git@example.com:2222/user/repo" ]
|
||||
[ "$owner" = "user" ]
|
||||
[ "$name" = "repo" ]
|
||||
[ "$dirname" = "repo" ] # Should be repo name, not port number
|
||||
}
|
||||
|
||||
@test "parsing should handle SSH URLs with ports and custom directory" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test SSH URL with port and custom directory
|
||||
run inst_parse_module_spec "ssh://git@example.com:2222/user/repo:my-custom-dir@develop"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "ssh://git@example.com:2222/user/repo" ]
|
||||
[ "$owner" = "user" ]
|
||||
[ "$name" = "repo" ]
|
||||
[ "$branch" = "develop" ]
|
||||
[ "$dirname" = "my-custom-dir" ]
|
||||
}
|
||||
|
||||
@test "parsing should handle complex URLs with ports, custom dirs, and branches" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Test comprehensive URL with port, custom directory, branch, and commit
|
||||
run inst_parse_module_spec "https://gitlab.example.com:9443/myorg/myrepo.git:custom-name@feature-branch:abc123def"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "https://gitlab.example.com:9443/myorg/myrepo.git" ]
|
||||
[ "$owner" = "myorg" ]
|
||||
[ "$name" = "myrepo" ]
|
||||
[ "$branch" = "feature-branch" ]
|
||||
[ "$commit" = "abc123def" ]
|
||||
[ "$url" = "https://gitlab.example.com:9443/myorg/myrepo.git" ]
|
||||
[ "$dirname" = "custom-name" ]
|
||||
}
|
||||
|
||||
@test "URL port parsing regression test - ensure ports are not confused with directory names" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# These are the problematic cases that the fix addresses
|
||||
local test_cases=(
|
||||
"https://example.com:8080/repo.git"
|
||||
"https://gitlab.internal:9443/group/project.git"
|
||||
"ssh://git@server.com:2222/owner/repo"
|
||||
"https://git.company.com:8443/team/module.git"
|
||||
)
|
||||
|
||||
for spec in "${test_cases[@]}"; do
|
||||
run inst_parse_module_spec "$spec"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
|
||||
# Critical: dirname should NEVER be a port number
|
||||
[[ ! "$dirname" =~ ^[0-9]+$ ]] || {
|
||||
echo "FAIL: Port number '$dirname' incorrectly parsed as directory name for spec: $spec"
|
||||
return 1
|
||||
}
|
||||
|
||||
# dirname should be the repository name by default
|
||||
local expected_name
|
||||
if [[ "$spec" =~ /([^/]+)(\.git)?$ ]]; then
|
||||
expected_name="${BASH_REMATCH[1]}"
|
||||
expected_name="${expected_name%.git}"
|
||||
fi
|
||||
[ "$dirname" = "$expected_name" ] || {
|
||||
echo "FAIL: Expected dirname '$expected_name' but got '$dirname' for spec: $spec"
|
||||
return 1
|
||||
}
|
||||
done
|
||||
}
|
||||
|
||||
@test "parsing should handle URL with custom directory but no branch" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
run inst_parse_module_spec "https://github.com/owner/repo.git:my-dir"
|
||||
[ "$status" -eq 0 ]
|
||||
IFS=' ' read -r repo_ref owner name branch commit url dirname <<< "$output"
|
||||
[ "$repo_ref" = "https://github.com/owner/repo.git" ]
|
||||
[ "$dirname" = "my-dir" ]
|
||||
[ "$branch" = "-" ]
|
||||
}
|
||||
|
||||
@test "modules list should maintain alphabetical order" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
# Add modules in random order
|
||||
inst_mod_list_upsert "zeta/mod-z" "master" "abc"
|
||||
inst_mod_list_upsert "alpha/mod-a" "master" "def"
|
||||
inst_mod_list_upsert "beta/mod-b" "master" "ghi"
|
||||
|
||||
# Read the list and verify alphabetical order
|
||||
local entries=()
|
||||
while read -r repo_ref branch commit; do
|
||||
[[ -z "$repo_ref" ]] && continue
|
||||
entries+=("$repo_ref")
|
||||
done < <(inst_mod_list_read)
|
||||
|
||||
# Should be in alphabetical order by owner/name
|
||||
[ "${entries[0]}" = "alpha/mod-a" ]
|
||||
[ "${entries[1]}" = "beta/mod-b" ]
|
||||
[ "${entries[2]}" = "zeta/mod-z" ]
|
||||
}
|
||||
|
||||
@test "module dispatcher should handle unknown commands gracefully" {
|
||||
cd "$TEST_DIR"
|
||||
source "$TEST_DIR/apps/installer/includes/includes.sh"
|
||||
|
||||
run inst_module "unknown-command"
|
||||
[ "$status" -eq 1 ]
|
||||
[[ "$output" =~ "Invalid option" ]]
|
||||
}
|
||||
685
apps/startup-scripts/README.md
Normal file
685
apps/startup-scripts/README.md
Normal file
@@ -0,0 +1,685 @@
|
||||
# AzerothCore Startup Scripts
|
||||
|
||||
A comprehensive suite of scripts for managing AzerothCore server instances with advanced session management, automatic restart capabilities, and production-ready service management.
|
||||
|
||||
## 📋 Table of Contents
|
||||
|
||||
- [Overview](#overview)
|
||||
- [Components](#components)
|
||||
- [Quick Start](#quick-start)
|
||||
- [Configuration](#configuration)
|
||||
- [Detailed Usage](#detailed-usage)
|
||||
- [Multiple Realms Setup](#multiple-realms-setup)
|
||||
- [Service Management](#service-management)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
|
||||
## 🎯 Overview
|
||||
|
||||
The AzerothCore startup scripts provide multiple approaches to running server instances:
|
||||
|
||||
1. **Development/Testing**: Simple execution for debugging and development
|
||||
2. **Production with Restarts**: Automatic restart on crashes with crash detection
|
||||
3. **Background Services**: Production-ready service management with PM2 or systemd
|
||||
4. **Session Management**: Interactive console access via tmux/screen
|
||||
|
||||
All scripts are integrated into the `acore.sh` dashboard for easy access.
|
||||
|
||||
### 📦 Automatic Deployment
|
||||
|
||||
**Important**: When you compile AzerothCore using the acore dashboard (`./acore.sh compiler build`), all startup scripts are automatically copied from `apps/startup-scripts/src/` to your `bin/` folder. This means:
|
||||
|
||||
- ✅ **Portable Deployment**: You can copy the entire `bin/` folder to different servers
|
||||
- ✅ **Self-Contained**: All restart and service management tools travel with your binaries
|
||||
- ✅ **No Additional Setup**: Scripts work immediately after deployment
|
||||
- ✅ **Production Ready**: Deploy to production servers without needing the full source code
|
||||
|
||||
This makes it easy to deploy your compiled binaries along with the management scripts to production environments where you may not have the full AzerothCore source code.
|
||||
|
||||
## 🔧 Components
|
||||
|
||||
### Core Scripts
|
||||
|
||||
- **`run-engine`**: Advanced script with session management and configuration priority
|
||||
- **`simple-restarter`**: Wrapper around starter with restart functionality (legacy compatibility)
|
||||
- **`starter`**: Basic binary execution with optional GDB support
|
||||
- **`service-manager.sh`**: Production service management with PM2/systemd
|
||||
|
||||
### Configuration
|
||||
|
||||
- **`conf.sh.dist`**: Default configuration template
|
||||
- **`conf.sh`**: User configuration (create from .dist)
|
||||
- **`gdb.conf`**: GDB debugging configuration
|
||||
|
||||
### Examples
|
||||
|
||||
- **`restarter-auth.sh`**: Auth server restart example
|
||||
- **`restarter-world.sh`**: World server restart example
|
||||
- **`starter-auth.sh`**: Auth server basic start example
|
||||
- **`starter-world.sh`**: World server basic start example
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### 1. Basic Server Start (Development)
|
||||
|
||||
```bash
|
||||
# Start authserver directly
|
||||
./starter /path/to/bin authserver
|
||||
|
||||
# Start worldserver with config
|
||||
./starter /path/to/bin worldserver "" /path/to/worldserver.conf
|
||||
```
|
||||
|
||||
### 2. Start with Auto-Restart
|
||||
|
||||
```bash
|
||||
# Using simple-restarter (legacy)
|
||||
./simple-restarter /path/to/bin authserver
|
||||
|
||||
# Using run-engine (recommended)
|
||||
./run-engine restart authserver --bin-path /path/to/bin
|
||||
```
|
||||
|
||||
### 3. Production Service Management
|
||||
|
||||
```bash
|
||||
# Create and start a service
|
||||
./service-manager.sh create auth authserver --bin-path /path/to/bin
|
||||
|
||||
# List all services
|
||||
./service-manager.sh list
|
||||
|
||||
# Stop a service
|
||||
./service-manager.sh stop auth
|
||||
```
|
||||
|
||||
### 4. Using acore.sh Dashboard
|
||||
|
||||
```bash
|
||||
# Interactive dashboard
|
||||
./acore.sh
|
||||
|
||||
# Direct commands
|
||||
./acore.sh run-authserver # Start authserver with restart
|
||||
./acore.sh run-worldserver # Start worldserver with restart
|
||||
./acore.sh service-manager # Access service manager
|
||||
```
|
||||
|
||||
## ⚙️ Configuration
|
||||
|
||||
### Configuration Priority (Highest to Lowest)
|
||||
|
||||
1. **`conf.sh`** - User configuration file
|
||||
2. **Command line arguments** - Runtime parameters
|
||||
3. **Environment variables** - `RUN_ENGINE_*` variables
|
||||
4. **`conf.sh.dist`** - Default configuration
|
||||
|
||||
### Creating Configuration
|
||||
|
||||
```bash
|
||||
# Copy default configuration
|
||||
cp scripts/conf.sh.dist scripts/conf.sh
|
||||
|
||||
# Edit your configuration
|
||||
nano scripts/conf.sh
|
||||
```
|
||||
|
||||
### Key Configuration Options
|
||||
|
||||
```bash
|
||||
# Binary settings
|
||||
export BINPATH="/path/to/azerothcore/bin"
|
||||
export SERVERBIN="worldserver" # or "authserver"
|
||||
export CONFIG="/path/to/worldserver.conf"
|
||||
|
||||
# Session management
|
||||
export SESSION_MANAGER="tmux" # none|auto|tmux|screen
|
||||
export SESSION_NAME="ac-world"
|
||||
|
||||
# Interactive mode control
|
||||
export AC_DISABLE_INTERACTIVE="0" # Set to 1 to disable interactive prompts (useful for non-interactive services)
|
||||
|
||||
# Debugging
|
||||
export GDB_ENABLED="1" # 0 or 1
|
||||
export GDB="/path/to/gdb.conf"
|
||||
|
||||
# Logging
|
||||
export LOGS_PATH="/path/to/logs"
|
||||
export CRASHES_PATH="/path/to/crashes"
|
||||
export LOG_PREFIX_NAME="realm1"
|
||||
```
|
||||
|
||||
## 📖 Detailed Usage
|
||||
|
||||
### 1. Run Engine
|
||||
|
||||
The `run-engine` is the most advanced script with multiple operation modes:
|
||||
|
||||
#### Basic Execution
|
||||
```bash
|
||||
# Start server once
|
||||
./run-engine start worldserver --bin-path /path/to/bin
|
||||
|
||||
# Start with configuration file
|
||||
./run-engine start worldserver --config ./conf-world.sh
|
||||
|
||||
# Start with specific server config
|
||||
./run-engine start worldserver --server-config /path/to/worldserver.conf
|
||||
```
|
||||
|
||||
#### Restart Mode
|
||||
```bash
|
||||
# Automatic restart on crash
|
||||
./run-engine restart worldserver --bin-path /path/to/bin
|
||||
|
||||
# Restart with session management
|
||||
./run-engine restart worldserver --session-manager tmux
|
||||
```
|
||||
|
||||
#### Session Management
|
||||
```bash
|
||||
# Start in tmux session
|
||||
./run-engine start worldserver --session-manager tmux
|
||||
|
||||
# Attach to existing session
|
||||
tmux attach-session -t worldserver
|
||||
|
||||
# Start in screen session
|
||||
./run-engine start worldserver --session-manager screen
|
||||
|
||||
# Attach to screen session
|
||||
screen -r worldserver
|
||||
```
|
||||
|
||||
#### Configuration Options
|
||||
```bash
|
||||
./run-engine restart worldserver \
|
||||
--bin-path /path/to/bin \
|
||||
--server-config /path/to/worldserver.conf \
|
||||
--session-manager tmux \
|
||||
--gdb-enabled 1 \
|
||||
--logs-path /path/to/logs \
|
||||
--crashes-path /path/to/crashes
|
||||
```
|
||||
|
||||
### 2. Simple Restarter
|
||||
|
||||
Legacy-compatible wrapper with restart functionality:
|
||||
|
||||
```bash
|
||||
# Basic restart
|
||||
./simple-restarter /path/to/bin worldserver
|
||||
|
||||
# With full parameters
|
||||
./simple-restarter \
|
||||
/path/to/bin \
|
||||
worldserver \
|
||||
./gdb.conf \
|
||||
/path/to/worldserver.conf \
|
||||
/path/to/system.log \
|
||||
/path/to/system.err \
|
||||
1 \
|
||||
/path/to/crashes
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
1. Binary path (required)
|
||||
2. Binary name (required)
|
||||
3. GDB configuration file (optional)
|
||||
4. Server configuration file (optional)
|
||||
5. System log file (optional)
|
||||
6. System error file (optional)
|
||||
7. GDB enabled flag (0/1, optional)
|
||||
8. Crashes directory path (optional)
|
||||
|
||||
### 3. Starter
|
||||
|
||||
Basic execution script without restart functionality:
|
||||
|
||||
```bash
|
||||
# Simple start
|
||||
./starter /path/to/bin worldserver
|
||||
|
||||
# With GDB debugging
|
||||
./starter /path/to/bin worldserver ./gdb.conf /path/to/worldserver.conf "" "" 1
|
||||
```
|
||||
|
||||
### 4. Service Manager
|
||||
|
||||
Production-ready service management:
|
||||
|
||||
#### Creating Services
|
||||
```bash
|
||||
# Auto-detect provider (PM2 or systemd)
|
||||
./service-manager.sh create auth authserver --bin-path /path/to/bin
|
||||
|
||||
# Force PM2
|
||||
./service-manager.sh create world worldserver --provider pm2 --bin-path /path/to/bin
|
||||
|
||||
# Force systemd
|
||||
./service-manager.sh create world worldserver --provider systemd --bin-path /path/to/bin
|
||||
|
||||
# Create service with restart policy
|
||||
./service-manager.sh create world worldserver --bin-path /path/to/bin --restart-policy always
|
||||
```
|
||||
|
||||
#### Restart Policies
|
||||
|
||||
Services support two restart policies:
|
||||
|
||||
- **`on-failure`** (default): Restart only on crashes or errors (exit code != 0, only works with PM2 or systemd without tmux/screen)
|
||||
- **`always`**: Restart on any exit, including clean shutdown (exit code 0)
|
||||
|
||||
**Important**: When using `--restart-policy always`, the in-game command `server shutdown X` will behave like `server restart X` - the service will automatically restart after shutdown. Only the shutdown message differs from a restart message.
|
||||
|
||||
```bash
|
||||
# Service that restarts only on crashes (default behavior)
|
||||
./service-manager.sh create auth authserver --bin-path /path/to/bin --restart-policy on-failure
|
||||
|
||||
# Service that always restarts (even on manual shutdown)
|
||||
./service-manager.sh create world worldserver --bin-path /path/to/bin --restart-policy always
|
||||
|
||||
# Update existing service restart policy
|
||||
./service-manager.sh update worldserver --restart-policy always
|
||||
```
|
||||
|
||||
#### Service Operations
|
||||
```bash
|
||||
# Start/stop services
|
||||
./service-manager.sh start auth
|
||||
./service-manager.sh stop world
|
||||
./service-manager.sh restart auth
|
||||
|
||||
# View logs
|
||||
./service-manager.sh logs world
|
||||
./service-manager.sh logs world --follow
|
||||
|
||||
# Attach to console (interactive)
|
||||
./service-manager.sh attach world
|
||||
|
||||
# List services
|
||||
./service-manager.sh list
|
||||
./service-manager.sh list pm2
|
||||
./service-manager.sh list systemd
|
||||
|
||||
# Delete service
|
||||
./service-manager.sh delete auth
|
||||
```
|
||||
|
||||
#### Health and Console Commands
|
||||
|
||||
Use these commands to programmatically check service health and interact with the console (used by CI workflows):
|
||||
|
||||
```bash
|
||||
# Check if service is currently running (exit 0 if running)
|
||||
./service-manager.sh is-running world
|
||||
|
||||
# Print current uptime in seconds (fails if not running)
|
||||
./service-manager.sh uptime-seconds world
|
||||
|
||||
# Wait until uptime >= 10s (optional timeout 240s)
|
||||
./service-manager.sh wait-uptime world 10 240
|
||||
|
||||
# Send a console command (uses pm2 send or tmux/screen)
|
||||
./service-manager.sh send world "server info"
|
||||
|
||||
# Show provider, configs and run-engine settings
|
||||
./service-manager.sh show-config world
|
||||
```
|
||||
|
||||
Notes:
|
||||
- For `send`, PM2 provider uses `pm2 send` with the process ID; systemd provider requires a session manager (tmux/screen). If no attachable session is configured, the command fails.
|
||||
- `wait-uptime` fails with a non-zero exit code if the service does not reach the requested uptime within the timeout window.
|
||||
|
||||
#### Service Configuration
|
||||
```bash
|
||||
# Update service settings
|
||||
./service-manager.sh update world --session-manager screen --gdb-enabled 1
|
||||
|
||||
# Edit configuration
|
||||
./service-manager.sh edit world
|
||||
|
||||
# Restore missing services from registry
|
||||
./service-manager.sh restore
|
||||
```
|
||||
|
||||
## 🌍 Multiple Realms Setup
|
||||
|
||||
### Method 1: Using Service Manager (Recommended)
|
||||
|
||||
```bash
|
||||
# Create multiple world server instances with different restart policies
|
||||
./service-manager.sh create world1 worldserver \
|
||||
--bin-path /path/to/bin \
|
||||
--server-config /path/to/worldserver-realm1.conf \
|
||||
--restart-policy on-failure
|
||||
|
||||
./service-manager.sh create world2 worldserver \
|
||||
--bin-path /path/to/bin \
|
||||
--server-config /path/to/worldserver-realm2.conf \
|
||||
--restart-policy always
|
||||
|
||||
# Single auth server for all realms (always restart for stability)
|
||||
./service-manager.sh create auth authserver \
|
||||
--bin-path /path/to/bin \
|
||||
--server-config /path/to/authserver.conf \
|
||||
--restart-policy always
|
||||
```
|
||||
|
||||
### Method 2: Using Run Engine with Different Configurations
|
||||
|
||||
Create separate configuration files for each realm:
|
||||
|
||||
**conf-realm1.sh:**
|
||||
```bash
|
||||
export BINPATH="/path/to/bin"
|
||||
export SERVERBIN="worldserver"
|
||||
export CONFIG="/path/to/worldserver-realm1.conf"
|
||||
export SESSION_NAME="ac-realm1"
|
||||
export LOG_PREFIX_NAME="realm1"
|
||||
export LOGS_PATH="/path/to/logs/realm1"
|
||||
```
|
||||
|
||||
**conf-realm2.sh:**
|
||||
```bash
|
||||
export BINPATH="/path/to/bin"
|
||||
export SERVERBIN="worldserver"
|
||||
export CONFIG="/path/to/worldserver-realm2.conf"
|
||||
export SESSION_NAME="ac-realm2"
|
||||
export LOG_PREFIX_NAME="realm2"
|
||||
export LOGS_PATH="/path/to/logs/realm2"
|
||||
```
|
||||
|
||||
Start each realm:
|
||||
```bash
|
||||
./run-engine restart worldserver --config ./conf-realm1.sh
|
||||
./run-engine restart worldserver --config ./conf-realm2.sh
|
||||
```
|
||||
|
||||
### Method 3: Using Examples with Custom Configurations
|
||||
|
||||
Copy and modify the example scripts:
|
||||
|
||||
```bash
|
||||
# Copy examples
|
||||
cp examples/restarter-world.sh restarter-realm1.sh
|
||||
cp examples/restarter-world.sh restarter-realm2.sh
|
||||
|
||||
# Edit each script to point to different configuration files
|
||||
# Then run:
|
||||
./restarter-realm1.sh
|
||||
./restarter-realm2.sh
|
||||
```
|
||||
|
||||
## 🛠️ Service Management
|
||||
|
||||
### Service Registry and Persistence
|
||||
|
||||
The service manager includes a comprehensive registry system that tracks all created services and enables automatic restoration:
|
||||
|
||||
#### Service Registry Features
|
||||
|
||||
- **Automatic Tracking**: All services are automatically registered when created
|
||||
- **Cross-Reboot Persistence**: PM2 services are configured with startup persistence
|
||||
- **Service Restoration**: Missing services can be detected and restored from registry
|
||||
- **Migration Support**: Legacy service configurations can be migrated to the new format
|
||||
|
||||
#### Using the Registry
|
||||
|
||||
```bash
|
||||
# Check for missing services and restore them
|
||||
./service-manager.sh restore
|
||||
|
||||
# List all registered services (includes status)
|
||||
./service-manager.sh list
|
||||
|
||||
# Services are automatically added to registry on creation
|
||||
./service-manager.sh create auth authserver --bin-path /path/to/bin
|
||||
```
|
||||
|
||||
#### Custom Configuration Directories
|
||||
|
||||
You can customize where service configurations and PM2/systemd files are stored:
|
||||
|
||||
```bash
|
||||
# Set custom directories
|
||||
export AC_SERVICE_CONFIG_DIR="/path/to/your/project/services"
|
||||
|
||||
# Now all service operations will use these custom directories
|
||||
./service-manager.sh create auth authserver --bin-path /path/to/bin
|
||||
```
|
||||
|
||||
This is particularly useful for:
|
||||
- **Version Control**: Keep service configurations in your project repository
|
||||
- **Multiple Projects**: Separate service configurations per project
|
||||
- **Team Collaboration**: Share service setups across development teams
|
||||
|
||||
#### Service Configuration Portability
|
||||
|
||||
The service manager automatically stores binary and configuration paths as relative paths when they are located under the `AC_SERVICE_CONFIG_DIR`, making service configurations portable across environments:
|
||||
|
||||
```bash
|
||||
# Set up a portable project structure
|
||||
export AC_SERVICE_CONFIG_DIR="/opt/myproject/services"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR"/{bin,etc}
|
||||
|
||||
# Copy your binaries and configs
|
||||
cp /path/to/compiled/authserver "$AC_SERVICE_CONFIG_DIR/bin/"
|
||||
cp /path/to/authserver.conf "$AC_SERVICE_CONFIG_DIR/etc/"
|
||||
|
||||
# Create service - paths under AC_SERVICE_CONFIG_DIR will be stored as relative
|
||||
./service-manager.sh create auth authserver \
|
||||
--bin-path "$AC_SERVICE_CONFIG_DIR/bin" \
|
||||
--server-config "$AC_SERVICE_CONFIG_DIR/etc/authserver.conf"
|
||||
|
||||
# Registry will contain relative paths like "bin/authserver" and "etc/authserver.conf"
|
||||
# instead of absolute paths, making the entire directory portable
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- **Environment Independence**: Move the entire services directory between machines
|
||||
- **Container Friendly**: Perfect for Docker volumes and bind mounts
|
||||
- **Backup/Restore**: Archive and restore complete service configurations
|
||||
- **Development/Production Parity**: Same relative structure across environments
|
||||
|
||||
**How it works:**
|
||||
- Paths under `AC_SERVICE_CONFIG_DIR` are automatically stored as relative paths
|
||||
- Paths outside `AC_SERVICE_CONFIG_DIR` are stored as absolute paths for safety
|
||||
- When services are restored or started, relative paths are resolved from `AC_SERVICE_CONFIG_DIR`
|
||||
- If `AC_SERVICE_CONFIG_DIR` is not set, all paths are stored as absolute paths (traditional behavior)
|
||||
|
||||
#### Migration from Legacy Format
|
||||
|
||||
If you have existing services in the old format, use the migration script:
|
||||
|
||||
```bash
|
||||
# Migrate existing registry to new format
|
||||
./migrate-registry.sh
|
||||
|
||||
# The script will:
|
||||
# - Detect old format automatically
|
||||
# - Create a backup of the old registry
|
||||
# - Convert to new format with proper tracking
|
||||
# - Preserve all existing service information
|
||||
```
|
||||
|
||||
### PM2 Services
|
||||
|
||||
When using PM2 as the service provider:
|
||||
|
||||
* [PM2 CLI Documentation](https://pm2.io/docs/runtime/reference/pm2-cli/)
|
||||
|
||||
**Automatic PM2 Persistence**: The service manager automatically configures PM2 for persistence across reboots by:
|
||||
- Running `pm2 startup` to set up the startup script
|
||||
- Running `pm2 save` after each service creation/modification
|
||||
- This ensures your services automatically start when the system reboots
|
||||
|
||||
NOTE: pm2 cannot run tmux/screen sessions, but you can always use the `attach` command to connect to the service console because pm2 supports interactive mode.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
The startup scripts recognize several environment variables for configuration and runtime behavior:
|
||||
|
||||
#### Configuration Directory Variables
|
||||
|
||||
- **`AC_SERVICE_CONFIG_DIR`**: Override the default configuration directory for services registry and configurations
|
||||
- Default: `${XDG_CONFIG_HOME:-$HOME/.config}/azerothcore/services`
|
||||
- Used for storing service registry and run-engine configurations
|
||||
|
||||
#### Service Detection Variables
|
||||
|
||||
- **`AC_LAUNCHED_BY_PM2`**: Set to `1` when launched by PM2 (automatically set by service-manager)
|
||||
- Disables the use of the `unbuffer` command for output capture
|
||||
- Enables non-interactive mode to prevent prompts
|
||||
- More robust than relying on PM2's internal variables
|
||||
|
||||
- **`AC_DISABLE_INTERACTIVE`**: Controls interactive mode (0=enabled, 1=disabled)
|
||||
- Automatically set based on execution context
|
||||
- Prevents AzerothCore from showing interactive prompts in service environments
|
||||
|
||||
#### Configuration Variables
|
||||
|
||||
- **`RUN_ENGINE_*`**: See [Configuration](#configuration) section for complete list
|
||||
- **`SERVICE_MODE`**: Set to `true` to enable service-specific behavior
|
||||
- **`SESSION_MANAGER`**: Override session manager choice (tmux, screen, none, auto)
|
||||
|
||||
### Systemd Services
|
||||
|
||||
When using systemd as the service provider:
|
||||
|
||||
```bash
|
||||
# Systemd commands
|
||||
systemctl --user status acore-auth # Check status
|
||||
systemctl --user logs acore-auth # View logs
|
||||
systemctl --user restart acore-auth # Restart
|
||||
systemctl --user enable acore-auth # Enable auto-start
|
||||
|
||||
# For system services (requires sudo)
|
||||
sudo systemctl status acore-auth
|
||||
sudo systemctl enable acore-auth
|
||||
```
|
||||
|
||||
**Enhanced systemd Integration:**
|
||||
- **Automatic Service Type**: When using session managers (tmux/screen), services are automatically configured with `Type=forking` for proper daemon behavior
|
||||
- **Smart ExecStop**: Services with session managers get automatic `ExecStop` commands to properly terminate tmux/screen sessions when stopping the service
|
||||
- **Non-Interactive Mode**: Services without session managers automatically set `AC_DISABLE_INTERACTIVE=1` to prevent hanging on prompts
|
||||
|
||||
### Session Management in Services
|
||||
|
||||
Services can be configured with session managers for interactive access:
|
||||
|
||||
```bash
|
||||
# Create service with tmux
|
||||
./service-manager.sh create world worldserver \
|
||||
--bin-path /path/to/bin \
|
||||
--session-manager tmux
|
||||
|
||||
# Attach to the session
|
||||
./service-manager.sh attach world
|
||||
# or directly:
|
||||
tmux attach-session -t worldserver
|
||||
```
|
||||
|
||||
## 🎮 Integration with acore.sh Dashboard
|
||||
|
||||
The startup scripts are fully integrated into the AzerothCore dashboard:
|
||||
|
||||
### Direct Commands
|
||||
|
||||
```bash
|
||||
# Run servers with simple restart (development/testing)
|
||||
./acore.sh run-worldserver # Option 11 or 'rw'
|
||||
./acore.sh run-authserver # Option 12 or 'ra'
|
||||
|
||||
# Access service manager (production)
|
||||
./acore.sh service-manager # Option 15 or 'sm'
|
||||
|
||||
# Examples:
|
||||
./acore.sh rw # Quick worldserver start
|
||||
./acore.sh ra # Quick authserver start
|
||||
./acore.sh sm create auth authserver --bin-path /path/to/bin
|
||||
```
|
||||
|
||||
### What Happens Behind the Scenes
|
||||
|
||||
- **run-worldserver/run-authserver**: Calls `simple-restarter` with appropriate binary
|
||||
- **service-manager**: Provides full access to the service management interface
|
||||
- Scripts automatically use the correct binary path from your build configuration
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### 1. Binary Not Found
|
||||
```bash
|
||||
Error: Binary '/path/to/bin/worldserver' not found
|
||||
```
|
||||
**Solution**: Check binary path and ensure servers are compiled
|
||||
```bash
|
||||
# Check if binary exists
|
||||
ls -la /path/to/bin/worldserver
|
||||
|
||||
# Compile if needed
|
||||
./acore.sh compiler build
|
||||
```
|
||||
|
||||
#### 2. Configuration File Issues
|
||||
```bash
|
||||
Error: Configuration file not found
|
||||
```
|
||||
**Solution**: Create configuration from template
|
||||
```bash
|
||||
cp scripts/conf.sh.dist scripts/conf.sh
|
||||
# Edit conf.sh with correct paths
|
||||
```
|
||||
|
||||
#### 3. Session Manager Not Available
|
||||
```bash
|
||||
Warning: tmux not found, falling back to direct execution
|
||||
```
|
||||
**Solution**: Install required session manager
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo apt install tmux screen
|
||||
|
||||
# CentOS/RHEL
|
||||
sudo yum install tmux screen
|
||||
```
|
||||
|
||||
#### 4. Permission Issues (systemd)
|
||||
```bash
|
||||
Failed to create systemd service
|
||||
```
|
||||
**Solution**: Check user permissions or use --system flag
|
||||
```bash
|
||||
# For user services (no sudo required)
|
||||
./service-manager.sh create auth authserver --bin-path /path/to/bin
|
||||
|
||||
# For system services (requires sudo)
|
||||
./service-manager.sh create auth authserver --bin-path /path/to/bin --system
|
||||
```
|
||||
|
||||
#### 5. PM2 Not Found
|
||||
```bash
|
||||
Error: PM2 is not installed
|
||||
```
|
||||
**Solution**: Install PM2
|
||||
```bash
|
||||
npm install -g pm2
|
||||
# or
|
||||
sudo npm install -g pm2
|
||||
```
|
||||
|
||||
#### 7. Registry Out of Sync
|
||||
```bash
|
||||
# If the service registry shows services that don't actually exist
|
||||
```
|
||||
**Solution**: Use registry sync or restore
|
||||
```bash
|
||||
# Check and restore missing services (also cleans up orphaned entries)
|
||||
./service-manager.sh restore
|
||||
|
||||
# If you have a very old registry format, migrate it
|
||||
./migrate-registry.sh
|
||||
```
|
||||
|
||||
|
||||
1
apps/startup-scripts/src/.gitignore
vendored
Normal file
1
apps/startup-scripts/src/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
logs
|
||||
57
apps/startup-scripts/src/conf.sh.dist
Normal file
57
apps/startup-scripts/src/conf.sh.dist
Normal file
@@ -0,0 +1,57 @@
|
||||
# AzerothCore Run Engine Default Configuration
|
||||
# This file contains default values that can be overridden by environment variables
|
||||
# Priority order: conf.sh > environment variables > conf.sh.dist (this file)
|
||||
|
||||
# Enable/disable GDB execution
|
||||
export GDB_ENABLED="${RUN_ENGINE_GDB_ENABLED:-0}"
|
||||
|
||||
# [optional] GDB configuration file
|
||||
# default: gdb.conf
|
||||
export GDB="${RUN_ENGINE_GDB:-}"
|
||||
|
||||
# Directory where binaries are stored
|
||||
export BINPATH="${RUN_ENGINE_BINPATH:-}"
|
||||
|
||||
# Server binary name (e.g., worldserver, authserver)
|
||||
export SERVERBIN="${RUN_ENGINE_SERVERBIN:-}"
|
||||
|
||||
# Path to server configuration file (including the file name)
|
||||
# ex: /home/user/azerothcore/etc/worldserver.conf
|
||||
export CONFIG="${RUN_ENGINE_CONFIG:-}"
|
||||
|
||||
# Session manager to use: none|auto|tmux|screen
|
||||
# auto will detect the best available option
|
||||
export SESSION_MANAGER="${RUN_ENGINE_SESSION_MANAGER:-none}"
|
||||
|
||||
# Default session manager (fallback when SESSION_MANAGER is not set)
|
||||
export DEFAULT_SESSION_MANAGER="${RUN_ENGINE_DEFAULT_SESSION_MANAGER:-none}"
|
||||
|
||||
# Path of the crashes directory
|
||||
# If not specified, it will be created in the same directory as logs named "crashes"
|
||||
export CRASHES_PATH="${RUN_ENGINE_CRASHES_PATH:-}"
|
||||
|
||||
# Path of log files directory
|
||||
export LOGS_PATH="${RUN_ENGINE_LOGS_PATH:-}"
|
||||
|
||||
# Prefix name for log files to avoid collision with other instances
|
||||
export LOG_PREFIX_NAME="${RUN_ENGINE_LOG_PREFIX_NAME:-}"
|
||||
|
||||
# [optional] Name of session (tmux session or screen session)
|
||||
# If not specified, a default name will be generated based on server binary
|
||||
export SESSION_NAME="${RUN_ENGINE_SESSION_NAME:-}"
|
||||
|
||||
# [optional] Screen-specific options: -A -m -d -S
|
||||
# WARNING: if you are running it under a systemd service
|
||||
# please do not remove -m -d arguments from screen if you are using it,
|
||||
# or keep WITH_CONSOLE=0. Otherwise the journald-logging system will take
|
||||
# 100% of CPU slowing down the whole machine.
|
||||
export SCREEN_OPTIONS="${RUN_ENGINE_SCREEN_OPTIONS:-}"
|
||||
|
||||
# Enable/disable console output
|
||||
# If disabled, output will be redirected to logging files
|
||||
export WITH_CONSOLE="${RUN_ENGINE_WITH_CONSOLE:-0}"
|
||||
|
||||
# Restart policy (on-failure|always)
|
||||
export RESTART_POLICY="always"
|
||||
|
||||
|
||||
48
apps/startup-scripts/src/examples/restarter-auth.sh
Executable file
48
apps/startup-scripts/src/examples/restarter-auth.sh
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Auth Server Restarter Example
|
||||
# This example shows how to use the run-engine with restart functionality for authserver
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
CONFIG_FILE="./conf-auth.sh"
|
||||
|
||||
# Method 1: Using configuration file (recommended)
|
||||
if [ -f "$CONFIG_FILE" ]; then
|
||||
echo "Starting authserver with restart loop using config file: $CONFIG_FILE"
|
||||
source "$CONFIG_FILE"
|
||||
"$PATH_RUNENGINE/run-engine" restart "$SERVERBIN" --config "$CONFIG_FILE"
|
||||
else
|
||||
echo "Error: Configuration file not found: $CONFIG_FILE"
|
||||
echo "Please create $CONFIG_FILE by copying and modifying conf.sh.dist"
|
||||
echo "Make sure to set: export SERVERBIN=\"authserver\""
|
||||
echo ""
|
||||
echo "Alternative: Start with binary path directly"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine restart /path/to/bin/authserver"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine restart authserver # if in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Method 2: Direct binary path (full path)
|
||||
# Uncomment the line below to start with full binary path
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" restart /home/user/azerothcore/bin/authserver --server-config /path/to/authserver.conf
|
||||
|
||||
# Method 3: Binary name only (system PATH)
|
||||
# Uncomment the line below if authserver is in your system PATH
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" restart authserver --server-config /path/to/authserver.conf
|
||||
|
||||
# Method 4: With session manager (tmux/screen)
|
||||
# Uncomment the line below to use tmux session
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" restart authserver --session-manager tmux --server-config /path/to/authserver.conf
|
||||
|
||||
# Method 5: Environment variables only
|
||||
# Uncomment the lines below for environment variable configuration
|
||||
#
|
||||
# export RUN_ENGINE_BINPATH="/path/to/your/bin"
|
||||
# export RUN_ENGINE_SERVERBIN="authserver"
|
||||
# export RUN_ENGINE_CONFIG="/path/to/authserver.conf"
|
||||
# "$PATH_RUNENGINE/run-engine" restart authserver
|
||||
|
||||
|
||||
47
apps/startup-scripts/src/examples/restarter-world.sh
Executable file
47
apps/startup-scripts/src/examples/restarter-world.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore World Server Restarter Example
|
||||
# This example shows how to use the run-engine with restart functionality for worldserver
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
CONFIG_FILE="./conf-world.sh"
|
||||
|
||||
# Method 1: Using configuration file (recommended)
|
||||
if [ -f "$CONFIG_FILE" ]; then
|
||||
echo "Starting worldserver with restart loop using config file: $CONFIG_FILE"
|
||||
"$PATH_RUNENGINE/run-engine" restart "$SERVERBIN" --config "$CONFIG_FILE"
|
||||
else
|
||||
echo "Error: Configuration file not found: $CONFIG_FILE"
|
||||
echo "Please create $CONFIG_FILE by copying and modifying conf.sh.dist"
|
||||
echo "Make sure to set: export SERVERBIN=\"worldserver\""
|
||||
echo ""
|
||||
echo "Alternative: Start with binary path directly"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine restart /path/to/bin/worldserver"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine restart worldserver # if in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Method 2: Direct binary path (full path)
|
||||
# Uncomment the line below to start with full binary path
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" restart /home/user/azerothcore/bin/worldserver --server-config /path/to/worldserver.conf
|
||||
|
||||
# Method 3: Binary name only (system PATH)
|
||||
# Uncomment the line below if worldserver is in your system PATH
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" restart worldserver --server-config /path/to/worldserver.conf
|
||||
|
||||
# Method 4: With session manager (tmux/screen)
|
||||
# Uncomment the line below to use tmux session
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" restart worldserver --session-manager tmux --server-config /path/to/worldserver.conf
|
||||
|
||||
# Method 5: Environment variables only
|
||||
# Uncomment the lines below for environment variable configuration
|
||||
#
|
||||
# export RUN_ENGINE_BINPATH="/path/to/your/bin"
|
||||
# export RUN_ENGINE_SERVERBIN="worldserver"
|
||||
# export RUN_ENGINE_CONFIG="/path/to/worldserver.conf"
|
||||
# "$PATH_RUNENGINE/run-engine" restart worldserver
|
||||
|
||||
|
||||
46
apps/startup-scripts/src/examples/starter-auth.sh
Executable file
46
apps/startup-scripts/src/examples/starter-auth.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Auth Server Starter Example
|
||||
# This example shows how to use the run-engine to start authserver without restart loop
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
CONFIG_FILE="./conf-auth.sh"
|
||||
|
||||
# Method 1: Using configuration file (recommended)
|
||||
if [ -f "$CONFIG_FILE" ]; then
|
||||
echo "Starting authserver (single run) with config file: $CONFIG_FILE"
|
||||
"$PATH_RUNENGINE/run-engine" start "$SERVERBIN" --config "$CONFIG_FILE"
|
||||
else
|
||||
echo "Error: Configuration file not found: $CONFIG_FILE"
|
||||
echo "Please create $CONFIG_FILE by copying and modifying conf.sh.dist"
|
||||
echo "Make sure to set: export SERVERBIN=\"authserver\""
|
||||
echo ""
|
||||
echo "Alternative: Start with binary path directly"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine start /path/to/bin/authserver"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine start authserver # if in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Method 2: Direct binary path (full path)
|
||||
# Uncomment the line below to start with full binary path
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" start /home/user/azerothcore/bin/authserver --server-config /path/to/authserver.conf
|
||||
|
||||
# Method 3: Binary name only (system PATH)
|
||||
# Uncomment the line below if authserver is in your system PATH
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" start authserver --server-config /path/to/authserver.conf
|
||||
|
||||
# Method 4: With session manager (tmux/screen)
|
||||
# Uncomment the line below to use tmux session
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" start authserver --session-manager tmux --server-config /path/to/authserver.conf
|
||||
|
||||
# Method 5: Environment variables only
|
||||
# Uncomment the lines below for environment variable configuration
|
||||
#
|
||||
# export RUN_ENGINE_BINPATH="/path/to/your/bin"
|
||||
# export RUN_ENGINE_SERVERBIN="authserver"
|
||||
# export RUN_ENGINE_CONFIG="/path/to/authserver.conf"
|
||||
# "$PATH_RUNENGINE/run-engine" start authserver
|
||||
|
||||
47
apps/startup-scripts/src/examples/starter-world.sh
Executable file
47
apps/startup-scripts/src/examples/starter-world.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore World Server Starter Example
|
||||
# This example shows how to use the run-engine to start worldserver without restart loop
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
CONFIG_FILE="./conf-world.sh"
|
||||
|
||||
# Method 1: Using configuration file (recommended)
|
||||
if [ -f "$CONFIG_FILE" ]; then
|
||||
echo "Starting worldserver (single run) with config file: $CONFIG_FILE"
|
||||
"$PATH_RUNENGINE/run-engine" start "$SERVERBIN" --config "$CONFIG_FILE"
|
||||
else
|
||||
echo "Error: Configuration file not found: $CONFIG_FILE"
|
||||
echo "Please create $CONFIG_FILE by copying and modifying conf.sh.dist"
|
||||
echo "Make sure to set: export SERVERBIN=\"worldserver\""
|
||||
echo ""
|
||||
echo "Alternative: Start with binary path directly"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine start /path/to/bin/worldserver"
|
||||
echo "Example: $PATH_RUNENGINE/run-engine start worldserver # if in PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Method 2: Direct binary path (full path)
|
||||
# Uncomment the line below to start with full binary path
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" start /home/user/azerothcore/bin/worldserver --server-config /path/to/worldserver.conf
|
||||
|
||||
# Method 3: Binary name only (system PATH)
|
||||
# Uncomment the line below if worldserver is in your system PATH
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" start worldserver --server-config /path/to/worldserver.conf
|
||||
|
||||
# Method 4: With session manager (tmux/screen)
|
||||
# Uncomment the line below to use tmux session
|
||||
#
|
||||
# "$PATH_RUNENGINE/run-engine" start worldserver --session-manager tmux --server-config /path/to/worldserver.conf
|
||||
|
||||
# Method 5: Environment variables only
|
||||
# Uncomment the lines below for environment variable configuration
|
||||
#
|
||||
# export RUN_ENGINE_BINPATH="/path/to/your/bin"
|
||||
# export RUN_ENGINE_SERVERBIN="worldserver"
|
||||
# export RUN_ENGINE_CONFIG="/path/to/worldserver.conf"
|
||||
# "$PATH_RUNENGINE/run-engine" start worldserver
|
||||
|
||||
|
||||
144
apps/startup-scripts/src/migrate-registry.sh
Executable file
144
apps/startup-scripts/src/migrate-registry.sh
Executable file
@@ -0,0 +1,144 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# One-time migration script for service registry
|
||||
# Converts old format to new format
|
||||
|
||||
set -euo pipefail # Strict error handling
|
||||
|
||||
CONFIG_DIR="${AC_SERVICE_CONFIG_DIR:-${XDG_CONFIG_HOME:-$HOME/.config}/azerothcore/services}"
|
||||
REGISTRY_FILE="$CONFIG_DIR/service_registry.json"
|
||||
BACKUP_FILE="$CONFIG_DIR/service_registry.json.backup"
|
||||
|
||||
# Colors
|
||||
readonly YELLOW='\033[1;33m'
|
||||
readonly GREEN='\033[0;32m'
|
||||
readonly RED='\033[0;31m'
|
||||
readonly BLUE='\033[0;34m'
|
||||
readonly NC='\033[0m'
|
||||
|
||||
echo -e "${BLUE}AzerothCore Service Registry Migration Tool${NC}"
|
||||
echo "=============================================="
|
||||
|
||||
# Check dependencies
|
||||
if ! command -v jq >/dev/null 2>&1; then
|
||||
echo -e "${RED}Error: jq is required but not installed. Please install jq package.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create config directory if it doesn't exist
|
||||
mkdir -p "$CONFIG_DIR"
|
||||
|
||||
# Check if registry exists
|
||||
if [ ! -f "$REGISTRY_FILE" ]; then
|
||||
echo -e "${YELLOW}No registry file found. Nothing to migrate.${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Validate JSON format
|
||||
if ! jq empty "$REGISTRY_FILE" >/dev/null 2>&1; then
|
||||
echo -e "${RED}Error: Registry file contains invalid JSON.${NC}"
|
||||
echo "Please check the file: $REGISTRY_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if it's already new format
|
||||
if jq -e 'type == "array" and (length == 0 or .[0] | has("bin_path"))' "$REGISTRY_FILE" >/dev/null 2>&1; then
|
||||
echo -e "${GREEN}Registry is already in new format. No migration needed.${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if it's old format
|
||||
if ! jq -e 'type == "array" and (length == 0 or .[0] | has("config"))' "$REGISTRY_FILE" >/dev/null 2>&1; then
|
||||
echo -e "${YELLOW}Registry format not recognized. Manual review needed.${NC}"
|
||||
echo "Current registry content:"
|
||||
cat "$REGISTRY_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}Old format detected. Starting migration...${NC}"
|
||||
|
||||
# Create backup
|
||||
if ! cp "$REGISTRY_FILE" "$BACKUP_FILE"; then
|
||||
echo -e "${RED}Error: Failed to create backup file.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${BLUE}Backup created: $BACKUP_FILE${NC}"
|
||||
|
||||
# Convert to new format
|
||||
echo "[]" > "$REGISTRY_FILE.new"
|
||||
|
||||
services_migrated=0
|
||||
while IFS= read -r service; do
|
||||
if [ -n "$service" ] && [ "$service" != "null" ]; then
|
||||
name=$(echo "$service" | jq -r '.name // ""')
|
||||
provider=$(echo "$service" | jq -r '.provider // ""')
|
||||
type=$(echo "$service" | jq -r '.type // ""')
|
||||
config=$(echo "$service" | jq -r '.config // ""')
|
||||
|
||||
# Validate required fields
|
||||
if [ -z "$name" ] || [ -z "$provider" ] || [ -z "$type" ]; then
|
||||
echo -e "${YELLOW}Skipping invalid service entry: $service${NC}"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}Migrating service: $name${NC}"
|
||||
|
||||
# Create new format entry with all required fields
|
||||
new_entry=$(jq -n \
|
||||
--arg name "$name" \
|
||||
--arg provider "$provider" \
|
||||
--arg type "$type" \
|
||||
--arg bin_path "unknown" \
|
||||
--arg args "" \
|
||||
--arg created "$(date -Iseconds)" \
|
||||
--arg status "migrated" \
|
||||
--arg systemd_type "--user" \
|
||||
--arg restart_policy "always" \
|
||||
--arg session_manager "none" \
|
||||
--arg gdb_enabled "0" \
|
||||
--arg pm2_opts "" \
|
||||
--arg server_config "" \
|
||||
--arg legacy_config "$config" \
|
||||
'{
|
||||
name: $name,
|
||||
provider: $provider,
|
||||
type: $type,
|
||||
bin_path: $bin_path,
|
||||
args: $args,
|
||||
created: $created,
|
||||
status: $status,
|
||||
systemd_type: $systemd_type,
|
||||
restart_policy: $restart_policy,
|
||||
session_manager: $session_manager,
|
||||
gdb_enabled: $gdb_enabled,
|
||||
pm2_opts: $pm2_opts,
|
||||
server_config: $server_config,
|
||||
legacy_config: $legacy_config
|
||||
}')
|
||||
|
||||
# Add to new registry with error checking
|
||||
if ! jq --argjson entry "$new_entry" '. += [$entry]' "$REGISTRY_FILE.new" > "$REGISTRY_FILE.new.tmp"; then
|
||||
echo -e "${RED}Error: Failed to add service $name to new registry${NC}"
|
||||
rm -f "$REGISTRY_FILE.new" "$REGISTRY_FILE.new.tmp"
|
||||
exit 1
|
||||
fi
|
||||
mv "$REGISTRY_FILE.new.tmp" "$REGISTRY_FILE.new"
|
||||
|
||||
services_migrated=$((services_migrated + 1))
|
||||
fi
|
||||
done < <(jq -c '.[]?' "$BACKUP_FILE" 2>/dev/null || echo "")
|
||||
|
||||
# Replace old registry with new one
|
||||
if ! mv "$REGISTRY_FILE.new" "$REGISTRY_FILE"; then
|
||||
echo -e "${RED}Error: Failed to replace old registry with new one${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}Migration completed successfully!${NC}"
|
||||
echo -e "${BLUE}Services migrated: $services_migrated${NC}"
|
||||
echo -e "${BLUE}Use 'service-manager.sh restore' to review and update services.${NC}"
|
||||
echo -e "${YELLOW}Note: Migrated services have bin_path='unknown' and need manual recreation.${NC}"
|
||||
echo ""
|
||||
echo -e "${BLUE}To recreate services, use commands like:${NC}"
|
||||
echo " ./service-manager.sh create auth authserver --provider pm2 --bin-path /path/to/your/bin"
|
||||
echo " ./service-manager.sh create world worldserver --provider systemd --bin-path /path/to/your/bin"
|
||||
490
apps/startup-scripts/src/run-engine
Executable file
490
apps/startup-scripts/src/run-engine
Executable file
@@ -0,0 +1,490 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Run Engine
|
||||
# Advanced script for running AzerothCore services with session management and restart capabilities
|
||||
#
|
||||
# This script can be sourced to provide functions or executed directly with parameters
|
||||
#
|
||||
# Configuration Priority Order (highest to lowest):
|
||||
# 1. conf.sh - User configuration file (highest priority)
|
||||
# 2. Command line arguments (--config, --server-config, etc.)
|
||||
# 3. Environment variables (RUN_ENGINE_*)
|
||||
# 4. conf.sh.dist - Default configuration (lowest priority)
|
||||
#
|
||||
# Environment Variables:
|
||||
# RUN_ENGINE_CONFIG_FILE - Path to temporary configuration file (optional)
|
||||
# RUN_ENGINE_SESSION_MANAGER - Session manager (none|auto|tmux|screen, default: auto)
|
||||
# RUN_ENGINE_BINPATH - Binary directory path
|
||||
# RUN_ENGINE_SERVERBIN - Server binary name (worldserver|authserver)
|
||||
# RUN_ENGINE_CONFIG - Server configuration file path
|
||||
# RUN_ENGINE_LOGS_PATH - Directory for log files
|
||||
# RUN_ENGINE_CRASHES_PATH - Directory for crash dumps
|
||||
# RUN_ENGINE_SESSION_NAME - Session name for tmux/screen
|
||||
|
||||
export RUN_ENGINE_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Configuration priority order:
|
||||
# 1. conf.sh (highest priority - user overrides)
|
||||
# 2. Environment variables (RUN_ENGINE_*)
|
||||
# 3. conf.sh.dist (lowest priority - defaults)
|
||||
|
||||
# Load default configuration first (sets defaults from environment variables)
|
||||
if [ -e "$RUN_ENGINE_PATH/conf.sh.dist" ]; then
|
||||
source "$RUN_ENGINE_PATH/conf.sh.dist"
|
||||
fi
|
||||
|
||||
# Load user configuration if exists (this takes priority over everything)
|
||||
if [ -e "$RUN_ENGINE_PATH/conf.sh" ]; then
|
||||
source "$RUN_ENGINE_PATH/conf.sh"
|
||||
fi
|
||||
|
||||
# Load configuration
|
||||
function load_config() {
|
||||
local config_file="$1"
|
||||
|
||||
# If a specific config file is provided via command line, load it
|
||||
# This allows temporary overrides for specific runs
|
||||
if [ -n "$config_file" ] && [ -e "$config_file" ]; then
|
||||
echo "Loading configuration from: $config_file"
|
||||
source "$config_file"
|
||||
elif [ -n "$RUN_ENGINE_CONFIG_FILE" ] && [ -e "$RUN_ENGINE_CONFIG_FILE" ]; then
|
||||
echo "Loading configuration from environment: $RUN_ENGINE_CONFIG_FILE"
|
||||
source "$RUN_ENGINE_CONFIG_FILE"
|
||||
fi
|
||||
|
||||
# Final override with any remaining environment variables
|
||||
# This ensures that even after loading config files, environment variables take precedence
|
||||
BINPATH="${RUN_ENGINE_BINPATH:-$BINPATH}"
|
||||
SERVERBIN="${RUN_ENGINE_SERVERBIN:-$SERVERBIN}"
|
||||
CONFIG="${RUN_ENGINE_CONFIG:-$CONFIG}"
|
||||
SESSION_MANAGER="${RUN_ENGINE_SESSION_MANAGER:-$SESSION_MANAGER}"
|
||||
LOGS_PATH="${RUN_ENGINE_LOGS_PATH:-$LOGS_PATH}"
|
||||
CRASHES_PATH="${RUN_ENGINE_CRASHES_PATH:-$CRASHES_PATH}"
|
||||
}
|
||||
|
||||
# Detect available session manager
|
||||
function detect_session_manager() {
|
||||
if command -v tmux >/dev/null 2>&1; then
|
||||
echo "tmux"
|
||||
elif command -v screen >/dev/null 2>&1; then
|
||||
echo "screen"
|
||||
else
|
||||
echo "none"
|
||||
fi
|
||||
}
|
||||
|
||||
# Determine which session manager to use
|
||||
function get_session_manager() {
|
||||
local requested="$1"
|
||||
|
||||
case "$requested" in
|
||||
"none")
|
||||
echo "none"
|
||||
;;
|
||||
"auto")
|
||||
detect_session_manager
|
||||
;;
|
||||
"tmux")
|
||||
if command -v tmux >/dev/null 2>&1; then
|
||||
echo "tmux"
|
||||
else
|
||||
echo "error"
|
||||
fi
|
||||
;;
|
||||
"screen")
|
||||
if command -v screen >/dev/null 2>&1; then
|
||||
echo "screen"
|
||||
else
|
||||
echo "error"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "none"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Configure log files
|
||||
function configure_files() {
|
||||
TRACE_BEGIN_STRING="SIGSEGV"
|
||||
TRACE_FILE="$LOGS_PATH/${LOG_PREFIX_NAME}_trace.log"
|
||||
ERR_FILE="$LOGS_PATH/${LOG_PREFIX_NAME}_error.log"
|
||||
SYSLOG="$LOGS_PATH/${LOG_PREFIX_NAME}_system.log"
|
||||
SYSERR="$LOGS_PATH/${LOG_PREFIX_NAME}_system.err"
|
||||
LINKS_FILE="$LOGS_PATH/${LOG_PREFIX_NAME}_crash_links.link"
|
||||
}
|
||||
|
||||
# Check if service is running
|
||||
function check_status() {
|
||||
local session_name="$1"
|
||||
local ret=1
|
||||
|
||||
# Check for GDB process
|
||||
local gdbres=$(pgrep -f "gdb.*--batch.*$SERVERBIN")
|
||||
if [[ "$GDB_ENABLED" -eq 1 && -n "$gdbres" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check for binary process
|
||||
local binres=$(pgrep -f "$SERVERBIN -c $CONFIG")
|
||||
if [ -n "$binres" ]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check session manager
|
||||
if [ -n "$session_name" ]; then
|
||||
case "$(get_session_manager "${SESSION_MANAGER:-auto}")" in
|
||||
"tmux")
|
||||
tmux has-session -t "$session_name" 2>/dev/null && return 1
|
||||
;;
|
||||
"screen")
|
||||
screen -ls "$session_name" 2>/dev/null | grep -q "$session_name" && return 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Run with session manager
|
||||
function run_with_session() {
|
||||
local session_manager="$1"
|
||||
local session_name="$2"
|
||||
local wrapper="$3"
|
||||
shift 3
|
||||
local args=("$@")
|
||||
|
||||
if [ "$wrapper" = "simple-restarter" ]; then
|
||||
script_path="$RUN_ENGINE_PATH/simple-restarter"
|
||||
else
|
||||
script_path="$RUN_ENGINE_PATH/starter"
|
||||
fi
|
||||
|
||||
case "$session_manager" in
|
||||
"tmux")
|
||||
echo "> Starting with tmux session: $session_name - attach with 'tmux attach -t $session_name'"
|
||||
tmux new-session -d -s "$session_name" -- "$script_path" "${args[@]}"
|
||||
;;
|
||||
"screen")
|
||||
local OPTIONS="-A -m -d -S"
|
||||
if [ -n "$SCREEN_OPTIONS" ]; then
|
||||
OPTIONS="$SCREEN_OPTIONS"
|
||||
fi
|
||||
echo "> Starting with screen session: $session_name (options: $OPTIONS) - attach with 'screen -r $session_name'"
|
||||
echo "screen $OPTIONS \"$session_name\" -- \"$script_path\" ${args[*]}"
|
||||
screen $OPTIONS "$session_name" -- "$script_path" "${args[@]}"
|
||||
;;
|
||||
"none"|*)
|
||||
echo "> Starting without session manager"
|
||||
"$script_path" "${args[@]}"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
function parse_arguments() {
|
||||
local mode="$1"
|
||||
local serverbin="$2"
|
||||
shift 2
|
||||
|
||||
local config_file=""
|
||||
local serverconfig=""
|
||||
local session_manager=""
|
||||
|
||||
# Parse named arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--config)
|
||||
config_file="$2"
|
||||
shift 2
|
||||
;;
|
||||
--server-config)
|
||||
serverconfig="$2"
|
||||
shift 2
|
||||
;;
|
||||
--session-manager)
|
||||
session_manager="$2"
|
||||
shift 2
|
||||
;;
|
||||
*)
|
||||
echo "Unknown argument: $1"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Export parsed values for use by start_service
|
||||
export PARSED_MODE="$mode"
|
||||
export PARSED_SERVERBIN="$serverbin"
|
||||
export PARSED_CONFIG_FILE="$config_file"
|
||||
export PARSED_SERVERCONFIG="$serverconfig"
|
||||
export PARSED_SESSION_MANAGER="$session_manager"
|
||||
|
||||
echo "Parsed arguments:"
|
||||
echo " Mode: $PARSED_MODE"
|
||||
echo " Server Binary: $PARSED_SERVERBIN"
|
||||
echo " Config File: $PARSED_CONFIG_FILE"
|
||||
echo " Server Config: $PARSED_SERVERCONFIG"
|
||||
echo " Session Manager: $PARSED_SESSION_MANAGER"
|
||||
}
|
||||
|
||||
# Start service (single run or with simple-restarter)
|
||||
function start_service() {
|
||||
local config_file="$1"
|
||||
local serverbin_path="$2"
|
||||
local serverconfig="$3"
|
||||
local use_restarter="${4:-false}"
|
||||
local session_manager_choice="$5"
|
||||
|
||||
# Load configuration first
|
||||
load_config "$config_file"
|
||||
|
||||
# if no session manager is specified, get it from config
|
||||
if [ -z "$session_manager_choice" ]; then
|
||||
session_manager_choice="$SESSION_MANAGER"
|
||||
fi
|
||||
|
||||
|
||||
# Parse serverbin_path to extract BINPATH and SERVERBIN
|
||||
if [ -n "$serverbin_path" ]; then
|
||||
# If it's a full path, extract directory and binary name
|
||||
if [[ "$serverbin_path" == */* ]]; then
|
||||
BINPATH="$(dirname "$serverbin_path")"
|
||||
SERVERBIN="$(basename "$serverbin_path")"
|
||||
else
|
||||
# If it's just a binary name, use it as-is (system PATH)
|
||||
SERVERBIN="$serverbin_path"
|
||||
BINPATH="${BINPATH:-""}" # Empty means use current directory or system PATH
|
||||
fi
|
||||
fi
|
||||
|
||||
# Use environment/config values if not set from command line
|
||||
BINPATH="${BINPATH:-$RUN_ENGINE_BINPATH}"
|
||||
SERVERBIN="${SERVERBIN:-$RUN_ENGINE_SERVERBIN}"
|
||||
CONFIG="${serverconfig:-$CONFIG}"
|
||||
|
||||
echo "SERVERBIN: $SERVERBIN"
|
||||
|
||||
# Validate required parameters
|
||||
if [ -z "$SERVERBIN" ]; then
|
||||
echo "Error: SERVERBIN is required"
|
||||
echo "Could not determine server binary from: $serverbin_path"
|
||||
echo "Provide it as:"
|
||||
echo " - Full path: $0 <mode> /path/to/bin/worldserver"
|
||||
echo " - Binary name: $0 <mode> worldserver"
|
||||
echo " - Environment variables: RUN_ENGINE_SERVERBIN"
|
||||
echo " - Configuration file with SERVERBIN variable"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# If BINPATH is set, validate binary exists and create log paths
|
||||
if [ -n "$BINPATH" ]; then
|
||||
if [ ! -d "$BINPATH" ]; then
|
||||
echo "Error: BINPATH not found: $BINPATH"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Set up directories and logging relative to BINPATH
|
||||
LOGS_PATH="${LOGS_PATH:-"$BINPATH/logs"}"
|
||||
CRASHES_PATH="${CRASHES_PATH:-"$BINPATH/crashes"}"
|
||||
mkdir -p "$LOGS_PATH"
|
||||
mkdir -p "$CRASHES_PATH"
|
||||
else
|
||||
# For system binaries, try to detect binary location and create logs accordingly
|
||||
local detected_binpath=""
|
||||
|
||||
# Try to find binary in system PATH
|
||||
local binary_location=$(which "$SERVERBIN" 2>/dev/null)
|
||||
if [ -n "$binary_location" ]; then
|
||||
detected_binpath="$(dirname "$binary_location")"
|
||||
echo "Binary found in system PATH: $binary_location"
|
||||
# Set BINPATH to the detected location so starter script can find the binary
|
||||
BINPATH="$detected_binpath"
|
||||
fi
|
||||
|
||||
# Set up log paths based on detected or fallback location
|
||||
if [ -n "$detected_binpath" ]; then
|
||||
LOGS_PATH="${LOGS_PATH:-"$detected_binpath/logs"}"
|
||||
CRASHES_PATH="${CRASHES_PATH:-"$detected_binpath/crashes"}"
|
||||
else
|
||||
# Fallback to current directory for logs
|
||||
LOGS_PATH="${LOGS_PATH:-./logs}"
|
||||
CRASHES_PATH="${CRASHES_PATH:-"$./crashes"}"
|
||||
fi
|
||||
|
||||
|
||||
mkdir -p "$LOGS_PATH"
|
||||
mkdir -p "$CRASHES_PATH"
|
||||
fi
|
||||
|
||||
# Set up logging names
|
||||
LOG_PREFIX_NAME="${LOG_PREFIX_NAME:-${SERVERBIN%server}}"
|
||||
|
||||
# Set up session name (with backward compatibility for SCREEN_NAME)
|
||||
SESSION_NAME="${SESSION_NAME:-$SCREEN_NAME}"
|
||||
SESSION_NAME="${SESSION_NAME:-AC-${SERVERBIN%server}}"
|
||||
|
||||
configure_files
|
||||
|
||||
local session_manager=$(get_session_manager "$session_manager_choice")
|
||||
|
||||
if [ "$session_manager" = "error" ]; then
|
||||
echo "Error: Invalid session manager specified: $session_manager_choice, is it installed?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Using session manager: $session_manager"
|
||||
echo "Starting server: $SERVERBIN"
|
||||
|
||||
if [ -n "$CONFIG" ]; then
|
||||
echo "Server config: $CONFIG"
|
||||
else
|
||||
echo "Server config: default (not specified)"
|
||||
fi
|
||||
|
||||
# Set AC_DISABLE_INTERACTIVE when running as a service without interactive session manager
|
||||
# This prevents AzerothCore from showing interactive prompts when running under systemd/pm2
|
||||
if [[ "${SERVICE_MODE:-false}" == "true" && "$session_manager" == "none" ]]; then
|
||||
export AC_DISABLE_INTERACTIVE=1
|
||||
echo "Service mode: Non-interactive mode enabled (AC_DISABLE_INTERACTIVE=1)"
|
||||
else
|
||||
export AC_DISABLE_INTERACTIVE=0
|
||||
if [[ "${SERVICE_MODE:-false}" == "true" ]]; then
|
||||
echo "Service mode: Interactive mode enabled (session manager: $session_manager)"
|
||||
else
|
||||
echo "Direct execution: Interactive mode enabled"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$use_restarter" = "true" ]; then
|
||||
# Use simple-restarter for restart functionality
|
||||
local gdb_enabled="${GDB_ENABLED:-0}"
|
||||
run_with_session "$session_manager" "$SESSION_NAME" "simple-restarter" "$BINPATH" "$SERVERBIN" "$GDB" "$CONFIG" "$SYSLOG" "$SYSERR" "$gdb_enabled" "$CRASHES_PATH"
|
||||
else
|
||||
# Single run using starter
|
||||
local gdb_enabled="${GDB_ENABLED:-0}"
|
||||
run_with_session "$session_manager" "$SESSION_NAME" "starter" "$BINPATH" "$SERVERBIN" "$GDB" "$CONFIG" "$SYSLOG" "$SYSERR" "$gdb_enabled" "$CRASHES_PATH"
|
||||
fi
|
||||
}
|
||||
|
||||
# Cleanup function
|
||||
function finish() {
|
||||
local session_manager=$(get_session_manager "${SESSION_MANAGER:-auto}")
|
||||
if [ -n "$SESSION_NAME" ]; then
|
||||
case "$session_manager" in
|
||||
"tmux")
|
||||
tmux kill-session -t "$SESSION_NAME" 2>/dev/null || true
|
||||
;;
|
||||
"screen")
|
||||
screen -X -S "$SESSION_NAME" quit 2>/dev/null || true
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
|
||||
# Legacy compatibility functions for old examples
|
||||
function restarter() {
|
||||
echo "Legacy function 'restarter' called - redirecting to new API"
|
||||
start_service "" "" "" "true" "${SESSION_MANAGER:-auto}"
|
||||
}
|
||||
|
||||
function starter() {
|
||||
echo "Legacy function 'starter' called - redirecting to new API"
|
||||
start_service "" "" "" "false" "${SESSION_MANAGER:-auto}"
|
||||
}
|
||||
|
||||
# Set trap for cleanup (currently disabled to avoid interfering with systemd)
|
||||
# trap finish EXIT
|
||||
|
||||
# Main execution when script is run directly
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
case "${1:-help}" in
|
||||
"start"|"restart")
|
||||
if [ $# -lt 2 ]; then
|
||||
echo "Error: Missing required arguments"
|
||||
echo "Usage: $0 <mode> <serverbin> [options]"
|
||||
echo "Example: $0 start worldserver --config ./conf-world.sh --server-config worldserver.conf"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Parse arguments
|
||||
if ! parse_arguments "$@"; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Determine restart mode
|
||||
use_restarter="false"
|
||||
if [ "$PARSED_MODE" = "restart" ]; then
|
||||
use_restarter="true"
|
||||
fi
|
||||
|
||||
# Start service with parsed arguments
|
||||
start_service "$PARSED_CONFIG_FILE" "$PARSED_SERVERBIN" "$PARSED_SERVERCONFIG" "$use_restarter" "$PARSED_SESSION_MANAGER"
|
||||
;;
|
||||
"help"|*)
|
||||
echo "AzerothCore Run Engine"
|
||||
echo ""
|
||||
echo "Usage: $0 <mode> <serverbin> [options]"
|
||||
echo ""
|
||||
echo "Modes:"
|
||||
echo " start - Start service once (no restart on crash)"
|
||||
echo " restart - Start service with restart on crash (uses simple-restarter)"
|
||||
echo ""
|
||||
echo "Required Parameters:"
|
||||
echo " serverbin - Server binary (full path or binary name)"
|
||||
echo " Full path: /path/to/bin/worldserver"
|
||||
echo " Binary name: worldserver (uses system PATH)"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --config <file> - Path to configuration file"
|
||||
echo " --server-config <file> - Server configuration file (sets -c parameter)"
|
||||
echo " --session-manager <type> - Session manager: none|auto|tmux|screen (default: auto)"
|
||||
echo ""
|
||||
echo "Configuration Priority (highest to lowest):"
|
||||
echo " 1. conf.sh - User configuration file"
|
||||
echo " 2. Command line arguments (--config, --server-config, etc.)"
|
||||
echo " 3. Environment variables (RUN_ENGINE_*)"
|
||||
echo " 4. conf.sh.dist - Default configuration"
|
||||
echo ""
|
||||
echo "Environment Variables:"
|
||||
echo " RUN_ENGINE_CONFIG_FILE - Config file path"
|
||||
echo " RUN_ENGINE_SESSION_MANAGER - Session manager (default: auto)"
|
||||
echo " RUN_ENGINE_BINPATH - Binary directory path"
|
||||
echo " RUN_ENGINE_SERVERBIN - Server binary name"
|
||||
echo " RUN_ENGINE_CONFIG - Server configuration file"
|
||||
echo " RUN_ENGINE_LOGS_PATH - Directory for log files"
|
||||
echo " RUN_ENGINE_CRASHES_PATH - Directory for crash dumps"
|
||||
echo " RUN_ENGINE_SESSION_NAME - Session name for tmux/screen"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo ""
|
||||
echo " # Using full path to binary"
|
||||
echo " $0 start /home/user/ac/bin/worldserver"
|
||||
echo ""
|
||||
echo " # Using binary name (system PATH)"
|
||||
echo " $0 start worldserver"
|
||||
echo ""
|
||||
echo " # With configuration file"
|
||||
echo " $0 start worldserver --config ./conf-world.sh"
|
||||
echo ""
|
||||
echo " # With server configuration (sets -c parameter)"
|
||||
echo " $0 start /path/to/bin/worldserver --server-config /etc/worldserver.conf"
|
||||
echo ""
|
||||
echo " # With session manager"
|
||||
echo " $0 restart worldserver --session-manager tmux"
|
||||
echo ""
|
||||
echo " # Complete example"
|
||||
echo " $0 restart /home/user/ac/bin/worldserver --config ./conf-world.sh --server-config worldserver.conf --session-manager screen"
|
||||
echo ""
|
||||
echo "Binary Resolution:"
|
||||
echo " - Full path (contains /): Extracts directory and binary name"
|
||||
echo " - Binary name only: Uses system PATH to find executable"
|
||||
echo " Auto-detection will check current directory first, then system PATH"
|
||||
echo ""
|
||||
echo "Server Config:"
|
||||
echo " If --server-config is specified, it's passed as -c parameter to the server."
|
||||
echo " If not specified, the server will use its default configuration."
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
2668
apps/startup-scripts/src/service-manager.sh
Executable file
2668
apps/startup-scripts/src/service-manager.sh
Executable file
File diff suppressed because it is too large
Load Diff
95
apps/startup-scripts/src/simple-restarter
Executable file
95
apps/startup-scripts/src/simple-restarter
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Simple Restarter
|
||||
# This script is a wrapper around the starter script that provides restart functionality
|
||||
# and maintains compatibility with the acore dashboard
|
||||
#
|
||||
# Usage: simple-restarter <binary> [gdb_file] [config] [syslog] [syserr] [gdb_enabled] [crashes_path]
|
||||
#
|
||||
# Parameters (same as starter):
|
||||
# $1 - Binary to execute (required)
|
||||
# $2 - GDB configuration file (optional)
|
||||
# $3 - Configuration file path (optional)
|
||||
# $4 - System log file (optional)
|
||||
# $5 - System error file (optional)
|
||||
# $6 - GDB enabled flag (0/1, optional)
|
||||
# $7 - Crashes directory path (optional)
|
||||
|
||||
# Get script directory
|
||||
CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Parameters (same as starter)
|
||||
BINPATH="$1"
|
||||
BINFILE="$2"
|
||||
GDB_FILE="$3"
|
||||
CONFIG="$4"
|
||||
SYSLOG="$5"
|
||||
SYSERR="$6"
|
||||
GDB_ENABLED="${7:-0}"
|
||||
CRASHES_PATH="$8"
|
||||
|
||||
BINARY="$BINPATH/$BINFILE"
|
||||
|
||||
# Default values (same as starter)
|
||||
DEFAULT_GDB_FILE="$CURRENT_PATH/gdb.conf"
|
||||
|
||||
# Set defaults if not provided
|
||||
GDB_FILE="${GDB_FILE:-$DEFAULT_GDB_FILE}"
|
||||
|
||||
# Counters for crash detection
|
||||
_instant_crash_count=0
|
||||
_restart_count=0
|
||||
|
||||
# Check if starter script exists
|
||||
STARTER_SCRIPT="$CURRENT_PATH/starter"
|
||||
if [ ! -f "$STARTER_SCRIPT" ]; then
|
||||
echo "Error: starter script not found at $STARTER_SCRIPT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Main restart loop
|
||||
while true; do
|
||||
STARTING_TIME=$(date +%s)
|
||||
|
||||
echo "AC_CONFIG_POLICY: $AC_CONFIG_POLICY"
|
||||
|
||||
# Use starter script to launch the binary with all parameters
|
||||
"$STARTER_SCRIPT" "$BINPATH" "$BINFILE" "$GDB_FILE" "$CONFIG" "$SYSLOG" "$SYSERR" "$GDB_ENABLED" "$CRASHES_PATH"
|
||||
|
||||
_exit_code=$?
|
||||
|
||||
echo "$(basename "$BINARY") terminated with exit code: $_exit_code"
|
||||
|
||||
# Calculate runtime
|
||||
ENDING_TIME=$(date +%s)
|
||||
DIFFERENCE=$((ENDING_TIME - STARTING_TIME))
|
||||
|
||||
((_restart_count++))
|
||||
echo "$(basename "$BINARY") terminated after $DIFFERENCE seconds, restart count: $_restart_count"
|
||||
|
||||
# Crash loop detection
|
||||
if [ "$DIFFERENCE" -lt 10 ]; then
|
||||
# Increment instant crash count if runtime is lower than 10 seconds
|
||||
((_instant_crash_count++))
|
||||
echo "Warning: Quick restart detected ($DIFFERENCE seconds) - instant crash count: $_instant_crash_count"
|
||||
else
|
||||
# Reset count on successful longer run
|
||||
_instant_crash_count=0
|
||||
fi
|
||||
|
||||
# Prevent infinite crash loops
|
||||
if [ "$_instant_crash_count" -gt 5 ]; then
|
||||
echo "Error: $(basename "$BINARY") restarter exited. Infinite crash loop prevented (6 crashes in under 10 seconds each)"
|
||||
echo "Please check your system configuration and logs"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Exit cleanly if shutdown was requested by command or SIGINT (exit code 0)
|
||||
if [ "$_exit_code" -eq 0 ]; then
|
||||
echo "$(basename "$BINARY") shutdown safely"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "$(basename "$BINARY") will restart in 3 seconds..."
|
||||
sleep 3
|
||||
done
|
||||
151
apps/startup-scripts/src/starter
Executable file
151
apps/startup-scripts/src/starter
Executable file
@@ -0,0 +1,151 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Starter Script
|
||||
# This script handles the execution of AzerothCore binaries with optional GDB support
|
||||
#
|
||||
# Usage: starter <binpath> <binfile> [gdb_file] [config] [syslog] [syserr] [gdb_enabled] [crashes_path]
|
||||
#
|
||||
# Parameters:
|
||||
# $1 - Binary path (required)
|
||||
# $2 - Binary file name (required)
|
||||
# $3 - GDB configuration file (optional)
|
||||
# $4 - Configuration file path (optional)
|
||||
# $5 - System log file (optional)
|
||||
# $6 - System error file (optional)
|
||||
# $7 - GDB enabled flag (0/1, optional)
|
||||
# $8 - Crashes directory path (optional)
|
||||
|
||||
BINPATH="$1"
|
||||
BINFILE="$2"
|
||||
GDB_FILE="$3"
|
||||
CONFIG="$4"
|
||||
SYSLOG="$5"
|
||||
SYSERR="$6"
|
||||
GDB_ENABLED="${7:-0}"
|
||||
CRASHES_PATH="$8"
|
||||
|
||||
# Default values
|
||||
CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
DEFAULT_CRASHES_PATH=$(realpath "$BINPATH/crashes")
|
||||
[ -n "$CONFIG" ] && CONFIG_ABS=$(realpath "$CONFIG")
|
||||
|
||||
# Set defaults if not provided
|
||||
CRASHES_PATH="${CRASHES_PATH:-$DEFAULT_CRASHES_PATH}"
|
||||
|
||||
# Validate binary
|
||||
if [ -z "$BINPATH" ] || [ -z "$BINFILE" ]; then
|
||||
echo "Error: Binary path and file are required"
|
||||
echo "Usage: $0 <binpath> <binfile> [gdb_file] [config] [syslog] [syserr] [gdb_enabled] [crashes_path]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BINARY="$BINPATH/$BINFILE"
|
||||
if [ ! -f "$BINARY" ]; then
|
||||
echo "Error: Binary '$BINARY' not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create crashes directory if it doesn't exist
|
||||
mkdir -p "$CRASHES_PATH"
|
||||
|
||||
cd "$BINPATH" || {
|
||||
echo "Error: Could not change to binary path '$BINPATH'"
|
||||
exit 1
|
||||
}
|
||||
|
||||
EXECPATH=$(realpath "$BINFILE")
|
||||
|
||||
if [ "$GDB_ENABLED" -eq 1 ]; then
|
||||
echo "Starting $EXECPATH with GDB enabled"
|
||||
|
||||
# Generate GDB configuration on the fly
|
||||
TIMESTAMP=$(date +%Y-%m-%d-%H-%M-%S)
|
||||
GDB_TEMP_FILE="$CRASHES_PATH/gdb-$TIMESTAMP.conf"
|
||||
GDB_OUTPUT_FILE="$CRASHES_PATH/gdb-$TIMESTAMP.txt"
|
||||
|
||||
# Create GDB configuration file if it is not defined
|
||||
if [ -z "$GDB_FILE" ]; then
|
||||
|
||||
# Create GDB configuration
|
||||
cat > "$GDB_TEMP_FILE" << EOF
|
||||
set logging file $GDB_OUTPUT_FILE
|
||||
set logging enabled on
|
||||
set debug timestamp
|
||||
EOF
|
||||
|
||||
# Add run command with config if specified
|
||||
if [ -n "$CONFIG_ABS" ]; then
|
||||
echo "run -c $CONFIG_ABS" >> "$GDB_TEMP_FILE"
|
||||
else
|
||||
echo "run" >> "$GDB_TEMP_FILE"
|
||||
fi
|
||||
|
||||
cat >> "$GDB_TEMP_FILE" << EOF
|
||||
bt
|
||||
bt full
|
||||
info thread
|
||||
thread apply all backtrace full
|
||||
EOF
|
||||
|
||||
|
||||
GDB_FILE="$GDB_TEMP_FILE"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
# Create log files if specified
|
||||
if [ -n "$SYSLOG" ]; then
|
||||
[ ! -f "$SYSLOG" ] && touch "$SYSLOG"
|
||||
fi
|
||||
|
||||
if [ -n "$SYSERR" ]; then
|
||||
[ ! -f "$SYSERR" ] && touch "$SYSERR"
|
||||
fi
|
||||
|
||||
# Execute with GDB
|
||||
if [ "${WITH_CONSOLE:-0}" -eq 0 ] && [ -n "$SYSLOG" ] && [ -n "$SYSERR" ]; then
|
||||
gdb -x "$GDB_FILE" --batch "$EXECPATH" >> "$SYSLOG" 2>> "$SYSERR"
|
||||
else
|
||||
echo "> Console enabled"
|
||||
if [ -n "$SYSLOG" ] && [ -n "$SYSERR" ]; then
|
||||
gdb -x "$GDB_FILE" --batch "$EXECPATH" > >(tee "$SYSLOG") 2> >(tee "$SYSERR" >&2)
|
||||
else
|
||||
gdb -x "$GDB_FILE" --batch "$EXECPATH"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# clean up temporary GDB file if it exists
|
||||
if [ -n "$GDB_TEMP_FILE" ]; then
|
||||
# Clean up temporary GDB file
|
||||
rm -f "$GDB_TEMP_FILE"
|
||||
fi
|
||||
else
|
||||
echo "Starting $BINFILE without GDB"
|
||||
# Determine if PM2 is active
|
||||
is_pm2_active="0"
|
||||
[ "$AC_LAUNCHED_BY_PM2" == "1" ] && is_pm2_active="1"
|
||||
|
||||
# Determine if interactive mode is enabled
|
||||
is_interactive_enabled="1"
|
||||
[ "$AC_DISABLE_INTERACTIVE" == "1" ] && is_interactive_enabled="0"
|
||||
|
||||
# use normal execution if we are running the binary under PM2
|
||||
# or when interactive mode is enabled
|
||||
if [[ "$is_pm2_active" == "1" || "$is_interactive_enabled" == "1" ]]; then
|
||||
echo "Running AC"
|
||||
"$EXECPATH" ${CONFIG_ABS:+-c "$CONFIG_ABS"}
|
||||
else
|
||||
# When AC_DISABLE_INTERACTIVE is set to 1 and we are not in PM2
|
||||
# This means we are using systemd without interactive mode and no session managers
|
||||
# in this case we need to run AC with unbuffer for line-buffered output
|
||||
# NOTE unbuffer doesn't fully support interactive mode
|
||||
if command -v unbuffer >/dev/null 2>&1; then
|
||||
echo "Running AC with unbuffer for line-buffered output"
|
||||
unbuffer "$EXECPATH" ${CONFIG_ABS:+-c "$CONFIG_ABS"}
|
||||
else
|
||||
echo "⚠️ unbuffer not found, the output may not be line-buffered. Try installing expect."
|
||||
exec "$EXECPATH" ${CONFIG_ABS:+-c "$CONFIG_ABS"}
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
14
apps/startup-scripts/test/bats.conf
Normal file
14
apps/startup-scripts/test/bats.conf
Normal file
@@ -0,0 +1,14 @@
|
||||
# BATS Test Configuration
|
||||
|
||||
# Set test timeout (in seconds)
|
||||
export BATS_TEST_TIMEOUT=30
|
||||
|
||||
# Enable verbose output for debugging
|
||||
export BATS_VERBOSE_RUN=1
|
||||
|
||||
# Test output format
|
||||
export BATS_FORMATTER=pretty
|
||||
|
||||
# Enable colored output
|
||||
export BATS_NO_PARALLELIZE_ACROSS_FILES=1
|
||||
export BATS_NO_PARALLELIZE_WITHIN_FILE=1
|
||||
484
apps/startup-scripts/test/test_startup_scripts.bats
Executable file
484
apps/startup-scripts/test/test_startup_scripts.bats
Executable file
@@ -0,0 +1,484 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# AzerothCore Startup Scripts Test Suite
|
||||
# This script tests the basic functionality of the startup scripts using the unified test framework
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
# Setup that runs before each test
|
||||
setup() {
|
||||
startup_scripts_setup
|
||||
export SCRIPT_DIR="$(cd "$(dirname "$BATS_TEST_FILENAME")/../src" && pwd)"
|
||||
}
|
||||
|
||||
# Cleanup that runs after each test
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
# ===== STARTER SCRIPT TESTS =====
|
||||
|
||||
@test "starter: should fail with missing parameters" {
|
||||
run timeout 3s "$SCRIPT_DIR/starter" '' ''
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "Error: Binary path and file are required" ]]
|
||||
}
|
||||
|
||||
@test "starter: should start with valid binary" {
|
||||
cd "$TEST_DIR"
|
||||
run timeout 5s "$SCRIPT_DIR/starter" "$TEST_DIR/bin" "test-server" "" "$TEST_DIR/test-server.conf" "" "" 0
|
||||
debug_on_failure
|
||||
# The starter might have issues with the script command, so we check for specific behavior
|
||||
# Either it should succeed or show a specific error we can work with
|
||||
[[ "$output" =~ "Test server starting" ]] || [[ "$output" =~ "script:" ]] || [[ "$status" -eq 124 ]]
|
||||
}
|
||||
|
||||
@test "starter: should validate binary path exists" {
|
||||
run "$SCRIPT_DIR/starter" "/nonexistent/path" "test-server"
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "Binary '/nonexistent/path/test-server' not found" ]]
|
||||
}
|
||||
|
||||
@test "starter: should detect PM2 environment properly" {
|
||||
cd "$TEST_DIR"
|
||||
# Test with AC_LAUNCHED_BY_PM2=1 (should not use script command)
|
||||
AC_LAUNCHED_BY_PM2=1 run timeout 5s "$SCRIPT_DIR/starter" "$TEST_DIR/bin" "test-server" "" "$TEST_DIR/test-server.conf" "" "" 0
|
||||
debug_on_failure
|
||||
# Should start without using script command
|
||||
[[ "$output" =~ "Test server starting" ]]
|
||||
}
|
||||
|
||||
# ===== SIMPLE RESTARTER TESTS =====
|
||||
|
||||
@test "simple-restarter: should fail with missing parameters" {
|
||||
run timeout 3s "$SCRIPT_DIR/simple-restarter" '' ''
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "Error: Binary path and file are required" ]]
|
||||
}
|
||||
|
||||
@test "simple-restarter: should fail with missing binary" {
|
||||
run timeout 3s "$SCRIPT_DIR/simple-restarter" "$TEST_DIR/bin" 'nonexistent'
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "not found" ]] || [[ "$output" =~ "terminated with exit code" ]]
|
||||
}
|
||||
|
||||
@test "simple-restarter: should detect starter script" {
|
||||
# Test that it finds the starter script
|
||||
run timeout 1s "$SCRIPT_DIR/simple-restarter" '' ''
|
||||
# Should not fail because starter script is missing
|
||||
[[ ! "$output" =~ "starter script not found" ]]
|
||||
}
|
||||
|
||||
# ===== RUN-ENGINE TESTS =====
|
||||
|
||||
@test "run-engine: should show help" {
|
||||
run "$SCRIPT_DIR/run-engine" help
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "AzerothCore Run Engine" ]]
|
||||
}
|
||||
|
||||
@test "run-engine: should validate parameters for start command" {
|
||||
run "$SCRIPT_DIR/run-engine" start
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "Missing required arguments" ]]
|
||||
}
|
||||
|
||||
@test "run-engine: should detect binary with full path" {
|
||||
run timeout 5s "$SCRIPT_DIR/run-engine" start "$TEST_DIR/bin/test-server" --server-config "$TEST_DIR/test-server.conf"
|
||||
debug_on_failure
|
||||
[[ "$output" =~ "Starting server: test-server" ]] || [[ "$status" -eq 124 ]]
|
||||
}
|
||||
|
||||
@test "run-engine: should detect binary in current directory" {
|
||||
cd "$TEST_DIR/bin"
|
||||
run timeout 5s "$SCRIPT_DIR/run-engine" start test-server --server-config "$TEST_DIR/test-server.conf"
|
||||
debug_on_failure
|
||||
[[ "$output" =~ "Binary found in current directory" ]] || [[ "$output" =~ "Starting server: test-server" ]] || [[ "$status" -eq 124 ]]
|
||||
}
|
||||
|
||||
@test "run-engine: should support restart mode" {
|
||||
run timeout 5s "$SCRIPT_DIR/run-engine" restart "$TEST_DIR/bin/test-server" --server-config "$TEST_DIR/test-server.conf"
|
||||
debug_on_failure
|
||||
[[ "$output" =~ "Starting server: test-server" ]] || [[ "$status" -eq 124 ]]
|
||||
}
|
||||
|
||||
# ===== SERVICE MANAGER TESTS =====
|
||||
|
||||
@test "service-manager: should show help" {
|
||||
run "$SCRIPT_DIR/service-manager.sh" help
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "AzerothCore Service Setup" ]]
|
||||
}
|
||||
|
||||
@test "service-manager: should validate create command parameters" {
|
||||
run "$SCRIPT_DIR/service-manager.sh" create
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "Missing required arguments" ]] || [[ "$output" =~ "Error:" ]]
|
||||
}
|
||||
|
||||
@test "service-manager: should validate restart policy values" {
|
||||
run "$SCRIPT_DIR/service-manager.sh" create auth test-auth --bin-path /nonexistent --restart-policy invalid
|
||||
[ "$status" -ne 0 ]
|
||||
[[ "$output" =~ "Invalid restart policy" ]]
|
||||
}
|
||||
|
||||
@test "service-manager: should accept valid restart policy values" {
|
||||
# Test on-failure (should be accepted)
|
||||
run "$SCRIPT_DIR/service-manager.sh" create auth test-auth --bin-path /nonexistent --restart-policy on-failure
|
||||
# Should fail due to missing binary, not restart policy validation
|
||||
[[ ! "$output" =~ "Invalid restart policy" ]]
|
||||
|
||||
# Test always (should be accepted)
|
||||
run "$SCRIPT_DIR/service-manager.sh" create auth test-auth2 --bin-path /nonexistent --restart-policy always
|
||||
# Should fail due to missing binary, not restart policy validation
|
||||
[[ ! "$output" =~ "Invalid restart policy" ]]
|
||||
}
|
||||
|
||||
@test "service-manager: should include restart policy in help output" {
|
||||
run "$SCRIPT_DIR/service-manager.sh" help
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "--restart-policy" ]]
|
||||
[[ "$output" =~ "on-failure|always" ]]
|
||||
}
|
||||
|
||||
@test "service-manager: help lists health and console commands" {
|
||||
run "$SCRIPT_DIR/service-manager.sh" help
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "is-running <service-name>" ]]
|
||||
[[ "$output" =~ "uptime-seconds <service-name>" ]]
|
||||
[[ "$output" =~ "wait-uptime <service> <sec>" ]]
|
||||
[[ "$output" =~ "send <service-name>" ]]
|
||||
[[ "$output" =~ "show-config <service-name>" ]]
|
||||
}
|
||||
|
||||
@test "service-manager: pm2 uptime and wait-uptime work with mocked pm2" {
|
||||
command -v jq >/dev/null 2>&1 || skip "jq not installed"
|
||||
export AC_SERVICE_CONFIG_DIR="$TEST_DIR/services"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR"
|
||||
# Create registry with pm2 provider service
|
||||
cat > "$AC_SERVICE_CONFIG_DIR/service_registry.json" << 'EOF'
|
||||
[
|
||||
{
|
||||
"name":"test-world",
|
||||
"provider":"pm2",
|
||||
"type":"service",
|
||||
"bin_path":"/bin/worldserver",
|
||||
"args":"",
|
||||
"systemd_type":"--user",
|
||||
"restart_policy":"always",
|
||||
"exec":{
|
||||
"command":"/bin/true",
|
||||
"args":[]
|
||||
}
|
||||
}
|
||||
]
|
||||
EOF
|
||||
# Create minimal service config and run-engine config files required by 'send'
|
||||
echo "RUN_ENGINE_CONFIG_FILE=\"$AC_SERVICE_CONFIG_DIR/test-world-run-engine.conf\"" > "$AC_SERVICE_CONFIG_DIR/test-world.conf"
|
||||
cat > "$AC_SERVICE_CONFIG_DIR/test-world-run-engine.conf" << 'EOF'
|
||||
export SESSION_MANAGER="none"
|
||||
export SESSION_NAME="test-world"
|
||||
EOF
|
||||
# Mock pm2
|
||||
cat > "$TEST_DIR/bin/pm2" << 'EOF'
|
||||
#!/usr/bin/env bash
|
||||
case "$1" in
|
||||
jlist)
|
||||
# Produce a JSON with uptime ~20 seconds
|
||||
if date +%s%N >/dev/null 2>&1; then
|
||||
nowms=$(( $(date +%s%N) / 1000000 ))
|
||||
else
|
||||
nowms=$(( $(date +%s) * 1000 ))
|
||||
fi
|
||||
up=$(( nowms - 20000 ))
|
||||
echo "[{\"name\":\"test-world\",\"pm2_env\":{\"status\":\"online\",\"pm_uptime\":$up}}]"
|
||||
;;
|
||||
id)
|
||||
echo "[1]"
|
||||
;;
|
||||
attach|send|list|describe|logs)
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
EOF
|
||||
chmod +x "$TEST_DIR/bin/pm2"
|
||||
|
||||
run "$SCRIPT_DIR/service-manager.sh" uptime-seconds test-world
|
||||
debug_on_failure
|
||||
[ "$status" -eq 0 ]
|
||||
# Output should be a number >= 10
|
||||
[[ "$output" =~ ^[0-9]+$ ]]
|
||||
[ "$output" -ge 10 ]
|
||||
|
||||
run "$SCRIPT_DIR/service-manager.sh" wait-uptime test-world 10 5
|
||||
debug_on_failure
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "service-manager: send works under pm2 with mocked pm2" {
|
||||
command -v jq >/dev/null 2>&1 || skip "jq not installed"
|
||||
export AC_SERVICE_CONFIG_DIR="$TEST_DIR/services"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR"
|
||||
# Create registry and config as in previous test
|
||||
cat > "$AC_SERVICE_CONFIG_DIR/service_registry.json" << 'EOF'
|
||||
[
|
||||
{
|
||||
"name":"test-world",
|
||||
"provider":"pm2",
|
||||
"type":"service",
|
||||
"bin_path":"/bin/worldserver",
|
||||
"args":"",
|
||||
"systemd_type":"--user",
|
||||
"restart_policy":"always",
|
||||
"exec":{
|
||||
"command":"/bin/true",
|
||||
"args":[]
|
||||
}
|
||||
}
|
||||
]
|
||||
EOF
|
||||
echo "RUN_ENGINE_CONFIG_FILE=\"$AC_SERVICE_CONFIG_DIR/test-world-run-engine.conf\"" > "$AC_SERVICE_CONFIG_DIR/test-world.conf"
|
||||
cat > "$AC_SERVICE_CONFIG_DIR/test-world-run-engine.conf" << 'EOF'
|
||||
export SESSION_MANAGER="none"
|
||||
export SESSION_NAME="test-world"
|
||||
EOF
|
||||
# pm2 mock
|
||||
cat > "$TEST_DIR/bin/pm2" << 'EOF'
|
||||
#!/usr/bin/env bash
|
||||
case "$1" in
|
||||
jlist)
|
||||
if date +%s%N >/dev/null 2>&1; then
|
||||
nowms=$(( $(date +%s%N) / 1000000 ))
|
||||
else
|
||||
nowms=$(( $(date +%s) * 1000 ))
|
||||
fi
|
||||
up=$(( nowms - 15000 ))
|
||||
echo "[{\"name\":\"test-world\",\"pm2_env\":{\"status\":\"online\",\"pm_uptime\":$up}}]"
|
||||
;;
|
||||
id)
|
||||
echo "[1]"
|
||||
;;
|
||||
send)
|
||||
# simulate success
|
||||
exit 0
|
||||
;;
|
||||
attach|list|describe|logs)
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
EOF
|
||||
chmod +x "$TEST_DIR/bin/pm2"
|
||||
|
||||
run "$SCRIPT_DIR/service-manager.sh" send test-world "server info"
|
||||
debug_on_failure
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "service-manager: restore helper recreates missing configs" {
|
||||
command -v jq >/dev/null 2>&1 || skip "jq not installed"
|
||||
export AC_SERVICE_CONFIG_DIR="$TEST_DIR/services"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR"
|
||||
source "$SCRIPT_DIR/service-manager.sh"
|
||||
|
||||
local service_name="restore-test"
|
||||
local run_engine_config="$AC_SERVICE_CONFIG_DIR/$service_name-run-engine.conf"
|
||||
local service_conf="$AC_SERVICE_CONFIG_DIR/$service_name.conf"
|
||||
rm -f "$run_engine_config" "$service_conf"
|
||||
|
||||
mkdir -p "$TEST_DIR/bin" "$TEST_DIR/etc"
|
||||
touch "$TEST_DIR/bin/worldserver"
|
||||
touch "$TEST_DIR/etc/worldserver.conf"
|
||||
|
||||
ensure_service_configs_restored "$service_name" "world" "systemd" "$TEST_DIR/bin/worldserver" "$TEST_DIR/etc/worldserver.conf" "always" "none" "0" "--user" "" "$run_engine_config"
|
||||
|
||||
[ -f "$run_engine_config" ]
|
||||
[ -f "$service_conf" ]
|
||||
grep -Fq 'export SESSION_MANAGER="none"' "$run_engine_config"
|
||||
grep -Fq 'export BINPATH="'$TEST_DIR'/bin"' "$run_engine_config"
|
||||
grep -Fq "RUN_ENGINE_CONFIG_FILE=\"$run_engine_config\"" "$service_conf"
|
||||
grep -Fq 'RESTART_POLICY="always"' "$service_conf"
|
||||
}
|
||||
|
||||
@test "service-manager: wait-uptime times out for unknown service" {
|
||||
command -v jq >/dev/null 2>&1 || skip "jq not installed"
|
||||
export AC_SERVICE_CONFIG_DIR="$TEST_DIR/services"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR"
|
||||
echo "[]" > "$AC_SERVICE_CONFIG_DIR/service_registry.json"
|
||||
run "$SCRIPT_DIR/service-manager.sh" wait-uptime unknown 2 1
|
||||
[ "$status" -ne 0 ]
|
||||
}
|
||||
|
||||
# ===== EXAMPLE SCRIPTS TESTS =====
|
||||
|
||||
@test "examples: restarter-world should show configuration error" {
|
||||
run "$SCRIPT_DIR/examples/restarter-world.sh"
|
||||
[[ "$output" =~ "Configuration file not found" ]]
|
||||
}
|
||||
|
||||
@test "examples: starter-auth should show configuration error" {
|
||||
run "$SCRIPT_DIR/examples/starter-auth.sh"
|
||||
[[ "$output" =~ "Configuration file not found" ]]
|
||||
}
|
||||
|
||||
# ===== PATH PORTABILITY TESTS =====
|
||||
|
||||
@test "service-manager: path conversion functions work correctly" {
|
||||
# Source the service-manager script to access helper functions
|
||||
source "$SCRIPT_DIR/service-manager.sh"
|
||||
|
||||
# Test make_path_relative without AC_SERVICE_CONFIG_DIR
|
||||
unset AC_SERVICE_CONFIG_DIR
|
||||
result=$(make_path_relative "/absolute/path/test")
|
||||
[[ "$result" == "/absolute/path/test" ]]
|
||||
|
||||
# Test make_path_relative with AC_SERVICE_CONFIG_DIR
|
||||
export AC_SERVICE_CONFIG_DIR="/tmp/test-config"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR/subdir"
|
||||
|
||||
result=$(make_path_relative "$AC_SERVICE_CONFIG_DIR/subdir/binary")
|
||||
[[ "$result" == "subdir/binary" ]]
|
||||
|
||||
result=$(make_path_relative "/opt/bin/authserver")
|
||||
[[ "$result" == "../../opt/bin/authserver" ]]
|
||||
|
||||
# Test make_path_absolute
|
||||
result=$(make_path_absolute "subdir/binary")
|
||||
[[ "$result" == "$AC_SERVICE_CONFIG_DIR/subdir/binary" ]]
|
||||
|
||||
result=$(make_path_absolute "../../opt/bin/authserver")
|
||||
[[ "$result" == "/opt/bin/authserver" ]]
|
||||
|
||||
# Test absolute path stays absolute
|
||||
result=$(make_path_absolute "/absolute/path")
|
||||
[[ "$result" == "/absolute/path" ]]
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$AC_SERVICE_CONFIG_DIR"
|
||||
unset AC_SERVICE_CONFIG_DIR
|
||||
}
|
||||
|
||||
@test "service-manager: registry stores relative paths when possible" {
|
||||
# Set up test environment
|
||||
export AC_SERVICE_CONFIG_DIR="$TEST_DIR/service-config"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR"
|
||||
|
||||
# Create a temporary service registry in our test directory
|
||||
local test_registry="$AC_SERVICE_CONFIG_DIR/test_registry.json"
|
||||
echo "[]" > "$test_registry"
|
||||
|
||||
# Source the service-manager and override REGISTRY_FILE
|
||||
source "$SCRIPT_DIR/service-manager.sh"
|
||||
REGISTRY_FILE="$test_registry"
|
||||
|
||||
# Create test binary directory under config dir
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR/bin"
|
||||
|
||||
# Test that paths under AC_SERVICE_CONFIG_DIR are stored as relative
|
||||
add_service_to_registry "test-service" "pm2" "auth" "$AC_SERVICE_CONFIG_DIR/bin/authserver" "--config test.conf" "" "always" "none" "0" "" "$AC_SERVICE_CONFIG_DIR/etc/test.conf"
|
||||
|
||||
# Check that paths were stored as relative
|
||||
local stored_bin_path=$(jq -r '.[0].bin_path' "$test_registry")
|
||||
local stored_config_path=$(jq -r '.[0].server_config' "$test_registry")
|
||||
|
||||
[[ "$stored_bin_path" == "bin/authserver" ]]
|
||||
[[ "$stored_config_path" == "etc/test.conf" ]]
|
||||
|
||||
# Test that absolute paths outside config dir are stored as absolute
|
||||
add_service_to_registry "test-service2" "pm2" "auth" "/opt/azerothcore/bin/authserver" "--config test.conf" "" "always" "none" "0" "" "/opt/azerothcore/etc/test.conf"
|
||||
|
||||
local stored_bin_path2=$(jq -r '.[1].bin_path' "$test_registry")
|
||||
local stored_config_path2=$(jq -r '.[1].server_config' "$test_registry")
|
||||
|
||||
local expected_bin_rel=$(make_path_relative "/opt/azerothcore/bin/authserver")
|
||||
local expected_cfg_rel=$(make_path_relative "/opt/azerothcore/etc/test.conf")
|
||||
|
||||
[[ "$stored_bin_path2" == "$expected_bin_rel" ]]
|
||||
[[ "$stored_config_path2" == "$expected_cfg_rel" ]]
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$AC_SERVICE_CONFIG_DIR"
|
||||
unset AC_SERVICE_CONFIG_DIR
|
||||
}
|
||||
|
||||
@test "service-manager: restore --sync-only recreates config files" {
|
||||
command -v jq >/dev/null 2>&1 || skip "jq not installed"
|
||||
export AC_SERVICE_CONFIG_DIR="$TEST_DIR/services"
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR"
|
||||
|
||||
cat > "$AC_SERVICE_CONFIG_DIR/service_registry.json" <<'EOF'
|
||||
[
|
||||
{
|
||||
"name": "sync-test",
|
||||
"provider": "pm2",
|
||||
"type": "auth",
|
||||
"bin_path": "bin/authserver",
|
||||
"exec": {
|
||||
"command": "../src/run-engine",
|
||||
"args": [
|
||||
"start",
|
||||
"bin/authserver",
|
||||
"--config",
|
||||
"sync-test-run-engine.conf"
|
||||
]
|
||||
},
|
||||
"args": "",
|
||||
"created": "2025-10-12T20:00:54+02:00",
|
||||
"status": "active",
|
||||
"systemd_type": "--user",
|
||||
"restart_policy": "always",
|
||||
"session_manager": "none",
|
||||
"gdb_enabled": "0",
|
||||
"pm2_opts": " ",
|
||||
"server_config": "etc/authserver.conf"
|
||||
}
|
||||
]
|
||||
EOF
|
||||
|
||||
rm -f "$AC_SERVICE_CONFIG_DIR/sync-test.conf" "$AC_SERVICE_CONFIG_DIR/sync-test-run-engine.conf"
|
||||
|
||||
mkdir -p "$AC_SERVICE_CONFIG_DIR/bin" "$AC_SERVICE_CONFIG_DIR/etc"
|
||||
touch "$AC_SERVICE_CONFIG_DIR/bin/authserver"
|
||||
touch "$AC_SERVICE_CONFIG_DIR/etc/authserver.conf"
|
||||
|
||||
run "$SCRIPT_DIR/service-manager.sh" restore --sync-only
|
||||
debug_on_failure
|
||||
[ "$status" -eq 0 ]
|
||||
|
||||
[ -f "$AC_SERVICE_CONFIG_DIR/sync-test.conf" ]
|
||||
[ -f "$AC_SERVICE_CONFIG_DIR/sync-test-run-engine.conf" ]
|
||||
grep -Fq "RUN_ENGINE_CONFIG_FILE=\"$AC_SERVICE_CONFIG_DIR/sync-test-run-engine.conf\"" "$AC_SERVICE_CONFIG_DIR/sync-test.conf"
|
||||
grep -Fq "export BINPATH=\"$AC_SERVICE_CONFIG_DIR/bin\"" "$AC_SERVICE_CONFIG_DIR/sync-test-run-engine.conf"
|
||||
}
|
||||
|
||||
@test "examples: restarter-auth should show configuration error" {
|
||||
run "$SCRIPT_DIR/examples/restarter-auth.sh"
|
||||
[[ "$output" =~ "Configuration file not found" ]]
|
||||
}
|
||||
|
||||
@test "examples: restarter-world should show alternative suggestions" {
|
||||
run "$SCRIPT_DIR/examples/restarter-world.sh"
|
||||
[[ "$output" =~ "Alternative: Start with binary path directly" ]]
|
||||
}
|
||||
|
||||
# ===== INTEGRATION TESTS =====
|
||||
|
||||
@test "integration: starter and simple-restarter work together" {
|
||||
# Test that simple-restarter can use starter
|
||||
run timeout 5s "$SCRIPT_DIR/simple-restarter" "$TEST_DIR/bin" "test-server"
|
||||
# Should start and then restart at least once
|
||||
[[ "$output" =~ "terminated with exit code" ]] || [[ "$status" -eq 124 ]]
|
||||
}
|
||||
|
||||
@test "integration: run-engine can handle missing config gracefully" {
|
||||
run timeout 3s "$SCRIPT_DIR/run-engine" start "$TEST_DIR/bin/test-server"
|
||||
# Should either work or give a meaningful error
|
||||
[[ "$status" -eq 124 ]] || [[ "$status" -eq 0 ]] || [[ "$output" =~ "config" ]]
|
||||
}
|
||||
416
apps/test-framework/README.md
Normal file
416
apps/test-framework/README.md
Normal file
@@ -0,0 +1,416 @@
|
||||
# AzerothCore Test Framework
|
||||
|
||||
This is the centralized test framework for all AzerothCore bash scripts. It provides a unified way to write, run, and manage tests across all modules.
|
||||
|
||||
## Structure
|
||||
|
||||
```
|
||||
apps/test-framework/
|
||||
├── test-main.sh # Unified test framework entry point
|
||||
├── run-bash-tests.sh # Bash test runner for BATS tests
|
||||
├── run-core-tests.sh # AzerothCore unit test runner
|
||||
├── README.md # This documentation
|
||||
├── bats_libs/ # Custom BATS libraries
|
||||
│ ├── acore-support.bash # Test setup and helpers
|
||||
│ └── acore-assert.bash # Custom assertions
|
||||
└── helpers/ # Test utilities
|
||||
└── test_common.sh # Common test functions and setup
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Using acore.sh (Recommended):
|
||||
```bash
|
||||
# Run the unified test framework (interactive menu)
|
||||
./acore.sh test
|
||||
|
||||
# Run bash tests directly
|
||||
./acore.sh test bash --all
|
||||
|
||||
# Run AzerothCore unit tests
|
||||
./acore.sh test core
|
||||
```
|
||||
|
||||
### From any module directory:
|
||||
```bash
|
||||
# Run tests for current module
|
||||
../test-framework/run-bash-tests.sh --dir .
|
||||
|
||||
```
|
||||
|
||||
### From test-framework directory:
|
||||
```bash
|
||||
# Run all tests in all modules
|
||||
./run-bash-tests.sh --all
|
||||
|
||||
# Run tests for specific module
|
||||
./run-bash-tests.sh startup-scripts
|
||||
|
||||
# List available modules
|
||||
./run-bash-tests.sh --list
|
||||
|
||||
# Run tests with debug info
|
||||
./run-bash-tests.sh --all --debug
|
||||
```
|
||||
|
||||
### From project root:
|
||||
```bash
|
||||
# Run all tests
|
||||
apps/test-framework/run-bash-tests.sh --all
|
||||
|
||||
# Run specific module
|
||||
apps/test-framework/run-bash-tests.sh startup-scripts
|
||||
|
||||
# Run with verbose output
|
||||
apps/test-framework/run-bash-tests.sh startup-scripts --verbose
|
||||
```
|
||||
|
||||
## Test Types
|
||||
|
||||
The framework now supports two types of tests:
|
||||
|
||||
1. **Bash Tests** - BATS-based tests for bash scripts and functionality
|
||||
2. **Core Tests** - AzerothCore C++ unit tests
|
||||
|
||||
### Unified Test Framework
|
||||
|
||||
The test framework provides a unified entry point through `test-main.sh` which presents an interactive menu:
|
||||
|
||||
- **bash**: Run BATS-based bash script tests
|
||||
- **core**: Run AzerothCore C++ unit tests
|
||||
- **quit**: Exit the test framework
|
||||
|
||||
```bash
|
||||
# Interactive test menu
|
||||
./acore.sh test
|
||||
|
||||
# Direct test execution
|
||||
./acore.sh test bash --all # Run all bash tests
|
||||
./acore.sh test core # Run core unit tests
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Commands
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
./run-bash-tests.sh --all
|
||||
|
||||
# Run tests for specific module
|
||||
./run-bash-tests.sh startup-scripts
|
||||
|
||||
# Run tests matching pattern
|
||||
./run-bash-tests.sh --filter starter
|
||||
|
||||
# Run tests in specific directory
|
||||
./run-bash-tests.sh --dir apps/docker
|
||||
|
||||
# Show available modules
|
||||
./run-bash-tests.sh --list
|
||||
|
||||
# Show test count
|
||||
./run-bash-tests.sh --count
|
||||
```
|
||||
|
||||
### Output Formats
|
||||
|
||||
```bash
|
||||
# Pretty output (default)
|
||||
./run-bash-tests.sh --pretty
|
||||
|
||||
# TAP output for CI/CD
|
||||
./run-bash-tests.sh --tap
|
||||
|
||||
# Verbose output with debug info
|
||||
./run-bash-tests.sh --verbose --debug
|
||||
```
|
||||
|
||||
## Writing Tests
|
||||
|
||||
### Basic Test Structure
|
||||
|
||||
```bash
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
setup() {
|
||||
acore_test_setup # Standard setup
|
||||
# or
|
||||
startup_scripts_setup # For startup scripts
|
||||
# or
|
||||
compiler_setup # For compiler tests
|
||||
# or
|
||||
docker_setup # For docker tests
|
||||
}
|
||||
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
@test "my test description" {
|
||||
run my_command
|
||||
assert_success
|
||||
assert_output "expected output"
|
||||
}
|
||||
```
|
||||
|
||||
### Available Setup Functions
|
||||
|
||||
- `acore_test_setup` - Basic setup for all tests
|
||||
- `startup_scripts_setup` - Setup for startup script tests
|
||||
- `compiler_setup` - Setup for compiler tests
|
||||
- `docker_setup` - Setup for docker tests
|
||||
- `extractor_setup` - Setup for extractor tests
|
||||
|
||||
### Custom Assertions
|
||||
|
||||
```bash
|
||||
# Assert binary exists and is executable
|
||||
assert_binary_exists "$TEST_DIR/bin/authserver"
|
||||
|
||||
# Assert server started correctly
|
||||
assert_acore_server_started "$output" "authserver"
|
||||
|
||||
# Assert config was loaded
|
||||
assert_config_loaded "$output" "authserver.conf"
|
||||
|
||||
# Assert build success
|
||||
assert_build_success "$output"
|
||||
|
||||
# Assert timeout occurred (for long-running processes)
|
||||
assert_timeout "$status"
|
||||
|
||||
# Assert log contains content
|
||||
assert_log_contains "$log_file" "Server started"
|
||||
```
|
||||
|
||||
### Test Environment Variables
|
||||
|
||||
When using the framework, these variables are automatically set:
|
||||
|
||||
- `$TEST_DIR` - Temporary test directory
|
||||
- `$AC_TEST_ROOT` - Project root directory
|
||||
- `$AC_TEST_APPS` - Apps directory
|
||||
- `$BUILDPATH` - Build directory path
|
||||
- `$SRCPATH` - Source directory path
|
||||
- `$BINPATH` - Binary directory path
|
||||
- `$LOGS_PATH` - Logs directory path
|
||||
|
||||
### Helper Functions
|
||||
|
||||
```bash
|
||||
# Create test binary
|
||||
create_test_binary "authserver" 0 2 "Server started"
|
||||
|
||||
# Create test config
|
||||
create_test_config "authserver.conf" "Database.Info = \"127.0.0.1;3306;root;pass;db\""
|
||||
|
||||
# Create AzerothCore specific binaries and configs
|
||||
create_acore_binaries
|
||||
create_acore_configs
|
||||
|
||||
# Run command with timeout
|
||||
run_with_timeout 5s my_command
|
||||
|
||||
# Wait for condition
|
||||
wait_for_condition "test -f $TEST_DIR/ready" 10 1
|
||||
|
||||
# Debug test failure
|
||||
debug_on_failure
|
||||
```
|
||||
|
||||
## Module Integration
|
||||
|
||||
### Adding Tests to a New Module
|
||||
|
||||
1. Create a `test/` directory in your module:
|
||||
```bash
|
||||
mkdir apps/my-module/test
|
||||
```
|
||||
|
||||
2. Create test files (ending in `.bats`):
|
||||
```bash
|
||||
touch apps/my-module/test/test_my_feature.bats
|
||||
```
|
||||
|
||||
3. Write your tests using the framework (see examples above)
|
||||
|
||||
### Running Tests
|
||||
|
||||
From your module directory:
|
||||
```bash
|
||||
../test-framework/run-bash-tests.sh --dir .
|
||||
```
|
||||
|
||||
From the test framework:
|
||||
```bash
|
||||
./run-bash-tests.sh my-module
|
||||
```
|
||||
|
||||
From project root:
|
||||
```bash
|
||||
apps/test-framework/run-bash-tests.sh my-module
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
For continuous integration, use TAP output:
|
||||
|
||||
```bash
|
||||
# Recommended: Use acore.sh integration
|
||||
./acore.sh test bash --tap --all > test-results.tap
|
||||
|
||||
# Direct script usage
|
||||
cd apps/test-framework
|
||||
./run-bash-tests.sh --all --tap > test-results.tap
|
||||
|
||||
# Or from project root
|
||||
apps/test-framework/run-bash-tests.sh --all --tap > test-results.tap
|
||||
|
||||
# Run core unit tests in CI
|
||||
./acore.sh test core
|
||||
```
|
||||
|
||||
## Core Tests
|
||||
|
||||
The framework now includes support for AzerothCore's C++ unit tests through `run-core-tests.sh`:
|
||||
|
||||
```bash
|
||||
# Run core unit tests
|
||||
./acore.sh test core
|
||||
|
||||
# Direct script usage
|
||||
apps/test-framework/run-core-tests.sh
|
||||
```
|
||||
|
||||
**Prerequisites for Core Tests:**
|
||||
- Project must be built with unit tests enabled (`CBUILD_TESTING="ON"` inside `conf/config.sh` that works with the acore.sh compiler)
|
||||
- Unit test binary should be available at `$BUILDPATH/src/test/unit_tests`
|
||||
|
||||
The core test runner will:
|
||||
1. Check if the unit test binary exists
|
||||
2. Execute the AzerothCore unit tests
|
||||
3. Return appropriate exit codes for CI/CD integration
|
||||
|
||||
## Available Commands
|
||||
|
||||
### Unified Test Framework Commands
|
||||
|
||||
Recommended usage through `acore.sh`:
|
||||
- `./acore.sh test` - Interactive test framework menu
|
||||
- `./acore.sh test bash [options]` - Run bash tests with options
|
||||
- `./acore.sh test core` - Run AzerothCore unit tests
|
||||
|
||||
### Bash Test Commands
|
||||
|
||||
All bash test functionality is available through the `run-bash-tests.sh` script:
|
||||
|
||||
### Basic Test Execution
|
||||
- `./run-bash-tests.sh --all` - Run all tests in all modules
|
||||
- `./run-bash-tests.sh <module>` - Run tests for specific module
|
||||
- `./run-bash-tests.sh --dir <path>` - Run tests in specific directory
|
||||
- `./run-bash-tests.sh --list` - List available modules
|
||||
- `./run-bash-tests.sh --count` - Show test count
|
||||
|
||||
### Output Control
|
||||
- `./run-bash-tests.sh --verbose` - Verbose output with debug info
|
||||
- `./run-bash-tests.sh --tap` - TAP output for CI/CD
|
||||
- `./run-bash-tests.sh --debug` - Debug mode with failure details
|
||||
- `./run-bash-tests.sh --pretty` - Pretty output (default)
|
||||
|
||||
### Test Filtering
|
||||
- `./run-bash-tests.sh --filter <pattern>` - Run tests matching pattern
|
||||
- `./run-bash-tests.sh <module> --filter <pattern>` - Filter within module
|
||||
|
||||
### Utility Functions
|
||||
- `./run-bash-tests.sh --help` - Show help message
|
||||
- Install BATS: Use your system package manager (`apt install bats`, `brew install bats-core`, etc.)
|
||||
|
||||
|
||||
### Direct Script Usage
|
||||
|
||||
## Examples
|
||||
|
||||
### Running Specific Tests
|
||||
```bash
|
||||
# Run only starter-related tests
|
||||
./run-bash-tests.sh --filter starter
|
||||
|
||||
# Run only tests in startup-scripts module
|
||||
./run-bash-tests.sh startup-scripts
|
||||
|
||||
# Run all tests with verbose output
|
||||
./run-bash-tests.sh --all --verbose
|
||||
|
||||
# Run tests in specific directory with debug
|
||||
./run-bash-tests.sh --dir apps/docker --debug
|
||||
```
|
||||
|
||||
### Development Workflow
|
||||
```bash
|
||||
# Recommended: Use acore.sh for unified testing
|
||||
./acore.sh test # Interactive menu
|
||||
./acore.sh test bash --all # All bash tests
|
||||
./acore.sh test core # Core unit tests
|
||||
|
||||
# While developing, run tests frequently from module directory
|
||||
cd apps/my-module
|
||||
../test-framework/run-bash-tests.sh --dir .
|
||||
|
||||
# Debug failing tests
|
||||
../test-framework/run-bash-tests.sh --dir . --debug --verbose
|
||||
|
||||
# Run specific test pattern
|
||||
../test-framework/run-bash-tests.sh --dir . --filter my-feature
|
||||
|
||||
# From project root - run all tests
|
||||
./acore.sh test bash --all # Recommended
|
||||
apps/test-framework/run-bash-tests.sh --all # Direct
|
||||
|
||||
# Quick test count check
|
||||
./acore.sh test bash --count # Recommended
|
||||
apps/test-framework/run-bash-tests.sh --count # Direct
|
||||
```
|
||||
|
||||
## Benefits
|
||||
|
||||
1. **No Boilerplate**: Minimal setup required for new test modules
|
||||
2. **Consistent Environment**: All tests use the same setup/teardown
|
||||
3. **Reusable Utilities**: Common functions available across all tests
|
||||
4. **Centralized Management**: Single place to update test infrastructure
|
||||
5. **Flexible Execution**: Run tests for one module, multiple modules, or all modules
|
||||
6. **CI/CD Ready**: TAP output format supported
|
||||
7. **Easy Debugging**: Built-in debug helpers and verbose output
|
||||
|
||||
## Dependencies
|
||||
|
||||
- [BATS (Bash Automated Testing System)](https://github.com/bats-core/bats-core)
|
||||
- Standard Unix utilities (find, grep, timeout, etc.)
|
||||
|
||||
Install BATS with your system package manager:
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo apt update && sudo apt install bats
|
||||
|
||||
# Fedora/RHEL
|
||||
sudo dnf install bats
|
||||
|
||||
# macOS
|
||||
brew install bats-core
|
||||
|
||||
# Arch Linux
|
||||
sudo pacman -S bats
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
When adding new test utilities:
|
||||
|
||||
1. Add common functions to `helpers/test_common.sh`
|
||||
2. Add BATS-specific helpers to `bats_libs/acore-support.bash`
|
||||
3. Add custom assertions to `bats_libs/acore-assert.bash`
|
||||
4. Update this README with new functionality
|
||||
178
apps/test-framework/bats_libs/acore-assert.bash
Normal file
178
apps/test-framework/bats_libs/acore-assert.bash
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore BATS Assertions Library
|
||||
# Custom assertions for AzerothCore testing
|
||||
|
||||
# Assert that a binary exists and is executable
|
||||
assert_binary_exists() {
|
||||
local binary_path="$1"
|
||||
local message="${2:-Binary should exist and be executable}"
|
||||
|
||||
if [[ ! -f "$binary_path" ]]; then
|
||||
echo "Binary not found: $binary_path"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -x "$binary_path" ]]; then
|
||||
echo "Binary not executable: $binary_path"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that output contains specific AzerothCore patterns
|
||||
assert_acore_server_started() {
|
||||
local output="$1"
|
||||
local server_type="$2"
|
||||
local message="${3:-Server should show startup message}"
|
||||
|
||||
if [[ ! "$output" =~ $server_type.*starting ]]; then
|
||||
echo "Server start message not found for $server_type"
|
||||
echo "Expected pattern: '$server_type.*starting'"
|
||||
echo "Actual output: $output"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that configuration file was loaded
|
||||
assert_config_loaded() {
|
||||
local output="$1"
|
||||
local config_file="$2"
|
||||
local message="${3:-Configuration file should be loaded}"
|
||||
|
||||
if [[ ! "$output" =~ config.*$config_file ]] && [[ ! "$output" =~ $config_file ]]; then
|
||||
echo "Configuration file loading not detected: $config_file"
|
||||
echo "Expected to find: config.*$config_file OR $config_file"
|
||||
echo "Actual output: $output"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that a process exited with expected code
|
||||
assert_exit_code() {
|
||||
local actual_code="$1"
|
||||
local expected_code="$2"
|
||||
local message="${3:-Process should exit with expected code}"
|
||||
|
||||
if [[ "$actual_code" -ne "$expected_code" ]]; then
|
||||
echo "Expected exit code: $expected_code"
|
||||
echo "Actual exit code: $actual_code"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that output contains specific error pattern
|
||||
assert_error_message() {
|
||||
local output="$1"
|
||||
local error_pattern="$2"
|
||||
local message="${3:-Output should contain expected error message}"
|
||||
|
||||
if [[ ! "$output" =~ $error_pattern ]]; then
|
||||
echo "Expected error pattern not found: $error_pattern"
|
||||
echo "Actual output: $output"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that a file was created
|
||||
assert_file_created() {
|
||||
local file_path="$1"
|
||||
local message="${2:-File should be created}"
|
||||
|
||||
if [[ ! -f "$file_path" ]]; then
|
||||
echo "File not created: $file_path"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that a directory was created
|
||||
assert_directory_created() {
|
||||
local dir_path="$1"
|
||||
local message="${2:-Directory should be created}"
|
||||
|
||||
if [[ ! -d "$dir_path" ]]; then
|
||||
echo "Directory not created: $dir_path"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that output contains success message
|
||||
assert_success_message() {
|
||||
local output="$1"
|
||||
local success_pattern="${2:-success|completed|finished|done}"
|
||||
local message="${3:-Output should contain success message}"
|
||||
|
||||
if [[ ! "$output" =~ $success_pattern ]]; then
|
||||
echo "Success message not found"
|
||||
echo "Expected pattern: $success_pattern"
|
||||
echo "Actual output: $output"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that build was successful
|
||||
assert_build_success() {
|
||||
local output="$1"
|
||||
local message="${2:-Build should complete successfully}"
|
||||
|
||||
local build_success_patterns="Build completed|compilation successful|build.*success|make.*success"
|
||||
assert_success_message "$output" "$build_success_patterns" "$message"
|
||||
}
|
||||
|
||||
# Assert that server is responsive
|
||||
assert_server_responsive() {
|
||||
local output="$1"
|
||||
local server_type="$2"
|
||||
local message="${3:-Server should be responsive}"
|
||||
|
||||
if [[ ! "$output" =~ $server_type.*initialized ]] && [[ ! "$output" =~ $server_type.*ready ]]; then
|
||||
echo "Server responsiveness not detected for $server_type"
|
||||
echo "Expected pattern: '$server_type.*initialized' OR '$server_type.*ready'"
|
||||
echo "Actual output: $output"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that timeout occurred (for long-running processes)
|
||||
assert_timeout() {
|
||||
local exit_code="$1"
|
||||
local message="${2:-Process should timeout as expected}"
|
||||
|
||||
if [[ "$exit_code" -ne 124 ]]; then
|
||||
echo "Expected timeout (exit code 124)"
|
||||
echo "Actual exit code: $exit_code"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Assert that log file contains expected content
|
||||
assert_log_contains() {
|
||||
local log_file="$1"
|
||||
local expected_content="$2"
|
||||
local message="${3:-Log file should contain expected content}"
|
||||
|
||||
if [[ ! -f "$log_file" ]]; then
|
||||
echo "Log file not found: $log_file"
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! grep -q "$expected_content" "$log_file"; then
|
||||
echo "Expected content not found in log: $expected_content"
|
||||
echo "Log file: $log_file"
|
||||
echo "Log contents:"
|
||||
cat "$log_file" | head -20
|
||||
echo "$message"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
116
apps/test-framework/bats_libs/acore-support.bash
Normal file
116
apps/test-framework/bats_libs/acore-support.bash
Normal file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore BATS Support Library
|
||||
# Additional helper functions for BATS testing
|
||||
|
||||
# Load common test utilities
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/../helpers/test_common.sh"
|
||||
|
||||
# Standard setup for all AzerothCore tests
|
||||
acore_test_setup() {
|
||||
setup_test_env
|
||||
create_acore_binaries
|
||||
create_acore_configs
|
||||
}
|
||||
|
||||
# Standard teardown for all AzerothCore tests
|
||||
acore_test_teardown() {
|
||||
cleanup_test_env
|
||||
}
|
||||
|
||||
# Quick setup for startup script tests
|
||||
startup_scripts_setup() {
|
||||
acore_test_setup
|
||||
create_test_script_config "test" "test-server"
|
||||
|
||||
# Create additional test binary for startup scripts
|
||||
create_test_binary "test-server" 0 2 "Test server starting with config:"
|
||||
|
||||
# Create the test-server.conf file that tests expect
|
||||
cat > "$TEST_DIR/test-server.conf" << EOF
|
||||
# Test server configuration file
|
||||
# Generated by AzerothCore test framework
|
||||
Database.Info = "127.0.0.1;3306;acore;acore;acore_world"
|
||||
LoginDatabaseInfo = "127.0.0.1;3306;acore;acore;acore_auth"
|
||||
CharacterDatabaseInfo = "127.0.0.1;3306;acore;acore;acore_characters"
|
||||
EOF
|
||||
}
|
||||
|
||||
# Quick setup for compiler tests
|
||||
compiler_setup() {
|
||||
acore_test_setup
|
||||
|
||||
# Create mock build tools
|
||||
create_test_binary "gcc" 0 1
|
||||
create_test_binary "g++" 0 1
|
||||
create_test_binary "ninja" 0 2
|
||||
|
||||
# Create mock CMake files
|
||||
mkdir -p "$TEST_DIR/build"
|
||||
touch "$TEST_DIR/build/CMakeCache.txt"
|
||||
echo "CMAKE_BUILD_TYPE:STRING=RelWithDebInfo" > "$TEST_DIR/build/CMakeCache.txt"
|
||||
}
|
||||
|
||||
# Quick setup for docker tests
|
||||
docker_setup() {
|
||||
acore_test_setup
|
||||
|
||||
# Create mock docker commands
|
||||
create_test_binary "docker" 0 1 "Docker container started"
|
||||
create_test_binary "docker-compose" 0 2 "Docker Compose services started"
|
||||
|
||||
# Create test docker files
|
||||
cat > "$TEST_DIR/Dockerfile" << 'EOF'
|
||||
FROM ubuntu:20.04
|
||||
RUN apt-get update
|
||||
EOF
|
||||
|
||||
cat > "$TEST_DIR/docker-compose.yml" << 'EOF'
|
||||
version: '3.8'
|
||||
services:
|
||||
test-service:
|
||||
image: ubuntu:20.04
|
||||
EOF
|
||||
}
|
||||
|
||||
# Quick setup for extractor tests
|
||||
extractor_setup() {
|
||||
acore_test_setup
|
||||
|
||||
# Create mock client data directories
|
||||
mkdir -p "$TEST_DIR/client"/{Maps,vmaps,mmaps,dbc}
|
||||
|
||||
# Create some test data files
|
||||
echo "Test map data" > "$TEST_DIR/client/Maps/test.map"
|
||||
echo "Test DBC data" > "$TEST_DIR/client/dbc/test.dbc"
|
||||
}
|
||||
|
||||
# Helper to run command with timeout and capture output
|
||||
run_with_timeout() {
|
||||
local timeout_duration="$1"
|
||||
shift
|
||||
run timeout "$timeout_duration" "$@"
|
||||
}
|
||||
|
||||
# Helper to check if a process is running
|
||||
process_running() {
|
||||
local process_name="$1"
|
||||
pgrep -f "$process_name" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Helper to wait for a condition
|
||||
wait_for_condition() {
|
||||
local condition="$1"
|
||||
local timeout="${2:-10}"
|
||||
local interval="${3:-1}"
|
||||
|
||||
local count=0
|
||||
while ! eval "$condition"; do
|
||||
sleep "$interval"
|
||||
count=$((count + interval))
|
||||
if [[ $count -ge $timeout ]]; then
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
}
|
||||
143
apps/test-framework/helpers/test_common.sh
Normal file
143
apps/test-framework/helpers/test_common.sh
Normal file
@@ -0,0 +1,143 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Test Common Utilities
|
||||
# Shared functions and setup for all BATS tests
|
||||
|
||||
export AC_TEST_FRAMEWORK_VERSION="1.0.0"
|
||||
|
||||
# Get paths
|
||||
AC_TEST_FRAMEWORK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
AC_PROJECT_ROOT="$(cd "$AC_TEST_FRAMEWORK_DIR/../.." && pwd)"
|
||||
|
||||
# Common test environment setup
|
||||
setup_test_env() {
|
||||
export TEST_DIR="$(mktemp -d)"
|
||||
export AC_TEST_ROOT="$AC_PROJECT_ROOT"
|
||||
export AC_TEST_APPS="$AC_TEST_ROOT/apps"
|
||||
|
||||
# Create standard test directory structure
|
||||
mkdir -p "$TEST_DIR"/{bin,etc,logs,data,crashes,build}
|
||||
|
||||
# Set up test-specific environment variables
|
||||
export ORIGINAL_PATH="$PATH"
|
||||
export PATH="$TEST_DIR/bin:$PATH"
|
||||
|
||||
# Common environment variables for AzerothCore
|
||||
export BUILDPATH="$TEST_DIR/build"
|
||||
export SRCPATH="$AC_TEST_ROOT"
|
||||
export BINPATH="$TEST_DIR/bin"
|
||||
export LOGS_PATH="$TEST_DIR/logs"
|
||||
}
|
||||
|
||||
cleanup_test_env() {
|
||||
if [[ -n "$TEST_DIR" && -d "$TEST_DIR" ]]; then
|
||||
rm -rf "$TEST_DIR"
|
||||
fi
|
||||
if [[ -n "$ORIGINAL_PATH" ]]; then
|
||||
export PATH="$ORIGINAL_PATH"
|
||||
fi
|
||||
}
|
||||
|
||||
# Create standard test binary
|
||||
create_test_binary() {
|
||||
local binary_name="$1"
|
||||
local exit_code="${2:-0}"
|
||||
local runtime="${3:-2}"
|
||||
local extra_output="${4:-""}"
|
||||
|
||||
cat > "$TEST_DIR/bin/$binary_name" << EOF
|
||||
#!/usr/bin/env bash
|
||||
echo "$binary_name starting with config: \$2"
|
||||
echo "$binary_name running for $runtime seconds..."
|
||||
if [[ -n "$extra_output" ]]; then
|
||||
echo "$extra_output"
|
||||
fi
|
||||
sleep $runtime
|
||||
echo "$binary_name exiting with code $exit_code"
|
||||
exit $exit_code
|
||||
EOF
|
||||
chmod +x "$TEST_DIR/bin/$binary_name"
|
||||
}
|
||||
|
||||
# Create test configuration file
|
||||
create_test_config() {
|
||||
local config_name="$1"
|
||||
local content="$2"
|
||||
|
||||
cat > "$TEST_DIR/etc/$config_name" << EOF
|
||||
# Test configuration file: $config_name
|
||||
# Generated by AzerothCore test framework
|
||||
$content
|
||||
EOF
|
||||
}
|
||||
|
||||
# Create AzerothCore specific test binaries
|
||||
create_acore_binaries() {
|
||||
create_test_binary "authserver" 0 1 "AuthServer initialized"
|
||||
create_test_binary "worldserver" 0 2 "WorldServer initialized"
|
||||
create_test_binary "cmake" 0 1 "CMake configured"
|
||||
create_test_binary "make" 0 2 "Build completed"
|
||||
create_test_binary "mapextractor" 0 3 "Map extraction completed"
|
||||
create_test_binary "vmap4extractor" 0 2 "VMap extraction completed"
|
||||
create_test_binary "vmap4assembler" 0 1 "VMap assembly completed"
|
||||
create_test_binary "mmaps_generator" 0 5 "MMap generation completed"
|
||||
}
|
||||
|
||||
# Create AzerothCore specific test configs
|
||||
create_acore_configs() {
|
||||
create_test_config "authserver.conf" 'Database.Info = "127.0.0.1;3306;acore;acore;acore_auth"
|
||||
LoginDatabaseInfo = "127.0.0.1;3306;acore;acore;acore_auth"'
|
||||
|
||||
create_test_config "worldserver.conf" 'Database.Info = "127.0.0.1;3306;acore;acore;acore_world"
|
||||
LoginDatabaseInfo = "127.0.0.1;3306;acore;acore;acore_auth"
|
||||
CharacterDatabaseInfo = "127.0.0.1;3306;acore;acore;acore_characters"'
|
||||
|
||||
create_test_config "config.sh" "export BUILDPATH=\"$TEST_DIR/build\"
|
||||
export SRCPATH=\"$AC_TEST_ROOT\"
|
||||
export BINPATH=\"$TEST_DIR/bin\"
|
||||
export LOGS_PATH=\"$TEST_DIR/logs\""
|
||||
}
|
||||
|
||||
# Create a test script configuration (for startup scripts)
|
||||
create_test_script_config() {
|
||||
local script_name="$1"
|
||||
local binary_name="${2:-authserver}"
|
||||
|
||||
cat > "$TEST_DIR/conf-$script_name.sh" << EOF
|
||||
export BINPATH="$TEST_DIR/bin"
|
||||
export SERVERBIN="$binary_name"
|
||||
export CONFIG="$TEST_DIR/etc/$binary_name.conf"
|
||||
export LOGS_PATH="$TEST_DIR/logs"
|
||||
export LOG_PREFIX_NAME="$script_name"
|
||||
export SCREEN_NAME="AC-$script_name"
|
||||
export GDB_ENABLED=0
|
||||
export WITH_CONSOLE=1
|
||||
EOF
|
||||
}
|
||||
|
||||
# Debug helper function
|
||||
debug_on_failure() {
|
||||
if [[ "$status" -ne 0 ]]; then
|
||||
echo "Command failed with status: $status" >&3
|
||||
echo "Output was:" >&3
|
||||
echo "$output" >&3
|
||||
if [[ -n "$TEST_DIR" ]]; then
|
||||
echo "Test directory contents:" >&3
|
||||
ls -la "$TEST_DIR" >&3 2>/dev/null || true
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Print test environment info
|
||||
print_test_env() {
|
||||
echo "Test Environment:" >&3
|
||||
echo " TEST_DIR: $TEST_DIR" >&3
|
||||
echo " AC_TEST_ROOT: $AC_TEST_ROOT" >&3
|
||||
echo " AC_TEST_APPS: $AC_TEST_APPS" >&3
|
||||
echo " PATH: $PATH" >&3
|
||||
}
|
||||
|
||||
# Check if running in test mode
|
||||
is_test_mode() {
|
||||
[[ -n "$BATS_TEST_FILENAME" ]] || [[ -n "$TEST_DIR" ]]
|
||||
}
|
||||
314
apps/test-framework/run-bash-tests.sh
Executable file
314
apps/test-framework/run-bash-tests.sh
Executable file
@@ -0,0 +1,314 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# AzerothCore Universal Test Runner
|
||||
# This script provides a unified way to run BATS tests across all modules
|
||||
|
||||
# Get the script directory and project root
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
# Count cores for parallel execution
|
||||
if [[ -z "$ACORE_TEST_CORES" ]]; then
|
||||
if command -v nproc >/dev/null 2>&1; then
|
||||
ACORE_TEST_CORES=$(nproc)
|
||||
elif command -v sysctl >/dev/null 2>&1; then
|
||||
ACORE_TEST_CORES=$(sysctl -n hw.ncpu)
|
||||
else
|
||||
ACORE_TEST_CORES=1 # Fallback to single core if detection fails
|
||||
fi
|
||||
export ACORE_TEST_CORES
|
||||
fi
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
show_help() {
|
||||
echo -e "${BLUE}AzerothCore Universal Test Runner${NC}"
|
||||
echo ""
|
||||
echo "Usage: $0 [OPTIONS] [TEST_MODULES...]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " -h, --help Show this help message"
|
||||
echo " -v, --verbose Enable verbose output"
|
||||
echo " -t, --tap Use TAP output format (for CI/CD)"
|
||||
echo " -p, --pretty Use pretty output format (default)"
|
||||
echo " -f, --filter Run only tests matching pattern"
|
||||
echo " -c, --count Show test count only"
|
||||
echo " -d, --debug Enable debug mode (shows output on failure)"
|
||||
echo " -l, --list List available test modules"
|
||||
echo " -j, --jobs <num> Set number of parallel jobs (default: $ACORE_TEST_CORES)"
|
||||
echo " --dir <path> Run tests in specific directory"
|
||||
echo " --all Run all tests in all modules"
|
||||
echo ""
|
||||
echo "Test Modules:"
|
||||
echo " startup-scripts - Startup script tests"
|
||||
echo " compiler - Compiler script tests"
|
||||
echo " docker - Docker-related tests"
|
||||
echo " installer - Installer script tests"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Run tests in current directory"
|
||||
echo " $0 --all # Run all tests in all modules"
|
||||
echo " $0 startup-scripts # Run startup-scripts tests only"
|
||||
echo " $0 --dir apps/docker # Run tests in specific directory"
|
||||
echo " $0 --verbose startup-scripts # Run with verbose output"
|
||||
echo " $0 --filter starter # Run only tests matching 'starter'"
|
||||
echo " $0 --tap # Output in TAP format for CI"
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
VERBOSE=false
|
||||
TAP=false
|
||||
PRETTY=true
|
||||
FILTER=""
|
||||
COUNT_ONLY=false
|
||||
DEBUG=false
|
||||
LIST_MODULES=false
|
||||
RUN_ALL=false
|
||||
TEST_DIRS=()
|
||||
TEST_MODULES=()
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-h|--help)
|
||||
show_help
|
||||
exit 0
|
||||
;;
|
||||
-v|--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
-t|--tap)
|
||||
TAP=true
|
||||
PRETTY=false
|
||||
shift
|
||||
;;
|
||||
-p|--pretty)
|
||||
PRETTY=true
|
||||
TAP=false
|
||||
shift
|
||||
;;
|
||||
-f|--filter)
|
||||
FILTER="$2"
|
||||
shift 2
|
||||
;;
|
||||
-c|--count)
|
||||
COUNT_ONLY=true
|
||||
shift
|
||||
;;
|
||||
-d|--debug)
|
||||
DEBUG=true
|
||||
shift
|
||||
;;
|
||||
-l|--list)
|
||||
LIST_MODULES=true
|
||||
shift
|
||||
;;
|
||||
--dir)
|
||||
TEST_DIRS+=("$2")
|
||||
shift 2
|
||||
;;
|
||||
--all)
|
||||
RUN_ALL=true
|
||||
shift
|
||||
;;
|
||||
-j|--jobs)
|
||||
if [[ "$2" =~ ^[0-9]+$ ]]; then
|
||||
ACORE_TEST_CORES="$2"
|
||||
export ACORE_TEST_CORES
|
||||
shift 2
|
||||
else
|
||||
echo -e "${RED}Error: Invalid number of jobs specified: $2${NC}"
|
||||
echo "Please provide a valid number."
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
*.bats)
|
||||
# Individual test files
|
||||
TEST_FILES+=("$1")
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
# Assume it's a module name
|
||||
TEST_MODULES+=("$1")
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check if BATS is installed
|
||||
if ! command -v bats >/dev/null 2>&1; then
|
||||
echo -e "${RED}Error: BATS is not installed${NC}"
|
||||
echo "Please install BATS first:"
|
||||
echo " sudo apt install bats # On Ubuntu/Debian"
|
||||
echo " brew install bats-core # On macOS"
|
||||
echo "Or run: make install-bats"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Function to find test directories
|
||||
find_test_directories() {
|
||||
local search_paths=()
|
||||
|
||||
if [[ "$RUN_ALL" == true ]]; then
|
||||
# Find all test directories
|
||||
mapfile -t search_paths < <(find "$PROJECT_ROOT/apps" -type d -name "test" 2>/dev/null)
|
||||
elif [[ ${#TEST_DIRS[@]} -gt 0 ]]; then
|
||||
# Use specified directories
|
||||
for dir in "${TEST_DIRS[@]}"; do
|
||||
if [[ -d "$PROJECT_ROOT/$dir/test" ]]; then
|
||||
search_paths+=("$PROJECT_ROOT/$dir/test")
|
||||
elif [[ -d "$dir/test" ]]; then
|
||||
search_paths+=("$dir/test")
|
||||
elif [[ -d "$dir" ]]; then
|
||||
search_paths+=("$dir")
|
||||
else
|
||||
echo -e "${YELLOW}Warning: Test directory not found: $dir${NC}"
|
||||
fi
|
||||
done
|
||||
elif [[ ${#TEST_MODULES[@]} -gt 0 ]]; then
|
||||
# Use specified modules
|
||||
for module in "${TEST_MODULES[@]}"; do
|
||||
if [[ -d "$PROJECT_ROOT/apps/$module/test" ]]; then
|
||||
search_paths+=("$PROJECT_ROOT/apps/$module/test")
|
||||
else
|
||||
echo -e "${YELLOW}Warning: Module test directory not found: $module${NC}"
|
||||
fi
|
||||
done
|
||||
else
|
||||
# Default: use current directory or startup-scripts if run from test-framework
|
||||
if [[ "$(basename "$PWD")" == "test-framework" ]]; then
|
||||
search_paths=("$PROJECT_ROOT/apps/startup-scripts/test")
|
||||
elif [[ -d "./test" ]]; then
|
||||
search_paths=("./test")
|
||||
else
|
||||
echo -e "${YELLOW}No test directory found. Use --all or specify a module.${NC}"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "${search_paths[@]}"
|
||||
}
|
||||
|
||||
# Function to list available modules
|
||||
list_modules() {
|
||||
echo -e "${BLUE}Available test modules:${NC}"
|
||||
find "$PROJECT_ROOT/apps" -type d -name "test" 2>/dev/null | while read -r test_dir; do
|
||||
module_name=$(basename "$(dirname "$test_dir")")
|
||||
test_count=$(find "$test_dir" -name "*.bats" | wc -l)
|
||||
echo -e " ${GREEN}$module_name${NC} ($test_count test files)"
|
||||
done
|
||||
}
|
||||
|
||||
# Show available modules if requested
|
||||
if [[ "$LIST_MODULES" == true ]]; then
|
||||
list_modules
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Find test directories
|
||||
TEST_SEARCH_PATHS=($(find_test_directories))
|
||||
|
||||
if [[ ${#TEST_SEARCH_PATHS[@]} -eq 0 ]]; then
|
||||
echo -e "${YELLOW}No test directories found.${NC}"
|
||||
echo "Use --list to see available modules."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Collect all test files
|
||||
TEST_FILES=()
|
||||
for test_dir in "${TEST_SEARCH_PATHS[@]}"; do
|
||||
if [[ -d "$test_dir" ]]; then
|
||||
if [[ -n "$FILTER" ]]; then
|
||||
# Find test files matching filter
|
||||
mapfile -t filtered_files < <(find "$test_dir" -name "*.bats" -exec grep -l "$FILTER" {} \; 2>/dev/null)
|
||||
TEST_FILES+=("${filtered_files[@]}")
|
||||
else
|
||||
# Use all test files in directory
|
||||
mapfile -t dir_files < <(find "$test_dir" -name "*.bats" 2>/dev/null)
|
||||
TEST_FILES+=("${dir_files[@]}")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#TEST_FILES[@]} -eq 0 ]]; then
|
||||
if [[ -n "$FILTER" ]]; then
|
||||
echo -e "${YELLOW}No test files found matching filter: $FILTER${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}No test files found in specified directories.${NC}"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Show test count only
|
||||
if [[ "$COUNT_ONLY" == true ]]; then
|
||||
total_tests=0
|
||||
for file in "${TEST_FILES[@]}"; do
|
||||
count=$(grep -c "^@test" "$file" 2>/dev/null || echo 0)
|
||||
total_tests=$((total_tests + count))
|
||||
done
|
||||
echo "Total tests: $total_tests"
|
||||
echo "Test files: ${#TEST_FILES[@]}"
|
||||
echo "Test directories: ${#TEST_SEARCH_PATHS[@]}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Build BATS command
|
||||
BATS_CMD="bats --jobs $ACORE_TEST_CORES"
|
||||
|
||||
# Set output format
|
||||
if [[ "$TAP" == true ]]; then
|
||||
BATS_CMD+=" --formatter tap"
|
||||
elif [[ "$PRETTY" == true ]]; then
|
||||
BATS_CMD+=" --formatter pretty"
|
||||
fi
|
||||
|
||||
# Enable verbose output
|
||||
if [[ "$VERBOSE" == true ]]; then
|
||||
BATS_CMD+=" --verbose-run"
|
||||
fi
|
||||
|
||||
# Add filter if specified
|
||||
if [[ -n "$FILTER" ]]; then
|
||||
BATS_CMD+=" --filter '$FILTER'"
|
||||
fi
|
||||
|
||||
# Add test files
|
||||
BATS_CMD+=" ${TEST_FILES[*]}"
|
||||
|
||||
echo -e "${BLUE}Running AzerothCore Tests with ${ACORE_TEST_CORES} jobs${NC}"
|
||||
echo -e "${YELLOW}Test directories: ${TEST_SEARCH_PATHS[*]}${NC}"
|
||||
echo -e "${YELLOW}Test files: ${#TEST_FILES[@]}${NC}"
|
||||
if [[ -n "$FILTER" ]]; then
|
||||
echo -e "${YELLOW}Filter: $FILTER${NC}"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Run tests
|
||||
if [[ "$DEBUG" == true ]]; then
|
||||
echo -e "${YELLOW}Command: $BATS_CMD${NC}"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Execute BATS
|
||||
if eval "$BATS_CMD"; then
|
||||
echo ""
|
||||
echo -e "${GREEN}✅ All tests passed!${NC}"
|
||||
exit 0
|
||||
else
|
||||
exit_code=$?
|
||||
echo ""
|
||||
echo -e "${RED}❌ Some tests failed!${NC}"
|
||||
|
||||
if [[ "$DEBUG" == true ]]; then
|
||||
echo -e "${YELLOW}Tip: Check the output above for detailed error information${NC}"
|
||||
echo -e "${YELLOW}You can also run individual tests for more detailed debugging:${NC}"
|
||||
echo -e "${YELLOW} $0 --verbose --filter <test_name>${NC}"
|
||||
fi
|
||||
|
||||
exit $exit_code
|
||||
fi
|
||||
20
apps/test-framework/run-core-tests.sh
Normal file
20
apps/test-framework/run-core-tests.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# shellcheck source-path=SCRIPTDIR
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Clean up gcda files to avoid false positives in coverage reports
|
||||
find var/build/obj -name '*.gcda' -delete
|
||||
|
||||
# shellcheck source=../bash_shared/includes.sh
|
||||
source "$CURRENT_PATH/../bash_shared/includes.sh"
|
||||
|
||||
TEST_PATH="$BUILDPATH/src/test/unit_tests"
|
||||
|
||||
if [[ ! -f "$TEST_PATH" ]]; then
|
||||
echo "Unit test binary not found at $TEST_PATH"
|
||||
echo "Please ensure the project is built with unit tests enabled."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exec "$TEST_PATH" "$@"
|
||||
45
apps/test-framework/test-main.sh
Normal file
45
apps/test-framework/test-main.sh
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# shellcheck source-path=SCRIPTDIR
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# shellcheck source=../bash_shared/includes.sh
|
||||
source "$CURRENT_PATH/../bash_shared/includes.sh"
|
||||
# shellcheck source=../bash_shared/menu_system.sh
|
||||
source "$AC_PATH_APPS/bash_shared/menu_system.sh"
|
||||
|
||||
# Menu: single ordered source of truth (no functions in strings)
|
||||
# Format: "key|short|description"
|
||||
menu_items=(
|
||||
"bash|b|Run Bash tests"
|
||||
"core|c|Run AzerothCore tests"
|
||||
"quit|q|Exit from this menu"
|
||||
)
|
||||
|
||||
|
||||
# Menu command handler - called by menu system for each command
|
||||
function handle_menu_command() {
|
||||
local key="$1"
|
||||
shift
|
||||
|
||||
case "$key" in
|
||||
"bash")
|
||||
bash "$CURRENT_PATH/run-bash-tests.sh" "${@:-"--all"}"
|
||||
;;
|
||||
"core")
|
||||
# shellcheck source=./run-core-tests.sh
|
||||
bash "$CURRENT_PATH/run-core-tests.sh" "$@"
|
||||
;;
|
||||
"quit")
|
||||
echo "Goodbye!"
|
||||
exit
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option. Use --help to see available commands."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Run the menu system
|
||||
menu_run_with_items "TEST FRAMEWORK" handle_menu_command -- "${menu_items[@]}" -- "$@"
|
||||
116
apps/valgrind/helgrind.supp
Normal file
116
apps/valgrind/helgrind.supp
Normal file
@@ -0,0 +1,116 @@
|
||||
{
|
||||
[1] ACE_Future::ready() race in WorldSession::ProcessQueryCallbacks(), a lock is used anyway in ACE_Future::get()/set()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14ACE_Future_RepIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE3setERKS4_R10ACE_FutureIS4_E
|
||||
fun:_ZN10ACE_FutureIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE3setERKS4_
|
||||
fun:_ZN21PreparedStatementTask7ExecuteEv
|
||||
}
|
||||
{
|
||||
[1] ACE_Future::ready() race in WorldSession::ProcessQueryCallbacks(), a lock is used anyway in ACE_Future::get()/set()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14ACE_Future_RepIP14SQLQueryHolderE3setERKS1_R10ACE_FutureIS1_E
|
||||
fun:_ZN10ACE_FutureIP14SQLQueryHolderE3setERKS1_
|
||||
fun:_ZN18SQLQueryHolderTask7ExecuteEv
|
||||
}
|
||||
{
|
||||
[2] ACE_Future::ready() race in WorldSession::ProcessQueryCallbacks(), a lock is used anyway in ACE_Future::get()/set()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK14ACE_Future_RepIP14SQLQueryHolderE5readyEv
|
||||
fun:_ZNK10ACE_FutureIP14SQLQueryHolderE5readyEv
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[3] ACE_Future::attach()/detach() false positive in WorldSession::HandleCharEnumOpcode()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14ACE_Future_RepIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE6attachERPS5_
|
||||
}
|
||||
{
|
||||
[4] ACE_Future::get() race in WorldSession::ProcessQueryCallbacks() , a lock is used anyway in ACE_Future::get()/set(), the only case when this is a race is if the same ACE_Future is reused by another thread
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK14ACE_Future_RepIP14SQLQueryHolderE3getERS1_P14ACE_Time_Value
|
||||
fun:_ZNK10ACE_FutureIP14SQLQueryHolderE3getERS1_P14ACE_Time_Value
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[4] ACE_Future::get() race in WorldSession::ProcessQueryCallbacks() , a lock is used anyway in ACE_Future::get()/set(), the only case when this is a race is if the same ACE_Future is reused by another thread
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK10ACE_FutureIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE5readyEv
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN6Player10LoadFromDBEjP14SQLQueryHolder
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14SQLQueryHolder17GetPreparedResultEm
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN12WorldSession15LoadAccountDataEN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEj
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK17PreparedResultSet5FetchEv
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK5Field9GetUInt32Ev
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK5Field8GetUInt8Ev
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[6] False positive of possible race about ACE_Strong_Bound_Ptr
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN17PreparedResultSetD1Ev
|
||||
fun:_ZN20ACE_Strong_Bound_PtrI17PreparedResultSet16ACE_Thread_MutexED1Ev
|
||||
fun:_ZN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexED1Ev
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[7] Race condition on bool in ACE, ignore
|
||||
Helgrind:Race
|
||||
fun:_ZN11WorldSocket12handle_closeEim
|
||||
fun:_ZN20ACE_Dev_Poll_Reactor16remove_handler_iEimP17ACE_Event_Handler
|
||||
}
|
||||
{
|
||||
[7] Race condition on bool in ACE, ignore
|
||||
Helgrind:Race
|
||||
fun:_ZNK11WorldSocket8IsClosedEv
|
||||
fun:_ZN12WorldSession6UpdateEjR12PacketFilter
|
||||
fun:_ZN5World14UpdateSessionsEj
|
||||
}
|
||||
3
apps/whitespace_remover/whitespace_remover.sh
Normal file
3
apps/whitespace_remover/whitespace_remover.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
# Change '*.cpp' and '*.h' to the extension you want to remove whitespaces in.
|
||||
find -name '*.cpp' -print0 | xargs -r0 sed -e 's/[[:blank:]]\+$//' -i
|
||||
find -name '*.h' -print0 | xargs -r0 sed -e 's/[[:blank:]]\+$//' -i
|
||||
Reference in New Issue
Block a user