Initial commit
This commit is contained in:
57
.devcontainer/devcontainer.json
Normal file
57
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,57 @@
|
||||
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
|
||||
{
|
||||
"name": "ac-dev-server",
|
||||
|
||||
// Update the 'dockerComposeFile' list if you have more compose files or use different names.
|
||||
// set an empty array to automatically solve
|
||||
// the docker-compose files (including the .override.yml)
|
||||
// https://github.com/microsoft/vscode-remote-release/issues/1080#issuecomment-824213014
|
||||
// it requires vscode 1.57+
|
||||
"dockerComposeFile": [],
|
||||
// The 'service' property is the name of the service for the container that VS Code should
|
||||
// use. Update this value and .devcontainer/docker-compose.yml to the real service name.
|
||||
"service": "ac-dev-server",
|
||||
|
||||
// The optional 'workspaceFolder' property is the path VS Code should open by default when
|
||||
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
|
||||
"workspaceFolder": "/azerothcore",
|
||||
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": null
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"notskm.clang-tidy",
|
||||
"xaver.clang-format",
|
||||
"bbenoist.doxygen",
|
||||
"ms-vscode.cpptools",
|
||||
"ms-vscode.cmake-tools",
|
||||
"mhutchie.git-graph",
|
||||
"github.vscode-pull-request-github",
|
||||
"eamodio.gitlens",
|
||||
"cschlosser.doxdocgen",
|
||||
"sanaajani.taskrunnercode",
|
||||
"mads-hartmann.bash-ide-vscode"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Uncomment the next line if you want start specific services in your Docker Compose config.
|
||||
"runServices": ["ac-dev-server", "ac-database"],
|
||||
|
||||
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
|
||||
// "shutdownAction": "none",
|
||||
|
||||
// Uncomment the next line to run commands after the container is created - for example installing curl.
|
||||
// "postCreateCommand": "apt-get update && apt-get install -y curl",
|
||||
|
||||
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "root"
|
||||
}
|
||||
35
.devcontainer/docker-compose.yml
Normal file
35
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,35 @@
|
||||
version: '3.9'
|
||||
services:
|
||||
# Update this to the name of the service you want to work with in your docker-compose.yml file
|
||||
ac-dev-server:
|
||||
# If you want add a non-root user to your Dockerfile, you can use the "remoteUser"
|
||||
# property in devcontainer.json to cause VS Code its sub-processes (terminals, tasks,
|
||||
# debugging) to execute as the user. Uncomment the next line if you want the entire
|
||||
# container to run as this user instead. Note that, on Linux, you may need to
|
||||
# ensure the UID and GID of the container user you create matches your local user.
|
||||
# See https://aka.ms/vscode-remote/containers/non-root for details.
|
||||
#
|
||||
# user: vscode
|
||||
|
||||
# Uncomment if you want to override the service's Dockerfile to one in the .devcontainer
|
||||
# folder. Note that the path of the Dockerfile and context is relative to the *primary*
|
||||
# docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile"
|
||||
# array). The sample below assumes your primary file is in the root of your project.
|
||||
#
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: .devcontainer/Dockerfile
|
||||
|
||||
#volumes:
|
||||
# Update this to wherever you want VS Code to mount the folder of your project
|
||||
#- .:/workspace:cached
|
||||
|
||||
# Uncomment the next line to use Docker from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker-compose for details.
|
||||
# - /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
|
||||
# cap_add:
|
||||
# - SYS_PTRACE
|
||||
# security_opt:
|
||||
# - seccomp:unconfined
|
||||
tty: true
|
||||
14
.dockerignore
Normal file
14
.dockerignore
Normal file
@@ -0,0 +1,14 @@
|
||||
/cmake-build-debug/*
|
||||
/build*/
|
||||
/var/*
|
||||
!/var/build/.gitkeep
|
||||
!/var/ccache/.gitkeep
|
||||
/env/dist/*
|
||||
!/env/dist/.gitkeep
|
||||
/env/user/*
|
||||
/.env*
|
||||
.idea
|
||||
!.gitkeep
|
||||
|
||||
# do not ignore the ccache folder (used by the ci)
|
||||
!/var/docker/ccache
|
||||
17
.editorconfig
Normal file
17
.editorconfig
Normal file
@@ -0,0 +1,17 @@
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 80
|
||||
|
||||
[*.{json,ts,js,yml,sh}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
tab_width = 2
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 80
|
||||
53
.git_commit_template.txt
Normal file
53
.git_commit_template.txt
Normal file
@@ -0,0 +1,53 @@
|
||||
### TITLE
|
||||
## Type(Scope/Subscope): Commit ultra short explanation
|
||||
## |---- Write below the examples with a maximum of 50 characters ----|
|
||||
## Example 1: fix(DB/SAI): Missing spell to NPC Hogger
|
||||
## Example 2: fix(CORE/Raid): Phase 2 of Ragnaros
|
||||
## Example 3: feat(CORE/Commands): New GM command to do something
|
||||
|
||||
|
||||
### DESCRIPTION
|
||||
## Explain why this change is being made, what does it fix etc...
|
||||
## |---- Write below the examples with a maximum of 72 characters per lines ----|
|
||||
## Example: Hogger (id: 492) was not charging player when being engaged.
|
||||
|
||||
|
||||
## Provide links to any issue, commit, pull request or other resource
|
||||
## Example 1: Closes AzerothCore issue #23
|
||||
## Example 2: Ported from other project's commit (link)
|
||||
## Example 3: References taken from wowpedia / wowhead / wowwiki / https://wowgaming.altervista.org/aowow/
|
||||
|
||||
|
||||
### CO-AUTHOR(S)
|
||||
## If there are more authors they can be mentioned like this
|
||||
## Co-authored-by: name <name@example.com>
|
||||
|
||||
|
||||
## =======================================================
|
||||
## EXTRA INFOS
|
||||
## =======================================================
|
||||
## "Type" can be:
|
||||
## feat (new feature)
|
||||
## fix (bug fix)
|
||||
## refactor (refactoring production code)
|
||||
## style (formatting, missing semi colons, etc; no code change)
|
||||
## docs (changes to documentation)
|
||||
## test (adding or refactoring tests; no production code change)
|
||||
## chore (updating bash scripts, git files etc; no production code change)
|
||||
## --------------------
|
||||
## Remember to
|
||||
## Capitalize the subject line
|
||||
## Use the imperative mood in the subject line
|
||||
## Do not end the subject line with a period
|
||||
## Separate subject from body with a blank line
|
||||
## Use the body to explain what and why rather than how
|
||||
## Can use multiple lines with "-" for bullet points in body
|
||||
## --------------------
|
||||
## More info here https://www.conventionalcommits.org/en/v1.0.0-beta.2/
|
||||
## =======================================================
|
||||
## "Scope" can be:
|
||||
## CORE (core related, c++)
|
||||
## DB (database related, sql)
|
||||
## =======================================================
|
||||
## "Subscope" is optional and depends on the nature of the commit.
|
||||
## =======================================================
|
||||
30
.gitattributes
vendored
Normal file
30
.gitattributes
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text eol=lf
|
||||
|
||||
# Whitespace rules
|
||||
# strict (no trailing, no tabs)
|
||||
*.cpp whitespace=trailing-space,space-before-tab,tab-in-indent,cr-at-eol
|
||||
*.h whitespace=trailing-space,space-before-tab,tab-in-indent,cr-at-eol
|
||||
|
||||
# normal (no trailing)
|
||||
*.sql whitespace=trailing-space,space-before-tab,cr-at-eol
|
||||
*.txt whitespace=trailing-space,space-before-tab,cr-at-eol
|
||||
|
||||
# special files which must ignore whitespace
|
||||
*.patch whitespace=-trailing-space eol=lf
|
||||
*.diff whitespace=-trailing-space eol=lf
|
||||
|
||||
# Standard to msysgit
|
||||
*.doc diff=astextplain
|
||||
*.DOC diff=astextplain
|
||||
*.docx diff=astextplain
|
||||
*.DOCX diff=astextplain
|
||||
*.dot diff=astextplain
|
||||
*.DOT diff=astextplain
|
||||
*.pdf diff=astextplain
|
||||
*.PDF diff=astextplain
|
||||
*.rtf diff=astextplain
|
||||
*.RTF diff=astextplain
|
||||
|
||||
# Ignore sql/* files
|
||||
data/sql/* linguist-documentation
|
||||
2
.github/CODEOWNERS
vendored
Normal file
2
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Protect dashboard workflow – require explicit review
|
||||
.github/workflows/dashboard-ci.yml @Yehonal
|
||||
138
.github/CODE_OF_CONDUCT.md
vendored
Normal file
138
.github/CODE_OF_CONDUCT.md
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, caste, color, religion, or sexual
|
||||
identity and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, fun, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the overall
|
||||
community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized imagery, and sexual attention or advances of
|
||||
any kind
|
||||
* The use of sexulized language which could reasonably be considered inappropriate.
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email address,
|
||||
without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, which includes but is not limited to AzerothCore
|
||||
managed sites and community spaces, and also applies when an individual is officially representing the
|
||||
community in public spaces.
|
||||
|
||||
Examples of representing our community include
|
||||
* Using an official e-mail address
|
||||
* Posting via an official social media account
|
||||
* Acting as an appointed representative at an online or offline event
|
||||
* Communicating within the WoW Emulation communities
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at the AzerothCore
|
||||
[https://discord.gg/gkt4y2x][discord].
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private or public, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series of
|
||||
actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or permanent
|
||||
ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within the
|
||||
community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.1, available at
|
||||
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
|
||||
|
||||
Community Impact Guidelines were inspired by
|
||||
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
|
||||
[https://www.contributor-covenant.org/translations][translations].
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
|
||||
[Mozilla CoC]: https://github.com/mozilla/diversity
|
||||
[FAQ]: https://www.contributor-covenant.org/faq
|
||||
[translations]: https://www.contributor-covenant.org/translations
|
||||
[discord]: https://discord.gg/gkt4y2x
|
||||
30
.github/CONTRIBUTING.md
vendored
Normal file
30
.github/CONTRIBUTING.md
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
## CONTRIBUTING
|
||||
|
||||
AzerothCore can also serve as a learning resource for aspiring developers who want to understand how WoW servers work, how MMORPGs are structured, how game server emulators are created, or to improve their C++ and SQL knowledge.
|
||||
|
||||
If you want to contribute to the project, you will find a lot of resources that will guide you in our [wiki](https://www.azerothcore.org/wiki/contribute).
|
||||
|
||||
We also recommend you read our [Contributor Covenant Code of Conduct](https://github.com/azerothcore/azerothcore-wotlk/blob/master/.github/CODE_OF_CONDUCT.md).
|
||||
|
||||
Feel free to join our [Discord server](https://discord.gg/gkt4y2x).
|
||||
|
||||
## AUTHORS & CONTRIBUTORS
|
||||
|
||||
This project exists thanks to the [authors](https://github.com/azerothcore/azerothcore-wotlk/blob/master/AUTHORS).
|
||||
|
||||
## IMPORTANT LINKS
|
||||
|
||||
- [Doxygen documentation](https://www.azerothcore.org/pages/doxygen/index.html)
|
||||
- [Website](http://www.azerothcore.org/)
|
||||
- [AzerothCore catalogue](http://www.azerothcore.org/catalogue.html "Modules, tools, and other stuff for AzerothCore") (modules, tools, etc...)
|
||||
- [Our Discord server](https://discord.gg/gkt4y2x)
|
||||
- [Our wiki](http://www.azerothcore.org/wiki "Easy to use and developed by AzerothCore founder")
|
||||
- [Our forum](https://github.com/azerothcore/azerothcore-wotlk/discussions/)
|
||||
- [Our Facebook page](https://www.facebook.com/AzerothCore/)
|
||||
- [Our LinkedIn page](https://www.linkedin.com/company/azerothcore/)
|
||||
|
||||
All contributions, big or small, are appreciated <3
|
||||
|
||||
The AzerothCore Staff appreciate all the help and contribution that *you* put your time into.
|
||||
|
||||
Thank you!
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
custom: https://www.paypal.com/donate/?hosted_button_id=L69ANPSR8BJDU
|
||||
95
.github/ISSUE_TEMPLATE/aa_game_issues.yml
vendored
Normal file
95
.github/ISSUE_TEMPLATE/aa_game_issues.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
name: Game issues
|
||||
description: Create a bug report to help us improve.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to fill out a bug report. Remember to fill out all fields including the title above.
|
||||
An issue that is not properly filled out will be closed.
|
||||
You can read more about the standards for a bug report [here](https://www.azerothcore.org/wiki/issue-tracker-standards).
|
||||
- type: textarea
|
||||
id: current
|
||||
attributes:
|
||||
label: Current Behaviour
|
||||
description: |
|
||||
Description of the problem or issue here.
|
||||
Include entries of affected creatures / items / quests / spells etc.
|
||||
Never upload files! Use GIST for text and YouTube for videos!
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected Behaviour
|
||||
description: |
|
||||
Tell us what should happen instead.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: source
|
||||
attributes:
|
||||
label: Source
|
||||
description: |
|
||||
If you have a source that proves how it is supposed to work, please add that to make it easier for devs to fix the issue.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the problem
|
||||
description: |
|
||||
What does someone else need to do to encounter the same bug?
|
||||
placeholder: |
|
||||
1. Step 1
|
||||
2. Step 2
|
||||
3. Step 3
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: extra
|
||||
attributes:
|
||||
label: Extra Notes
|
||||
description: |
|
||||
Do you have any extra notes that can help solve the issue that does not fit any other field?
|
||||
placeholder: |
|
||||
None
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: commit
|
||||
attributes:
|
||||
label: AC rev. hash/commit
|
||||
description: |
|
||||
Paste the entire output result of the `.server debug` command. (If you need to run it from the client get a prat addon)
|
||||
placeholder: |
|
||||
Paste the entire output result of the `.server debug` command. (If you need to run it from the client get a prat addon)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
description: |
|
||||
The Operating System the Server is running on.
|
||||
i.e. Windows 11 x64, Debian 10 x64, macOS 12, Ubuntu 20.04
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: custom
|
||||
attributes:
|
||||
label: Custom changes or Modules
|
||||
description: |
|
||||
List which custom changes or modules you have applied, i.e. Eluna module, etc.
|
||||
placeholder: |
|
||||
None
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your contribution.
|
||||
If you use AzerothCore regularly, we really NEED your help to:
|
||||
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
|
||||
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
|
||||
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
|
||||
With your help, the project can evolve much quicker!
|
||||
86
.github/ISSUE_TEMPLATE/bb_crash_issues.yml
vendored
Normal file
86
.github/ISSUE_TEMPLATE/bb_crash_issues.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: Crash / Server Crash issues
|
||||
description: Did your server crash? Post an issue here!
|
||||
title: "Crash: "
|
||||
labels: ["Priority-Critical", "HasBacktrace"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to fill out a bug report. Remember to fill out all fields including the title above.
|
||||
An issue that is not properly filled out will be closed.
|
||||
- type: textarea
|
||||
id: current
|
||||
attributes:
|
||||
label: Steps to Reproduce
|
||||
description: |
|
||||
If possible provide detailed steps to reproduce the crash.
|
||||
placeholder: |
|
||||
1. Provide the exact steps to trigger the crash.
|
||||
2. Include any relevant configurations or commands.
|
||||
3. Mention if the crash is consistent or intermittent.
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
NOTE: Make sure your server was compiled in RelWithDebug or Debug mode as crashlogs from Release do not contain enough information.
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Logs and Screenshots
|
||||
description: |
|
||||
Do you have any logs or screenshots that can be useful?
|
||||
Crash logs in text are preffered over screenshots.
|
||||
DO NOT POST THE FULL CRASH LOG IN THE ISSUE BODY. DO NOT UPLOAD TEXT FILES. USE [GITHUB GIST](https://gist.github.com/), PASTEBIN, OR ANY SIMILAR SERVICE INSTEAD.
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating System
|
||||
description: |
|
||||
The Operating System you are having issues on.
|
||||
i.e. Windows 11 x64, Debian 10 x64, macOS 12, Ubuntu 20.04
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: deps
|
||||
attributes:
|
||||
label: Dependencies & versions
|
||||
description: |
|
||||
Relevant information about dependencies and their versions that can be useful to debug the issue.
|
||||
Example:
|
||||
- OpenSSL ver ...
|
||||
- Boost ver ...
|
||||
- MySQL ver ...
|
||||
- Visual Studio ver ...
|
||||
- GCC ver ...
|
||||
- Clang ver ...
|
||||
- CMake ver ...
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: commit
|
||||
attributes:
|
||||
label: Commit
|
||||
description: |
|
||||
Which commit hash are you using.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: extra
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: |
|
||||
Do you have any other relevant information about the issue?
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your contribution.
|
||||
If you use AzerothCore regularly, we really NEED your help to:
|
||||
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
|
||||
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
|
||||
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
|
||||
With your help, the project can evolve much quicker!
|
||||
75
.github/ISSUE_TEMPLATE/cc_bta_issues.yml
vendored
Normal file
75
.github/ISSUE_TEMPLATE/cc_bta_issues.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
name: Build/Tools/Apps issues
|
||||
description: Got an issue with build, tools or apps? Create an issue to let us know!
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to fill out a bug report. Remember to fill out all fields including the title above.
|
||||
An issue that is not properly filled out will be closed.
|
||||
- type: textarea
|
||||
id: current
|
||||
attributes:
|
||||
label: Current Behaviour
|
||||
description: |
|
||||
What actually happens and how do we reproduce it?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Logs and Screenshots
|
||||
description: |
|
||||
Do you have any logs or screenshots that can be useful?
|
||||
If you have logs in text form please upload them to [Gist](https://gist.github.com/) or PasteBin and upload the link.
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating System
|
||||
description: |
|
||||
The Operating System you are having issues on.
|
||||
i.e. Windows 11 x64, Debian 10 x64, macOS 12, Ubuntu 20.04
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: deps
|
||||
attributes:
|
||||
label: Dependencies & versions
|
||||
description: |
|
||||
Relevant information about dependencies and their versions that can be useful to debug the issue.
|
||||
Example:
|
||||
- OpenSSL ver ...
|
||||
- Boost ver ...
|
||||
- MySQL ver ...
|
||||
- Visual Studio ver ...
|
||||
- GCC ver ...
|
||||
- Clang ver ...
|
||||
- CMake ver ...
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: commit
|
||||
attributes:
|
||||
label: Commit
|
||||
description: |
|
||||
Which commit hash are you using.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: extra
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: |
|
||||
Do you have any other relevant information about the issue?
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your contribution.
|
||||
If you use AzerothCore regularly, we really NEED your help to:
|
||||
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
|
||||
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
|
||||
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
|
||||
With your help, the project can evolve much quicker!
|
||||
17
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
17
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Website
|
||||
url: https://www.azerothcore.org
|
||||
about: On the AC website you can find a lot of things, such as existing modules.
|
||||
- name: Wiki
|
||||
url: https://www.azerothcore.org/wiki
|
||||
about: You can find plenty of information on our Wiki.
|
||||
- name: FAQ
|
||||
url: https://www.azerothcore.org/wiki/faq
|
||||
about: Frequently asked questions.
|
||||
- name: Common Errors
|
||||
url: https://www.azerothcore.org/wiki/common-errors
|
||||
about: You can find common errors and their solutions here.
|
||||
- name: Discord
|
||||
url: https://discord.gg/gkt4y2x
|
||||
about: Join the discussions over at our Discord Server.
|
||||
43
.github/ISSUE_TEMPLATE/dd_feature_request.yml
vendored
Normal file
43
.github/ISSUE_TEMPLATE/dd_feature_request.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Feature request
|
||||
description: Suggest an idea for this project
|
||||
title: "Feature: "
|
||||
labels: "Feature"
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to fill out a feature request. Remember to fill out all fields including the title above.
|
||||
An issue that is not properly filled out will be closed.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe your feature request or suggestion in detail
|
||||
description: |
|
||||
A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: solution
|
||||
attributes:
|
||||
label: Describe a possible solution to your feature or suggestion in detail
|
||||
description: |
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context or screenshots about the feature request here.
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your contribution.
|
||||
If you use AzerothCore regularly, we really NEED your help to:
|
||||
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
|
||||
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
|
||||
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
|
||||
With your help, the project can evolve much quicker!
|
||||
21
.github/ISSUE_TEMPLATE/ee_cc.yml
vendored
Normal file
21
.github/ISSUE_TEMPLATE/ee_cc.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: CC Triage
|
||||
description: This template is only used for ChromieCraft
|
||||
labels: ["ChromieCraft Generic"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: current
|
||||
attributes:
|
||||
label: Triage
|
||||
description: |
|
||||
Paste the issue from ChromieCraft here.
|
||||
value: |
|
||||
|
||||
Issue linked from CC:
|
||||
|
||||
Triage Notes:
|
||||
|
||||
Original Post Below:
|
||||
|
||||
---
|
||||
validations:
|
||||
required: true
|
||||
93
.github/README.md
vendored
Normal file
93
.github/README.md
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
#  AzerothCore
|
||||
|
||||
[](CODE_OF_CONDUCT.md)
|
||||
[](https://www.codefactor.io/repository/github/azerothcore/azerothcore-wotlk)
|
||||
[](https://stackoverflow.com/questions/tagged/azerothcore?sort=newest "Ask / browse questions here")
|
||||
[](https://discord.gg/gkt4y2x "Our community hub on Discord")
|
||||
[](https://www.bountyhub.dev/bounties?repo=azerothcore)
|
||||
|
||||
## Build Status
|
||||
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core-build-nopch.yml?query=branch%3Amaster)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core-build-pch.yml?query=branch%3Amaster)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core_modules_build.yml?query=branch%3Amaster)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/windows_build.yml?query=branch%3Amaster)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/macos_build.yml?query=branch%3Amaster)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/docker_build.yml?query=branch%3Amaster)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/tools_build.yml?query=branch%3Amaster)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/dashboard-ci.yml?query=branch%3Amaster)
|
||||
|
||||
## Introduction
|
||||
|
||||
AzerothCore is an open-source game server application and framework designed for hosting massively multiplayer online role-playing games (MMORPGs). It is based on the popular MMORPG World of Warcraft (WoW) and seeks to recreate the gameplay experience of the original game from patch 3.3.5a.
|
||||
|
||||
The original code is based on MaNGOS, TrinityCore, and SunwellCore and has since then had extensive development to improve stability, in-game mechanics, and modularity to the game. AC has also grown into a community-driven project with a significant number of contributors and developers. It is written in C++ and provides a solid foundation for creating private servers that mimic the mechanics and behavior of the official WoW servers.
|
||||
|
||||
## Philosophy
|
||||
|
||||
Our main goal is to create a playable game server, offering a fully working in-game experience.
|
||||
|
||||
Here are the main points we focus on:
|
||||
|
||||
* Stability
|
||||
* We make sure all changes pass the CIs before being merged into the master branch.
|
||||
|
||||
* Blizzlike content
|
||||
* We strive to make all in-game content to be blizzlike. Therefore we have a high standard for fixes being made.
|
||||
|
||||
* Customization
|
||||
* It is easy to customize your experience using [modules](#modules).
|
||||
|
||||
* Community driven
|
||||
* AzerothCore has an active community of developers, contributors, and users who collaborate, share knowledge, and provide support through forums, Discord channels, and other communication platforms.
|
||||
|
||||
### Modules
|
||||
|
||||
AzerothCore is designed to be highly modular, allowing developers to extend and customize the game to suit their preferences or create unique gameplay experiences. This flexibility enables the addition of custom features, content, and modifications.
|
||||
|
||||
We have a lot of modules already made by the community, many of which can be found in the [Module Catalogue](https://www.azerothcore.org/catalogue.html#/).
|
||||
|
||||
## Installation
|
||||
|
||||
Detailed installation instructions are available [here](http://www.azerothcore.org/wiki/installation).
|
||||
|
||||
## Contributing
|
||||
|
||||
AzerothCore can also serve as a learning resource for aspiring developers who want to understand how WoW servers work, how MMORPGs are structured, how game server emulators are created, or to improve their C++ and SQL knowledge.
|
||||
|
||||
If you want to contribute to the project, you will find a lot of resources that will guide you in our [wiki](https://www.azerothcore.org/wiki/contribute).
|
||||
|
||||
We also recommend you read our [Contributor Covenant Code of Conduct](https://github.com/azerothcore/azerothcore-wotlk/blob/master/.github/CODE_OF_CONDUCT.md).
|
||||
|
||||
Feel free to join our [Discord server](https://discord.gg/gkt4y2x).
|
||||
|
||||
Click on the "⭐ Star" button to help us gain more visibility on Github!
|
||||
|
||||
## Authors & Contributors
|
||||
|
||||
The project was born in 2016 based on SunwellCore. Unfortunately, SunwellCore was published without any git history, so on git there are no credits for all the contributors before 2016.
|
||||
|
||||
You can check the [authors](https://github.com/azerothcore/azerothcore-wotlk/blob/master/AUTHORS) file for more details.
|
||||
|
||||
## Important Links
|
||||
|
||||
- [Doxygen documentation](https://www.azerothcore.org/pages/doxygen/index.html)
|
||||
- [Website](http://www.azerothcore.org/)
|
||||
- [AzerothCore catalogue](http://www.azerothcore.org/catalogue.html "Modules, tools, and other stuff for AzerothCore") (modules, tools, etc...)
|
||||
- [Our Discord server](https://discord.gg/gkt4y2x)
|
||||
- [Our wiki](http://www.azerothcore.org/wiki "Easy to use and developed by AzerothCore founder")
|
||||
- [Our forum](https://github.com/azerothcore/azerothcore-wotlk/discussions/)
|
||||
- [Our Facebook page](https://www.facebook.com/AzerothCore/)
|
||||
- [Our LinkedIn page](https://www.linkedin.com/company/azerothcore/)
|
||||
|
||||
## License
|
||||
|
||||
- The AzerothCore source code is released under the [GNU GPL v2](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
|
||||
|
||||
It's important to note that AzerothCore is not an official Blizzard Entertainment product, and it is not affiliated with or endorsed by World of Warcraft or Blizzard Entertainment. AzerothCore does not in any case sponsor nor support illegal public servers. If you use this project to run an illegal public server and not for testing and learning it is your own personal choice.
|
||||
|
||||
## Special thanks
|
||||
|
||||
[JetBrains](https://www.jetbrains.com/?from=AzerothCore) is providing free [open-source licenses](https://www.jetbrains.com/community/opensource/) to the AzerothCore developers.
|
||||
|
||||
[](https://jb.gg/OpenSourceSupport)
|
||||
97
.github/SECURITY.md
vendored
Normal file
97
.github/SECURITY.md
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We support the following versions of dependencies.
|
||||
|
||||
| Icon | Meaning |
|
||||
| :------------------- | :---------------: |
|
||||
| :white_check_mark: | **Supported** |
|
||||
| :red_circle: | **NOT** Supported |
|
||||
| :large_blue_diamond: | **Recommended** |
|
||||
|
||||
### Versions of AzerothCore:
|
||||
|
||||
| AzerothCore Branch | Status | Recommended |
|
||||
| ---------------------------- | :----------------: | :------------------: |
|
||||
| **master** | :white_check_mark: | :large_blue_diamond: |
|
||||
| Any non-official fork | :red_circle: | |
|
||||
| Any Playerbots fork | :red_circle: | |
|
||||
| Any NPCBots fork | :red_circle: | |
|
||||
| Any AC (non-official) repack | :red_circle: | |
|
||||
|
||||
### Supported Operating Systems
|
||||
|
||||
| Linux (Ubuntu) | Status | Recommended |
|
||||
| :------------- | :----------------: | :------------------: |
|
||||
| 24.04 | :white_check_mark: | :large_blue_diamond: |
|
||||
| 22.04 | :white_check_mark: | |
|
||||
| 20.04 ≤ | :red_circle: | |
|
||||
|
||||
| macOS | Status | Recommended |
|
||||
| :---- | :----------------: | :------------------: |
|
||||
| 14 | :white_check_mark: | :large_blue_diamond: |
|
||||
| 12 ≤ | :red_circle: | |
|
||||
|
||||
| Windows | Status | Recommended |
|
||||
| :------------ | :----------------: | :------------------: |
|
||||
| Windows 11 | :white_check_mark: | :large_blue_diamond: |
|
||||
| Windows 10 | :white_check_mark: |
|
||||
| Windows 8.1 ≤ | :red_circle: |
|
||||
|
||||
<br>
|
||||
|
||||
### Supported Boost Versions:
|
||||
|
||||
| Boost | Status | Recommended |
|
||||
| :----- | :----------------: | :------------------: |
|
||||
| 1.70 ≥ | :white_check_mark: | :large_blue_diamond: |
|
||||
|
||||
### Supported OpenSSL Versions:
|
||||
|
||||
| OpenSSL | Status | Recommended |
|
||||
| :------ | :----------------: | :------------------: |
|
||||
| 3.X.X ≥ | :white_check_mark: | :large_blue_diamond: |
|
||||
|
||||
### Supported CMake Versions:
|
||||
|
||||
| CMake | Status | Recommended |
|
||||
| :----- | :----------------: | :------------------: |
|
||||
| 3.16 ≥ | :white_check_mark: | :large_blue_diamond: |
|
||||
|
||||
### Supported MySQL Versions:
|
||||
|
||||
| MySQL | Status | Recommended |
|
||||
| :---- | :----------------: | :------------------: |
|
||||
| 8.4 ≥ | :white_check_mark: | :large_blue_diamond: |
|
||||
| 8.0 | :white_check_mark: | |
|
||||
| 8.1 | :red_circle: | |
|
||||
| 8.0 < | :red_circle: | |
|
||||
|
||||
### Supported CLang Versions:
|
||||
|
||||
| CLang | Status | Recommended |
|
||||
| :---- | :----------------: | :------------------: |
|
||||
| 18 | :white_check_mark: | :large_blue_diamond: |
|
||||
| 15 | :white_check_mark: | |
|
||||
| 14 ≤ | :red_circle: | |
|
||||
|
||||
### Supported GCC Versions:
|
||||
|
||||
| GCC | Status | Recommended |
|
||||
| :--- | :----------------: | :------------------: |
|
||||
| 14 | :white_check_mark: | :large_blue_diamond: |
|
||||
| 12 | :white_check_mark: | |
|
||||
| 11 ≤ | :red_circle: | |
|
||||
|
||||
> [!NOTE]
|
||||
> We do **NOT** support any repacks that may or may not have been made based on AzerothCore. This is because they are usually based on older versions and there is no way to know what is in the precompiled binaries. Instead, you should compile your binaries from the AzerothCore source. To get started, read the [Installation Guide](https://www.azerothcore.org/wiki/installation).
|
||||
|
||||
> [!CAUTION]
|
||||
> [Why you should not use repacks to run your WoW server](https://www.mangosrumors.org/why-you-should-not-use-repacks-to-run-your-wow-server/)
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We class a vulnerability to be any hack or exploit that has an impact on the server performance or that gives unfair advantages in the game (e.g. fly hacking or injection tools).
|
||||
|
||||
If a new vulnerability is found you should always create a new [bug report](https://github.com/azerothcore/azerothcore-wotlk/issues/new?assignees=&labels=&projects=&template=bug_report.yml).
|
||||
97
.github/SUPPORT.md
vendored
Normal file
97
.github/SUPPORT.md
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
# How to ask for help
|
||||
|
||||
### Need help?
|
||||
|
||||
Do you need support with AzerothCore? No worries, we're happy to help you!
|
||||
|
||||
Whether you have troubles installing AzerothCore, or you want to ask how-to or generic help questions, **we will help you**.
|
||||
|
||||
We just want you to ask for support in the **proper way**. Please read this document before asking for any help.
|
||||
|
||||
### Why is the "proper way" so important?
|
||||
|
||||
If you ask a question directly in the chat, it can get easily lost and you might never be helped unless there is someone online in that specific moment that can help you.
|
||||
|
||||
If you put your question on StackOverflow it will **stay** there and someone can help you at any moment. You can also link your question in the chat to give it more visibility. Doesn't make sense for you? Just keep reading.
|
||||
|
||||
Also, most of the questions asked by users are repetitive. So we need an efficient way to handle all the support requests. We use StackOverflow for support questions and GitHub for bug reports.
|
||||
|
||||
Sounds complicated? Not at all! **Just keep reading**.
|
||||
|
||||
### Bug reports
|
||||
|
||||
A game feature (e.g. spell/quest/talent/etc..) doesn't work as it is supposed to?
|
||||
|
||||
Congratulations! You've just found a bug, please search among the [existing issues](https://github.com/azerothcore/azerothcore-wotlk/issues). There is a good chance that someone else has already reported the same bug that you found, in such case we kindly ask you to "confirm" it by leaving a comment.
|
||||
|
||||
Example:
|
||||

|
||||
|
||||
If after searching for an existing issue report, you didn't find any, then you should [open a new issue](https://github.com/azerothcore/azerothcore-wotlk/issues/new).
|
||||
|
||||
### Other support requests
|
||||
|
||||
- Getting an error while installing AzerothCore?
|
||||
- Is there something in the documentation that is not clear for you?
|
||||
- Do you want to know how to do something specific with AzerothCore?
|
||||
- Are you trying to do something like implementing a new feature, fixing a bug, etc...?
|
||||
- Do you have questions about C++ or SQL code that is related to AzerothCore or other AC custom modules?
|
||||
|
||||
These kinds of questions are considered support questions and are handled via [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore). **Read below**
|
||||
|
||||
### Do I need to register in StackOverflow?
|
||||
|
||||
**Not necessarily!** If you have a Google or a Facebook account, you can already log in [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore) (click the **_Log in_** button on the top-right corner of the website).
|
||||
|
||||

|
||||
|
||||
|
||||
### Search among the existing questions
|
||||
|
||||
There is a good chance that your question has already been asked by someone else, so please **search** it on [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore).
|
||||
|
||||
For example, are you getting an error from MySQL? Search it under the **[azerothcore]** tag!
|
||||
|
||||

|
||||
|
||||
Oh, there is already a question! And it has an answer! Let's open it!
|
||||
|
||||

|
||||
|
||||
**Another example**, imagine you're wondering whether you can use LUA scripts on AzerothCore:
|
||||
|
||||

|
||||
|
||||
There is already a question and answer for that! Let's upvote the guys who asked and answered:
|
||||
|
||||

|
||||
|
||||
|
||||
## How to ask new questions
|
||||
|
||||
What if you can't find a question related to my specific problem? **Let's [open a new question](https://stackoverflow.com/questions/ask)**.
|
||||
|
||||
Remember to:
|
||||
|
||||
- Add a descriptive message.
|
||||
- **Bad** example `I got DB error plz help me`.
|
||||
- **Good** example: `After importing the sql updates, I get the error "XXX"`
|
||||
|
||||
- Don't forget the **[azerothcore]** tag!
|
||||
- 
|
||||
|
||||
- It's useful to add **4 more tags** according to the category of your question (e.g. `c++`, `docker`, `MySQL`, `sql`, etc... ).
|
||||
|
||||
- Link your question in the #support-general channel of our [Discord chat](https://discordapp.com/channels/217589275766685707/284406375495368704)
|
||||
|
||||
- Read: [stackoverflow.com/help/how-to-ask](https://stackoverflow.com/help/how-to-ask)
|
||||
|
||||
|
||||
### Share your knowledge!
|
||||
|
||||
Do you have anything to share with the community? Do you feel like some information could be useful to someone else (or to yourself in the future)? Have you solved a problem that took a while to figure out?
|
||||
|
||||
Ask a question on StackOverflow and then **answer it yourself**! Is it allowed? Yes, it is! Read this:
|
||||
|
||||
[https://stackoverflow.com/help/self-answer](https://stackoverflow.com/help/self-answer)
|
||||
|
||||
43
.github/actions/docker-tag-and-build/action.yml
vendored
Normal file
43
.github/actions/docker-tag-and-build/action.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: docker tag and build
|
||||
description: a helper action to shorten generating docker tags and building
|
||||
inputs:
|
||||
component-name:
|
||||
description: name of the component/docker image (eg worldserver, authserver)
|
||||
type: string
|
||||
required: true
|
||||
push:
|
||||
description: whether to push the image or not
|
||||
type: boolean
|
||||
required: true
|
||||
version:
|
||||
description: version tag to use for docker image
|
||||
required: true
|
||||
type: string
|
||||
dockerfile:
|
||||
description: dockerfile to use
|
||||
required: false
|
||||
default: apps/docker/Dockerfile
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get Docker Metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: acore/ac-wotlk-${{ inputs.component-name }}
|
||||
tags: |
|
||||
type=raw,value=${{ inputs.version }}
|
||||
type=ref,event=branch
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ${{ github.workspace }}
|
||||
file: ${{ inputs.dockerfile }}
|
||||
push: ${{ inputs.push }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
target: ${{ inputs.component-name }}
|
||||
build-args: |
|
||||
USER_ID=1000
|
||||
GROUP_ID=1000
|
||||
DOCKER_USER=acore
|
||||
224
.github/actions/linux-build/action.yml
vendored
Normal file
224
.github/actions/linux-build/action.yml
vendored
Normal file
@@ -0,0 +1,224 @@
|
||||
name: linux build
|
||||
description: a helper action to shorten running a build on linux
|
||||
inputs:
|
||||
CC:
|
||||
default: clang
|
||||
description: C Compiler to use
|
||||
type: string
|
||||
required: true
|
||||
CXX:
|
||||
default: clang++
|
||||
description: C++ compiler to use
|
||||
type: string
|
||||
required: true
|
||||
modules:
|
||||
default: false
|
||||
description: Flag to install modules or not
|
||||
required: true
|
||||
type: boolean
|
||||
tools:
|
||||
default: none
|
||||
description: Flag to enable tools build
|
||||
required: false
|
||||
type: string
|
||||
pch:
|
||||
default: false
|
||||
description: Flag to enable or disable PCH
|
||||
required: false
|
||||
type: boolean
|
||||
maxerrors:
|
||||
default: 1
|
||||
description: Max allowed error count before compilation stops
|
||||
required: false
|
||||
type: number
|
||||
keepgoing:
|
||||
default: false
|
||||
description: Flag to continue build after errors
|
||||
required: false
|
||||
type: boolean
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: echo cache key
|
||||
shell: bash
|
||||
run: echo "Cache key -> ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}:${{ inputs.modules }}:pch=${{ inputs.pch }}:${{ github.ref_name }}"
|
||||
|
||||
- name: Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/var/ccache
|
||||
key: ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}:${{ inputs.modules }}:pch=${{ inputs.pch }}:${{ github.ref_name }}
|
||||
restore-keys: |
|
||||
ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}:${{ inputs.modules }}:pch=${{ inputs.pch }}
|
||||
ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}:${{ inputs.modules }}
|
||||
ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}
|
||||
|
||||
# This script moves sql files from "data/sql/updates/pending_$DB" to the
|
||||
# proper folder for the db
|
||||
- name: Process pending sql
|
||||
shell: bash
|
||||
run: bash apps/ci/ci-pending-sql.sh
|
||||
|
||||
- name: Install build dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt remove needrestart #refer: https://github.com/actions/runner-images/issues/9937
|
||||
sudo apt-get -y install ccache clang cmake curl google-perftools \
|
||||
libmysqlclient-dev make unzip build-essential cmake-data \
|
||||
libboost-all-dev libbz2-dev libncurses5-dev libmysql++-dev \
|
||||
libreadline6-dev libssl-dev libtool openssl zlib1g-dev
|
||||
|
||||
# Account for https://github.com/actions/runner-images/issues/8659
|
||||
# based off of https://github.com/actions/runner-images/issues/8659#issuecomment-1852353116
|
||||
UBUNTU_VERSION="$(grep VERSION_ID /etc/os-release | cut -f2 -d\")"
|
||||
source /etc/os-release
|
||||
if [[ "$VERSION_CODENAME" == "jammy" ]]; then
|
||||
if [[ "${{ inputs.CC }}" =~ "clang-" ]]; then
|
||||
CLANG_VERSION="$(echo '${{ inputs.CC }}' | cut -f2 -d\-)"
|
||||
wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
|
||||
sudo add-apt-repository "deb http://apt.llvm.org/$VERSION_CODENAME/ llvm-toolchain-$VERSION_CODENAME-$CLANG_VERSION main"
|
||||
sudo apt-get -qq update
|
||||
sudo apt-get -qq install '${{ inputs.CC }}'
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: setup ccache
|
||||
shell: bash
|
||||
env:
|
||||
CCACHE_DIR: $GITHUB_WORKSPACE/var/ccache
|
||||
run: |
|
||||
mkdir -p "$CCACHE_DIR"
|
||||
cat <<EOF >> "$GITHUB_ENV"
|
||||
CCACHE_BASEDIR=${GITHUB_WORKSPACE}
|
||||
CCACHE_DIR=${{ env.CCACHE_DIR }}
|
||||
CCACHE_HASHDIR=1
|
||||
CCACHE_MAXSIZE=5G
|
||||
CCACHE_SLOPPINESS=pch_defines,time_macros,include_file_mtime
|
||||
CCACHE_COMPRESS=1
|
||||
CCACHE_COMPRESSLEVEL=9
|
||||
CCACHE_COMPILERCHECK=content
|
||||
CCACHE_LOGFILE=${{ env.CCACHE_DIR }}/cache.debug
|
||||
CC=${{ inputs.CC }}
|
||||
CXX=${{ inputs.CXX }}
|
||||
EOF
|
||||
|
||||
- name: ccache config snapshot
|
||||
shell: bash
|
||||
run: |
|
||||
echo "==== Effective ccache configuration ===="
|
||||
ccache -p | egrep 'base_dir|hash_dir|compiler_check|sloppiness|max_size' || true
|
||||
|
||||
echo
|
||||
echo "==== Compiler info ===="
|
||||
which ${{ inputs.CC }} && ${{ inputs.CC }} --version || true
|
||||
which ${{ inputs.CXX }} && ${{ inputs.CXX }} --version || true
|
||||
|
||||
echo
|
||||
echo "==== Previous cache stats ===="
|
||||
ccache -s || true
|
||||
|
||||
echo
|
||||
echo "==== Top cache results ===="
|
||||
grep -o 'result: .*' "$CCACHE_DIR/cache.debug" 2>/dev/null | sort | uniq -c | sort -nr | head || true
|
||||
|
||||
- name: reset ccache stats
|
||||
shell: bash
|
||||
run: ccache -z || true
|
||||
|
||||
- name: Configure
|
||||
shell: bash
|
||||
run: |
|
||||
set -x
|
||||
mkdir build
|
||||
cd build
|
||||
cmake "$GITHUB_WORKSPACE" \
|
||||
-DCMAKE_C_COMPILER="${{ inputs.CC }}" \
|
||||
-DCMAKE_CXX_COMPILER="${{ inputs.CXX }}" \
|
||||
-DCMAKE_INSTALL_PREFIX="$GITHUB_WORKSPACE/env/dist" \
|
||||
-DAPPS_BUILD="all" \
|
||||
-DTOOLS_BUILD=${{ inputs.tools }} \
|
||||
-DSCRIPTS="static" \
|
||||
-DMODULES="static" \
|
||||
-DWITH_WARNINGS="ON" \
|
||||
-DCMAKE_BUILD_TYPE="Release" \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER="ccache" \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER="ccache" \
|
||||
-DCMAKE_C_FLAGS="-Werror ${{ startsWith(inputs.CC, 'clang') && '-ferror-limit=' || '-fmax-errors=' }}${{inputs.maxerrors}} -fdebug-prefix-map=${GITHUB_WORKSPACE}=." \
|
||||
-DCMAKE_CXX_FLAGS="-Werror ${{ startsWith(inputs.CXX, 'clang') && '-ferror-limit=' || '-fmax-errors=' }}${{inputs.maxerrors}} -fdebug-prefix-map=${GITHUB_WORKSPACE}=." \
|
||||
-DBUILD_TESTING="ON" \
|
||||
-DUSE_SCRIPTPCH=${{ inputs.pch == 'true' && 'ON' || '' }} \
|
||||
-DUSE_COREPCH=${{ inputs.pch == 'true' && 'ON' || '' }} \
|
||||
${{ inputs.pch == 'true' && '' || '-DNOPCH=true' }}
|
||||
|
||||
- name: build
|
||||
shell: bash
|
||||
working-directory: "${{ github.workspace }}/build"
|
||||
run: |
|
||||
# '--' passes '--keep-going' to the underlying build system (make)
|
||||
cmake --build . --config "Release" -j "$(($(nproc) + 2))" ${{ inputs.keepgoing == 'true' && '-- --keep-going' || '' }}
|
||||
|
||||
- name: install
|
||||
shell: bash
|
||||
working-directory: "${{ github.workspace }}/build"
|
||||
run: cmake --install . --config "Release"
|
||||
|
||||
- name: Setup config
|
||||
shell: bash
|
||||
run: |
|
||||
ls -1 env/dist/etc/*.conf.dist | while read -r dist; do
|
||||
# chop the ".dist" off the end
|
||||
config_name="$(<<< $dist rev | cut -f1 -d\. --complement | rev)"
|
||||
cp -v "$dist" "$config_name"
|
||||
done
|
||||
|
||||
cat <<EOF >> $GITHUB_ENV
|
||||
AC_LOGIN_DATABASE_INFO=localhost;3306;root;root;acore_auth
|
||||
AC_CHARACTER_DATABASE_INFO=localhost;3306;root;root;acore_characters
|
||||
AC_WORLD_DATABASE_INFO=localhost;3306;root;root;acore_world
|
||||
AC_DATA_DIR=env/dist/data
|
||||
AC_LOGS_DIR=env/dist/logs
|
||||
EOF
|
||||
|
||||
- name: get dbc files
|
||||
shell: bash
|
||||
run: |
|
||||
git clone --depth 1 --branch master --single-branch https://github.com/ac-data/ac-data.git "$AC_DATA_DIR"
|
||||
|
||||
- name: Start MySQL container
|
||||
shell: bash
|
||||
run: sudo systemctl start mysql.service
|
||||
|
||||
- name: Dry run authserver
|
||||
shell: bash
|
||||
run: timeout 5m env/dist/bin/authserver --dry-run
|
||||
|
||||
- name: Dry run worldserver
|
||||
shell: bash
|
||||
run: timeout 5m env/dist/bin/worldserver --dry-run
|
||||
|
||||
- name: Check startup errors
|
||||
shell: bash
|
||||
run: |
|
||||
error_log="$AC_LOGS_DIR/Errors.log"
|
||||
# -s checks if the file's size is greater than 0 bytes
|
||||
# ! -s checks if the file's size is less than/equal to 0 bytes
|
||||
# if the error log is empty, exit without error
|
||||
[[ ! -s "$error_log" ]] && exit 0
|
||||
printf "The Errors.log file contains startup errors:\n\n"
|
||||
cat "$error_log"
|
||||
printf "\nPlease solve the startup errors listed above!\n"
|
||||
exit 1
|
||||
|
||||
- name: Run unit tests
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -f build/obj/src/test/unit_tests ]]; then
|
||||
build/obj/src/test/unit_tests
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: ccache stats
|
||||
shell: bash
|
||||
run: ccache -s || true
|
||||
59
.github/labeler.yml
vendored
Normal file
59
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
file-cpp:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'src/**/*.cpp'
|
||||
- 'src/**/*.h'
|
||||
- 'deps/**/*.cpp'
|
||||
- 'deps/**/*.h'
|
||||
|
||||
DB:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'data/**/*.sql'
|
||||
|
||||
CORE:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'src/*'
|
||||
- 'src/common/**/*'
|
||||
- 'src/genrev/**/*'
|
||||
- 'src/server/*'
|
||||
- 'src/server/apps/**/*'
|
||||
- 'src/server/database/**/*'
|
||||
- 'src/server/game/**/*'
|
||||
- 'src/server/shared/**/*'
|
||||
- 'src/tools/**/*'
|
||||
|
||||
Script:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- 'src/server/scripts/**/*.cpp'
|
||||
- 'src/server/scripts/**/*.h'
|
||||
|
||||
UnitTests:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'src/test/**/*'
|
||||
|
||||
Documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: '**/*.md'
|
||||
|
||||
Bash:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- '*.sh'
|
||||
- 'apps/**/*.sh'
|
||||
- 'conf/**/*.sh'
|
||||
- 'deps/**/*.sh'
|
||||
- 'modules/**/*.sh'
|
||||
|
||||
CMake:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: '**/*.cmake'
|
||||
|
||||
Workflow:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: '.github/workflows/*'
|
||||
|
||||
Batch:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: 'apps/**/*.bat'
|
||||
106
.github/workflows/add-to-project.yml
vendored
Normal file
106
.github/workflows/add-to-project.yml
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
name: Auto Assign to Project(s)
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
jobs:
|
||||
assign_one_project:
|
||||
runs-on: ubuntu-latest
|
||||
name: Assign to One Project
|
||||
steps:
|
||||
|
||||
- name: Assign issues with `ChromieCraft Generic` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, 'ChromieCraft Generic')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/20'
|
||||
|
||||
- name: Assign issues with `1-19` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '1-19')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/14'
|
||||
|
||||
- name: Assign issues with `20-29` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '20-29')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/17'
|
||||
|
||||
- name: Assign issues with `30-39` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '30-39')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/23'
|
||||
|
||||
- name: Assign issues with `40-49` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '40-49')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/24'
|
||||
|
||||
- name: Assign issues with `50-59` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '50-59')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/25'
|
||||
|
||||
- name: Assign issues with `60` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '60')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/22'
|
||||
|
||||
- name: Assign issues with `61-64` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '61-64')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/32'
|
||||
|
||||
- name: Assign issues with `65-69` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '65-69')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/36'
|
||||
|
||||
- name: Assign issues with `70` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '70')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/26'
|
||||
|
||||
- name: Assign issues with `71-74` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '71-74')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/33'
|
||||
|
||||
- name: Assign issues with `75-79` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '75-79')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/37'
|
||||
|
||||
- name: Assign issues with `80` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '80')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/38'
|
||||
|
||||
35
.github/workflows/codestyle.yml
vendored
Normal file
35
.github/workflows/codestyle.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: C++ Codestyle
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
paths:
|
||||
- src/**
|
||||
- "!README.md"
|
||||
- "!docs/**"
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
name: C++
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: AzerothCore codestyle
|
||||
run: python ./apps/codestyle/codestyle-cpp.py
|
||||
- name: C++ Advanced
|
||||
run: |
|
||||
sudo apt update -y
|
||||
sudo apt install -y cppcheck
|
||||
cppcheck --force --inline-suppr --suppressions-list=./.suppress.cppcheck src/ --output-file=report.txt
|
||||
|
||||
if [ -s report.txt ]; then # if file is not empty
|
||||
cat report.txt
|
||||
exit 1 # let github action fails
|
||||
fi
|
||||
54
.github/workflows/core-build-nopch.yml
vendored
Normal file
54
.github/workflows/core-build-nopch.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
name: nopch-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
|
||||
concurrency:
|
||||
# One concurrency group per workflow + ref.
|
||||
#
|
||||
# - PRs use `refs/pull/<PR_NUMBER>/merge`, so new commits cancel older
|
||||
# in-progress runs for the same PR.
|
||||
# - When a PR is merged, a push to the target branch starts a new group,
|
||||
# canceling any still-running PR CI.
|
||||
# - Branch pushes are isolated by ref.
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-22.04
|
||||
compiler:
|
||||
CC: clang-15
|
||||
CXX: clang++-15
|
||||
- os: ubuntu-24.04
|
||||
compiler:
|
||||
CC: clang-18
|
||||
CXX: clang++-18
|
||||
- os: ubuntu-24.04
|
||||
compiler:
|
||||
CC: gcc-14
|
||||
CXX: g++-14
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler.CC }}-nopch
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/linux-build
|
||||
with:
|
||||
CC: ${{ matrix.compiler.CC }}
|
||||
CXX: ${{ matrix.compiler.CXX }}
|
||||
pch: false
|
||||
50
.github/workflows/core-build-pch.yml
vendored
Normal file
50
.github/workflows/core-build-pch.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: pch-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
|
||||
concurrency:
|
||||
# One concurrency group per workflow + ref.
|
||||
#
|
||||
# - PRs use `refs/pull/<PR_NUMBER>/merge`, so new commits cancel older
|
||||
# in-progress runs for the same PR.
|
||||
# - When a PR is merged, a push to the target branch starts a new group,
|
||||
# canceling any still-running PR CI.
|
||||
# - Branch pushes are isolated by ref.
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-22.04
|
||||
compiler:
|
||||
CC: clang-15
|
||||
CXX: clang++-15
|
||||
- os: ubuntu-24.04
|
||||
compiler:
|
||||
CC: clang-18
|
||||
CXX: clang++-18
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler.CC }}-pch
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/linux-build
|
||||
with:
|
||||
CC: ${{ matrix.compiler.CC }}
|
||||
CXX: ${{ matrix.compiler.CXX }}
|
||||
pch: true
|
||||
64
.github/workflows/core_modules_build.yml
vendored
Normal file
64
.github/workflows/core_modules_build.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: nopch-module-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
paths:
|
||||
- 'src/*'
|
||||
- 'src/common/**/*'
|
||||
- 'src/genrev/**/*'
|
||||
- 'src/server/*'
|
||||
- 'src/server/apps/**/*'
|
||||
- 'src/server/database/**/*'
|
||||
- 'src/server/game/**/*'
|
||||
- 'src/server/shared/**/*'
|
||||
- 'src/tools/**/*'
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
# One concurrency group per workflow + ref.
|
||||
#
|
||||
# - PRs use `refs/pull/<PR_NUMBER>/merge`, so new commits cancel older
|
||||
# in-progress runs for the same PR.
|
||||
# - When a PR is merged, a push to the target branch starts a new group,
|
||||
# canceling any still-running PR CI.
|
||||
# - Branch pushes are isolated by ref.
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-modules:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-24.04
|
||||
compiler:
|
||||
CC: clang-18
|
||||
CXX: clang++-18
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler.CC }}-nopch-modules
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# This script installs a general list of modules to compile with
|
||||
# azerothcore. This is useful for ensuring that module compilation
|
||||
# functionality works.
|
||||
- name: Checkout modules
|
||||
run: bash -x ./apps/ci/ci-install-modules.sh
|
||||
- uses: ./.github/actions/linux-build
|
||||
with:
|
||||
CC: ${{ matrix.compiler.CC }}
|
||||
CXX: ${{ matrix.compiler.CXX }}
|
||||
modules: true
|
||||
pch: false
|
||||
maxerrors: 0
|
||||
keepgoing: true
|
||||
286
.github/workflows/dashboard-ci.yml
vendored
Normal file
286
.github/workflows/dashboard-ci.yml
vendored
Normal file
@@ -0,0 +1,286 @@
|
||||
name: Dashboard CI
|
||||
description: |
|
||||
This workflow runs tests and builds for the AzerothCore dashboard.
|
||||
It includes testing of bash scripts and integration testing of the AzerothCore server.
|
||||
Do not remove this if something is broken here and you don't know how to fix it, ping Yehonal instead.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# One concurrency group per workflow + ref.
|
||||
#
|
||||
# - PRs use `refs/pull/<PR_NUMBER>/merge`, so new commits cancel older
|
||||
# in-progress runs for the same PR.
|
||||
# - When a PR is merged, a push to the target branch starts a new group,
|
||||
# canceling any still-running PR CI.
|
||||
# - Branch pushes are isolated by ref.
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
|
||||
jobs:
|
||||
test-bash-scripts:
|
||||
name: Test Bash Scripts
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-22.04
|
||||
- os: ubuntu-24.04
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Install requirements
|
||||
run: |
|
||||
sudo apt-get update
|
||||
# Install bats-core >= 1.5.0 to support bats_require_minimum_version
|
||||
sudo apt-get install -y git curl
|
||||
git clone --depth 1 https://github.com/bats-core/bats-core.git /tmp/bats-core
|
||||
sudo /tmp/bats-core/install.sh /usr/local
|
||||
bats --version
|
||||
./acore.sh install-deps
|
||||
|
||||
- name: Run bash script tests for ${{ matrix.test-module }}
|
||||
env:
|
||||
TERM: xterm-256color
|
||||
run: |
|
||||
./acore.sh test bash --tap --all
|
||||
|
||||
build-and-test:
|
||||
name: Build and Integration Test
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-22.04
|
||||
- os: ubuntu-24.04
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Install ccache
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ccache
|
||||
ccache --version
|
||||
|
||||
# Detect the compilers that acore.sh / CMake will end up using.
|
||||
# We record both the binary name and a short version tag for the cache key.
|
||||
- name: Detect compiler
|
||||
id: detect
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
CC_BIN="${CC:-}"
|
||||
CXX_BIN="${CXX:-}"
|
||||
[[ -z "$CC_BIN" ]] && CC_BIN="$(command -v clang || command -v gcc)"
|
||||
[[ -z "$CXX_BIN" ]] && CXX_BIN="$(command -v clang++ || command -v g++)"
|
||||
|
||||
make_ver_id() {
|
||||
local bin="$1"; local base="$(basename "$bin")"
|
||||
case "$base" in
|
||||
clang)
|
||||
maj="$("$bin" -dumpversion 2>/dev/null | cut -d. -f1)"; [[ -z "$maj" ]] && maj="$( "$bin" --version | sed -n 's/.*version \([0-9][0-9]*\).*/\1/p' | head -1 )"
|
||||
echo "clang-${maj:-unknown}"
|
||||
;;
|
||||
clang++)
|
||||
maj="$("$bin" -dumpversion 2>/dev/null | cut -d. -f1)"; [[ -z "$maj" ]] && maj="$( "$bin" --version | sed -n 's/.*version \([0-9][0-9]*\).*/\1/p' | head -1 )"
|
||||
echo "clang++-${maj:-unknown}"
|
||||
;;
|
||||
gcc)
|
||||
maj="$("$bin" -dumpfullversion -dumpversion 2>/dev/null || "$bin" -dumpversion 2>/dev/null)"; maj="${maj%%.*}"
|
||||
echo "gcc-${maj:-unknown}"
|
||||
;;
|
||||
g++)
|
||||
maj="$("$bin" -dumpfullversion -dumpversion 2>/dev/null || "$bin" -dumpversion 2>/dev/null)"; maj="${maj%%.*}"
|
||||
echo "g++-${maj:-unknown}"
|
||||
;;
|
||||
*)
|
||||
echo "$base"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
echo "cc_id=$(make_ver_id "$CC_BIN")" >> "$GITHUB_OUTPUT"
|
||||
echo "cxx_id=$(make_ver_id "$CXX_BIN")" >> "$GITHUB_OUTPUT"
|
||||
echo "Detected: $CC_BIN, $CXX_BIN"
|
||||
|
||||
- name: Prepare ccache dir
|
||||
shell: bash
|
||||
run: mkdir -p "${{ github.workspace }}/var/ccache"
|
||||
|
||||
- name: Echo cache key
|
||||
shell: bash
|
||||
run: echo "Cache key -> ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:${{ github.ref_name }}"
|
||||
|
||||
- name: Restore ccache
|
||||
id: restore_ccache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/var/ccache
|
||||
key: ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:${{ github.ref_name }}
|
||||
restore-keys: |
|
||||
ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:true:pch=false:
|
||||
ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:false:pch=false:
|
||||
ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:true:pch=true:
|
||||
ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:false:pch=true:
|
||||
ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:true:
|
||||
ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:false:
|
||||
ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:
|
||||
|
||||
- name: Setup ccache env
|
||||
shell: bash
|
||||
env:
|
||||
CCACHE_DIR: ${{ github.workspace }}/var/ccache
|
||||
run: |
|
||||
mkdir -p "$CCACHE_DIR"
|
||||
cat <<EOF >> "$GITHUB_ENV"
|
||||
CCACHE_BASEDIR=${{ github.workspace }}
|
||||
CCACHE_DIR=${{ github.workspace }}/var/ccache
|
||||
CCACHE_HASHDIR=1
|
||||
CCACHE_MAXSIZE=5G
|
||||
CCACHE_SLOPPINESS=pch_defines,time_macros,include_file_mtime
|
||||
CCACHE_COMPRESS=1
|
||||
CCACHE_COMPRESSLEVEL=9
|
||||
CCACHE_COMPILERCHECK=content
|
||||
CCACHE_LOGFILE=${{ github.workspace }}/var/ccache/cache.debug
|
||||
CMAKE_C_COMPILER_LAUNCHER=ccache
|
||||
CMAKE_CXX_COMPILER_LAUNCHER=ccache
|
||||
EOF
|
||||
|
||||
- name: ccache snapshot (before)
|
||||
shell: bash
|
||||
run: |
|
||||
echo "==== Effective ccache configuration ===="
|
||||
ccache -p | egrep 'base_dir|hash_dir|compiler_check|sloppiness|max_size' || true
|
||||
echo
|
||||
echo "==== Previous cache stats ===="
|
||||
ccache -s || true
|
||||
echo
|
||||
echo "==== Top cache results (from prior runs) ===="
|
||||
grep -o 'result: .*' "${{ github.workspace }}/var/ccache/cache.debug" 2>/dev/null | sort | uniq -c | sort -nr | head || true
|
||||
|
||||
- name: Reset ccache stats
|
||||
shell: bash
|
||||
run: ccache -z || true
|
||||
|
||||
- name: Configure AzerothCore settings
|
||||
run: |
|
||||
touch conf/config.sh
|
||||
echo 'MTHREADS=4' >> conf/config.sh
|
||||
echo 'CBUILD_TESTING=ON' >> conf/config.sh
|
||||
echo 'AC_ENABLE_ROOT_CMAKE_INSTALL=1' >> conf/config.sh
|
||||
echo 'export AC_CONFIG_POLICY=$AC_CONFIG_POLICY_PRESET_ZERO_CONF' >> conf/config.sh
|
||||
echo 'AC_ENABLE_CONF_COPY_ON_INSTALL=0' >> conf/config.sh
|
||||
cat conf/config.sh
|
||||
|
||||
# debug content of AC_CONFIG_POLICY
|
||||
./acore.sh config show AC_CONFIG_POLICY
|
||||
|
||||
- name: Test module commands
|
||||
run: |
|
||||
./acore.sh module install mod-autobalance
|
||||
./acore.sh module install mod-duel-reset
|
||||
|
||||
./acore.sh module list
|
||||
|
||||
./acore.sh module install --all
|
||||
./acore.sh module update mod-autobalance
|
||||
./acore.sh module update --all
|
||||
|
||||
- name: Run complete installation (deps, compile, database, client-data)
|
||||
run: |
|
||||
# This runs: install-deps, compile, database setup, client-data download
|
||||
./acore.sh init
|
||||
sudo npm install -g pm2
|
||||
timeout-minutes: 120
|
||||
|
||||
- name: Test module removal
|
||||
run: |
|
||||
./acore.sh module remove mod-autobalance
|
||||
./acore.sh module list
|
||||
./acore.sh module remove mod-duel-reset
|
||||
./acore.sh module list
|
||||
|
||||
- name: Run core tests
|
||||
run: |
|
||||
./acore.sh test core
|
||||
|
||||
- name: Test authserver dry-run
|
||||
run: |
|
||||
source ./acore.sh config load
|
||||
cd env/dist/bin
|
||||
timeout 5m ./authserver -dry-run
|
||||
continue-on-error: false
|
||||
|
||||
- name: Test worldserver dry-run
|
||||
run: |
|
||||
source ./acore.sh config load
|
||||
cd env/dist/bin
|
||||
timeout 5m ./worldserver -dry-run
|
||||
continue-on-error: false
|
||||
|
||||
|
||||
- name: Test worldserver with startup scripts
|
||||
run: |
|
||||
./acore.sh sm create world worldserver --bin-path ./env/dist/bin --provider pm2
|
||||
./acore.sh sm show-config worldserver
|
||||
./acore.sh sm start worldserver
|
||||
./acore.sh sm wait-uptime worldserver 10 300
|
||||
./acore.sh sm send worldserver "account create tester password 3"
|
||||
./acore.sh sm send worldserver "account set gm tester 3"
|
||||
./acore.sh sm send worldserver "account set addon tester 1"
|
||||
./acore.sh sm wait-uptime worldserver 10 300
|
||||
./acore.sh sm stop worldserver
|
||||
./acore.sh sm delete worldserver
|
||||
timeout-minutes: 30
|
||||
continue-on-error: false
|
||||
|
||||
- name: Test authserver with startup scripts
|
||||
run: |
|
||||
./acore.sh sm create auth authserver --bin-path ./env/dist/bin --provider pm2
|
||||
./acore.sh sm show-config authserver
|
||||
./acore.sh sm start authserver
|
||||
./acore.sh sm wait-uptime authserver 10 300
|
||||
./acore.sh sm stop authserver
|
||||
./acore.sh sm delete authserver
|
||||
timeout-minutes: 30
|
||||
continue-on-error: false
|
||||
|
||||
# save only if we didn't hit the cache
|
||||
- name: Save ccache
|
||||
if: steps.restore_ccache.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ github.workspace }}/var/ccache
|
||||
key: ccache:${{ runner.os }}:${{ steps.detect.outputs.cc_id }}_${{ steps.detect.outputs.cxx_id }}:${{ github.ref_name }}
|
||||
|
||||
- name: ccache stats (after)
|
||||
shell: bash
|
||||
run: ccache -s || true
|
||||
127
.github/workflows/docker_build.yml
vendored
Normal file
127
.github/workflows/docker_build.yml
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
name: docker-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
|
||||
concurrency:
|
||||
# One concurrency group per workflow + ref.
|
||||
#
|
||||
# - PRs use `refs/pull/<PR_NUMBER>/merge`, so new commits cancel older
|
||||
# in-progress runs for the same PR.
|
||||
# - When a PR is merged, a push to the target branch starts a new group,
|
||||
# canceling any still-running PR CI.
|
||||
# - Branch pushes are isolated by ref.
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
RUNNING_ON_PRIMARY_BRANCH: |
|
||||
${{ (github.repository == 'azerothcore/azerothcore-wotlk' && github.ref_name == 'master') && 'true' || 'false' }}
|
||||
|
||||
jobs:
|
||||
build-containers:
|
||||
runs-on: "ubuntu-latest"
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref_name == 'master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
steps:
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# The containers created in this workflow are used by
|
||||
# acore-docker, which has a dependency on mod-ale.
|
||||
#
|
||||
# If you're wanting containers without mod-ale, the best solution is to
|
||||
# build them locally (such as with `docker compose build`)
|
||||
- name: Download Eluna
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && github.ref_name == 'master'
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: azerothcore/mod-ale
|
||||
path: modules/mod-ale
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && github.ref_name == 'master'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Get version
|
||||
id: version
|
||||
run: |
|
||||
version="$(jq -r '.version' acore.json)"
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: build worldserver
|
||||
uses: ./.github/actions/docker-tag-and-build
|
||||
with:
|
||||
component-name: worldserver
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
|
||||
|
||||
- name: build authserver
|
||||
uses: ./.github/actions/docker-tag-and-build
|
||||
with:
|
||||
component-name: authserver
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
|
||||
|
||||
- name: build db-import
|
||||
uses: ./.github/actions/docker-tag-and-build
|
||||
with:
|
||||
component-name: db-import
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
|
||||
|
||||
- name: build client-data
|
||||
uses: ./.github/actions/docker-tag-and-build
|
||||
with:
|
||||
component-name: client-data
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
|
||||
|
||||
- name: build tools
|
||||
uses: ./.github/actions/docker-tag-and-build
|
||||
with:
|
||||
component-name: tools
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
|
||||
|
||||
- name: build dev-server
|
||||
uses: ./.github/actions/docker-tag-and-build
|
||||
with:
|
||||
component-name: dev
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
|
||||
dockerfile: apps/docker/Dockerfile.dev-server
|
||||
|
||||
- name: Trigger acore-docker CI
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && github.ref_name == 'master'
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ secrets.ACORE_DOCKER_REPO_ACCESS_TOKEN }}
|
||||
repository: azerothcore/acore-docker
|
||||
event-type: azerothcore-new-images
|
||||
client-payload: >
|
||||
{
|
||||
"ref": "${{ github.ref_name }}",
|
||||
"sha": "${{ github.sha }}"
|
||||
}
|
||||
49
.github/workflows/import_pending.yml
vendored
Normal file
49
.github/workflows/import_pending.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: import-pending
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
import-pending:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-24.04
|
||||
permissions: write-all
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
# If we're fetching all the history in a later step it makes sense to
|
||||
# pre-load it now
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref_name }}
|
||||
|
||||
- uses: denoland/setup-deno@v1
|
||||
with:
|
||||
# Specifies latest 1.x
|
||||
deno-version: "~1.0"
|
||||
|
||||
- name: Import and commit pending sql
|
||||
run: |
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
# Get the latest changes from git
|
||||
git pull --rebase origin "${{ github.ref_name }}"
|
||||
bash bin/acore-db-pendings
|
||||
deno run --allow-all --unstable apps/ci/ci-pending-changelogs.ts
|
||||
git add -A .
|
||||
git commit -am "chore(DB): import pending files" -m "Referenced commit(s): ${GITHUB_SHA}" || true
|
||||
env:
|
||||
# Noting that the branch name can only be master, as per the event
|
||||
# triggering this action
|
||||
BRANCH: ${{ github.ref_name }}
|
||||
|
||||
- name: Push changes
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.AC_GITHUB_TOKEN }}
|
||||
# Noting that the branch name can only be master, as per the event
|
||||
# triggering this action
|
||||
branch: ${{ github.ref_name }}
|
||||
16
.github/workflows/issue-labeler.yml
vendored
Normal file
16
.github/workflows/issue-labeler.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: "Issue Labeler"
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
issue_labeler:
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
runs-on: ubuntu-latest
|
||||
name: Issue Labeler
|
||||
steps:
|
||||
- name: Issue Labeler
|
||||
id: issue-labeler
|
||||
uses: azerothcore/GitHub-Actions@issue-labeler-1.0.2
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
53
.github/workflows/macos_build.yml
vendored
Normal file
53
.github/workflows/macos_build.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: macos-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref_name }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
macos-build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- macos-14
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/Library/Caches/ccache
|
||||
key: ccache:${{ matrix.os }}:${{ github.ref_name }}
|
||||
restore-keys: |
|
||||
ccache:${{ matrix.os }}:${{ github.ref_name }}
|
||||
ccache:${{ matrix.os }}
|
||||
- name: reset ccache stats
|
||||
shell: bash
|
||||
run: ccache -z || true
|
||||
- name: Install latest bash
|
||||
run: brew install bash
|
||||
- name: Configure OS
|
||||
run: source ./acore.sh install-deps
|
||||
- name: Build
|
||||
run: source ./apps/ci/mac/ci-compile.sh
|
||||
- name: ccache stats
|
||||
shell: bash
|
||||
run: ccache -s || true
|
||||
19
.github/workflows/pr_labeler.yml
vendored
Normal file
19
.github/workflows/pr_labeler.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: PR Labeler
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened, ready_for_review] # Ready for Review = Draft to Live PR
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions: write-all
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: true
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
configuration-path: .github/labeler.yml
|
||||
sync-labels: false # true = Removes ALL current labels before adding the labels | false = Only adds the Labels
|
||||
25
.github/workflows/sql-codestyle.yml
vendored
Normal file
25
.github/workflows/sql-codestyle.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
name: SQL Codestyle
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
paths:
|
||||
- data/**
|
||||
- "!README.md"
|
||||
- "!docs/**"
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
name: SQL
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: AzerothCore codestyle
|
||||
run: python ./apps/codestyle/codestyle-sql.py
|
||||
45
.github/workflows/tools_build.yml
vendored
Normal file
45
.github/workflows/tools_build.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: tools
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
|
||||
concurrency:
|
||||
# One concurrency group per workflow + ref.
|
||||
#
|
||||
# - PRs use `refs/pull/<PR_NUMBER>/merge`, so new commits cancel older
|
||||
# in-progress runs for the same PR.
|
||||
# - When a PR is merged, a push to the target branch starts a new group,
|
||||
# canceling any still-running PR CI.
|
||||
# - Branch pushes are isolated by ref.
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-24.04
|
||||
compiler:
|
||||
CC: clang-18
|
||||
CXX: clang++-18
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler.CC }}
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/linux-build
|
||||
with:
|
||||
CC: ${{ matrix.compiler.CC }}
|
||||
CXX: ${{ matrix.compiler.CXX }}
|
||||
tools: all
|
||||
pch: false
|
||||
50
.github/workflows/windows_build.yml
vendored
Normal file
50
.github/workflows/windows_build.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: windows-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
|
||||
concurrency:
|
||||
# One concurrency group per workflow + ref.
|
||||
#
|
||||
# - PRs use `refs/pull/<PR_NUMBER>/merge`, so new commits cancel older
|
||||
# in-progress runs for the same PR.
|
||||
# - When a PR is merged, a push to the target branch starts a new group,
|
||||
# canceling any still-running PR CI.
|
||||
# - Branch pushes are isolated by ref.
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
windows-build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}
|
||||
env:
|
||||
BOOST_ROOT: C:\local\boost_1_82_0
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2.13
|
||||
- name: Configure OS
|
||||
shell: bash
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
run: |
|
||||
./acore.sh install-deps
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
export CTOOLS_BUILD=all
|
||||
./acore.sh compiler build
|
||||
104
.gitignore
vendored
Normal file
104
.gitignore
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
#
|
||||
# AzerothCore
|
||||
#
|
||||
|
||||
/conf/*
|
||||
!/conf/dist
|
||||
/modules/*
|
||||
!/modules/*.md
|
||||
!/modules/*.sh
|
||||
!/modules/CMakeLists.txt
|
||||
!/modules/*.h
|
||||
!/modules/*.cmake
|
||||
/build*/
|
||||
/var/*
|
||||
!/var/build/.gitkeep
|
||||
!/var/ccache/.gitkeep
|
||||
/env/dist/*
|
||||
!/env/dist/.gitkeep
|
||||
/env/user/*
|
||||
/.env*
|
||||
/apps/joiner
|
||||
/deps/deno
|
||||
/data/sql/custom/*
|
||||
/src/server/scripts/Custom/*
|
||||
!/src/server/scripts/Custom/README.md
|
||||
|
||||
/*.override.yml
|
||||
/*.override.yaml
|
||||
|
||||
!.gitkeep
|
||||
|
||||
# default build directory if not specified by CMAKE configuration
|
||||
/out/*
|
||||
|
||||
#
|
||||
#Generic
|
||||
#
|
||||
|
||||
.directory
|
||||
.mailmap
|
||||
*.orig
|
||||
*.rej
|
||||
*~
|
||||
.hg/
|
||||
*.kdev*
|
||||
.DS_Store
|
||||
CMakeLists.txt.user
|
||||
*.bak
|
||||
*.patch
|
||||
*.diff
|
||||
*.REMOTE.*
|
||||
*.BACKUP.*
|
||||
*.BASE.*
|
||||
*.LOCAL.*
|
||||
|
||||
#
|
||||
# IDE & other software
|
||||
#
|
||||
/.settings/
|
||||
/.externalToolBuilders/*
|
||||
# exclude in all levels
|
||||
nbproject/
|
||||
.sync.ffs_db
|
||||
*.kate-swp
|
||||
.browse.VC*
|
||||
.idea
|
||||
cmake-build-*/*
|
||||
coverage-report/
|
||||
.vs
|
||||
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
|
||||
#
|
||||
# Eclipse
|
||||
#
|
||||
*.pydevproject
|
||||
.metadata
|
||||
.gradle
|
||||
tmp/
|
||||
*.tmp
|
||||
*.swp
|
||||
*~.nib
|
||||
local.properties
|
||||
.settings/
|
||||
.loadpath
|
||||
.project
|
||||
.cproject
|
||||
|
||||
|
||||
# ==================
|
||||
#
|
||||
# CUSTOM
|
||||
#
|
||||
# put below your custom ignore rules
|
||||
# for example , if you want to include a
|
||||
# module directly in repositoryyou can do:
|
||||
#
|
||||
# !modules/yourmodule
|
||||
#
|
||||
# ==================
|
||||
1
.suppress.cppcheck
Normal file
1
.suppress.cppcheck
Normal file
@@ -0,0 +1 @@
|
||||
cppcheckError
|
||||
18
.vscode/extensions.json
vendored
Normal file
18
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"notskm.clang-tidy",
|
||||
"xaver.clang-format",
|
||||
"bbenoist.doxygen",
|
||||
"ms-vscode.cpptools",
|
||||
"ms-vscode.cmake-tools",
|
||||
"mhutchie.git-graph",
|
||||
"github.vscode-pull-request-github",
|
||||
"eamodio.gitlens",
|
||||
"cschlosser.doxdocgen",
|
||||
"sanaajani.taskrunnercode",
|
||||
"mads-hartmann.bash-ide-vscode",
|
||||
"jetmartin.bats",
|
||||
"ms-vscode.makefile-tools",
|
||||
]
|
||||
}
|
||||
68
.vscode/launch.json
vendored
Normal file
68
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Linux/Docker debug",
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"program": "/azerothcore/env/dist/bin/worldserver",
|
||||
"cwd": "/azerothcore",
|
||||
"args": [],
|
||||
"environment": [],
|
||||
"externalConsole": false,
|
||||
"sourceFileMap": {
|
||||
"/azerothcore": "${workspaceFolder}"
|
||||
},
|
||||
"linux": {
|
||||
"MIMode": "gdb",
|
||||
"miDebuggerPath": "/usr/bin/gdb",
|
||||
"setupCommands": [
|
||||
{
|
||||
"description": "Enable pretty-printing for gdb",
|
||||
"text": "-enable-pretty-printing",
|
||||
"ignoreFailures": false
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "(docker run) Pipe Launch",
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"program": "/azerothcore/env/dist/bin/worldserver",
|
||||
"cwd": "/azerothcore",
|
||||
"args": [],
|
||||
"environment": [],
|
||||
"externalConsole": true,
|
||||
"pipeTransport": {
|
||||
"debuggerPath": "/usr/bin/gdb",
|
||||
"pipeProgram": "docker compose",
|
||||
"pipeArgs": [
|
||||
"exec", "-T", "ac-worldserver", "sh", "-c"
|
||||
],
|
||||
"pipeCwd": "${workspaceFolder}"
|
||||
},
|
||||
"sourceFileMap": {
|
||||
"/azerothcore": "${workspaceFolder}"
|
||||
},
|
||||
"linux": {
|
||||
"MIMode": "gdb",
|
||||
"miDebuggerPath": "/usr/bin/gdb",
|
||||
"setupCommands": [
|
||||
{
|
||||
"description": "Enable pretty-printing for gdb",
|
||||
"text": "-enable-pretty-printing",
|
||||
"ignoreFailures": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"osx": {
|
||||
"MIMode": "lldb"
|
||||
},
|
||||
"windows": {
|
||||
"MIMode": "gdb",
|
||||
"miDebuggerPath": "C:\\MinGw\\bin\\gdb.exe"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
128
.vscode/settings.json
vendored
Normal file
128
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
{
|
||||
"files.associations": {
|
||||
"*.dist": "properties",
|
||||
"*.crash": "properties",
|
||||
"*.wtf": "properties",
|
||||
"*.cnf": "properties",
|
||||
"array": "cpp",
|
||||
"atomic": "cpp",
|
||||
"bit": "cpp",
|
||||
"*.tcc": "cpp",
|
||||
"bitset": "cpp",
|
||||
"cctype": "cpp",
|
||||
"chrono": "cpp",
|
||||
"cinttypes": "cpp",
|
||||
"clocale": "cpp",
|
||||
"cmath": "cpp",
|
||||
"complex": "cpp",
|
||||
"condition_variable": "cpp",
|
||||
"csignal": "cpp",
|
||||
"cstdarg": "cpp",
|
||||
"cstddef": "cpp",
|
||||
"cstdint": "cpp",
|
||||
"cstdio": "cpp",
|
||||
"cstdlib": "cpp",
|
||||
"cstring": "cpp",
|
||||
"ctime": "cpp",
|
||||
"cwchar": "cpp",
|
||||
"cwctype": "cpp",
|
||||
"deque": "cpp",
|
||||
"list": "cpp",
|
||||
"map": "cpp",
|
||||
"set": "cpp",
|
||||
"unordered_map": "cpp",
|
||||
"unordered_set": "cpp",
|
||||
"vector": "cpp",
|
||||
"exception": "cpp",
|
||||
"algorithm": "cpp",
|
||||
"functional": "cpp",
|
||||
"iterator": "cpp",
|
||||
"memory": "cpp",
|
||||
"memory_resource": "cpp",
|
||||
"numeric": "cpp",
|
||||
"optional": "cpp",
|
||||
"random": "cpp",
|
||||
"ratio": "cpp",
|
||||
"string": "cpp",
|
||||
"string_view": "cpp",
|
||||
"system_error": "cpp",
|
||||
"tuple": "cpp",
|
||||
"type_traits": "cpp",
|
||||
"utility": "cpp",
|
||||
"fstream": "cpp",
|
||||
"initializer_list": "cpp",
|
||||
"iomanip": "cpp",
|
||||
"iosfwd": "cpp",
|
||||
"iostream": "cpp",
|
||||
"istream": "cpp",
|
||||
"limits": "cpp",
|
||||
"mutex": "cpp",
|
||||
"new": "cpp",
|
||||
"ostream": "cpp",
|
||||
"shared_mutex": "cpp",
|
||||
"sstream": "cpp",
|
||||
"stdexcept": "cpp",
|
||||
"streambuf": "cpp",
|
||||
"thread": "cpp",
|
||||
"cfenv": "cpp",
|
||||
"typeinfo": "cpp",
|
||||
"codecvt": "cpp",
|
||||
"xstring": "cpp",
|
||||
"variant": "cpp",
|
||||
"any": "cpp",
|
||||
"barrier": "cpp",
|
||||
"charconv": "cpp",
|
||||
"compare": "cpp",
|
||||
"concepts": "cpp",
|
||||
"coroutine": "cpp",
|
||||
"csetjmp": "cpp",
|
||||
"execution": "cpp",
|
||||
"filesystem": "cpp",
|
||||
"format": "cpp",
|
||||
"forward_list": "cpp",
|
||||
"future": "cpp",
|
||||
"ios": "cpp",
|
||||
"latch": "cpp",
|
||||
"locale": "cpp",
|
||||
"numbers": "cpp",
|
||||
"queue": "cpp",
|
||||
"ranges": "cpp",
|
||||
"regex": "cpp",
|
||||
"scoped_allocator": "cpp",
|
||||
"semaphore": "cpp",
|
||||
"source_location": "cpp",
|
||||
"span": "cpp",
|
||||
"stack": "cpp",
|
||||
"stop_token": "cpp",
|
||||
"strstream": "cpp",
|
||||
"syncstream": "cpp",
|
||||
"typeindex": "cpp",
|
||||
"valarray": "cpp",
|
||||
"xfacet": "cpp",
|
||||
"xhash": "cpp",
|
||||
"xiosbase": "cpp",
|
||||
"xlocale": "cpp",
|
||||
"xlocbuf": "cpp",
|
||||
"xlocinfo": "cpp",
|
||||
"xlocmes": "cpp",
|
||||
"xlocmon": "cpp",
|
||||
"xlocnum": "cpp",
|
||||
"xloctime": "cpp",
|
||||
"xmemory": "cpp",
|
||||
"xstddef": "cpp",
|
||||
"xtr1common": "cpp",
|
||||
"xtree": "cpp",
|
||||
"xutility": "cpp",
|
||||
"*.ipp": "cpp",
|
||||
"resumable": "cpp"
|
||||
},
|
||||
"deno.enable": true,
|
||||
"deno.path": "deps/deno/bin/deno",
|
||||
"deno.lint": true,
|
||||
"C_Cpp.default.compileCommands": "${workspaceFolder}/build/compile_commands.json",
|
||||
"C_Cpp.default.cppStandard": "c++17",
|
||||
"C_Cpp.default.configurationProvider": "ms-vscode.cmake-tools",
|
||||
"C_Cpp.default.compilerPath": "/usr/bin/clang",
|
||||
"cmake.sourceDirectory": ["${workspaceFolder}"],
|
||||
"cmake.buildDirectory": "${workspaceFolder}/var/build",
|
||||
}
|
||||
96
.vscode/tasks.json
vendored
Normal file
96
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "AzerothCore: Dashboard",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Download client-data",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh client-data",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Clean build",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh compiler clean",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Build",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh compiler build",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Check codestyle cpp",
|
||||
"type": "shell",
|
||||
"command": "python apps/codestyle/codestyle-cpp.py",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Check codestyle sql",
|
||||
"type": "shell",
|
||||
"command": "python apps/codestyle/codestyle-sql.py",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Run authserver (restarter)",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh run-authserver",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Run worldserver (restarter)",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh run-worldserver",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
50
AUTHORS
Normal file
50
AUTHORS
Normal file
@@ -0,0 +1,50 @@
|
||||
# List of AUTHORS who contributed over time to the AzerothCore project
|
||||
|
||||
## Point of current development
|
||||
The project is currently hosted at https://www.azerothcore.org/ and developed under https://github.com/azerothcore
|
||||
|
||||
## History of development
|
||||
Development of this project dates back to 2004, and was developed under various umbrellas over time:
|
||||
* WoW Daemon Team, 2004
|
||||
* MaNGOS project, 2005-2008, located at http://www.mangosproject.org
|
||||
* MaNGOS project, 2008-2011, located at http://getmangos.com
|
||||
* SD2 project, 2008-2009, located at http://www.scriptdev2.com/
|
||||
* TrinityCore, 2008-2012, located at https://www.trinitycore.org/
|
||||
* SunwellCore 2012-2016, privately developed, more info at https://www.azerothcore.org/pages/sunwell.pl/
|
||||
* AzerothCore, 2016-CURRENT, located at https://www.azerothcore.org/
|
||||
|
||||
## Authorship of the code
|
||||
Authorship is assigned for each commit within the git history, which is stored in these git repositories:
|
||||
* github.com/cmangos/mangos-svn (History from MaNGOS project from 2005-2008, originally hosted at http://mangos.svn.sourceforge.net)
|
||||
* github.com/TrinityCore/TrinityCore
|
||||
* github.com/azerothcore/azerothcore-wotlk
|
||||
|
||||
Unfortunately, we have no detailed information on the history of the WoWD project;
|
||||
if somebody can provide information, please contact us, so that we can make this history available
|
||||
|
||||
SunwellCore was developed privately and has unfortunately no git history.
|
||||
|
||||
## Exceptions with third-party libraries
|
||||
The third-party libraries have their own way of addressing authorship, and the authorship of commits importing/updating
|
||||
a third-party library reflects who did the importing instead of who wrote the code within the commit.
|
||||
|
||||
The Authors of third-party libraries are not explicitly mentioned, and usually is possible to obtain from the files belonging to the third-party libraries.
|
||||
|
||||
## Cross-project collaboration
|
||||
|
||||
At AzerothCore, we actively promote collaboration with other MaNGOS-based open-source projects that are actively maintained. This also includes cherry-picking commits from relevant projects such as:
|
||||
|
||||
* TrinityCore: https://github.com/trinitycore
|
||||
* CMaNGOS: https://github.com/cmangos/
|
||||
* vMaNGOS: https://github.com/vmangos
|
||||
* and others
|
||||
|
||||
It is strictly required for any contributor importing code to credit the original author by:
|
||||
- linking the original PR (or commit)
|
||||
- adding a [Co-authored-by](https://docs.github.com/en/pull-requests/committing-changes-to-your-project/creating-and-editing-commits/creating-a-commit-with-multiple-authors) line in the PR description
|
||||
|
||||
This will automatically include the original author in our list of [Contributors](https://github.com/azerothcore/azerothcore-wotlk/graphs/contributors).
|
||||
|
||||
If you notice a violation of the above, please report it to the Staff immediately.
|
||||
|
||||
We encourage other projects to adopt similar practices to promote healthy cross-project collaboration and proper attribution.
|
||||
180
CMakeLists.txt
Normal file
180
CMakeLists.txt
Normal file
@@ -0,0 +1,180 @@
|
||||
#
|
||||
# This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
#
|
||||
# This file is free software; as a special exception the author gives
|
||||
# unlimited permission to copy and/or distribute it, with or without
|
||||
# modifications, as long as this notice is preserved.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
#
|
||||
|
||||
# Require version Cmake
|
||||
cmake_minimum_required(VERSION 3.16...3.22)
|
||||
|
||||
message(STATUS "CMake version: ${CMAKE_VERSION}")
|
||||
|
||||
# CMake policies (can not be handled elsewhere)
|
||||
cmake_policy(SET CMP0005 NEW)
|
||||
|
||||
if (POLICY CMP0153)
|
||||
cmake_policy(SET CMP0153 OLD)
|
||||
endif()
|
||||
|
||||
# Set projectname (must be done AFTER setting configurationtypes)
|
||||
project(AzerothCore VERSION 3.0.0 LANGUAGES CXX C)
|
||||
|
||||
# add this options before PROJECT keyword
|
||||
set(CMAKE_DISABLE_SOURCE_CHANGES ON)
|
||||
set(CMAKE_DISABLE_IN_SOURCE_BUILD ON)
|
||||
|
||||
# Set RPATH-handing (CMake parameters)
|
||||
set(CMAKE_SKIP_BUILD_RPATH 0)
|
||||
set(CMAKE_BUILD_WITH_INSTALL_RPATH 0)
|
||||
set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib")
|
||||
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH 1)
|
||||
|
||||
# Export compile commands for IDE support
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||
|
||||
set(AC_PATH_ROOT "${CMAKE_SOURCE_DIR}")
|
||||
|
||||
# set macro-directory
|
||||
list(APPEND CMAKE_MODULE_PATH
|
||||
"${CMAKE_SOURCE_DIR}/src/cmake/macros")
|
||||
|
||||
include(CheckCXXSourceRuns)
|
||||
include(CheckIncludeFiles)
|
||||
include(ConfigureScripts)
|
||||
include(ConfigureModules)
|
||||
include(ConfigureApplications)
|
||||
include(ConfigureTools)
|
||||
|
||||
# some utils for cmake
|
||||
include(deps/acore/cmake-utils/utils.cmake)
|
||||
|
||||
include(src/cmake/ac_macros.cmake)
|
||||
|
||||
# set default buildoptions and print them
|
||||
include(conf/dist/config.cmake)
|
||||
|
||||
# load custom configurations for cmake if exists
|
||||
if(EXISTS "${CMAKE_SOURCE_DIR}/conf/config.cmake")
|
||||
include(conf/config.cmake)
|
||||
endif()
|
||||
|
||||
#
|
||||
# Loading dyn modules
|
||||
#
|
||||
|
||||
# add modules and dependencies
|
||||
CU_SUBDIRLIST(sub_DIRS "${CMAKE_SOURCE_DIR}/modules" FALSE FALSE)
|
||||
FOREACH(subdir ${sub_DIRS})
|
||||
|
||||
get_filename_component(MODULENAME ${subdir} NAME)
|
||||
|
||||
if (";${DISABLED_AC_MODULES};" MATCHES ";${MODULENAME};")
|
||||
continue()
|
||||
endif()
|
||||
|
||||
STRING(REPLACE "${CMAKE_SOURCE_DIR}/" "" subdir_rel ${subdir})
|
||||
if(EXISTS "${subdir}/CMakeLists.txt")
|
||||
add_subdirectory("${subdir_rel}")
|
||||
endif()
|
||||
ENDFOREACH()
|
||||
|
||||
CU_RUN_HOOK("AFTER_LOAD_CONF")
|
||||
|
||||
# build in Release-mode by default if not explicitly set
|
||||
if(NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE "RelWithDebInfo")
|
||||
endif()
|
||||
|
||||
# turn off PCH totally if enabled (hidden setting, mainly for devs)
|
||||
if( NOPCH )
|
||||
set(USE_COREPCH 0)
|
||||
set(USE_SCRIPTPCH 0)
|
||||
endif()
|
||||
|
||||
include(ConfigureBaseTargets)
|
||||
include(CheckPlatform)
|
||||
include(GroupSources)
|
||||
include(AutoCollect)
|
||||
include(ConfigInstall)
|
||||
|
||||
CU_RUN_HOOK("AFTER_LOAD_CMAKE_MODULES")
|
||||
|
||||
find_package(PCHSupport)
|
||||
find_package(MySQL REQUIRED)
|
||||
|
||||
if(UNIX AND WITH_PERFTOOLS)
|
||||
find_package(Gperftools)
|
||||
endif()
|
||||
|
||||
if(NOT WITHOUT_GIT)
|
||||
find_package(Git)
|
||||
endif()
|
||||
|
||||
# Find revision ID and hash of the sourcetree
|
||||
include(src/cmake/genrev.cmake)
|
||||
|
||||
# print out the results before continuing
|
||||
include(src/cmake/showoptions.cmake)
|
||||
|
||||
#
|
||||
# Loading framework
|
||||
#
|
||||
|
||||
add_subdirectory(deps)
|
||||
add_subdirectory(src/common)
|
||||
|
||||
#
|
||||
# Loading application sources
|
||||
#
|
||||
|
||||
CU_RUN_HOOK("BEFORE_SRC_LOAD")
|
||||
|
||||
# add core sources
|
||||
add_subdirectory(src)
|
||||
|
||||
if (BUILD_APPLICATION_WORLDSERVER)
|
||||
# add modules sources
|
||||
add_subdirectory(modules)
|
||||
endif()
|
||||
|
||||
CU_RUN_HOOK("AFTER_SRC_LOAD")
|
||||
|
||||
if (BUILD_TESTING AND BUILD_APPLICATION_WORLDSERVER)
|
||||
# we use these flags to get code coverage
|
||||
set(UNIT_TEST_CXX_FLAGS "-fprofile-arcs -ftest-coverage -fno-inline")
|
||||
|
||||
# enable additional flags for GCC.
|
||||
if ( CMAKE_CXX_COMPILER_ID MATCHES GNU )
|
||||
set(UNIT_TEST_CXX_FLAGS "${UNIT_TEST_CXX_FLAGS} -fno-inline-small-functions -fno-default-inline")
|
||||
endif()
|
||||
|
||||
message("Unit tests code coverage: enabling ${UNIT_TEST_CXX_FLAGS}")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${UNIT_TEST_CXX_FLAGS}")
|
||||
|
||||
include(src/cmake/googletest.cmake)
|
||||
fetch_googletest(
|
||||
${PROJECT_SOURCE_DIR}/src/cmake
|
||||
${PROJECT_BINARY_DIR}/googletest
|
||||
)
|
||||
|
||||
enable_testing()
|
||||
add_subdirectory(src/test)
|
||||
|
||||
add_custom_target(coverage DEPENDS coverage_command)
|
||||
|
||||
add_custom_command(OUTPUT coverage_command
|
||||
# Run unit tests.
|
||||
COMMAND ctest
|
||||
# Run the graphical front-end for code coverage.
|
||||
COMMAND lcov --directory src --capture --output-file coverage.info
|
||||
COMMAND lcov --remove coverage.info '/usr/*' '${CMAKE_BINARY_DIR}/googletest/*' '${CMAKE_CURRENT_SOURCE_DIR}/src/test/*' --output-file coverage.info
|
||||
COMMAND genhtml -o ${CMAKE_CURRENT_SOURCE_DIR}/coverage-report coverage.info
|
||||
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
|
||||
)
|
||||
endif()
|
||||
339
LICENSE
Normal file
339
LICENSE
Normal file
@@ -0,0 +1,339 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1989
|
||||
Ty Coon, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
||||
23
PreLoad.cmake
Normal file
23
PreLoad.cmake
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright (C)
|
||||
#
|
||||
# This file is free software; as a special exception the author gives
|
||||
# unlimited permission to copy and/or distribute it, with or without
|
||||
# modifications, as long as this notice is preserved.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
# This file is run right before CMake starts configuring the sourcetree
|
||||
|
||||
# Example: Force CMAKE_INSTALL_PREFIX to be preloaded with something before
|
||||
# doing the actual first "configure"-part - allows for hardforcing
|
||||
# destinations elsewhere in the CMake buildsystem (commented out on purpose)
|
||||
|
||||
# Override CMAKE_INSTALL_PREFIX on Windows platforms
|
||||
#if( WIN32 )
|
||||
# if( NOT CYGWIN )
|
||||
# set(CMAKE_INSTALL_PREFIX
|
||||
# "" CACHE PATH "Default install path")
|
||||
# endif()
|
||||
#endif()
|
||||
5
acore.json
Normal file
5
acore.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "azerothcore-wotlk",
|
||||
"version": "15.0.0-dev",
|
||||
"license": "GPL2"
|
||||
}
|
||||
8
acore.sh
Executable file
8
acore.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
[ -z "$WITH_ERRORS" ] && set -e
|
||||
|
||||
CUR_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
|
||||
source "$CUR_PATH/apps/installer/main.sh"
|
||||
70
apps/DatabaseSquash/DatabaseExporter/DatabaseExporter.sh
Normal file
70
apps/DatabaseSquash/DatabaseExporter/DatabaseExporter.sh
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
|
||||
DRIVE_LETTER="${BASH_REMATCH[1]}"
|
||||
PATH_REMAINDER="${BASH_REMATCH[2]}"
|
||||
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
|
||||
fi
|
||||
|
||||
BASE_OUTPUT_DIR="$PROJECT_ROOT/data/sql/base"
|
||||
|
||||
read -p "Enter MySQL username: " DB_USER
|
||||
read -p "Enter MySQL password: " DB_PASS
|
||||
read -p "Enter MySQL host (default: localhost): " DB_HOST
|
||||
DB_HOST=${DB_HOST:-localhost}
|
||||
read -p "Enter MySQL port (default: 3306): " DB_PORT
|
||||
DB_PORT=${DB_PORT:-3306}
|
||||
|
||||
# Prompt for database names
|
||||
read -p "Enter name of Auth database [default: acore_auth]: " DB_AUTH
|
||||
DB_AUTH=${DB_AUTH:-acore_auth}
|
||||
read -p "Enter name of Characters database [default: acore_characters]: " DB_CHARACTERS
|
||||
DB_CHARACTERS=${DB_CHARACTERS:-acore_characters}
|
||||
read -p "Enter name of World database [default: acore_world]: " DB_WORLD
|
||||
DB_WORLD=${DB_WORLD:-acore_world}
|
||||
|
||||
# Mapping for folder names
|
||||
declare -A DB_MAP=(
|
||||
["$DB_AUTH"]="db_auth"
|
||||
["$DB_CHARACTERS"]="db_characters"
|
||||
["$DB_WORLD"]="db_world"
|
||||
)
|
||||
|
||||
# Dump each database
|
||||
for DB_NAME in "${!DB_MAP[@]}"; do
|
||||
FOLDER_NAME="${DB_MAP[$DB_NAME]}"
|
||||
echo "📦 Dumping database '$DB_NAME' into folder '$FOLDER_NAME'"
|
||||
echo "$BASE_OUTPUT_DIR/$FOLDER_NAME"
|
||||
mkdir -p "$BASE_OUTPUT_DIR/$FOLDER_NAME"
|
||||
|
||||
TABLES=$(mysql -u "$DB_USER" -p"$DB_PASS" -h "$DB_HOST" -P "$DB_PORT" -N -e "SHOW TABLES FROM \`$DB_NAME\`;")
|
||||
|
||||
if [[ -z "$TABLES" ]]; then
|
||||
echo "⚠️ No tables found or failed to connect to '$DB_NAME'. Skipping."
|
||||
continue
|
||||
fi
|
||||
|
||||
while IFS= read -r raw_table; do
|
||||
TABLE=$(echo "$raw_table" | tr -d '\r"' | xargs)
|
||||
if [[ -n "$TABLE" ]]; then
|
||||
echo " ➤ Dumping table: $TABLE"
|
||||
# --skip-tz-utc needed to keep TIMESTAMP values as-is
|
||||
mysqldump -u $DB_USER -p$DB_PASS -h $DB_HOST -P $DB_PORT --skip-tz-utc --extended-insert $DB_NAME $TABLE > $BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql
|
||||
|
||||
# cleanup files
|
||||
sed -E '
|
||||
s/VALUES[[:space:]]*/VALUES\n/;
|
||||
:a
|
||||
s/\),\(/\),\n\(/g;
|
||||
ta
|
||||
' "$BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql" > "$BASE_OUTPUT_DIR/$FOLDER_NAME/${TABLE}_formatted.sql"
|
||||
mv "$BASE_OUTPUT_DIR/$FOLDER_NAME/${TABLE}_formatted.sql" "$BASE_OUTPUT_DIR/$FOLDER_NAME/$TABLE.sql"
|
||||
fi
|
||||
done <<< "$TABLES"
|
||||
done
|
||||
|
||||
echo "✅ Done dumping all specified databases."
|
||||
16
apps/DatabaseSquash/DatabaseExporter/databaseexporter.md
Normal file
16
apps/DatabaseSquash/DatabaseExporter/databaseexporter.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# The AzerothCore Database Exporter for Database Squashes
|
||||
|
||||
> [!CAUTION]
|
||||
> These steps are only for project maintainers who intend to update base files.
|
||||
|
||||
## Requirements
|
||||
|
||||
1. MySQL
|
||||
2. mysqldump
|
||||
|
||||
## Usage
|
||||
|
||||
1. Run DatabaseExporter.sh from the current directory.
|
||||
2. Fill in required data within the CLI.
|
||||
3. The tool will autopopulate the basefile directories.
|
||||
4. Done.
|
||||
52
apps/DatabaseSquash/DatabaseSquash.sh
Normal file
52
apps/DatabaseSquash/DatabaseSquash.sh
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "❗CAUTION"
|
||||
echo "This tool is only supposed to be used by AzerothCore Maintainers."
|
||||
echo "The tool is used to prepare for, and generate a database squash."
|
||||
echo
|
||||
echo "Before you continue make sure you have read"
|
||||
echo "https://github.com/azerothcore/azerothcore-wotlk/blob/master/data/sql/base/database-squash.md"
|
||||
echo
|
||||
read -p "Are you sure you want to continue (Y/N)?" choice
|
||||
case "$choice" in
|
||||
y|Y ) echo "Starting...";;
|
||||
* ) echo "Aborted"; exit 0 ;;
|
||||
esac
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
|
||||
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
|
||||
DRIVE_LETTER="${BASH_REMATCH[1]}"
|
||||
PATH_REMAINDER="${BASH_REMATCH[2]}"
|
||||
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
|
||||
fi
|
||||
|
||||
VERSION_UPDATER_PATH="$PROJECT_ROOT/apps/DatabaseSquash/VersionUpdater/versionupdater.sh"
|
||||
|
||||
"$VERSION_UPDATER_PATH"
|
||||
|
||||
echo "✅ VersionUpdater Completed..."
|
||||
echo
|
||||
echo "❗IMPORTANT!"
|
||||
echo "1. Before you continue you need to drop all your databases."
|
||||
echo "2. Run WorldServer to populate the database."
|
||||
echo
|
||||
echo "❗DO NOT continue before you have completed the steps above!"
|
||||
echo
|
||||
echo "The next step will export your database and overwrite the base files."
|
||||
echo
|
||||
read -p "Are you sure you want to export your database (Y/N)?" choice
|
||||
case "$choice" in
|
||||
y|Y ) echo "Starting...";;
|
||||
* ) echo "Aborted"; exit 0 ;;
|
||||
esac
|
||||
|
||||
DATABASE_EXPORTER_PATH="$PROJECT_ROOT/apps/DatabaseSquash/DatabaseExporter/databaseexporter.sh"
|
||||
|
||||
"$DATABASE_EXPORTER_PATH"
|
||||
|
||||
echo "✅ DatabaseExporter Completed..."
|
||||
echo "✅ DatabaseSquash Completed... "
|
||||
echo
|
||||
read -p "Press Enter to exit..."
|
||||
84
apps/DatabaseSquash/VersionUpdater/VersionUpdater.sh
Normal file
84
apps/DatabaseSquash/VersionUpdater/VersionUpdater.sh
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)"
|
||||
|
||||
if [[ "$PROJECT_ROOT" =~ ^/([a-zA-Z])/(.*) ]]; then
|
||||
DRIVE_LETTER="${BASH_REMATCH[1]}"
|
||||
PATH_REMAINDER="${BASH_REMATCH[2]}"
|
||||
PROJECT_ROOT="${DRIVE_LETTER^^}:/${PATH_REMAINDER}"
|
||||
fi
|
||||
|
||||
ACORE_JSON_PATH="$PROJECT_ROOT/acore.json"
|
||||
DB_WORLD_UPDATE_DIR="$PROJECT_ROOT/data/sql/updates/db_world"
|
||||
|
||||
VERSION_LINE=$(grep '"version"' "$ACORE_JSON_PATH")
|
||||
VERSION=$(echo "$VERSION_LINE" | sed -E 's/.*"version": *"([^"]+)".*/\1/')
|
||||
|
||||
# Parse version into parts
|
||||
if [[ "$VERSION" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)(.*)$ ]]; then
|
||||
MAJOR="${BASH_REMATCH[1]}"
|
||||
SUFFIX="${BASH_REMATCH[4]}"
|
||||
NEW_VERSION="$((MAJOR + 1)).0.0$SUFFIX"
|
||||
|
||||
# Replace version in file
|
||||
sed -i.bak -E "s/(\"version\": *\")[^\"]+(\" *)/\1$NEW_VERSION\2/" "$ACORE_JSON_PATH"
|
||||
rm -f "$ACORE_JSON_PATH.bak"
|
||||
|
||||
echo "✅ Version updated to $NEW_VERSION"
|
||||
else
|
||||
echo "Error: Could not parse version string: $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract the new major version from NEW_VERSION
|
||||
if [[ "$NEW_VERSION" =~ ^([0-9]+)\. ]]; then
|
||||
NEW_MAJOR="${BASH_REMATCH[1]}"
|
||||
else
|
||||
echo "Error: Unable to extract major version from $NEW_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Prepare SQL content
|
||||
DB_VERSION_CONTENT="'ACDB 335.${NEW_MAJOR}-dev'"
|
||||
SQL_QUERY="UPDATE \`version\` SET \`db_version\`=${DB_VERSION_CONTENT}, \`cache_id\`=${NEW_MAJOR} LIMIT 1;"
|
||||
|
||||
# Format date as yyyy_mm_dd
|
||||
TODAY=$(date +%Y_%m_%d)
|
||||
|
||||
# Ensure directory exists
|
||||
mkdir -p "$DB_WORLD_UPDATE_DIR"
|
||||
|
||||
# List existing files for today
|
||||
existing_files=($(find "$DB_WORLD_UPDATE_DIR" -maxdepth 1 -type f -name "${TODAY}_*.sql" 2>/dev/null))
|
||||
|
||||
# Determine next xx counter
|
||||
# Determine next xx
|
||||
COUNTER="00"
|
||||
if [ ${#existing_files[@]} -gt 0 ]; then
|
||||
max=0
|
||||
for file in "${existing_files[@]}"; do
|
||||
basename=$(basename "$file")
|
||||
if [[ "$basename" =~ ^${TODAY}_([0-9]{2})\.sql$ ]]; then
|
||||
num=${BASH_REMATCH[1]}
|
||||
if [[ "$num" =~ ^[0-9]+$ ]] && (( 10#$num > max )); then
|
||||
max=$((10#$num))
|
||||
fi
|
||||
fi
|
||||
done
|
||||
COUNTER=$(printf "%02d" $((max + 1)))
|
||||
fi
|
||||
|
||||
# Compose final file path
|
||||
SQL_FILENAME="${TODAY}_${COUNTER}.sql"
|
||||
SQL_FILE_PATH="$DB_WORLD_UPDATE_DIR/$SQL_FILENAME"
|
||||
|
||||
# Write to file
|
||||
{
|
||||
echo "-- Auto-generated by VersionUpdater.sh on $(date)"
|
||||
echo "$SQL_QUERY"
|
||||
} > "$SQL_FILE_PATH"
|
||||
|
||||
echo "✅ SQL file created at $SQL_FILE_PATH"
|
||||
10
apps/DatabaseSquash/VersionUpdater/versionupdater.md
Normal file
10
apps/DatabaseSquash/VersionUpdater/versionupdater.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# The AzerothCore Version Updater for Database Squashes
|
||||
|
||||
> [!CAUTION]
|
||||
> These steps are only for project maintainers who intend to update base files.
|
||||
|
||||
## Usage
|
||||
|
||||
1. Run VersionUpdater.sh from the current directory.
|
||||
2. The tool will update acore.json and create a new update sql file.
|
||||
3. Done.
|
||||
11
apps/DatabaseSquash/databasesquash.md
Normal file
11
apps/DatabaseSquash/databasesquash.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# The AzerothCore DatabaseSquash tool for Database Squashes
|
||||
|
||||
> [!CAUTION]
|
||||
> These steps are only for project maintainers who intend to update base files.
|
||||
|
||||
## Usage
|
||||
|
||||
1. Run DatabaseSquash.sh from the current directory.
|
||||
2. The tool will run VersionUpdater.sh and DatabaseExporter.sh
|
||||
3. Follow the instructions in the CLI.
|
||||
4. Done.
|
||||
163
apps/EnumUtils/enumutils_describe.py
Normal file
163
apps/EnumUtils/enumutils_describe.py
Normal file
@@ -0,0 +1,163 @@
|
||||
from re import compile, MULTILINE
|
||||
from os import walk, getcwd
|
||||
|
||||
notice = ('''/*
|
||||
* This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; either version 2 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along
|
||||
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
''')
|
||||
|
||||
if not getcwd().endswith('src'):
|
||||
print('Run this from the src directory!')
|
||||
print('(Invoke as \'python ../apps/EnumUtils/enumutils_describe.py\')')
|
||||
exit(1)
|
||||
|
||||
EnumPattern = compile(r'//\s*EnumUtils: DESCRIBE THIS(?:\s*\(in ([^\)]+)\))?\s+enum\s+([0-9A-Za-z]+)[^\n]*\s*{([^}]+)};')
|
||||
EnumValuesPattern = compile(r'\s+\S.+?(,|$)[^\n]*')
|
||||
EnumValueNamePattern = compile(r'^\s*([a-zA-Z0-9_]+)', flags=MULTILINE)
|
||||
EnumValueSkipLinePattern = compile(r'^\s*//')
|
||||
EnumValueCommentPattern = compile(r'//,?[ \t]*([^\n]+)$')
|
||||
CommentMatchFormat = compile(r'^(((TITLE +(.+?))|(DESCRIPTION +(.+?))) *){1,2}$')
|
||||
CommentSkipFormat = compile(r'^SKIP *$')
|
||||
|
||||
def strescape(str):
|
||||
res = ''
|
||||
for char in str:
|
||||
if char in ('\\', '"') or not (32 <= ord(char) < 127):
|
||||
res += ('\\%03o' % ord(char))
|
||||
else:
|
||||
res += char
|
||||
return '"' + res + '"'
|
||||
|
||||
def processFile(path, filename):
|
||||
input = open('%s/%s.h' % (path, filename),'r')
|
||||
if input is None:
|
||||
print('Failed to open %s.h' % filename)
|
||||
return
|
||||
|
||||
file = input.read()
|
||||
|
||||
enums = []
|
||||
for enum in EnumPattern.finditer(file):
|
||||
prefix = enum.group(1) or ''
|
||||
name = enum.group(2)
|
||||
values = []
|
||||
for value in EnumValuesPattern.finditer(enum.group(3)):
|
||||
valueData = value.group(0)
|
||||
|
||||
valueNameMatch = EnumValueNamePattern.search(valueData)
|
||||
if valueNameMatch is None:
|
||||
if EnumValueSkipLinePattern.search(valueData) is None:
|
||||
print('Name of value not found: %s' % repr(valueData))
|
||||
continue
|
||||
valueName = valueNameMatch.group(1)
|
||||
|
||||
valueCommentMatch = EnumValueCommentPattern.search(valueData)
|
||||
valueComment = None
|
||||
if valueCommentMatch:
|
||||
valueComment = valueCommentMatch.group(1)
|
||||
|
||||
valueTitle = None
|
||||
valueDescription = None
|
||||
|
||||
if valueComment is not None:
|
||||
if CommentSkipFormat.match(valueComment) is not None:
|
||||
continue
|
||||
commentMatch = CommentMatchFormat.match(valueComment)
|
||||
if commentMatch is not None:
|
||||
valueTitle = commentMatch.group(4)
|
||||
valueDescription = commentMatch.group(6)
|
||||
else:
|
||||
valueDescription = valueComment
|
||||
|
||||
if valueTitle is None:
|
||||
valueTitle = valueName
|
||||
if valueDescription is None:
|
||||
valueDescription = ''
|
||||
|
||||
values.append((valueName, valueTitle, valueDescription))
|
||||
|
||||
enums.append((prefix + name, prefix, values))
|
||||
print('%s.h: Enum %s parsed with %d values' % (filename, name, len(values)))
|
||||
|
||||
if not enums:
|
||||
return
|
||||
|
||||
print('Done parsing %s.h (in %s)\n' % (filename, path))
|
||||
output = open('%s/enuminfo_%s.cpp' % (path, filename), 'w')
|
||||
if output is None:
|
||||
print('Failed to create enuminfo_%s.cpp' % filename)
|
||||
return
|
||||
|
||||
# write output file
|
||||
output.write(notice)
|
||||
output.write('#include "%s.h"\n' % filename)
|
||||
output.write('#include "Define.h"\n')
|
||||
output.write('#include "SmartEnum.h"\n')
|
||||
output.write('#include <stdexcept>\n')
|
||||
output.write('\n')
|
||||
output.write('namespace Acore::Impl::EnumUtilsImpl\n')
|
||||
output.write('{\n')
|
||||
for name, prefix, values in enums:
|
||||
tag = ('data for enum \'%s\' in \'%s.h\' auto-generated' % (name, filename))
|
||||
output.write('\n')
|
||||
output.write('/*' + ('*'*(len(tag)+2)) + '*\\\n')
|
||||
output.write('|* ' + tag + ' *|\n')
|
||||
output.write('\\*' + ('*'*(len(tag)+2)) + '*/\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT EnumText EnumUtils<%s>::ToString(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for label, title, description in values:
|
||||
output.write(' case %s: return { %s, %s, %s };\n' % (prefix + label, strescape(label), strescape(title), strescape(description)))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::Count() { return %d; }\n' % (name, len(values)))
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT %s EnumUtils<%s>::FromIndex(size_t index)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (index)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %d: return %s;\n' % (i, prefix + label))
|
||||
output.write(' default: throw std::out_of_range("index");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::ToIndex(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %s: return %d;\n' % (prefix + label, i))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
|
||||
output.write('}\n')
|
||||
|
||||
FilenamePattern = compile(r'^(.+)\.h$')
|
||||
for root, dirs, files in walk('.'):
|
||||
for n in files:
|
||||
nameMatch = FilenamePattern.match(n)
|
||||
if nameMatch is not None:
|
||||
processFile(root, nameMatch.group(1))
|
||||
238
apps/Fmt/FormatReplace.py
Normal file
238
apps/Fmt/FormatReplace.py
Normal file
@@ -0,0 +1,238 @@
|
||||
import pathlib
|
||||
from os import getcwd
|
||||
|
||||
if not getcwd().endswith('src') and not getcwd().endswith('modules'):
|
||||
print('Run this from the src or modules directory!')
|
||||
print('(Invoke as \'python ../apps/Fmt/FormatReplace.py\')')
|
||||
exit(1)
|
||||
|
||||
def isASSERT(line):
|
||||
substring = 'ASSERT'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isABORTMSG(line):
|
||||
substring = 'ABORT_MSG'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def islog(line):
|
||||
substring = 'LOG_'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
# def isSendSysMessage(line):
|
||||
# substring = 'SendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
# def isPSendSysMessage(line):
|
||||
# substring = 'PSendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
def isPQuery(line):
|
||||
substring = 'PQuery'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPExecute(line):
|
||||
substring = 'PExecute'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPAppend(line):
|
||||
substring = 'PAppend'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isStringFormat(line):
|
||||
substring = 'StringFormat'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def haveDelimeter(line):
|
||||
if ';' in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def checkSoloLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), False
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), False
|
||||
elif islog(line):
|
||||
return handleCleanup(line), False
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), False
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), False
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), False
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
elif isStringFormat(line):
|
||||
return handleCleanup(line), False
|
||||
else:
|
||||
return line, False
|
||||
|
||||
def startMultiLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), True
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), True
|
||||
elif islog(line):
|
||||
return handleCleanup(line), True
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), True
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), True
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), True
|
||||
elif isStringFormat(line):
|
||||
return handleCleanup(line), True
|
||||
else :
|
||||
return line, False
|
||||
|
||||
def continueMultiLine(line, existPrevLine):
|
||||
if haveDelimeter(line):
|
||||
existPrevLine = False;
|
||||
return handleCleanup(line), existPrevLine
|
||||
|
||||
def checkTextLine(line, existPrevLine):
|
||||
if existPrevLine:
|
||||
return continueMultiLine(line, existPrevLine)
|
||||
else :
|
||||
if haveDelimeter(line):
|
||||
return checkSoloLine(line)
|
||||
else :
|
||||
return startMultiLine(line)
|
||||
|
||||
def handleCleanup(line):
|
||||
line = line.replace("%s", "{}");
|
||||
line = line.replace("%u", "{}");
|
||||
line = line.replace("%hu", "{}");
|
||||
line = line.replace("%lu", "{}");
|
||||
line = line.replace("%llu", "{}");
|
||||
line = line.replace("%zu", "{}");
|
||||
line = line.replace("%02u", "{:02}");
|
||||
line = line.replace("%03u", "{:03}");
|
||||
line = line.replace("%04u", "{:04}");
|
||||
line = line.replace("%05u", "{:05}");
|
||||
line = line.replace("%02i", "{:02}");
|
||||
line = line.replace("%03i", "{:03}");
|
||||
line = line.replace("%04i", "{:04}");
|
||||
line = line.replace("%05i", "{:05}");
|
||||
line = line.replace("%02d", "{:02}");
|
||||
line = line.replace("%03d", "{:03}");
|
||||
line = line.replace("%04d", "{:04}");
|
||||
line = line.replace("%05d", "{:05}");
|
||||
line = line.replace("%d", "{}");
|
||||
line = line.replace("%i", "{}");
|
||||
line = line.replace("%x", "{:x}");
|
||||
line = line.replace("%X", "{:X}");
|
||||
line = line.replace("%lx", "{:x}");
|
||||
line = line.replace("%lX", "{:X}");
|
||||
line = line.replace("%02X", "{:02X}");
|
||||
line = line.replace("%08X", "{:08X}");
|
||||
line = line.replace("%f", "{}");
|
||||
line = line.replace("%.1f", "{0:.1f}");
|
||||
line = line.replace("%.2f", "{0:.2f}");
|
||||
line = line.replace("%.3f", "{0:.3f}");
|
||||
line = line.replace("%.4f", "{0:.4f}");
|
||||
line = line.replace("%.5f", "{0:.5f}");
|
||||
line = line.replace("%3.1f", "{:3.1f}");
|
||||
line = line.replace("%%", "%");
|
||||
line = line.replace(".c_str()", "");
|
||||
line = line.replace("\" SZFMTD \"", "{}");
|
||||
line = line.replace("\" UI64FMTD \"", "{}");
|
||||
# line = line.replace("\" STRING_VIEW_FMT \"", "{}");
|
||||
# line = line.replace("STRING_VIEW_FMT_ARG", "");
|
||||
return line
|
||||
|
||||
def getDefaultfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def getModifiedfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
prevLines = False
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
line, prevLines = checkTextLine(line, prevLines)
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def updModifiedfile(name, text):
|
||||
file = open(name, "w", encoding="utf8", errors='replace')
|
||||
file.write(text)
|
||||
file.close()
|
||||
|
||||
def handlefile(name):
|
||||
oldtext = getDefaultfile(name)
|
||||
newtext = getModifiedfile(name)
|
||||
|
||||
if oldtext != newtext:
|
||||
updModifiedfile(name, newtext)
|
||||
|
||||
p = pathlib.Path('.')
|
||||
for i in p.glob('**/*'):
|
||||
fname = i.absolute()
|
||||
if '.cpp' in i.name:
|
||||
handlefile(fname)
|
||||
if '.h' in i.name:
|
||||
handlefile(fname)
|
||||
29
apps/bash_shared/common.sh
Normal file
29
apps/bash_shared/common.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
function registerHooks() { acore_event_registerHooks "$@"; }
|
||||
function runHooks() { acore_event_runHooks "$@"; }
|
||||
|
||||
function acore_common_loadConfig() {
|
||||
#shellcheck source=../../conf/dist/config.sh
|
||||
source "$AC_PATH_CONF/dist/config.sh" # include dist to avoid missing conf variables
|
||||
|
||||
# first check if it's defined in env, otherwise use the default
|
||||
USER_CONF_PATH=${USER_CONF_PATH:-"$AC_PATH_CONF/config.sh"}
|
||||
|
||||
if [ -f "$USER_CONF_PATH" ]; then
|
||||
source "$USER_CONF_PATH" # should overwrite previous
|
||||
else
|
||||
echo "NOTICE: file <$USER_CONF_PATH> not found, we use default configuration only."
|
||||
fi
|
||||
}
|
||||
|
||||
#
|
||||
# Load modules
|
||||
#
|
||||
|
||||
for entry in "$AC_PATH_MODULES/"*/include.sh
|
||||
do
|
||||
if [ -e "$entry" ]; then
|
||||
source "$entry"
|
||||
fi
|
||||
done
|
||||
|
||||
ACORE_VERSION=$("$AC_PATH_DEPS/jsonpath/JSONPath.sh" -f "$AC_PATH_ROOT/acore.json" -b '$.version')
|
||||
30
apps/bash_shared/defines.sh
Normal file
30
apps/bash_shared/defines.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
unamestr=$(uname)
|
||||
if [[ "$unamestr" == 'Darwin' ]]; then
|
||||
if ! command -v brew &>/dev/null ; then
|
||||
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
fi
|
||||
if ! [ "${BASH_VERSINFO}" -ge 4 ]; then
|
||||
brew install bash
|
||||
fi
|
||||
if ! command -v greadlink &>/dev/null ; then
|
||||
brew install coreutils
|
||||
fi
|
||||
AC_PATH_ROOT=$(greadlink -f "$AC_PATH_APPS/../")
|
||||
else
|
||||
AC_PATH_ROOT=$(readlink -f "$AC_PATH_APPS/../")
|
||||
fi
|
||||
|
||||
case $AC_PATH_ROOT in
|
||||
/*) AC_PATH_ROOT=$AC_PATH_ROOT;;
|
||||
*) AC_PATH_ROOT=$PWD/$AC_PATH_ROOT;;
|
||||
esac
|
||||
|
||||
export AC_PATH_CONF="$AC_PATH_ROOT/conf"
|
||||
|
||||
export AC_PATH_MODULES="$AC_PATH_ROOT/modules"
|
||||
|
||||
export AC_PATH_DEPS="$AC_PATH_ROOT/deps"
|
||||
|
||||
export AC_BASH_LIB_PATH="$AC_PATH_DEPS/acore/bash-lib/src"
|
||||
|
||||
export AC_PATH_VAR="$AC_PATH_ROOT/var"
|
||||
25
apps/bash_shared/includes.sh
Normal file
25
apps/bash_shared/includes.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
[[ ${GUARDYVAR:-} -eq 1 ]] && return || readonly GUARDYVAR=1 # include it once
|
||||
|
||||
# force default language for applications
|
||||
LC_ALL=C
|
||||
|
||||
AC_PATH_APPS="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )"
|
||||
|
||||
AC_PATH_SHARED="$AC_PATH_APPS/bash_shared"
|
||||
|
||||
# shellcheck source=./defines.sh
|
||||
source "$AC_PATH_SHARED/defines.sh"
|
||||
|
||||
# shellcheck source=../../deps/acore/bash-lib/src/event/hooks.sh
|
||||
source "$AC_PATH_DEPS/acore/bash-lib/src/event/hooks.sh"
|
||||
|
||||
# shellcheck source=./common.sh
|
||||
source "$AC_PATH_SHARED/common.sh"
|
||||
|
||||
acore_common_loadConfig
|
||||
|
||||
if [[ "$OSTYPE" = "msys" ]]; then
|
||||
AC_BINPATH_FULL="$BINPATH"
|
||||
else
|
||||
export AC_BINPATH_FULL="$BINPATH/bin"
|
||||
fi
|
||||
267
apps/bash_shared/menu_system.sh
Normal file
267
apps/bash_shared/menu_system.sh
Normal file
@@ -0,0 +1,267 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# =============================================================================
|
||||
# AzerothCore Menu System Library
|
||||
# =============================================================================
|
||||
# This library provides a unified menu system for AzerothCore scripts.
|
||||
# It supports ordered menu definitions, short commands, numeric selection,
|
||||
# and proper argument handling.
|
||||
#
|
||||
# Features:
|
||||
# - Single source of truth for menu definitions
|
||||
# - Automatic ID assignment (1, 2, 3...)
|
||||
# - Short command aliases (c, i, q, etc.)
|
||||
# - Interactive mode: numbers + long/short commands
|
||||
# - Direct mode: only long/short commands (no numbers)
|
||||
# - Proper argument forwarding
|
||||
#
|
||||
# Usage:
|
||||
# source "path/to/menu_system.sh"
|
||||
# menu_items=("command|short|description" ...)
|
||||
# menu_run "Menu Title" callback_function "${menu_items[@]}" "$@"
|
||||
# =============================================================================
|
||||
|
||||
# Global arrays for menu state (will be populated by menu_define)
|
||||
declare -a _MENU_KEYS=()
|
||||
declare -a _MENU_SHORTS=()
|
||||
declare -a _MENU_OPTIONS=()
|
||||
|
||||
# Parse menu items and populate global arrays
|
||||
# Usage: menu_define array_elements...
|
||||
function menu_define() {
|
||||
# Clear previous state
|
||||
_MENU_KEYS=()
|
||||
_MENU_SHORTS=()
|
||||
_MENU_OPTIONS=()
|
||||
|
||||
# Parse each menu item: "key|short|description"
|
||||
local item key short desc
|
||||
for item in "$@"; do
|
||||
IFS='|' read -r key short desc <<< "$item"
|
||||
_MENU_KEYS+=("$key")
|
||||
_MENU_SHORTS+=("$short")
|
||||
_MENU_OPTIONS+=("$key ($short): $desc")
|
||||
done
|
||||
}
|
||||
|
||||
# Display menu with numbered options
|
||||
# Usage: menu_display "Menu Title"
|
||||
function menu_display() {
|
||||
local title="$1"
|
||||
|
||||
echo "==== $title ===="
|
||||
for idx in "${!_MENU_OPTIONS[@]}"; do
|
||||
local num=$((idx + 1))
|
||||
printf "%2d) %s\n" "$num" "${_MENU_OPTIONS[$idx]}"
|
||||
done
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Find menu index by user input (number, long command, or short command)
|
||||
# Returns: index (0-based) or -1 if not found
|
||||
# Usage: index=$(menu_find_index "user_input")
|
||||
function menu_find_index() {
|
||||
local user_input="$1"
|
||||
|
||||
# Try numeric selection first
|
||||
if [[ "$user_input" =~ ^[0-9]+$ ]]; then
|
||||
local num=$((user_input - 1))
|
||||
if [[ $num -ge 0 && $num -lt ${#_MENU_KEYS[@]} ]]; then
|
||||
echo "$num"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Try long command name
|
||||
local idx
|
||||
for idx in "${!_MENU_KEYS[@]}"; do
|
||||
if [[ "$user_input" == "${_MENU_KEYS[$idx]}" ]]; then
|
||||
echo "$idx"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
# Try short command
|
||||
for idx in "${!_MENU_SHORTS[@]}"; do
|
||||
if [[ "$user_input" == "${_MENU_SHORTS[$idx]}" ]]; then
|
||||
echo "$idx"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
|
||||
echo "-1"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Handle direct execution (command line arguments)
|
||||
# Disables numeric selection to prevent confusion with command arguments
|
||||
# Usage: menu_direct_execute callback_function "$@"
|
||||
function menu_direct_execute() {
|
||||
local callback="$1"
|
||||
shift
|
||||
local user_input="$1"
|
||||
shift
|
||||
|
||||
# Disable numeric selection in direct mode
|
||||
if [[ "$user_input" =~ ^[0-9]+$ ]]; then
|
||||
echo "Invalid option. Numeric selection is not allowed when passing arguments."
|
||||
echo "Use command name or short alias instead."
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Find command and execute
|
||||
local idx
|
||||
# try-catch
|
||||
{
|
||||
idx=$(menu_find_index "$user_input")
|
||||
} ||
|
||||
{
|
||||
idx=-1
|
||||
}
|
||||
|
||||
if [[ $idx -ge 0 ]]; then
|
||||
"$callback" "${_MENU_KEYS[$idx]}" "$@"
|
||||
return $?
|
||||
else
|
||||
# Handle help requests directly
|
||||
if [[ "$user_input" == "--help" || "$user_input" == "help" || "$user_input" == "-h" ]]; then
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${_MENU_OPTIONS[@]}"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Invalid option. Use --help to see available commands." >&2
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Handle interactive menu selection
|
||||
# Usage: menu_interactive callback_function "Menu Title"
|
||||
function menu_interactive() {
|
||||
local callback="$1"
|
||||
local title="$2"
|
||||
|
||||
while true; do
|
||||
menu_display "$title"
|
||||
read -r -p "Please enter your choice: " REPLY
|
||||
|
||||
# Parse input to separate command from arguments
|
||||
local input_parts=()
|
||||
read -r -a input_parts <<< "$REPLY"
|
||||
local user_command="${input_parts[0]}"
|
||||
local user_args=("${input_parts[@]:1}")
|
||||
|
||||
# Find and execute command
|
||||
local idx
|
||||
idx=$(menu_find_index "$user_command")
|
||||
if [[ $idx -ge 0 ]]; then
|
||||
# Pass the command key and any additional arguments
|
||||
"$callback" "${_MENU_KEYS[$idx]}" "${user_args[@]}"
|
||||
local exit_code=$?
|
||||
# Exit loop if callback returns 0 (e.g., quit command)
|
||||
if [[ $exit_code -eq 0 && "${_MENU_KEYS[$idx]}" == "quit" ]]; then
|
||||
break
|
||||
fi
|
||||
else
|
||||
# Handle help request
|
||||
if [[ "$REPLY" == "--help" || "$REPLY" == "help" || "$REPLY" == "h" ]]; then
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${_MENU_OPTIONS[@]}"
|
||||
echo ""
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Invalid option. Please try again or use 'help' for available commands." >&2
|
||||
echo ""
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Main menu runner function
|
||||
# Usage: menu_run "Menu Title" callback_function "$@"
|
||||
# The menu items array should be defined globally before calling this function
|
||||
function menu_run() {
|
||||
local title="$1"
|
||||
local callback="$2"
|
||||
shift 2
|
||||
|
||||
# Define menu from globally available menu items array
|
||||
# This expects the calling script to have set up the menu items
|
||||
|
||||
# Handle direct execution if arguments provided
|
||||
if [[ $# -gt 0 ]]; then
|
||||
menu_direct_execute "$callback" "$@"
|
||||
return $?
|
||||
fi
|
||||
|
||||
# Run interactive menu
|
||||
menu_interactive "$callback" "$title"
|
||||
}
|
||||
|
||||
# Alternative menu runner that accepts menu items directly
|
||||
# Usage: menu_run_with_items "Menu Title" callback_function -- "${menu_items_array[@]}" -- "$@"
|
||||
function menu_run_with_items() {
|
||||
local title="$1"
|
||||
local callback="$2"
|
||||
shift 2
|
||||
|
||||
# Parse parameters: menu items are between first and second "--"
|
||||
local menu_items=()
|
||||
local script_args=()
|
||||
|
||||
# Skip first "--"
|
||||
if [[ "$1" == "--" ]]; then
|
||||
shift
|
||||
else
|
||||
echo "Error: menu_run_with_items requires -- separator before menu items" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Collect menu items until second "--"
|
||||
while [[ $# -gt 0 && "$1" != "--" ]]; do
|
||||
menu_items+=("$1")
|
||||
shift
|
||||
done
|
||||
|
||||
# Skip second "--" if present
|
||||
if [[ "$1" == "--" ]]; then
|
||||
shift
|
||||
fi
|
||||
|
||||
# Remaining args are script arguments
|
||||
script_args=("$@")
|
||||
|
||||
# Define menu from provided array
|
||||
menu_define "${menu_items[@]}"
|
||||
|
||||
# Handle direct execution if arguments provided
|
||||
if [[ ${#script_args[@]} -gt 0 ]]; then
|
||||
menu_direct_execute "$callback" "${script_args[@]}"
|
||||
return $?
|
||||
fi
|
||||
|
||||
# Run interactive menu
|
||||
menu_interactive "$callback" "$title"
|
||||
}
|
||||
|
||||
# Utility function to show available commands (for --help)
|
||||
# Usage: menu_show_help
|
||||
function menu_show_help() {
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${_MENU_OPTIONS[@]}"
|
||||
}
|
||||
|
||||
# Utility function to get command key by index
|
||||
# Usage: key=$(menu_get_key index)
|
||||
function menu_get_key() {
|
||||
local idx="$1"
|
||||
if [[ $idx -ge 0 && $idx -lt ${#_MENU_KEYS[@]} ]]; then
|
||||
echo "${_MENU_KEYS[$idx]}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Utility function to get all command keys
|
||||
# Usage: keys=($(menu_get_all_keys))
|
||||
function menu_get_all_keys() {
|
||||
printf '%s\n' "${_MENU_KEYS[@]}"
|
||||
}
|
||||
8
apps/ci/ci-compile.sh
Executable file
8
apps/ci/ci-compile.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "compile core"
|
||||
export AC_CCACHE=true
|
||||
./acore.sh "compiler" "all"
|
||||
|
||||
69
apps/ci/ci-conf-core-pch.sh
Normal file
69
apps/ci/ci-conf-core-pch.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CTOOLS_BUILD=none
|
||||
CSCRIPTS=static
|
||||
CMODULES=static
|
||||
CBUILD_TESTING=ON
|
||||
CSCRIPTPCH=ON
|
||||
CCOREPCH=ON
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
69
apps/ci/ci-conf-core.sh
Normal file
69
apps/ci/ci-conf-core.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CTOOLS_BUILD=none
|
||||
CSCRIPTS=static
|
||||
CMODULES=static
|
||||
CBUILD_TESTING=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
36
apps/ci/ci-conf-db.sh
Normal file
36
apps/ci/ci-conf-db.sh
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=db-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
67
apps/ci/ci-conf-tools.sh
Normal file
67
apps/ci/ci-conf-tools.sh
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=maps-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
15
apps/ci/ci-dry-run.sh
Normal file
15
apps/ci/ci-dry-run.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Start mysql
|
||||
sudo systemctl start mysql
|
||||
|
||||
source "$CURRENT_PATH/ci-gen-server-conf-files.sh" $1 "etc" "bin" "root"
|
||||
|
||||
(cd ./env/dist/bin/ && timeout 5m ./$APP_NAME -dry-run)
|
||||
|
||||
# Stop mysql
|
||||
sudo systemctl stop mysql
|
||||
18
apps/ci/ci-error-check.sh
Executable file
18
apps/ci/ci-error-check.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
ERRORS_FILE="./env/dist/bin/Errors.log";
|
||||
|
||||
echo "Checking Startup Errors"
|
||||
echo
|
||||
|
||||
if [[ -s ${ERRORS_FILE} ]]; then
|
||||
printf "The Errors.log file contains startup errors:\n\n";
|
||||
cat ${ERRORS_FILE};
|
||||
printf "\nPlease solve the startup errors listed above!\n";
|
||||
exit 1;
|
||||
else
|
||||
echo "> No startup errors found in Errors.log";
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Done"
|
||||
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
APP_NAME=$1
|
||||
CONFIG_FOLDER=${2:-"etc"}
|
||||
BIN_FOLDER=${3-"bin"}
|
||||
MYSQL_ROOT_PASSWORD=${4:-""}
|
||||
|
||||
# copy dist files to conf files
|
||||
cp ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf.dist ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
# replace login info
|
||||
sed -i "s/127.0.0.1;3306;acore;acore/localhost;3306;root;$MYSQL_ROOT_PASSWORD/" ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
if [[ $APP_NAME == "worldserver" ]]; then
|
||||
sed -i 's/DataDir = \".\"/DataDir = \".\/data"/' ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
git clone --depth=1 --branch=master --single-branch https://github.com/ac-data/ac-data.git ./env/dist/$BIN_FOLDER/data
|
||||
fi
|
||||
107
apps/ci/ci-install-modules.sh
Executable file
107
apps/ci/ci-install-modules.sh
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "install modules"
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-1v1-arena modules/mod-1v1-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-account-mounts modules/mod-account-mounts
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ah-bot modules/mod-ah-bot
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-anticheat modules/mod-anticheat
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-antifarming modules/mod-antifarming
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-arena-3v3-solo-queue modules/mod-arena-3v3-solo-queue
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-arena-replay modules/mod-arena-replay
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-auto-revive modules/mod-auto-revive
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-autobalance modules/mod-autobalance
|
||||
# NOTE: disabled because it causes DB error
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-azerothshard.git modules/mod-azerothshard
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-better-item-reloading modules/mod-better-item-reloading
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-item-reward modules/mod-bg-item-reward
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-reward modules/mod-bg-reward
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-boss-announcer modules/mod-boss-announcer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-breaking-news-override modules/mod-breaking-news-override
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-buff-command modules/mod-buff-command
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cfbg modules/mod-cfbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-character-tools modules/mod-character-tools
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-login modules/mod-chat-login
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-transmitter modules/mod-chat-transmitter
|
||||
# NOTE: disabled because it causes DB startup error
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chromie-xp modules/mod-chromie-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-congrats-on-level modules/mod-congrats-on-level
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-costumes modules/mod-costumes
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cta-switch modules/mod-cta-switch
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-custom-login modules/mod-custom-login
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-desertion-warnings modules/mod-desertion-warnings
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-detailed-logging modules/mod-detailed-logging
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-dmf-switch modules/mod-dmf-switch
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-duel-reset modules/mod-duel-reset
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-dynamic-xp modules/mod-dynamic-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ale modules/mod-ale
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-emblem-transfer modules/mod-emblem-transfer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-fireworks-on-level modules/mod-fireworks-on-level
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-global-chat modules/mod-global-chat
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-guild-zone-system modules/mod-guild-zone-system
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-guildhouse modules/mod-guildhouse
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-individual-xp modules/mod-individual-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-instance-reset modules/mod-instance-reset
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-instanced-worldbosses modules/mod-instanced-worldbosses
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ip-tracker modules/mod-ip-tracker
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-item-level-up modules/mod-item-level-up
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-keep-out modules/mod-keep-out
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-learn-highest-talent modules/mod-learn-highest-talent
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-learn-spells modules/mod-learn-spells
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-low-level-arena modules/mod-low-level-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-low-level-rbg modules/mod-low-level-rbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-mall-teleport modules/mod-mall-teleport
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-morph-all-players modules/mod-morph-all-players
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-morphsummon modules/mod-morphsummon
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-multi-client-check modules/mod-multi-client-check
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-notify-muted modules/mod-notify-muted
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-all-mounts modules/mod-npc-all-mounts
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-beastmaster modules/mod-npc-beastmaster
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-buffer modules/mod-npc-buffer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-codebox modules/mod-npc-codebox
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-enchanter modules/mod-npc-enchanter
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-free-professions modules/mod-npc-free-professions
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-gambler modules/mod-npc-gambler
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-morph modules/mod-npc-morph
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-services modules/mod-npc-services
|
||||
# not yet on azerothcore github
|
||||
git clone --depth=1 --branch=master https://github.com/gozzim/mod-npc-spectator modules/mod-npc-spectator
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-talent-template modules/mod-npc-talent-template
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-titles-tokens modules/mod-npc-titles-tokens
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-phased-duels modules/mod-phased-duels
|
||||
# outdated
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-playerbots modules/mod-playerbots
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pocket-portal modules/mod-pocket-portal
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-premium modules/mod-premium
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-progression-system.git modules/mod-progression-system
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-promotion-azerothcore modules/mod-promotion-azerothcore
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-quests modules/mod-pvp-quests
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-titles modules/mod-pvp-titles
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-zones modules/mod-pvp-zones
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpscript modules/mod-pvpscript
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpstats-announcer modules/mod-pvpstats-announcer
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-quest-status modules/mod-quest-status
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-queue-list-cache modules/mod-queue-list-cache
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-quick-teleport modules/mod-quick-teleport
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-racial-trait-swap modules/mod-racial-trait-swap
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-random-enchants modules/mod-random-enchants
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-rdf-expansion modules/mod-rdf-expansion
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-resurrection-scroll modules/mod-resurrection-scroll
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-reward-played-time modules/mod-reward-played-time
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-reward-shop modules/mod-reward-shop
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-server-auto-shutdown.git modules/mod-server-auto-shutdown
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-solocraft modules/mod-solocraft
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-skip-dk-starting-area modules/mod-skip-dk-starting-area
|
||||
# has core patch file
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-spell-regulator modules/mod-spell-regulator
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-starter-guild modules/mod-starter-guild
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-system-vip modules/mod-system-vip
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-tic-tac-toe modules/mod-tic-tac-toe
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-top-arena modules/mod-top-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-transmog modules/mod-transmog
|
||||
# archived / outdated
|
||||
#git clone --depth=1 --branch=master https://github.com/azerothcore/mod-war-effort modules/mod-war-effort
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-weekend-xp modules/mod-weekend-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-who-logged modules/mod-who-logged
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-zone-difficulty modules/mod-zone-difficulty
|
||||
74
apps/ci/ci-install.sh
Normal file
74
apps/ci/ci-install.sh
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CSCRIPTS=static
|
||||
CBUILD_TESTING=ON
|
||||
CSERVERS=ON
|
||||
CTOOLS=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
time sudo apt-get update -y
|
||||
# time sudo apt-get upgrade -y
|
||||
time sudo apt-get install -y git lsb-release sudo
|
||||
time ./acore.sh install-deps
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
54
apps/ci/ci-pending-changelogs.ts
Normal file
54
apps/ci/ci-pending-changelogs.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import * as semver from "https://deno.land/x/semver/mod.ts";
|
||||
|
||||
// specify the needed paths here
|
||||
const CHANGELOG_PATH = "doc/changelog";
|
||||
const CHANGELOG_PENDING_PATH = `${CHANGELOG_PATH}/pendings`;
|
||||
const CHANGELOG_MASTER_FILE = `${CHANGELOG_PATH}/master.md`;
|
||||
const ACORE_JSON = "./acore.json";
|
||||
|
||||
// read the acore.json file to work with the versioning
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
const data = await Deno.readFile(ACORE_JSON);
|
||||
const acoreInfo = JSON.parse(decoder.decode(data));
|
||||
|
||||
let changelogText = await Deno.readTextFile(CHANGELOG_MASTER_FILE);
|
||||
|
||||
const currentVersion = acoreInfo.version;
|
||||
|
||||
const res=Deno.run({ cmd: [ "git", "rev-parse",
|
||||
"HEAD"],
|
||||
stdout: 'piped',
|
||||
stderr: 'piped',
|
||||
stdin: 'null' });
|
||||
await res.status();
|
||||
const gitVersion = new TextDecoder().decode(await res.output());
|
||||
|
||||
|
||||
for await (const dirEntry of Deno.readDir(CHANGELOG_PENDING_PATH)) {
|
||||
if (!dirEntry.isFile || !dirEntry.name.endsWith(".md")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Upgrade the prerelease version number (e.g. 1.0.0-dev.1 -> 1.0.0-dev.2)
|
||||
acoreInfo.version = semver.inc(acoreInfo.version, "prerelease", {
|
||||
includePrerelease: true,
|
||||
});
|
||||
|
||||
// read the pending file found and add it at the beginning of the changelog text
|
||||
const data = await Deno.readTextFile(
|
||||
`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`,
|
||||
);
|
||||
changelogText = `## ${acoreInfo.version} | Commit: [${gitVersion}](https://github.com/azerothcore/azerothcore-wotlk/commit/${gitVersion}\n\n${data}\n${changelogText}`;
|
||||
|
||||
// remove the pending file
|
||||
await Deno.remove(`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`);
|
||||
}
|
||||
|
||||
// write to acore.json and master.md only if new version is available
|
||||
if (currentVersion != acoreInfo.version) {
|
||||
console.log(`Changelog version upgraded from ${currentVersion} to ${acoreInfo.version}`)
|
||||
Deno.writeTextFile(CHANGELOG_MASTER_FILE, changelogText);
|
||||
Deno.writeTextFile(ACORE_JSON, JSON.stringify(acoreInfo, null, 2)+"\n");
|
||||
} else {
|
||||
console.log("No changelogs to add")
|
||||
}
|
||||
74
apps/ci/ci-pending-sql.sh
Normal file
74
apps/ci/ci-pending-sql.sh
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
source "$CURRENT_PATH/../bash_shared/includes.sh"
|
||||
|
||||
UPDATES_PATH="$AC_PATH_ROOT/data/sql/updates"
|
||||
|
||||
# get_next_index "data/sql/updates/db_world/2024_10_14_22.sql"
|
||||
# => 23
|
||||
# get_next_index ""
|
||||
# => 00
|
||||
function get_next_index() {
|
||||
if [[ -n "$1" ]]; then
|
||||
# PREV_COUNT should be a non-zero padded number
|
||||
PREV_COUNT="$(
|
||||
# grabs the filename of the first argument, removes ".sql" suffix.
|
||||
basename "$1" .sql |
|
||||
# get the last number
|
||||
cut -f4 -d_ |
|
||||
# retrieve the last number, without zero padding
|
||||
grep -oE "[1-9][0-9]*$"
|
||||
)"
|
||||
|
||||
printf '%02d' "$((PREV_COUNT + 1))"
|
||||
else
|
||||
echo "00"
|
||||
fi
|
||||
}
|
||||
|
||||
# lists all SQL files in the appropriate data/sql/updates/db_$1, and then moves them to a standard format, ordered by date and how many imports have happened that day. The name should be in this format:
|
||||
#
|
||||
# /path/to/data/sql/updates/db_NAME/YYYY_MM_DD_INDEX.sql
|
||||
#
|
||||
# Where INDEX is a number with a minimum with a minimum width (0-padded) of 2
|
||||
#
|
||||
# for example, "data/sql/updates/db_world/2024_10_01_03.sql" translates to "the third update in the world database from October 01, 2024"
|
||||
|
||||
TODAY="$(date +%Y_%m_%d)"
|
||||
function import() {
|
||||
PENDING_PATH="$AC_PATH_ROOT/data/sql/updates/pending_db_$1"
|
||||
UPDATES_DIR="$UPDATES_PATH/db_$1"
|
||||
|
||||
# Get the most recent SQL file applied to this database. Used for the header comment
|
||||
LATEST_UPDATE="$(find "$UPDATES_DIR" -iname "*.sql" | sort -h | tail -n 1)"
|
||||
# Get latest SQL file applied to this database, today. This could be empty.
|
||||
LATEST_UPDATE_TODAY="$(find "$UPDATES_DIR" -iname "$TODAY*.sql" | sort -h | tail -n 1)"
|
||||
|
||||
for entry in "$PENDING_PATH"/*.sql; do
|
||||
if [[ -f "$entry" ]]; then
|
||||
INDEX="$(get_next_index "$LATEST_UPDATE_TODAY")"
|
||||
OUTPUT_FILE="${UPDATES_DIR}/${TODAY}_${INDEX}.sql"
|
||||
|
||||
# ensure a note is added as a header comment
|
||||
echo "-- DB update $(basename "$LATEST_UPDATE" .sql) -> $(basename "$OUTPUT_FILE" .sql)" >"$OUTPUT_FILE"
|
||||
# fill in the SQL contents under that
|
||||
cat "$entry" >>"$OUTPUT_FILE"
|
||||
# remove the unneeded file
|
||||
rm -f "$entry"
|
||||
# set the newest file to the file we just moved
|
||||
LATEST_UPDATE_TODAY="$OUTPUT_FILE"
|
||||
LATEST_UPDATE="$OUTPUT_FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
import "world"
|
||||
import "characters"
|
||||
import "auth"
|
||||
|
||||
echo "Done."
|
||||
3
apps/ci/ci-run-unit-tests.sh
Executable file
3
apps/ci/ci-run-unit-tests.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
time var/build/obj/src/test/unit_tests
|
||||
47
apps/ci/mac/ci-compile.sh
Executable file
47
apps/ci/mac/ci-compile.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export OPENSSL_ROOT_DIR=$(brew --prefix openssl@3)
|
||||
|
||||
export CCACHE_CPP2=true
|
||||
export CCACHE_MAXSIZE='500M'
|
||||
export CCACHE_COMPRESS=1
|
||||
export CCACHE_COMPRESSLEVEL=9
|
||||
ccache -s
|
||||
|
||||
cd var/build/obj
|
||||
|
||||
mysql_include_path=$(brew --prefix mysql)/include/mysql
|
||||
mysql_lib_path=$(brew --prefix mysql)/lib/libmysqlclient.dylib
|
||||
|
||||
if [ ! -d "$mysql_include_path" ]; then
|
||||
echo "Original mysql include directory doesn't exist. Lets try to use the first available folder in mysql dir."
|
||||
base_dir=$(brew --cellar mysql)/$(basename $(ls -d $(brew --cellar mysql)/*/ | head -n 1))
|
||||
echo "Trying the next mysql base dir: $base_dir"
|
||||
mysql_include_path=$base_dir/include/mysql
|
||||
mysql_lib_path=$base_dir/lib/libmysqlclient.dylib
|
||||
fi
|
||||
|
||||
time cmake ../../../ \
|
||||
-DTOOLS=1 \
|
||||
-DBUILD_TESTING=1 \
|
||||
-DSCRIPTS=static \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DMYSQL_ADD_INCLUDE_PATH=$mysql_include_path \
|
||||
-DMYSQL_LIBRARY=$mysql_lib_path \
|
||||
-DREADLINE_INCLUDE_DIR=$(brew --prefix readline)/include \
|
||||
-DREADLINE_LIBRARY=$(brew --prefix readline)/lib/libreadline.dylib \
|
||||
-DOPENSSL_INCLUDE_DIR="$OPENSSL_ROOT_DIR/include" \
|
||||
-DOPENSSL_SSL_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libssl.dylib" \
|
||||
-DOPENSSL_CRYPTO_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libcrypto.dylib" \
|
||||
-DWITH_WARNINGS=1 \
|
||||
-DCMAKE_C_FLAGS="-Werror" \
|
||||
-DCMAKE_CXX_FLAGS="-Werror" \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
||||
-DUSE_SCRIPTPCH=0 \
|
||||
-DUSE_COREPCH=0 \
|
||||
;
|
||||
|
||||
time make -j $(($(sysctl -n hw.ncpu ) + 2))
|
||||
|
||||
ccache -s
|
||||
263
apps/codestyle/codestyle-cpp.py
Normal file
263
apps/codestyle/codestyle-cpp.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
# Get the src directory of the project
|
||||
src_directory = os.path.join(os.getcwd(), 'src')
|
||||
|
||||
# Global variables
|
||||
error_handler = False
|
||||
results = {
|
||||
"Multiple blank lines check": "Passed",
|
||||
"Trailing whitespace check": "Passed",
|
||||
"GetCounter() check": "Passed",
|
||||
"Misc codestyle check": "Passed",
|
||||
"GetTypeId() check": "Passed",
|
||||
"NpcFlagHelpers check": "Passed",
|
||||
"ItemFlagHelpers check": "Passed",
|
||||
"ItemTemplateFlagHelpers check": "Passed"
|
||||
}
|
||||
|
||||
# Main function to parse all the files of the project
|
||||
def parsing_file(directory: str) -> None:
|
||||
print("Starting AzerothCore CPP Codestyle check...")
|
||||
print(" ")
|
||||
print("Please read the C++ Code Standards for AzerothCore:")
|
||||
print("https://www.azerothcore.org/wiki/cpp-code-standards")
|
||||
print(" ")
|
||||
for root, _, files in os.walk(directory):
|
||||
for file in files:
|
||||
if not file.endswith('.ico'): # Skip .ico files that cannot be read
|
||||
file_path = os.path.join(root, file)
|
||||
file_name = file
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as file:
|
||||
multiple_blank_lines_check(file, file_path)
|
||||
trailing_whitespace_check(file, file_path)
|
||||
get_counter_check(file, file_path)
|
||||
if not file_name.endswith('.cmake') and file_name != 'CMakeLists.txt':
|
||||
misc_codestyle_check(file, file_path)
|
||||
if file_name != 'Object.h':
|
||||
get_typeid_check(file, file_path)
|
||||
if file_name != 'Unit.h':
|
||||
npcflags_helpers_check(file, file_path)
|
||||
if file_name != 'Item.h':
|
||||
itemflag_helpers_check(file, file_path)
|
||||
if file_name != 'ItemTemplate.h':
|
||||
itemtemplateflag_helpers_check(file, file_path)
|
||||
except UnicodeDecodeError:
|
||||
print(f"\nCould not decode file {file_path}")
|
||||
sys.exit(1)
|
||||
# Output the results
|
||||
print("")
|
||||
for check, result in results.items():
|
||||
print(f"{check} : {result}")
|
||||
if error_handler:
|
||||
print("\nPlease fix the codestyle issues above.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(f"\nEverything looks good")
|
||||
|
||||
# Codestyle patterns checking for multiple blank lines
|
||||
def multiple_blank_lines_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
consecutive_blank_lines = 0
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.strip() == '':
|
||||
consecutive_blank_lines += 1
|
||||
if consecutive_blank_lines > 1:
|
||||
print(f"Multiple blank lines found in {file_path} at line {line_number - 1}")
|
||||
check_failed = True
|
||||
else:
|
||||
consecutive_blank_lines = 0
|
||||
# Additional check for the end of the file
|
||||
if consecutive_blank_lines >= 1:
|
||||
print(f"Multiple blank lines found at the end of: {file_path}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Multiple blank lines check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for whitespace at the end of the lines
|
||||
def trailing_whitespace_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.endswith(' \n'):
|
||||
print(f"Trailing whitespace found: {file_path} at line {line_number}")
|
||||
if not error_handler:
|
||||
error_handler = True
|
||||
results["Trailing whitespace check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for ObjectGuid::GetCounter()
|
||||
def get_counter_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'ObjectGuid::GetCounter()' in line:
|
||||
print(f"Please use ObjectGuid::ToString().c_str() instead ObjectGuid::GetCounter(): {file_path} at line {line_number}")
|
||||
if not error_handler:
|
||||
error_handler = True
|
||||
results["GetCounter() check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for GetTypeId()
|
||||
def get_typeid_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'GetTypeId() == TYPEID_ITEM' in line or 'GetTypeId() != TYPEID_ITEM' in line:
|
||||
print(f"Please use IsItem() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_UNIT' in line or 'GetTypeId() != TYPEID_UNIT' in line:
|
||||
print(f"Please use IsCreature() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_PLAYER' in line or 'GetTypeId() != TYPEID_PLAYER' in line:
|
||||
print(f"Please use IsPlayer() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_GAMEOBJECT' in line or 'GetTypeId() != TYPEID_GAMEOBJECT' in line:
|
||||
print(f"Please use IsGameObject() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'GetTypeId() == TYPEID_DYNOBJECT' in line or 'GetTypeId() != TYPEID_DYNOBJECT' in line:
|
||||
print(f"Please use IsDynamicObject() instead of GetTypeId(): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["GetTypeId() check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for NpcFlag helpers
|
||||
def npcflags_helpers_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'GetUInt32Value(UNIT_NPC_FLAGS)' in line:
|
||||
print(
|
||||
f"Please use GetNpcFlags() instead of GetUInt32Value(UNIT_NPC_FLAGS): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'HasFlag(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use HasNpcFlag() instead of HasFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'SetUInt32Value(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use ReplaceAllNpcFlags() instead of SetUInt32Value(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'SetFlag(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use SetNpcFlag() instead of SetFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'RemoveFlag(UNIT_NPC_FLAGS,' in line:
|
||||
print(
|
||||
f"Please use RemoveNpcFlag() instead of RemoveFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["NpcFlagHelpers check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for ItemFlag helpers
|
||||
def itemflag_helpers_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_REFUNDABLE)' in line:
|
||||
print(
|
||||
f"Please use IsRefundable() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_REFUNDABLE): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_BOP_TRADEABLE)' in line:
|
||||
print(
|
||||
f"Please use IsBOPTradable() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_BOP_TRADEABLE): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_WRAPPED)' in line:
|
||||
print(
|
||||
f"Please use IsWrapped() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_WRAPPED): {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["ItemFlagHelpers check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for ItemTemplate helpers
|
||||
def itemtemplateflag_helpers_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'Flags & ITEM_FLAG' in line:
|
||||
print(
|
||||
f"Please use HasFlag(ItemFlag) instead of 'Flags & ITEM_FLAG_': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'Flags2 & ITEM_FLAG2' in line:
|
||||
print(
|
||||
f"Please use HasFlag2(ItemFlag2) instead of 'Flags2 & ITEM_FLAG2_': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if 'FlagsCu & ITEM_FLAGS_CU' in line:
|
||||
print(
|
||||
f"Please use HasFlagCu(ItemFlagsCustom) instead of 'FlagsCu & ITEM_FLAGS_CU_': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["ItemTemplateFlagHelpers check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for various codestyle issues
|
||||
def misc_codestyle_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
|
||||
# used to check for "if/else (...) {" "} else" ignores "if/else (...) {...}" "#define ... if/else (...) {"
|
||||
ifelse_curlyregex = r"^[^#define].*\s+(if|else)(\s*\(.*\))?\s*{[^}]*$|}\s*else(\s*{[^}]*$)"
|
||||
# used to catch double semicolons ";;" ignores "(;;)"
|
||||
double_semiregex = r"(?<!\()\s*;;(?!\))"
|
||||
# used to catch tabs
|
||||
tab_regex = r"\t"
|
||||
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if 'const auto&' in line:
|
||||
print(
|
||||
f"Please use the 'auto const&' syntax instead of 'const auto&': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.search(r'\bconst\s+\w+\s*\*\b', line):
|
||||
print(
|
||||
f"Please use the 'Class/ObjectType const*' syntax instead of 'const Class/ObjectType*': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if [match for match in [' if(', ' if ( '] if match in line]:
|
||||
print(
|
||||
f"Please use the 'if (XXXX)' syntax instead of 'if(XXXX)': {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.match(ifelse_curlyregex, line):
|
||||
print(
|
||||
f"Curly brackets are not allowed to be leading or trailing if/else statements. Place it on a new line: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.search(double_semiregex, line):
|
||||
print(
|
||||
f"Double semicolon (;;) found in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.match(tab_regex, line):
|
||||
print(
|
||||
f"Tab found! Replace it to 4 spaces: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Misc codestyle check"] = "Failed"
|
||||
|
||||
# Main function
|
||||
parsing_file(src_directory)
|
||||
411
apps/codestyle/codestyle-sql.py
Normal file
411
apps/codestyle/codestyle-sql.py
Normal file
@@ -0,0 +1,411 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import glob
|
||||
import subprocess
|
||||
|
||||
base_dir = os.getcwd()
|
||||
|
||||
# Get the pending directory of the project
|
||||
pattern = os.path.join(base_dir, 'data/sql/updates/pending_db_*')
|
||||
src_directory = glob.glob(pattern)
|
||||
|
||||
# Get files from base dir
|
||||
base_pattern = os.path.join(base_dir, 'data/sql/base/db_*')
|
||||
base_directory = glob.glob(base_pattern)
|
||||
|
||||
# Get files from archive dir
|
||||
archive_pattern = os.path.join(base_dir, 'data/sql/archive/db_*')
|
||||
archive_directory = glob.glob(archive_pattern)
|
||||
|
||||
# Global variables
|
||||
error_handler = False
|
||||
results = {
|
||||
"Multiple blank lines check": "Passed",
|
||||
"Trailing whitespace check": "Passed",
|
||||
"SQL codestyle check": "Passed",
|
||||
"INSERT & DELETE safety usage check": "Passed",
|
||||
"Missing semicolon check": "Passed",
|
||||
"Backtick check": "Passed",
|
||||
"Directory check": "Passed",
|
||||
"Table engine check": "Passed"
|
||||
}
|
||||
|
||||
# Collect all files in all directories
|
||||
def collect_files_from_directories(directories: list) -> list:
|
||||
all_files = []
|
||||
for directory in directories:
|
||||
for root, _, files in os.walk(directory):
|
||||
for file in files:
|
||||
if not file.endswith('.sh'): # Skip .sh files
|
||||
all_files.append(os.path.join(root, file))
|
||||
return all_files
|
||||
|
||||
# Used to find changed or added files compared to master.
|
||||
def get_changed_files() -> list:
|
||||
subprocess.run(["git", "fetch", "origin", "master"], check=True)
|
||||
result = subprocess.run(
|
||||
["git", "diff", "--name-status", "origin/master"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
changed_files = []
|
||||
for line in result.stdout.strip().splitlines():
|
||||
if not line:
|
||||
continue
|
||||
status, path = line.split(maxsplit=1)
|
||||
if status in ("A", "M"):
|
||||
changed_files.append(path)
|
||||
return changed_files
|
||||
|
||||
# Main function to parse all the files of the project
|
||||
def parsing_file(files: list) -> None:
|
||||
print("Starting AzerothCore SQL Codestyle check...")
|
||||
print(" ")
|
||||
print("Please read the SQL Standards for AzerothCore:")
|
||||
print("https://www.azerothcore.org/wiki/sql-standards")
|
||||
print(" ")
|
||||
|
||||
# Iterate over all files in data/sql/updates/pending_db_*
|
||||
for file_path in files:
|
||||
if "base" not in file_path and "archive" not in file_path:
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as file:
|
||||
multiple_blank_lines_check(file, file_path)
|
||||
trailing_whitespace_check(file, file_path)
|
||||
sql_check(file, file_path)
|
||||
insert_delete_safety_check(file, file_path)
|
||||
semicolon_check(file, file_path)
|
||||
backtick_check(file, file_path)
|
||||
non_innodb_engine_check(file, file_path)
|
||||
except UnicodeDecodeError:
|
||||
print(f"\n❌ Could not decode file {file_path}")
|
||||
sys.exit(1)
|
||||
|
||||
# Make sure we only check changed or added files when we work with base/archive paths
|
||||
changed_files = get_changed_files()
|
||||
# Iterate over all file paths
|
||||
for file_path in changed_files:
|
||||
if "base" in file_path or "archive" in file_path:
|
||||
try:
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
directory_check(f, file_path)
|
||||
except UnicodeDecodeError:
|
||||
print(f"\n❌ Could not decode file {file_path}")
|
||||
sys.exit(1)
|
||||
|
||||
# Output the results
|
||||
print("\n ")
|
||||
for check, result in results.items():
|
||||
print(f"{check} : {result}")
|
||||
if error_handler:
|
||||
print("\n ")
|
||||
print("\n❌ Please fix the codestyle issues above.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("\n ")
|
||||
print(f"\n✅ Everything looks good")
|
||||
|
||||
# Codestyle patterns checking for multiple blank lines
|
||||
def multiple_blank_lines_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
consecutive_blank_lines = 0
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.strip() == '':
|
||||
consecutive_blank_lines += 1
|
||||
if consecutive_blank_lines > 1:
|
||||
print(f"❌ Multiple blank lines found in {file_path} at line {line_number - 1}")
|
||||
check_failed = True
|
||||
else:
|
||||
consecutive_blank_lines = 0
|
||||
# Additional check for the end of the file
|
||||
if consecutive_blank_lines >= 1:
|
||||
print(f"❌ Multiple blank lines found at the end of: {file_path}")
|
||||
check_failed = True
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Multiple blank lines check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for whitespace at the end of the lines
|
||||
def trailing_whitespace_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.endswith(' \n'):
|
||||
print(f"❌ Trailing whitespace found: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Trailing whitespace check"] = "Failed"
|
||||
|
||||
# Codestyle patterns checking for various codestyle issues
|
||||
def sql_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
check_failed = False
|
||||
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if [match for match in ['broadcast_text'] if match in line]:
|
||||
print(
|
||||
f"❌ DON'T EDIT broadcast_text TABLE UNLESS YOU KNOW WHAT YOU ARE DOING!\nThis error can safely be ignored if the changes are approved to be sniffed: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if "EntryOrGuid" in line:
|
||||
print(
|
||||
f"❌ Please use entryorguid syntax instead of EntryOrGuid in {file_path} at line {line_number}\nWe recommend to use keira to have the right syntax in auto-query generation")
|
||||
check_failed = True
|
||||
if [match for match in [';;'] if match in line]:
|
||||
print(
|
||||
f"❌ Double semicolon (;;) found in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
if re.match(r"\t", line):
|
||||
print(
|
||||
f"❌ Tab found! Replace it to 4 spaces: {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
last_line = line[-1].strip()
|
||||
if last_line:
|
||||
print(
|
||||
f"❌ The last line is not a newline. Please add a newline: {file_path}")
|
||||
check_failed = True
|
||||
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["SQL codestyle check"] = "Failed"
|
||||
|
||||
def insert_delete_safety_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0) # Reset file pointer to the beginning
|
||||
not_delete = ["creature_template", "gameobject_template", "item_template", "quest_template"]
|
||||
check_failed = False
|
||||
previous_line = ""
|
||||
|
||||
# Parse all the file
|
||||
for line_number, line in enumerate(file, start = 1):
|
||||
if line.startswith("--"):
|
||||
continue
|
||||
if "INSERT" in line and "DELETE" not in previous_line:
|
||||
print(f"❌ No DELETE keyword found before the INSERT in {file_path} at line {line_number}\nIf this error is intended, please notify a maintainer")
|
||||
check_failed = True
|
||||
previous_line = line
|
||||
match = re.match(r"DELETE FROM\s+`([^`]+)`", line, re.IGNORECASE)
|
||||
if match:
|
||||
table_name = match.group(1)
|
||||
if table_name in not_delete:
|
||||
print(
|
||||
f"❌ Entries from {table_name} should not be deleted! {file_path} at line {line_number}\nIf this error is intended, please notify a maintainer")
|
||||
check_failed = True
|
||||
|
||||
# Handle the script error and update the result output
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["INSERT & DELETE safety usage check"] = "Failed"
|
||||
|
||||
def semicolon_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
|
||||
file.seek(0) # Reset file pointer to the start
|
||||
check_failed = False
|
||||
|
||||
query_open = False
|
||||
in_block_comment = False
|
||||
inside_values_block = False
|
||||
|
||||
lines = file.readlines()
|
||||
total_lines = len(lines)
|
||||
|
||||
def get_next_non_blank_line(start):
|
||||
""" Get the next non-blank, non-comment line starting from `start` """
|
||||
for idx in range(start, total_lines):
|
||||
next_line = lines[idx].strip()
|
||||
if next_line and not next_line.startswith('--') and not next_line.startswith('/*'):
|
||||
return next_line
|
||||
return None
|
||||
|
||||
for line_number, line in enumerate(lines, start=1):
|
||||
stripped_line = line.strip()
|
||||
|
||||
# Skip single-line comments
|
||||
if stripped_line.startswith('--'):
|
||||
continue
|
||||
|
||||
# Handle block comments
|
||||
if in_block_comment:
|
||||
if '*/' in stripped_line:
|
||||
in_block_comment = False
|
||||
stripped_line = stripped_line.split('*/', 1)[1].strip()
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
if '/*' in stripped_line:
|
||||
query_open = False # Reset query state at start of block comment
|
||||
in_block_comment = True
|
||||
stripped_line = stripped_line.split('/*', 1)[0].strip()
|
||||
|
||||
# Skip empty lines (unless inside values block)
|
||||
if not stripped_line and not inside_values_block:
|
||||
continue
|
||||
|
||||
# Remove inline comments after SQL
|
||||
stripped_line = stripped_line.split('--', 1)[0].strip()
|
||||
|
||||
if stripped_line.upper().startswith("SET") and not stripped_line.endswith(";"):
|
||||
print(f"❌ Missing semicolon in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
# Detect query start
|
||||
if not query_open and any(keyword in stripped_line.upper() for keyword in ["SELECT", "INSERT", "UPDATE", "DELETE", "REPLACE"]):
|
||||
query_open = True
|
||||
|
||||
# Detect start of multi-line VALUES block
|
||||
if any(kw in stripped_line.upper() for kw in ["INSERT", "REPLACE"]) and "VALUES" in stripped_line.upper():
|
||||
inside_values_block = True
|
||||
query_open = True # Ensure query is marked open too
|
||||
|
||||
if inside_values_block:
|
||||
if not stripped_line:
|
||||
continue # Allow blank lines inside VALUES block
|
||||
|
||||
if stripped_line.startswith('('):
|
||||
# Get next non-blank line to detect if we're at the last row
|
||||
next_line = get_next_non_blank_line(line_number)
|
||||
|
||||
if next_line and next_line.startswith('('):
|
||||
# Expect comma if another row follows
|
||||
if not stripped_line.endswith(','):
|
||||
print(f"❌ Missing comma in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
else:
|
||||
# Expect semicolon if this is the final row
|
||||
if not stripped_line.endswith(';'):
|
||||
print(f"❌ Missing semicolon in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
inside_values_block = False
|
||||
query_open = False
|
||||
else:
|
||||
inside_values_block = False # Close block if semicolon was found
|
||||
|
||||
elif query_open and not inside_values_block:
|
||||
# Normal query handling (outside multi-row VALUES block)
|
||||
if line_number == total_lines and not stripped_line.endswith(';'):
|
||||
print(f"❌ Missing semicolon in {file_path} at the last line {line_number}")
|
||||
check_failed = True
|
||||
query_open = False
|
||||
elif stripped_line.endswith(';'):
|
||||
query_open = False
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Missing semicolon check"] = "Failed"
|
||||
|
||||
def backtick_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0)
|
||||
check_failed = False
|
||||
|
||||
# Find SQL clauses
|
||||
pattern = re.compile(
|
||||
r'\b(SELECT|FROM|JOIN|WHERE|GROUP BY|ORDER BY|DELETE FROM|UPDATE|INSERT INTO|SET|REPLACE|REPLACE INTO)\s+(.*?)(?=;$|(?=\b(?:WHERE|SET|VALUES)\b)|$)',
|
||||
re.IGNORECASE | re.DOTALL
|
||||
)
|
||||
|
||||
# Make sure to ignore values enclosed in single- and doublequotes
|
||||
quote_pattern = re.compile(r"'(?:\\'|[^'])*'|\"(?:\\\"|[^\"])*\"")
|
||||
|
||||
for line_number, line in enumerate(file, start=1):
|
||||
# Ignore comments
|
||||
if line.startswith('--'):
|
||||
continue
|
||||
|
||||
# Sanitize single- and doublequotes to prevent false positives
|
||||
sanitized_line = quote_pattern.sub('', line)
|
||||
matches = pattern.findall(sanitized_line)
|
||||
|
||||
for clause, content in matches:
|
||||
# Find all words and exclude @variables
|
||||
words = re.findall(r'\b(?<!@)([a-zA-Z_][a-zA-Z0-9_]*)\b', content)
|
||||
|
||||
for word in words:
|
||||
# Skip MySQL keywords
|
||||
if word.upper() in {"SELECT", "FROM", "JOIN", "WHERE", "GROUP", "BY", "ORDER",
|
||||
"DELETE", "UPDATE", "INSERT", "INTO", "SET", "VALUES", "AND",
|
||||
"IN", "OR", "REPLACE", "NOT", "BETWEEN",
|
||||
"DISTINCT", "HAVING", "LIMIT", "OFFSET", "AS", "ON", "INNER",
|
||||
"LEFT", "RIGHT", "FULL", "OUTER", "CROSS", "NATURAL",
|
||||
"EXISTS", "LIKE", "IS", "NULL", "UNION", "ALL", "ASC", "DESC",
|
||||
"CASE", "WHEN", "THEN", "ELSE", "END", "CREATE", "TABLE",
|
||||
"ALTER", "DROP", "DATABASE", "INDEX", "VIEW", "TRIGGER",
|
||||
"PROCEDURE", "FUNCTION", "PRIMARY", "KEY", "FOREIGN", "REFERENCES",
|
||||
"CONSTRAINT", "DEFAULT", "AUTO_INCREMENT", "UNIQUE", "CHECK",
|
||||
"SHOW", "DESCRIBE", "EXPLAIN", "USE", "GRANT", "REVOKE",
|
||||
"BEGIN", "COMMIT", "ROLLBACK", "SAVEPOINT", "LOCK", "UNLOCK",
|
||||
"WITH", "RECURSIVE", "COLUMN", "ENGINE", "CHARSET", "COLLATE",
|
||||
"IF", "ELSEIF", "LOOP", "WHILE", "DO", "HANDLER", "LEAVE",
|
||||
"ITERATE", "DECLARE", "CURSOR", "FETCH", "OPEN", "CLOSE"}:
|
||||
continue
|
||||
|
||||
# Make sure the word is enclosed in backticks
|
||||
if not re.search(rf'`{re.escape(word)}`', content):
|
||||
print(f"❌ Missing backticks around ({word}). {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Backtick check"] = "Failed"
|
||||
|
||||
def directory_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0)
|
||||
check_failed = False
|
||||
|
||||
# Normalize path and split into parts
|
||||
normalized_path = os.path.normpath(file_path) # handles / and \
|
||||
path_parts = normalized_path.split(os.sep)
|
||||
|
||||
# Fail if '/base/' is part of the path
|
||||
if "base" in path_parts:
|
||||
print(f"❗ {file_path} is changed/added in the base directory.\nIf this is intended, please notify a maintainer.")
|
||||
check_failed = True
|
||||
|
||||
# Fail if '/archive/' is part of the path
|
||||
if "archive" in path_parts:
|
||||
print(f"❗ {file_path} is changed/added in the archive directory.\nIf this is intended, please notify a maintainer.")
|
||||
check_failed = True
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Directory check"] = "Failed"
|
||||
|
||||
def non_innodb_engine_check(file: io, file_path: str) -> None:
|
||||
global error_handler, results
|
||||
file.seek(0)
|
||||
check_failed = False
|
||||
|
||||
engine_pattern = re.compile(r'ENGINE\s*=\s*([a-zA-Z0-9_]+)', re.IGNORECASE)
|
||||
|
||||
for line_number, line in enumerate(file, start=1):
|
||||
match = engine_pattern.search(line)
|
||||
if match:
|
||||
engine = match.group(1).lower()
|
||||
if engine != "innodb":
|
||||
print(f"❌ Non-InnoDB engine found: '{engine}' in {file_path} at line {line_number}")
|
||||
check_failed = True
|
||||
|
||||
if check_failed:
|
||||
error_handler = True
|
||||
results["Table engine check"] = "Failed"
|
||||
|
||||
# Collect all files from matching directories
|
||||
all_files = collect_files_from_directories(src_directory) + collect_files_from_directories(base_directory) + collect_files_from_directories(archive_directory)
|
||||
|
||||
# Main function
|
||||
parsing_file(all_files)
|
||||
2
apps/compiler/.gitignore
vendored
Normal file
2
apps/compiler/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
config.sh
|
||||
|
||||
32
apps/compiler/README.md
Normal file
32
apps/compiler/README.md
Normal file
@@ -0,0 +1,32 @@
|
||||
## How to compile:
|
||||
|
||||
first of all, if you need some custom configuration you have to copy
|
||||
/conf/dist/config.sh in /conf/config.sh and configure it
|
||||
|
||||
* for a "clean" compilation you must run all scripts in their order:
|
||||
|
||||
./1-clean.sh
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you add/rename/delete some sources and you need to compile it you have to run:
|
||||
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you have modified code only, you just need to run
|
||||
|
||||
./3-build.sh
|
||||
|
||||
|
||||
## compiler.sh
|
||||
|
||||
compiler.sh script contains an interactive menu to clean/compile/build. You can also run actions directly by command lines specifying the option.
|
||||
Ex:
|
||||
./compiler.sh 3
|
||||
|
||||
It will start the build process (it's equivalent to ./3-build.sh)
|
||||
|
||||
## Note:
|
||||
|
||||
For an optimal development process and **really faster** compilation time, is suggested to use clang instead of gcc
|
||||
65
apps/compiler/compiler.sh
Executable file
65
apps/compiler/compiler.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/includes/includes.sh"
|
||||
source "$AC_PATH_APPS/bash_shared/menu_system.sh"
|
||||
|
||||
# Menu definition using the new system
|
||||
# Format: "key|short|description"
|
||||
comp_menu_items=(
|
||||
"build|b|Configure and compile"
|
||||
"clean|cl|Clean build files"
|
||||
"configure|cfg|Run CMake"
|
||||
"compile|cmp|Compile only"
|
||||
"all|a|clean, configure and compile"
|
||||
"ccacheClean|cc|Clean ccache files, normally not needed"
|
||||
"ccacheShowStats|cs|show ccache statistics"
|
||||
"quit|q|Close this menu"
|
||||
)
|
||||
|
||||
# Menu command handler - called by menu system for each command
|
||||
function handle_compiler_command() {
|
||||
local key="$1"
|
||||
shift
|
||||
|
||||
case "$key" in
|
||||
"build")
|
||||
comp_build
|
||||
;;
|
||||
"clean")
|
||||
comp_clean
|
||||
;;
|
||||
"configure")
|
||||
comp_configure
|
||||
;;
|
||||
"compile")
|
||||
comp_compile
|
||||
;;
|
||||
"all")
|
||||
comp_all
|
||||
;;
|
||||
"ccacheClean")
|
||||
comp_ccacheClean
|
||||
;;
|
||||
"ccacheShowStats")
|
||||
comp_ccacheShowStats
|
||||
;;
|
||||
"quit")
|
||||
echo "Closing compiler menu..."
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option. Use --help to see available commands."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Hook support (preserved from original)
|
||||
runHooks "ON_AFTER_OPTIONS" # you can create your custom options
|
||||
|
||||
# Run the menu system
|
||||
menu_run_with_items "ACORE COMPILER" handle_compiler_command -- "${comp_menu_items[@]}" -- "$@"
|
||||
7
apps/compiler/includes/defines.sh
Normal file
7
apps/compiler/includes/defines.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
# you can choose build type from cmd argument
|
||||
if [ ! -z $1 ]
|
||||
then
|
||||
CCTYPE=$1
|
||||
CCTYPE=${CCTYPE^} # capitalize first letter if it's not yet
|
||||
fi
|
||||
|
||||
205
apps/compiler/includes/functions.sh
Normal file
205
apps/compiler/includes/functions.sh
Normal file
@@ -0,0 +1,205 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# shellcheck source=../../../deps/acore/bash-lib/src/common/boolean.sh
|
||||
source "$AC_BASH_LIB_PATH/common/boolean.sh"
|
||||
|
||||
# Set SUDO variable - one liner
|
||||
SUDO=""
|
||||
|
||||
IS_SUDO_ENABLED=${AC_ENABLE_ROOT_CMAKE_INSTALL:-0}
|
||||
|
||||
# Allow callers to opt-out from privilege escalation during install/perms adjustments
|
||||
if [[ $IS_SUDO_ENABLED == 1 ]]; then
|
||||
SUDO=$([ "$EUID" -ne 0 ] && echo "sudo" || echo "")
|
||||
fi
|
||||
|
||||
function comp_clean() {
|
||||
DIRTOCLEAN=${BUILDPATH:-var/build/obj}
|
||||
PATTERN="$DIRTOCLEAN/*"
|
||||
|
||||
echo "Cleaning build files in $DIRTOCLEAN"
|
||||
|
||||
[ -d "$DIRTOCLEAN" ] && rm -rf $PATTERN
|
||||
}
|
||||
|
||||
function comp_ccacheEnable() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
export CCACHE_MAXSIZE=${CCACHE_MAXSIZE:-'1000MB'}
|
||||
#export CCACHE_DEPEND=true
|
||||
export CCACHE_SLOPPINESS=${CCACHE_SLOPPINESS:-pch_defines,time_macros,include_file_mtime}
|
||||
export CCACHE_CPP2=${CCACHE_CPP2:-true} # optimization for clang
|
||||
export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1}
|
||||
export CCACHE_COMPRESSLEVEL=${CCACHE_COMPRESSLEVEL:-9}
|
||||
export CCACHE_COMPILERCHECK=${CCACHE_COMPILERCHECK:-content}
|
||||
export CCACHE_LOGFILE=${CCACHE_LOGFILE:-"$CCACHE_DIR/cache.debug"}
|
||||
#export CCACHE_NODIRECT=true
|
||||
|
||||
export CCUSTOMOPTIONS="$CCUSTOMOPTIONS -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
}
|
||||
|
||||
function comp_ccacheClean() {
|
||||
[ "$AC_CCACHE" != true ] && echo "ccache is disabled" && return
|
||||
|
||||
echo "Cleaning ccache"
|
||||
ccache -C
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_ccacheResetStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -zc
|
||||
}
|
||||
|
||||
function comp_ccacheShowStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_configure() {
|
||||
CWD=$(pwd)
|
||||
|
||||
cd $BUILDPATH
|
||||
|
||||
echo "Build path: $BUILDPATH"
|
||||
echo "DEBUG info: $CDEBUG"
|
||||
echo "Compilation type: $CTYPE"
|
||||
echo "CCache: $AC_CCACHE"
|
||||
# -DCMAKE_BUILD_TYPE=$CCTYPE disable optimization "slow and huge amount of ram"
|
||||
# -DWITH_COREDEBUG=$CDEBUG compiled with debug information
|
||||
|
||||
#-DSCRIPTS_COMMANDS=$CSCRIPTS -DSCRIPTS_CUSTOM=$CSCRIPTS -DSCRIPTS_EASTERNKINGDOMS=$CSCRIPTS -DSCRIPTS_EVENTS=$CSCRIPTS -DSCRIPTS_KALIMDOR=$CSCRIPTS \
|
||||
#-DSCRIPTS_NORTHREND=$CSCRIPTS -DSCRIPTS_OUTDOORPVP=$CSCRIPTS -DSCRIPTS_OUTLAND=$CSCRIPTS -DSCRIPTS_PET=$CSCRIPTS -DSCRIPTS_SPELLS=$CSCRIPTS -DSCRIPTS_WORLD=$CSCRIPTS \
|
||||
#-DAC_WITH_UNIT_TEST=$CAC_UNIT_TEST -DAC_WITH_PLUGINS=$CAC_PLG \
|
||||
|
||||
local DCONF=""
|
||||
if [ ! -z "$CONFDIR" ]; then
|
||||
DCONF="-DCONF_DIR=$CONFDIR"
|
||||
fi
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
OSOPTIONS=""
|
||||
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
darwin*)
|
||||
OSOPTIONS=" -DMYSQL_ADD_INCLUDE_PATH=/usr/local/include -DMYSQL_LIBRARY=/usr/local/lib/libmysqlclient.dylib -DREADLINE_INCLUDE_DIR=/usr/local/opt/readline/include -DREADLINE_LIBRARY=/usr/local/opt/readline/lib/libreadline.dylib -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl@3/include -DOPENSSL_SSL_LIBRARIES=/usr/local/opt/openssl@3/lib/libssl.dylib -DOPENSSL_CRYPTO_LIBRARIES=/usr/local/opt/openssl@3/lib/libcrypto.dylib "
|
||||
;;
|
||||
msys*)
|
||||
OSOPTIONS=" -DMYSQL_INCLUDE_DIR=C:\tools\mysql\current\include -DMYSQL_LIBRARY=C:\tools\mysql\current\lib\mysqlclient.lib "
|
||||
;;
|
||||
esac
|
||||
|
||||
cmake $SRCPATH -DCMAKE_INSTALL_PREFIX=$BINPATH $DCONF \
|
||||
-DAPPS_BUILD=$CAPPS_BUILD \
|
||||
-DTOOLS_BUILD=$CTOOLS_BUILD \
|
||||
-DSCRIPTS=$CSCRIPTS \
|
||||
-DMODULES=$CMODULES \
|
||||
-DBUILD_TESTING=$CBUILD_TESTING \
|
||||
-DUSE_SCRIPTPCH=$CSCRIPTPCH \
|
||||
-DUSE_COREPCH=$CCOREPCH \
|
||||
-DCMAKE_BUILD_TYPE=$CTYPE \
|
||||
-DWITH_WARNINGS=$CWARNINGS \
|
||||
-DCMAKE_C_COMPILER=$CCOMPILERC \
|
||||
-DCMAKE_CXX_COMPILER=$CCOMPILERCXX \
|
||||
$CBUILD_APPS_LIST $CBUILD_TOOLS_LIST $OSOPTIONS $CCUSTOMOPTIONS
|
||||
|
||||
cd $CWD
|
||||
|
||||
runHooks "ON_AFTER_CONFIG"
|
||||
}
|
||||
|
||||
function comp_compile() {
|
||||
[ $MTHREADS == 0 ] && MTHREADS=$(grep -c ^processor /proc/cpuinfo) && MTHREADS=$(($MTHREADS + 2))
|
||||
|
||||
echo "Using $MTHREADS threads"
|
||||
|
||||
pushd "$BUILDPATH" >> /dev/null || exit 1
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
comp_ccacheResetStats
|
||||
|
||||
time cmake --build . --config $CTYPE -j $MTHREADS
|
||||
|
||||
comp_ccacheShowStats
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
msys*)
|
||||
cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
linux*|darwin*)
|
||||
local confDir
|
||||
confDir=${CONFDIR:-"$AC_BINPATH_FULL/../etc"}
|
||||
|
||||
# create the folders before installing to
|
||||
# set the current user and permissions
|
||||
echo "Creating $AC_BINPATH_FULL..."
|
||||
mkdir -p "$AC_BINPATH_FULL"
|
||||
echo "Creating $confDir..."
|
||||
mkdir -p "$confDir"
|
||||
mkdir -p "$confDir/modules"
|
||||
|
||||
confDir=$(realpath "$confDir")
|
||||
|
||||
echo "Cmake install..."
|
||||
$SUDO cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
# set all aplications SUID bit
|
||||
if [[ $IS_SUDO_ENABLED == 0 ]]; then
|
||||
echo "Skipping root ownership and SUID changes (IS_SUDO_ENABLED=0)"
|
||||
else
|
||||
echo "Setting permissions on binary files"
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec $SUDO chown root:root -- {} +
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec $SUDO chmod u+s -- {} +
|
||||
$SUDO setcap cap_sys_nice=eip "$AC_BINPATH_FULL/worldserver"
|
||||
$SUDO setcap cap_sys_nice=eip "$AC_BINPATH_FULL/authserver"
|
||||
fi
|
||||
|
||||
|
||||
if ( isTrue "$AC_ENABLE_CONF_COPY_ON_INSTALL" ) then
|
||||
echo "Copying default configuration files to $confDir ..."
|
||||
[[ -f "$confDir/worldserver.conf.dist" && ! -f "$confDir/worldserver.conf" ]] && \
|
||||
cp -v "$confDir/worldserver.conf.dist" "$confDir/worldserver.conf"
|
||||
[[ -f "$confDir/authserver.conf.dist" && ! -f "$confDir/authserver.conf" ]] && \
|
||||
cp -v "$confDir/authserver.conf.dist" "$confDir/authserver.conf"
|
||||
[[ -f "$confDir/dbimport.conf.dist" && ! -f "$confDir/dbimport.conf" ]] && \
|
||||
cp -v "$confDir/dbimport.conf.dist" "$confDir/dbimport.conf"
|
||||
|
||||
for f in "$confDir/modules/"*.dist
|
||||
do
|
||||
[[ -e $f ]] || break # handle the case of no *.dist files
|
||||
if [[ ! -f "${f%.dist}" ]]; then
|
||||
echo "Copying module config $(basename "${f%.dist}")"
|
||||
cp -v "$f" "${f%.dist}";
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
esac
|
||||
|
||||
runHooks "ON_AFTER_BUILD"
|
||||
}
|
||||
|
||||
function comp_build() {
|
||||
comp_configure
|
||||
comp_compile
|
||||
}
|
||||
|
||||
function comp_all() {
|
||||
comp_clean
|
||||
comp_build
|
||||
}
|
||||
23
apps/compiler/includes/includes.sh
Normal file
23
apps/compiler/includes/includes.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/../../bash_shared/includes.sh"
|
||||
|
||||
AC_PATH_COMPILER="$AC_PATH_APPS/compiler"
|
||||
|
||||
if [ -f "$AC_PATH_COMPILER/config.sh" ]; then
|
||||
source "$AC_PATH_COMPILER/config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
function ac_on_after_build() {
|
||||
# move the run engine
|
||||
cp -rvf "$AC_PATH_APPS/startup-scripts/src/"* "$BINPATH"
|
||||
}
|
||||
|
||||
registerHooks "ON_AFTER_BUILD" ac_on_after_build
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/defines.sh"
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/functions.sh"
|
||||
|
||||
mkdir -p $BUILDPATH
|
||||
mkdir -p $BINPATH
|
||||
17
apps/compiler/test/bats.conf
Normal file
17
apps/compiler/test/bats.conf
Normal file
@@ -0,0 +1,17 @@
|
||||
# BATS Test Configuration for Compiler App
|
||||
|
||||
# Set test timeout (in seconds)
|
||||
export BATS_TEST_TIMEOUT=60
|
||||
|
||||
# Enable verbose output for debugging
|
||||
export BATS_VERBOSE_RUN=1
|
||||
|
||||
# Test output format
|
||||
export BATS_FORMATTER=pretty
|
||||
|
||||
# Enable colored output
|
||||
export BATS_NO_PARALLELIZE_ACROSS_FILES=1
|
||||
export BATS_NO_PARALLELIZE_WITHIN_FILE=1
|
||||
|
||||
# Compiler specific test configuration
|
||||
export COMPILER_TEST_SKIP_HEAVY=1
|
||||
309
apps/compiler/test/test_compiler.bats
Executable file
309
apps/compiler/test/test_compiler.bats
Executable file
@@ -0,0 +1,309 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# Require minimum BATS version when supported (older distro packages lack this)
|
||||
if type -t bats_require_minimum_version >/dev/null 2>&1; then
|
||||
bats_require_minimum_version 1.5.0
|
||||
fi
|
||||
|
||||
# AzerothCore Compiler Scripts Test Suite
|
||||
# Tests the functionality of the compiler scripts using the unified test framework
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
# Setup that runs before each test
|
||||
setup() {
|
||||
compiler_setup
|
||||
export SCRIPT_DIR="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)"
|
||||
export COMPILER_SCRIPT="$SCRIPT_DIR/compiler.sh"
|
||||
}
|
||||
|
||||
# Cleanup that runs after each test
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
# ===== COMPILER SCRIPT TESTS =====
|
||||
|
||||
@test "compiler: should show help with --help argument" {
|
||||
run bash -c "echo '' | timeout 5s $COMPILER_SCRIPT --help"
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Available commands:" ]]
|
||||
}
|
||||
|
||||
@test "compiler: should show help with empty input" {
|
||||
run bash -c "echo '' | timeout 5s $COMPILER_SCRIPT 2>&1 || true"
|
||||
# The script might exit with timeout (124) or success (0), both are acceptable for this test
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 124 ]]
|
||||
# Check if output contains expected content - looking for menu options (old or new format)
|
||||
[[ "$output" =~ "build:" ]] || [[ "$output" =~ "clean:" ]] || [[ "$output" =~ "Please enter your choice" ]] || [[ "$output" =~ "build (b):" ]] || [[ "$output" =~ "ACORE COMPILER" ]] || [[ -z "$output" ]]
|
||||
}
|
||||
|
||||
@test "compiler: should accept option numbers" {
|
||||
# Test option 7 (ccacheShowStats) which should be safe to run
|
||||
run bash -c "echo '7' | timeout 10s $COMPILER_SCRIPT 2>/dev/null || true"
|
||||
# The script might exit with timeout (124) or success (0), both are acceptable
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 124 ]]
|
||||
}
|
||||
|
||||
@test "compiler: should accept option by name" {
|
||||
run timeout 10s "$COMPILER_SCRIPT" ccacheShowStats
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "compiler: should handle invalid option gracefully" {
|
||||
run timeout 5s "$COMPILER_SCRIPT" invalidOption
|
||||
# Should exit with error code for invalid option
|
||||
[ "$status" -eq 1 ]
|
||||
# Output check is optional as error message might be buffered
|
||||
}
|
||||
|
||||
@test "compiler: should handle invalid number gracefully" {
|
||||
run bash -c "echo '999' | timeout 5s $COMPILER_SCRIPT 2>&1 || true"
|
||||
# The script might exit with timeout (124) or success (0) for interactive mode
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 124 ]]
|
||||
# In interactive mode, the script should continue asking for input or timeout
|
||||
}
|
||||
|
||||
@test "compiler: should quit with quit option" {
|
||||
run timeout 5s "$COMPILER_SCRIPT" quit
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
# ===== FUNCTION TESTS =====
|
||||
|
||||
@test "functions: comp_clean should handle non-existent build directory" {
|
||||
# Source the functions with a non-existent build path
|
||||
run bash -c "
|
||||
export BUILDPATH='/tmp/non_existent_build_dir_$RANDOM'
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_clean
|
||||
"
|
||||
# Accept either success or failure - the important thing is the function runs
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 1 ]]
|
||||
[[ "$output" =~ "Cleaning build files" ]]
|
||||
}
|
||||
|
||||
@test "functions: comp_clean should remove build files when directory exists" {
|
||||
# Create a temporary build directory with test files
|
||||
local test_build_dir="/tmp/test_build_$RANDOM"
|
||||
mkdir -p "$test_build_dir/subdir"
|
||||
touch "$test_build_dir/test_file.txt"
|
||||
touch "$test_build_dir/subdir/nested_file.txt"
|
||||
|
||||
# Run the clean function
|
||||
run bash -c "
|
||||
export BUILDPATH='$test_build_dir'
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_clean
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Cleaning build files" ]]
|
||||
# Directory should still exist but be empty
|
||||
[ -d "$test_build_dir" ]
|
||||
[ ! -f "$test_build_dir/test_file.txt" ]
|
||||
[ ! -f "$test_build_dir/subdir/nested_file.txt" ]
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$test_build_dir"
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheShowStats should run without errors when ccache enabled" {
|
||||
run bash -c "
|
||||
export AC_CCACHE=true
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheShowStats
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheShowStats should do nothing when ccache disabled" {
|
||||
run bash -c "
|
||||
export AC_CCACHE=false
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheShowStats
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
# Should produce no output when ccache is disabled
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheClean should handle disabled ccache" {
|
||||
run bash -c "
|
||||
export AC_CCACHE=false
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheClean
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "ccache is disabled" ]]
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheClean should run when ccache enabled" {
|
||||
# Only run if ccache is actually available
|
||||
if command -v ccache >/dev/null 2>&1; then
|
||||
run bash -c "
|
||||
export AC_CCACHE=true
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheClean
|
||||
"
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Cleaning ccache" ]]
|
||||
else
|
||||
skip "ccache not available on system"
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheEnable should set environment variables" {
|
||||
# Call the function in a subshell to capture environment changes
|
||||
run bash -c "
|
||||
export AC_CCACHE=true
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheEnable
|
||||
env | grep CCACHE | head -5
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCACHE_MAXSIZE" ]] || [[ "$output" =~ "CCACHE_COMPRESS" ]]
|
||||
}
|
||||
|
||||
@test "functions: comp_ccacheEnable should not set variables when ccache disabled" {
|
||||
# Call the function and verify it returns early when ccache is disabled
|
||||
run bash -c "
|
||||
export AC_CCACHE=false
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_ccacheEnable
|
||||
# The function should return early, so we check if it completed successfully
|
||||
echo 'Function completed without setting CCACHE vars'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Function completed" ]]
|
||||
}
|
||||
|
||||
# Mock tests for build functions (these would normally require a full setup)
|
||||
@test "functions: comp_configure should detect platform" {
|
||||
# Mock cmake command to avoid actual configuration
|
||||
run -127 bash -c "
|
||||
function cmake() {
|
||||
echo 'CMAKE called with args: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
# Set required variables
|
||||
export BUILDPATH='/tmp'
|
||||
export SRCPATH='/tmp'
|
||||
export BINPATH='/tmp'
|
||||
export CTYPE='Release'
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run configure in the /tmp directory
|
||||
cd /tmp && comp_configure
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "Platform:" ]] || [[ "$output" =~ "CMAKE called with args:" ]]
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_compile should detect thread count" {
|
||||
# Mock cmake command to avoid actual compilation
|
||||
run -127 bash -c "
|
||||
function cmake() {
|
||||
echo 'CMAKE called with args: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
# Mock other commands
|
||||
function pushd() { echo 'pushd $*'; }
|
||||
function popd() { echo 'popd $*'; }
|
||||
function time() { shift; \"\$@\"; }
|
||||
export -f pushd popd time
|
||||
|
||||
# Set required variables
|
||||
export BUILDPATH='/tmp'
|
||||
export MTHREADS=0
|
||||
export CTYPE='Release'
|
||||
export AC_BINPATH_FULL='/tmp'
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run compile in the /tmp directory
|
||||
cd /tmp && comp_compile
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "pushd" ]] || [[ "$output" =~ "CMAKE called with args:" ]]
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_build should call configure and compile" {
|
||||
# Mock the comp_configure and comp_compile functions
|
||||
run -127 bash -c "
|
||||
function comp_configure() {
|
||||
echo 'comp_configure called'
|
||||
return 0
|
||||
}
|
||||
|
||||
function comp_compile() {
|
||||
echo 'comp_compile called'
|
||||
return 0
|
||||
}
|
||||
|
||||
export -f comp_configure comp_compile
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run build
|
||||
comp_build
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "comp_configure called" ]] && [[ "$output" =~ "comp_compile called" ]]
|
||||
fi
|
||||
}
|
||||
|
||||
@test "functions: comp_all should call clean and build" {
|
||||
# Mock the comp_clean and comp_build functions
|
||||
run -127 bash -c "
|
||||
function comp_clean() {
|
||||
echo 'comp_clean called'
|
||||
return 0
|
||||
}
|
||||
|
||||
function comp_build() {
|
||||
echo 'comp_build called'
|
||||
return 0
|
||||
}
|
||||
|
||||
export -f comp_clean comp_build
|
||||
|
||||
# Source the functions
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Run all
|
||||
comp_all
|
||||
"
|
||||
|
||||
# Accept command not found as this might indicate missing dependencies
|
||||
[[ "$status" -eq 0 ]] || [[ "$status" -eq 127 ]]
|
||||
# If successful, check for expected output
|
||||
if [ "$status" -eq 0 ]; then
|
||||
[[ "$output" =~ "comp_clean called" ]] && [[ "$output" =~ "comp_build called" ]]
|
||||
fi
|
||||
}
|
||||
211
apps/compiler/test/test_compiler_config.bats
Executable file
211
apps/compiler/test/test_compiler_config.bats
Executable file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# AzerothCore Compiler Configuration Test Suite
|
||||
# Tests the configuration and support scripts for the compiler module
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
# Setup that runs before each test
|
||||
setup() {
|
||||
compiler_setup
|
||||
export SCRIPT_DIR="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)"
|
||||
}
|
||||
|
||||
# Cleanup that runs after each test
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
# ===== DEFINES SCRIPT TESTS =====
|
||||
|
||||
@test "defines: should accept CCTYPE from argument" {
|
||||
# Test the defines script with a release argument
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' release; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=Release" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle uppercase CCTYPE" {
|
||||
# Test the defines script with an uppercase argument
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' DEBUG; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=DEBUG" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle lowercase input" {
|
||||
# Test the defines script with lowercase input
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' debug; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=Debug" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle mixed case input" {
|
||||
# Test the defines script with mixed case input
|
||||
run bash -c "unset CCTYPE; source '$SCRIPT_DIR/includes/defines.sh' rElEaSe; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=RElEaSe" ]]
|
||||
}
|
||||
|
||||
@test "defines: should handle no argument" {
|
||||
# Test the defines script with no argument
|
||||
run bash -c "CCTYPE='original'; source '$SCRIPT_DIR/includes/defines.sh'; echo \"CCTYPE=\$CCTYPE\""
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CCTYPE=original" ]]
|
||||
}
|
||||
|
||||
# ===== INCLUDES SCRIPT TESTS =====
|
||||
|
||||
@test "includes: should create necessary directories" {
|
||||
# Create a temporary test environment
|
||||
local temp_dir="/tmp/compiler_test_$RANDOM"
|
||||
local build_path="$temp_dir/build"
|
||||
local bin_path="$temp_dir/bin"
|
||||
|
||||
# Remove directories to test creation
|
||||
rm -rf "$temp_dir"
|
||||
|
||||
# Source the includes script with custom paths - use a simpler approach
|
||||
run bash -c "
|
||||
export BUILDPATH='$build_path'
|
||||
export BINPATH='$bin_path'
|
||||
export AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
|
||||
# Create directories manually since includes.sh does this
|
||||
mkdir -p \"\$BUILDPATH\"
|
||||
mkdir -p \"\$BINPATH\"
|
||||
|
||||
echo 'Directories created'
|
||||
[ -d '$build_path' ] && echo 'BUILD_EXISTS'
|
||||
[ -d '$bin_path' ] && echo 'BIN_EXISTS'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "BUILD_EXISTS" ]]
|
||||
[[ "$output" =~ "BIN_EXISTS" ]]
|
||||
|
||||
# Cleanup
|
||||
rm -rf "$temp_dir"
|
||||
}
|
||||
|
||||
@test "includes: should source required files" {
|
||||
# Test that all required files are sourced without errors
|
||||
run bash -c "
|
||||
# Set minimal required environment
|
||||
AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
BUILDPATH='/tmp'
|
||||
BINPATH='/tmp'
|
||||
source '$SCRIPT_DIR/includes/includes.sh'
|
||||
echo 'All files sourced successfully'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "All files sourced successfully" ]]
|
||||
}
|
||||
|
||||
@test "includes: should set AC_PATH_COMPILER variable" {
|
||||
# Test that AC_PATH_COMPILER is set correctly
|
||||
run bash -c "
|
||||
AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
BUILDPATH='/tmp'
|
||||
BINPATH='/tmp'
|
||||
source '$SCRIPT_DIR/includes/includes.sh'
|
||||
echo \"AC_PATH_COMPILER=\$AC_PATH_COMPILER\"
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "AC_PATH_COMPILER=" ]]
|
||||
[[ "$output" =~ "/compiler" ]]
|
||||
}
|
||||
|
||||
@test "includes: should register ON_AFTER_BUILD hook" {
|
||||
# Test that the hook is registered
|
||||
run bash -c "
|
||||
AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
BUILDPATH='/tmp'
|
||||
BINPATH='/tmp'
|
||||
source '$SCRIPT_DIR/includes/includes.sh'
|
||||
# Check if the function exists
|
||||
type ac_on_after_build > /dev/null && echo 'HOOK_FUNCTION_EXISTS'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "HOOK_FUNCTION_EXISTS" ]]
|
||||
}
|
||||
|
||||
# ===== CONFIGURATION TESTS =====
|
||||
|
||||
@test "config: should handle missing config file gracefully" {
|
||||
# Test behavior when config.sh doesn't exist
|
||||
run bash -c "
|
||||
export AC_PATH_APPS='$SCRIPT_DIR/..'
|
||||
export AC_PATH_COMPILER='$SCRIPT_DIR'
|
||||
export BUILDPATH='/tmp'
|
||||
export BINPATH='/tmp'
|
||||
|
||||
# Test that missing config doesn't break sourcing
|
||||
[ ! -f '$SCRIPT_DIR/config.sh' ] && echo 'NO_CONFIG_FILE'
|
||||
echo 'Config handled successfully'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Config handled successfully" ]]
|
||||
}
|
||||
|
||||
# ===== ENVIRONMENT VARIABLE TESTS =====
|
||||
|
||||
@test "environment: should handle platform detection" {
|
||||
# Test that OSTYPE is properly handled
|
||||
run bash -c "
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
echo \"Platform detected: \$OSTYPE\"
|
||||
case \"\$OSTYPE\" in
|
||||
linux*) echo 'LINUX_DETECTED' ;;
|
||||
darwin*) echo 'DARWIN_DETECTED' ;;
|
||||
msys*) echo 'MSYS_DETECTED' ;;
|
||||
*) echo 'UNKNOWN_PLATFORM' ;;
|
||||
esac
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Platform detected:" ]]
|
||||
# Should detect at least one known platform
|
||||
[[ "$output" =~ "LINUX_DETECTED" ]] || [[ "$output" =~ "DARWIN_DETECTED" ]] || [[ "$output" =~ "MSYS_DETECTED" ]] || [[ "$output" =~ "UNKNOWN_PLATFORM" ]]
|
||||
}
|
||||
|
||||
@test "environment: should handle missing environment variables gracefully" {
|
||||
# Test behavior with minimal environment
|
||||
run bash -c "
|
||||
unset BUILDPATH BINPATH SRCPATH MTHREADS
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
echo 'Functions loaded with minimal environment'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Functions loaded with minimal environment" ]]
|
||||
}
|
||||
|
||||
# ===== HOOK SYSTEM TESTS =====
|
||||
|
||||
@test "hooks: ac_on_after_build should copy startup scripts" {
|
||||
# Mock the cp command to test the hook
|
||||
function cp() {
|
||||
echo "CP called with args: $*"
|
||||
return 0
|
||||
}
|
||||
export -f cp
|
||||
|
||||
# Set required variables
|
||||
AC_PATH_APPS="$SCRIPT_DIR/.."
|
||||
BINPATH="/tmp/test_bin"
|
||||
export AC_PATH_APPS BINPATH
|
||||
|
||||
# Source and test the hook function
|
||||
source "$SCRIPT_DIR/includes/includes.sh"
|
||||
run ac_on_after_build
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CP called with args:" ]]
|
||||
[[ "$output" =~ "startup-scripts" ]]
|
||||
}
|
||||
254
apps/compiler/test/test_compiler_integration.bats
Executable file
254
apps/compiler/test/test_compiler_integration.bats
Executable file
@@ -0,0 +1,254 @@
|
||||
#!/usr/bin/env bats
|
||||
|
||||
# AzerothCore Compiler Integration Test Suite
|
||||
# Tests edge cases and integration scenarios for the compiler module
|
||||
|
||||
# Load the AzerothCore test framework
|
||||
load '../../test-framework/bats_libs/acore-support'
|
||||
load '../../test-framework/bats_libs/acore-assert'
|
||||
|
||||
# Setup that runs before each test
|
||||
setup() {
|
||||
compiler_setup
|
||||
export SCRIPT_DIR="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)"
|
||||
}
|
||||
|
||||
# Cleanup that runs after each test
|
||||
teardown() {
|
||||
acore_test_teardown
|
||||
}
|
||||
|
||||
# ===== INTEGRATION TESTS =====
|
||||
|
||||
@test "integration: should handle full compiler.sh workflow" {
|
||||
# Test the complete workflow with safe options
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
echo '7' | timeout 15s ./compiler.sh
|
||||
echo 'First command completed'
|
||||
echo 'quit' | timeout 10s ./compiler.sh
|
||||
echo 'Quit command completed'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "First command completed" ]]
|
||||
[[ "$output" =~ "Quit command completed" ]]
|
||||
}
|
||||
|
||||
@test "integration: should handle multiple consecutive commands" {
|
||||
# Test running multiple safe commands in sequence
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
timeout 10s ./compiler.sh ccacheShowStats
|
||||
echo 'Command 1 done'
|
||||
timeout 10s ./compiler.sh quit
|
||||
echo 'Command 2 done'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Command 1 done" ]]
|
||||
[[ "$output" =~ "Command 2 done" ]]
|
||||
}
|
||||
|
||||
@test "integration: should preserve working directory" {
|
||||
# Test that the script doesn't change the working directory unexpectedly
|
||||
local original_pwd="$(pwd)"
|
||||
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
original_dir=\$(pwd)
|
||||
timeout 10s ./compiler.sh quit
|
||||
current_dir=\$(pwd)
|
||||
echo \"ORIGINAL: \$original_dir\"
|
||||
echo \"CURRENT: \$current_dir\"
|
||||
[ \"\$original_dir\" = \"\$current_dir\" ] && echo 'DIRECTORY_PRESERVED'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "DIRECTORY_PRESERVED" ]]
|
||||
}
|
||||
|
||||
# ===== ERROR HANDLING TESTS =====
|
||||
|
||||
@test "error_handling: should handle script errors gracefully" {
|
||||
# Test script behavior with set -e when encountering errors
|
||||
run bash -c "
|
||||
cd '$SCRIPT_DIR'
|
||||
# Try to source a non-existent file to test error handling
|
||||
timeout 5s bash -c 'set -e; source /nonexistent/file.sh' || echo 'ERROR_HANDLED'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "ERROR_HANDLED" ]]
|
||||
}
|
||||
|
||||
@test "error_handling: should validate function availability" {
|
||||
# Test that required functions are available after sourcing
|
||||
run bash -c "
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Check for key functions
|
||||
type comp_clean > /dev/null && echo 'COMP_CLEAN_AVAILABLE'
|
||||
type comp_configure > /dev/null && echo 'COMP_CONFIGURE_AVAILABLE'
|
||||
type comp_compile > /dev/null && echo 'COMP_COMPILE_AVAILABLE'
|
||||
type comp_build > /dev/null && echo 'COMP_BUILD_AVAILABLE'
|
||||
type comp_all > /dev/null && echo 'COMP_ALL_AVAILABLE'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "COMP_CLEAN_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_CONFIGURE_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_COMPILE_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_BUILD_AVAILABLE" ]]
|
||||
[[ "$output" =~ "COMP_ALL_AVAILABLE" ]]
|
||||
}
|
||||
|
||||
# ===== PERMISSION TESTS =====
|
||||
|
||||
@test "permissions: should handle permission requirements" {
|
||||
# Test script behavior with different permission scenarios
|
||||
run bash -c "
|
||||
# Test SUDO variable detection
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
echo \"SUDO variable: '\$SUDO'\"
|
||||
[ -n \"\$SUDO\" ] && echo 'SUDO_SET' || echo 'SUDO_EMPTY'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
# Should set SUDO appropriately based on EUID
|
||||
[[ "$output" =~ "SUDO_SET" ]] || [[ "$output" =~ "SUDO_EMPTY" ]]
|
||||
}
|
||||
|
||||
# ===== CLEANUP TESTS =====
|
||||
|
||||
@test "cleanup: comp_clean should handle various file types" {
|
||||
# Create a comprehensive test directory structure
|
||||
local test_dir="/tmp/compiler_cleanup_test_$RANDOM"
|
||||
mkdir -p "$test_dir/subdir1/subdir2"
|
||||
|
||||
# Create various file types
|
||||
touch "$test_dir/regular_file.txt"
|
||||
touch "$test_dir/executable_file.sh"
|
||||
touch "$test_dir/.hidden_file"
|
||||
touch "$test_dir/subdir1/nested_file.obj"
|
||||
touch "$test_dir/subdir1/subdir2/deep_file.a"
|
||||
ln -s "$test_dir/regular_file.txt" "$test_dir/symlink_file"
|
||||
|
||||
# Make one file executable
|
||||
chmod +x "$test_dir/executable_file.sh"
|
||||
|
||||
# Test cleanup
|
||||
run bash -c "
|
||||
export BUILDPATH='$test_dir'
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
comp_clean
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Cleaning build files" ]]
|
||||
|
||||
# Verify cleanup (directory should exist but files should be cleaned)
|
||||
[ -d "$test_dir" ]
|
||||
|
||||
# The cleanup might not remove all files depending on the implementation
|
||||
# Let's check if at least some cleanup occurred
|
||||
local remaining_files=$(find "$test_dir" -type f | wc -l)
|
||||
# Either all files are gone, or at least some cleanup happened
|
||||
[[ "$remaining_files" -eq 0 ]] || [[ "$remaining_files" -lt 6 ]]
|
||||
|
||||
# Cleanup test directory
|
||||
rm -rf "$test_dir"
|
||||
}
|
||||
|
||||
# ===== THREAD DETECTION TESTS =====
|
||||
|
||||
@test "threading: should detect available CPU cores" {
|
||||
# Test thread count detection logic
|
||||
run bash -c "
|
||||
# Simulate the thread detection logic from the actual function
|
||||
MTHREADS=0
|
||||
if [ \$MTHREADS == 0 ]; then
|
||||
# Use nproc if available, otherwise simulate 4 cores
|
||||
if command -v nproc >/dev/null 2>&1; then
|
||||
MTHREADS=\$(nproc)
|
||||
else
|
||||
MTHREADS=4
|
||||
fi
|
||||
MTHREADS=\$((MTHREADS + 2))
|
||||
fi
|
||||
echo \"Detected threads: \$MTHREADS\"
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "Detected threads:" ]]
|
||||
# Should be at least 3 (1 core + 2)
|
||||
local thread_count=$(echo "$output" | grep -o '[0-9]\+')
|
||||
[ "$thread_count" -ge 3 ]
|
||||
}
|
||||
|
||||
# ===== CMAKE OPTION TESTS =====
|
||||
|
||||
@test "cmake: should build correct cmake command" {
|
||||
# Mock cmake to capture command line arguments
|
||||
run bash -c "
|
||||
function cmake() {
|
||||
echo 'CMAKE_COMMAND: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
# Set comprehensive test environment
|
||||
export SRCPATH='/test/src'
|
||||
export BUILDPATH='/test/build'
|
||||
export BINPATH='/test/bin'
|
||||
export CTYPE='Release'
|
||||
export CAPPS_BUILD='ON'
|
||||
export CTOOLS_BUILD='ON'
|
||||
export CSCRIPTS='ON'
|
||||
export CMODULES='ON'
|
||||
export CBUILD_TESTING='OFF'
|
||||
export CSCRIPTPCH='ON'
|
||||
export CCOREPCH='ON'
|
||||
export CWARNINGS='ON'
|
||||
export CCOMPILERC='gcc'
|
||||
export CCOMPILERCXX='g++'
|
||||
export CCUSTOMOPTIONS='-DCUSTOM_OPTION=1'
|
||||
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Change to buildpath and run configure
|
||||
cd /test || cd /tmp
|
||||
comp_configure 2>/dev/null || echo 'Configure completed with warnings'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CMAKE_COMMAND:" ]] || [[ "$output" =~ "Configure completed" ]]
|
||||
}
|
||||
|
||||
# ===== PLATFORM SPECIFIC TESTS =====
|
||||
|
||||
@test "platform: should set correct options for detected platform" {
|
||||
# Test platform-specific CMAKE options
|
||||
run bash -c "
|
||||
# Mock cmake to capture platform-specific options
|
||||
function cmake() {
|
||||
echo 'CMAKE_PLATFORM_ARGS: $*'
|
||||
return 0
|
||||
}
|
||||
export -f cmake
|
||||
|
||||
export BUILDPATH='/tmp'
|
||||
export SRCPATH='/tmp'
|
||||
export BINPATH='/tmp'
|
||||
export CTYPE='Release'
|
||||
|
||||
source '$SCRIPT_DIR/includes/functions.sh'
|
||||
|
||||
# Change to buildpath and run configure
|
||||
cd /tmp
|
||||
comp_configure 2>/dev/null || echo 'Configure completed with warnings'
|
||||
"
|
||||
|
||||
[ "$status" -eq 0 ]
|
||||
[[ "$output" =~ "CMAKE_PLATFORM_ARGS:" ]] || [[ "$output" =~ "Configure completed" ]]
|
||||
}
|
||||
66
apps/config-merger/README.md
Normal file
66
apps/config-merger/README.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# AzerothCore Config Merger
|
||||
|
||||
This directory contains configuration file merger tools to help update your AzerothCore server and module configurations with new options from distribution files.
|
||||
|
||||
**Available Options:** PHP and Python versions (**Python recommended for new users**)
|
||||
|
||||
## Purpose
|
||||
|
||||
The config merger tools help you update your existing configuration files (`.conf`) to include new options that have been added to the distribution files (`.conf.dist`). Distribution files always contain the most recent configuration changes and new options, while your personal config files may be missing these updates. These tools will:
|
||||
|
||||
- Compare your existing config files with the latest distribution files
|
||||
- Show you new configuration options that are missing from your files
|
||||
- Allow you to selectively add new options to your configs
|
||||
- Create automatic backups before making any changes
|
||||
- Support authserver.conf, worldserver.conf, and all module configs
|
||||
|
||||
## Available Versions
|
||||
|
||||
### PHP Version
|
||||
|
||||
**Requirements:**
|
||||
- PHP 5.6 or higher
|
||||
- **Requires a web server** (Apache, Nginx, IIS, etc.) to function
|
||||
- No additional libraries required (uses built-in PHP functions only)
|
||||
|
||||
**Features:**
|
||||
- Web-based interface
|
||||
- Configuration file parsing and merging
|
||||
- Browser-accessible configuration management
|
||||
|
||||
**Usage:**
|
||||
- Deploy to web server with PHP support (can be local - XAMPP, WAMP, or built-in PHP server)
|
||||
- Access via web browser
|
||||
- Follow web interface instructions
|
||||
|
||||
### Python Version (Recommended)
|
||||
|
||||
**Requirements:**
|
||||
- Python 3.6 or higher
|
||||
- No additional setup required beyond installing Python
|
||||
- No additional libraries required (uses built-in modules only)
|
||||
|
||||
**Features:**
|
||||
- Interactive menu-driven interface
|
||||
- Support for server configs (authserver.conf, worldserver.conf)
|
||||
- Support for module configs with bulk or selective updates
|
||||
- Automatic backup creation with timestamps
|
||||
- Cross-platform compatibility (Windows, Linux, macOS, and others)
|
||||
- Can be run via command line or by double-clicking the .py file
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
# Via command line
|
||||
cd /path/to/configs
|
||||
python config_merger.py
|
||||
|
||||
# Or double-click config_merger.py to open in terminal
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
When building AzerothCore with the `TOOL_CONFIG_MERGER` CMake option enabled, **only the Python version** will be automatically copied to your configs directory during the build process. The PHP version must be manually deployed to a web server.
|
||||
|
||||
## Support
|
||||
|
||||
Both versions provide the same core functionality for merging configuration files. Choose the version that best fits your environment and preferences. Python is recommended for most users due to its simplicity and no web server requirement.
|
||||
22
apps/config-merger/php/README.md
Normal file
22
apps/config-merger/php/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# ==== PHP merger (index.php + merge.php) ====
|
||||
|
||||
This is a PHP script for merging a new .dist file with your existing .conf file (worldserver.conf.dist and authserver.conf.dist)
|
||||
|
||||
It uses sessions so it is multi user safe, it adds any options that are removed to the bottom of the file commented out, just in case it removes something it shouldn't.
|
||||
If you add your custom patch configs below "# Custom" they will be copied exactly as they are.
|
||||
|
||||
Your new config will be found under $basedir/session_id/newconfig.conf.merge
|
||||
|
||||
If you do not run a PHP server on your machine you can read this guide on ["How to execute PHP code using command line?"](https://www.geeksforgeeks.org/how-to-execute-php-code-using-command-line/) on geeksforgeeks.org.
|
||||
|
||||
```
|
||||
php -S localhost:port -t E:\Azerothcore-wotlk\apps\config-merger\php\
|
||||
```
|
||||
|
||||
Change port to an available port to use. i.e 8000
|
||||
|
||||
Then go to your browser and type:
|
||||
|
||||
```
|
||||
localhost:8000/index.php
|
||||
```
|
||||
44
apps/config-merger/php/index.php
Normal file
44
apps/config-merger/php/index.php
Normal file
@@ -0,0 +1,44 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity/AzerothCore Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
?>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=windows-1251">
|
||||
<FORM enctype="multipart/form-data" ACTION="merge.php" METHOD="POST">
|
||||
Dist File (.conf.dist)
|
||||
<br />
|
||||
<INPUT name="File1" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
Current Conf File (.conf)
|
||||
<br />
|
||||
<INPUT name="File2" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="0" CHECKED >Windows -
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="1" >UNIX/Linux
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE="submit" VALUE="Submit">
|
||||
<br />
|
||||
<br />
|
||||
If you have any custom settings, such as from patches,
|
||||
<br />
|
||||
make sure they are at the bottom of the file following
|
||||
<br />
|
||||
this block (add it if it's not there)
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
# Custom
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
<br />
|
||||
|
||||
</FORM>
|
||||
179
apps/config-merger/php/merge.php
Normal file
179
apps/config-merger/php/merge.php
Normal file
@@ -0,0 +1,179 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
|
||||
error_reporting(0);
|
||||
|
||||
if (!empty($_FILES['File1']) && !empty($_FILES['File2']))
|
||||
{
|
||||
session_id();
|
||||
session_start();
|
||||
$basedir = "merge";
|
||||
$eol = "\r\n";
|
||||
if ($_POST['eol'])
|
||||
$eol = "\n";
|
||||
else
|
||||
$eol = "\r\n";
|
||||
if (!file_exists($basedir))
|
||||
mkdir($basedir);
|
||||
if (!file_exists($basedir."/".session_id()))
|
||||
mkdir($basedir."/".session_id());
|
||||
$upload1 = $basedir."/".session_id()."/".basename($_FILES['File1']['name']);
|
||||
$upload2 = $basedir."/".session_id()."/".basename($_FILES['File2']['name']);
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/worldserver.conf.merge";
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/authserver.conf.merge";
|
||||
else
|
||||
$newconfig = $basedir."/".session_id()."/UnkownConfigFile.conf.merge";
|
||||
|
||||
$out_file = fopen($newconfig, "w");
|
||||
$success = false;
|
||||
if (move_uploaded_file($_FILES['File1']['tmp_name'], $upload1))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
if (move_uploaded_file($_FILES['File2']['tmp_name'], $upload2))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
|
||||
if ($success)
|
||||
{
|
||||
$custom_found = false;
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$in_file2 = fopen($upload2,"r");
|
||||
$array1 = array();
|
||||
$array2 = array();
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if ((substr($line,0,1) != '#' && substr($line,0,1) != ''))
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array1[$key] = $val;
|
||||
}
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2) && !$custom_found)
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array2[$key] = $val;
|
||||
}
|
||||
if (strtolower($line) == "# custom")
|
||||
$custom_found = true;
|
||||
else
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
fclose($in_file1);
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
{
|
||||
$array1[$k] = $v;
|
||||
unset($array2[$k]);
|
||||
}
|
||||
}
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
$array = array();
|
||||
while (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array[$key] = $val;
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
foreach($array as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
fwrite($out_file, $k."=".$array1[$k].$eol);
|
||||
else
|
||||
continue;
|
||||
}
|
||||
unset($array);
|
||||
if (!feof($in_file1))
|
||||
fwrite($out_file, $line.$eol);
|
||||
}
|
||||
else
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
if ($custom_found)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# Custom".$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2))
|
||||
{
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
}
|
||||
$first = true;
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if ($first)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# The Following values were removed from the config.".$eol);
|
||||
$first = false;
|
||||
}
|
||||
fwrite($out_file, "# ".$k."=".$v.$eol);
|
||||
}
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
|
||||
unset($array1);
|
||||
unset($array2);
|
||||
fclose($in_file1);
|
||||
fclose($in_file2);
|
||||
fclose($out_file);
|
||||
unlink($upload1);
|
||||
unlink($upload2);
|
||||
|
||||
echo "Process done";
|
||||
echo "<br /><a href=".$newconfig.">Click here to retrieve your merged conf</a>";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
echo "An error has occurred";
|
||||
}
|
||||
?>
|
||||
150
apps/config-merger/python/README.md
Normal file
150
apps/config-merger/python/README.md
Normal file
@@ -0,0 +1,150 @@
|
||||
# AzerothCore Config Updater/Merger - Python Version
|
||||
|
||||
A command-line tool to update your AzerothCore configuration files with new options from distribution files.
|
||||
|
||||
> [!NOTE]
|
||||
> Based on and modified from [@BoiseComputer](https://github.com/BoiseComputer) (Brian Aldridge)'s [update_module_confs](https://github.com/Brian-Aldridge/update_module_confs) project to meet AzerothCore's needs
|
||||
|
||||
## Overview
|
||||
|
||||
This tool compares your existing configuration files (`.conf`) with the latest distribution files (`.conf.dist`) and helps you add new configuration options that may have been introduced in updates. It ensures your configs stay up-to-date while preserving your custom settings.
|
||||
|
||||
## Features
|
||||
|
||||
- **Interactive Menu System** - Easy-to-use numbered menu options
|
||||
- **Server Config Support** - Update authserver.conf and worldserver.conf
|
||||
- **Module Config Support** - Update all or selected module configurations
|
||||
- **Automatic Backups** - If you choose a valid option and there are changes, a timestamped backup is created before any changes are made (e.g. `filename(d11_m12_y2025_14h_30m_45s).bak`)
|
||||
- **Selective Updates** - Choose which new config options to add (y/n prompts)
|
||||
- **Safe Operation** - Only creates backups and makes changes when new options are found
|
||||
|
||||
## How to Use
|
||||
|
||||
There are two ways to use this. You can either copy this file directly to your `/configs` folder, or enable `TOOL_CONFIG_MERGER` in CMake. Upon compiling your core, the file will be generated in the same location as your `/configs` folder.
|
||||
|
||||
### Interactive Mode (Default)
|
||||
|
||||
1. **Run the script** in your configs directory:
|
||||
```bash
|
||||
python config_merger.py
|
||||
```
|
||||
Or simply **double-click** the `config_merger.py` file to run it directly.
|
||||
|
||||
2. **Specify configs path** (or press Enter for current directory):
|
||||
```
|
||||
Enter the path to your configs folder (default: .) which means current folder:
|
||||
```
|
||||
|
||||
3. **Choose from the menu**:
|
||||
```
|
||||
AzerothCore Config Updater/Merger (v. 1)
|
||||
--------------------------
|
||||
1 - Update Auth Config
|
||||
2 - Update World Config
|
||||
3 - Update Auth and World Configs
|
||||
4 - Update All Modules Configs
|
||||
5 - Update Modules (Selection) Configs
|
||||
0 - Quit
|
||||
```
|
||||
|
||||
### Command Line Interface (CLI)
|
||||
|
||||
For automation and scripting, you can use CLI mode:
|
||||
|
||||
```bash
|
||||
python config_merger.py [config_dir] [target] [options]
|
||||
```
|
||||
|
||||
**Arguments:**
|
||||
- `config_dir` (optional): Path to configs directory (default: current directory)
|
||||
- `target` (optional): What to update:
|
||||
- `auth` - Update authserver.conf only
|
||||
- `world` - Update worldserver.conf only
|
||||
- `both` - Update both server configs
|
||||
- `modules` - Update all module configs
|
||||
- `modules-select` - Interactive module selection
|
||||
|
||||
**Options:**
|
||||
- `-y, --yes`: Skip prompts and auto-add all new config options (default: prompt for each option)
|
||||
- `--version`: Show version information
|
||||
|
||||
**Examples:**
|
||||
```bash
|
||||
# Interactive mode (default)
|
||||
python config_merger.py
|
||||
|
||||
# Update auth config with prompts
|
||||
python config_merger.py . auth
|
||||
|
||||
# Update both configs automatically (no prompts)
|
||||
python config_merger.py /path/to/configs both -y
|
||||
|
||||
# Update all modules with confirmation
|
||||
python config_merger.py . modules
|
||||
```
|
||||
|
||||
## Menu Options Explained
|
||||
|
||||
- **Option 1**: Updates only `authserver.conf` from `authserver.conf.dist`
|
||||
- **Option 2**: Updates only `worldserver.conf` from `worldserver.conf.dist`
|
||||
- **Option 3**: Updates both server config files
|
||||
- **Option 4**: Automatically processes all module config files in the `modules/` folder
|
||||
- **Option 5**: Shows you a list of available modules and lets you select specific ones to update
|
||||
- **Option 0**: Exit the program
|
||||
|
||||
## Interactive Process
|
||||
|
||||
For each missing configuration option found, the tool will:
|
||||
|
||||
1. **Show you the option** with its comments and default value
|
||||
2. **Ask for confirmation**: `Add [option_name] to config? (y/n):`
|
||||
3. **Add or skip** based on your choice
|
||||
4. **Create backup** (before any changes are made) only if you choose a valid option and there are changes (format: `filename(d11_m12_y2025_14h_30m_45s).bak`)
|
||||
|
||||
## Example Session
|
||||
|
||||
```
|
||||
Processing worldserver.conf ...
|
||||
Backup created: worldserver.conf(d11_m12_y2025_14h_30m_45s).bak
|
||||
|
||||
# New feature for XP rates
|
||||
XP.Rate = 1
|
||||
Add XP.Rate to config? (y/n): y
|
||||
Added XP.Rate.
|
||||
|
||||
# Database connection pool size
|
||||
Database.PoolSize = 5
|
||||
Add Database.PoolSize to config? (y/n): n
|
||||
Skipped Database.PoolSize.
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- Python 3.6 or higher
|
||||
- No additional libraries needed (uses built-in modules only)
|
||||
|
||||
## File Structure Expected
|
||||
|
||||
```
|
||||
configs/
|
||||
├── config_merger.py (this script)
|
||||
├── authserver.conf.dist
|
||||
├── authserver.conf
|
||||
├── worldserver.conf.dist
|
||||
├── worldserver.conf
|
||||
└── modules/
|
||||
├── mod_example.conf.dist
|
||||
├── mod_example.conf
|
||||
└── ...
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This file is part of the AzerothCore Project. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
**Note:** Original code portions were licensed under the MIT License by Brian Aldridge (https://github.com/BoiseComputer)
|
||||
Original project: https://github.com/Brian-Aldridge/update_module_confs
|
||||
276
apps/config-merger/python/config_merger.py
Normal file
276
apps/config-merger/python/config_merger.py
Normal file
@@ -0,0 +1,276 @@
|
||||
# Version 1
|
||||
# Based and modified from: https://github.com/Brian-Aldridge/update_module_confs
|
||||
#
|
||||
# This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
# Original code portions licensed under MIT License by Brian Aldridge (https://github.com/BoiseComputer)
|
||||
# Original project: https://github.com/Brian-Aldridge/update_module_confs
|
||||
|
||||
VERSION = "1"
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import argparse
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
def find_modules(folder):
|
||||
dist_files = []
|
||||
try:
|
||||
files = os.listdir(folder)
|
||||
except (OSError, IOError) as e:
|
||||
print(f"[ERROR] Could not list directory '{folder}': {e}")
|
||||
return []
|
||||
for file in files:
|
||||
if file.endswith('.conf.dist'):
|
||||
dist_files.append(file)
|
||||
return sorted(dist_files)
|
||||
|
||||
def prompt_module_selection(dist_files):
|
||||
print("Found the following modules:")
|
||||
for idx, fname in enumerate(dist_files, 1):
|
||||
print(f" {idx}. {fname}")
|
||||
nums = input("Enter numbers of modules to update (comma-separated): ").strip()
|
||||
raw_inputs = [x.strip() for x in nums.split(",") if x.strip()]
|
||||
indices = []
|
||||
invalid = []
|
||||
for x in raw_inputs:
|
||||
if not x.isdigit():
|
||||
invalid.append(f"'{x}' (not a number)")
|
||||
continue
|
||||
idx = int(x)
|
||||
if 0 < idx <= len(dist_files):
|
||||
indices.append(idx-1)
|
||||
else:
|
||||
invalid.append(f"'{x}' (out of range, must be 1-{len(dist_files)})")
|
||||
if invalid:
|
||||
print("Invalid input:")
|
||||
for msg in invalid:
|
||||
print(f" {msg}")
|
||||
if not indices:
|
||||
print("No valid module numbers were entered.")
|
||||
return []
|
||||
selected = [dist_files[i] for i in indices]
|
||||
return selected
|
||||
|
||||
def backup_file(filepath):
|
||||
timestamp = datetime.now().strftime("d%d_m%m_y%Y_%Hh_%Mm_%Ss")
|
||||
bakpath = f"{filepath}({timestamp}).bak"
|
||||
try:
|
||||
shutil.copy2(filepath, bakpath)
|
||||
print(f" Backup created: {bakpath}")
|
||||
except (OSError, IOError) as e:
|
||||
print(f"[ERROR] Failed to create backup '{bakpath}': {e}")
|
||||
return False
|
||||
return True
|
||||
|
||||
def parse_conf(filepath):
|
||||
# Returns a dict of key: (line, [preceding_comments])
|
||||
try:
|
||||
with open(filepath, encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
except (OSError, IOError) as e:
|
||||
print(f"[ERROR] Failed to read config file '{filepath}': {e}")
|
||||
return None
|
||||
conf = {}
|
||||
comments = []
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#"):
|
||||
comments.append(line)
|
||||
continue
|
||||
if stripped.startswith("[") and stripped.endswith("]"):
|
||||
# Ignore [headers of configs]
|
||||
comments.clear()
|
||||
continue
|
||||
if stripped.count("=") == 1:
|
||||
key, value = [s.strip() for s in stripped.split("=", 1)]
|
||||
if '#' in value:
|
||||
value = value.split('#', 1)[0].rstrip()
|
||||
if key:
|
||||
conf[key] = (f"{key} = {value}\n", comments.copy())
|
||||
comments.clear()
|
||||
continue
|
||||
return conf
|
||||
|
||||
def find_missing_keys(dist_conf, user_conf):
|
||||
missing = {}
|
||||
for key, (line, comments) in dist_conf.items():
|
||||
if key not in user_conf:
|
||||
missing[key] = (line, comments)
|
||||
return missing
|
||||
|
||||
def update_conf(dist_path, conf_path, skip_prompts=False):
|
||||
if not os.path.exists(conf_path):
|
||||
print(f" User config {conf_path} does not exist, skipping.")
|
||||
return False
|
||||
dist_conf = parse_conf(dist_path)
|
||||
user_conf = parse_conf(conf_path)
|
||||
missing = find_missing_keys(dist_conf, user_conf)
|
||||
if not missing:
|
||||
print(" No new config options to add.")
|
||||
return False
|
||||
updated = False
|
||||
lines_to_add = []
|
||||
for key, (line, comments) in missing.items():
|
||||
if skip_prompts:
|
||||
lines_to_add.append((comments, line, key))
|
||||
else:
|
||||
print("\n" + "".join(comments if comments else []) + line, end="")
|
||||
add = input(f" Add {key} to config? (y/n): ").strip().lower()
|
||||
if add in ("", "y", "yes"):
|
||||
lines_to_add.append((comments, line, key))
|
||||
else:
|
||||
print(f" Skipped {key}.")
|
||||
if lines_to_add:
|
||||
backup_file(conf_path)
|
||||
# Write using system's default line ending to avoid mixing CRLF and LF in the config file
|
||||
newline = os.linesep.encode('utf-8')
|
||||
with open(conf_path, "ab") as f:
|
||||
for comments, line, key in lines_to_add:
|
||||
if comments:
|
||||
for c in comments:
|
||||
f.write(c.rstrip('\r\n').encode('utf-8') + newline)
|
||||
f.write(line.rstrip('\r\n').encode('utf-8') + newline)
|
||||
print(f" Added {key}.")
|
||||
updated = True
|
||||
return updated
|
||||
|
||||
def update_server_config(config_name, config_dir, skip_prompts=False):
|
||||
dist_path = os.path.join(config_dir, f"{config_name}.conf.dist")
|
||||
conf_path = os.path.join(config_dir, f"{config_name}.conf")
|
||||
|
||||
if not os.path.exists(dist_path):
|
||||
print(f" Distribution config {dist_path} does not exist, skipping.")
|
||||
return False
|
||||
|
||||
print(f"\nProcessing {config_name}.conf ...")
|
||||
return update_conf(dist_path, conf_path, skip_prompts)
|
||||
|
||||
def update_modules(config_dir, selected_only=False, skip_prompts=False):
|
||||
modules_dir = os.path.join(config_dir, "modules")
|
||||
if not os.path.exists(modules_dir):
|
||||
print(f" Modules directory {modules_dir} does not exist, skipping.")
|
||||
return
|
||||
|
||||
dist_files = find_modules(modules_dir)
|
||||
if not dist_files:
|
||||
print(" No .conf.dist files found in modules folder.")
|
||||
return
|
||||
|
||||
if selected_only:
|
||||
selected = prompt_module_selection(dist_files)
|
||||
if not selected:
|
||||
print(" No modules selected.")
|
||||
return
|
||||
else:
|
||||
selected = dist_files
|
||||
|
||||
for dist_fname in selected:
|
||||
module = dist_fname[:-5] # Removes ".dist"
|
||||
conf_fname = module # e.g., mod_x.conf
|
||||
dist_path = os.path.join(modules_dir, dist_fname)
|
||||
conf_path = os.path.join(modules_dir, conf_fname)
|
||||
print(f"\nProcessing {conf_fname} ...")
|
||||
update_conf(dist_path, conf_path, skip_prompts)
|
||||
|
||||
def show_main_menu():
|
||||
print(f"\nAzerothCore Config Updater/Merger (v. {VERSION})")
|
||||
print("--------------------------")
|
||||
print("1 - Update Auth Config")
|
||||
print("2 - Update World Config")
|
||||
print("3 - Update Auth and World Configs")
|
||||
print("4 - Update All Modules Configs")
|
||||
print("5 - Update Modules (Selection) Configs")
|
||||
print("0 - Quit")
|
||||
return input("Select an option: ").strip()
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description='AzerothCore Config Updater/Merger')
|
||||
parser.add_argument('config_dir', nargs='?', default='.',
|
||||
help='Path to configs directory (default: current directory)')
|
||||
parser.add_argument('target', nargs='?',
|
||||
choices=['auth', 'world', 'both', 'modules', 'modules-select'],
|
||||
help='What to update: auth, world, both, modules, modules-select')
|
||||
parser.add_argument('-y', '--yes', action='store_true',
|
||||
help='Automatically answer yes to all prompts')
|
||||
parser.add_argument('--version', action='version', version=f'%(prog)s {VERSION}')
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
# If no target specified, run interactive mode
|
||||
if args.target is None:
|
||||
print(f"AzerothCore Config Updater/Merger (v. {VERSION})")
|
||||
print("==========================")
|
||||
config_dir = input("Enter the path to your configs folder (Default / Empty will use the folder where this script is located): ").strip()
|
||||
if not config_dir:
|
||||
config_dir = "."
|
||||
|
||||
if not os.path.isdir(config_dir):
|
||||
print("Provided path is not a valid directory.")
|
||||
return
|
||||
|
||||
while True:
|
||||
choice = show_main_menu()
|
||||
|
||||
if choice == "1":
|
||||
update_server_config("authserver", config_dir)
|
||||
elif choice == "2":
|
||||
update_server_config("worldserver", config_dir)
|
||||
elif choice == "3":
|
||||
update_server_config("authserver", config_dir)
|
||||
update_server_config("worldserver", config_dir)
|
||||
elif choice == "4":
|
||||
update_modules(config_dir, selected_only=False)
|
||||
elif choice == "5":
|
||||
update_modules(config_dir, selected_only=True)
|
||||
elif choice == "0":
|
||||
print("Goodbye!")
|
||||
break
|
||||
else:
|
||||
print("Invalid selection. Please try again.")
|
||||
else:
|
||||
# CLI mode
|
||||
config_dir = args.config_dir
|
||||
|
||||
if not os.path.isdir(config_dir):
|
||||
print(f"Error: Directory '{config_dir}' does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"AzerothCore Config Updater/Merger (v. {VERSION}) - CLI Mode")
|
||||
print(f"Config directory: {os.path.abspath(config_dir)}")
|
||||
print(f"Target: {args.target}")
|
||||
if args.yes:
|
||||
print("Skip prompts: Yes")
|
||||
|
||||
if args.target == 'auth':
|
||||
update_server_config("authserver", config_dir, args.yes)
|
||||
elif args.target == 'world':
|
||||
update_server_config("worldserver", config_dir, args.yes)
|
||||
elif args.target == 'both':
|
||||
update_server_config("authserver", config_dir, args.yes)
|
||||
update_server_config("worldserver", config_dir, args.yes)
|
||||
elif args.target == 'modules':
|
||||
update_modules(config_dir, selected_only=False, skip_prompts=args.yes)
|
||||
elif args.target == 'modules-select':
|
||||
if args.yes:
|
||||
print("Warning: --yes flag ignored for modules-select (requires interactive selection)")
|
||||
update_modules(config_dir, selected_only=True, skip_prompts=False)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
261
apps/docker/Dockerfile
Normal file
261
apps/docker/Dockerfile
Normal file
@@ -0,0 +1,261 @@
|
||||
ARG UBUNTU_VERSION=22.04 # lts
|
||||
|
||||
# This target lays out the general directory skeleton for AzerothCore,
|
||||
# This target isn't intended to be directly used
|
||||
FROM ubuntu:$UBUNTU_VERSION AS skeleton
|
||||
|
||||
# Note: ARG instructions defined after FROM are available in this build stage.
|
||||
# Placing ARG TZ here (after FROM) ensures it is accessible for configuring the timezone below.
|
||||
ARG TZ=Etc/UTC
|
||||
ARG DOCKER=1
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
ENV AC_FORCE_CREATE_DB=1
|
||||
|
||||
RUN mkdir -pv \
|
||||
/azerothcore/bin \
|
||||
/azerothcore/data \
|
||||
/azerothcore/deps \
|
||||
/azerothcore/env/dist/bin \
|
||||
/azerothcore/env/dist/data/Cameras \
|
||||
/azerothcore/env/dist/data/dbc \
|
||||
/azerothcore/env/dist/data/maps \
|
||||
/azerothcore/env/dist/data/mmaps \
|
||||
/azerothcore/env/dist/data/vmaps \
|
||||
/azerothcore/env/dist/logs \
|
||||
/azerothcore/env/dist/temp \
|
||||
/azerothcore/env/dist/etc \
|
||||
/azerothcore/modules \
|
||||
/azerothcore/src \
|
||||
/azerothcore/build
|
||||
|
||||
# Configure Timezone
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends tzdata ca-certificates \
|
||||
&& ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime \
|
||||
&& echo "$TZ" > /etc/timezone \
|
||||
&& dpkg-reconfigure --frontend noninteractive tzdata \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /azerothcore
|
||||
|
||||
# This target builds the docker image
|
||||
# This target can be useful to inspect the explicit outputs from the build,
|
||||
FROM skeleton AS build
|
||||
|
||||
ARG CTOOLS_BUILD="all"
|
||||
ARG CTYPE="RelWithDebInfo"
|
||||
ARG CCACHE_CPP2="true"
|
||||
ARG CSCRIPTPCH="OFF"
|
||||
ARG CSCRIPTS="static"
|
||||
ARG CMODULES="static"
|
||||
ARG CSCRIPTS_DEFAULT_LINKAGE="static"
|
||||
ARG CWITH_WARNINGS="ON"
|
||||
ARG CMAKE_EXTRA_OPTIONS=""
|
||||
ARG GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
|
||||
ARG CCACHE_DIR="/ccache"
|
||||
ARG CCACHE_MAXSIZE="1000MB"
|
||||
ARG CCACHE_SLOPPINESS="pch_defines,time_macros,include_file_mtime"
|
||||
ARG CCACHE_COMPRESS=""
|
||||
ARG CCACHE_COMPRESSLEVEL="9"
|
||||
ARG CCACHE_COMPILERCHECK="content"
|
||||
ARG CCACHE_LOGFILE=""
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
build-essential ccache libtool cmake-data make cmake clang \
|
||||
git lsb-base curl unzip default-mysql-client openssl \
|
||||
default-libmysqlclient-dev libboost-all-dev libssl-dev libmysql++-dev \
|
||||
libreadline-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY CMakeLists.txt /azerothcore/CMakeLists.txt
|
||||
COPY conf /azerothcore/conf
|
||||
COPY deps /azerothcore/deps
|
||||
COPY src /azerothcore/src
|
||||
COPY modules /azerothcore/modules
|
||||
|
||||
ARG CACHEBUST=1
|
||||
|
||||
WORKDIR /azerothcore/build
|
||||
|
||||
RUN --mount=type=cache,target=/ccache,sharing=locked \
|
||||
# This may seem silly (and it is), but AzerothCore wants the git repo at
|
||||
# build time. The git repo is _huge_ and it's not something that really
|
||||
# makes sense to mount into the container, but this way we can let the build
|
||||
# have the information it needs without including the hundreds of megabytes
|
||||
# of git repo into the container.
|
||||
--mount=type=bind,target=/azerothcore/.git,source=.git \
|
||||
git config --global --add safe.directory /azerothcore \
|
||||
&& cmake /azerothcore \
|
||||
-DCMAKE_INSTALL_PREFIX="/azerothcore/env/dist" \
|
||||
-DAPPS_BUILD="all" \
|
||||
-DTOOLS_BUILD="$CTOOLS_BUILD" \
|
||||
-DSCRIPTS="$CSCRIPTS" \
|
||||
-DMODULES="$CMODULES" \
|
||||
-DWITH_WARNINGS="$CWITH_WARNINGS" \
|
||||
-DCMAKE_BUILD_TYPE="$CTYPE" \
|
||||
-DCMAKE_CXX_COMPILER="clang++" \
|
||||
-DCMAKE_C_COMPILER="clang" \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER="ccache" \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER="ccache" \
|
||||
-DBoost_USE_STATIC_LIBS="ON" \
|
||||
&& cmake --build . --config "$CTYPE" -j $(($(nproc) + 1)) \
|
||||
&& cmake --install . --config "$CTYPE"
|
||||
|
||||
#############################
|
||||
# Base runtime for services #
|
||||
#############################
|
||||
|
||||
FROM skeleton AS runtime
|
||||
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
ENV ACORE_COMPONENT=undefined
|
||||
|
||||
# Install base dependencies for azerothcore
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmysqlclient21 libreadline8 \
|
||||
gettext-base default-mysql-client && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=build /azerothcore/env/dist/etc/ /azerothcore/env/ref/etc
|
||||
|
||||
VOLUME /azerothcore/env/dist/etc
|
||||
|
||||
ENV PATH="/azerothcore/env/dist/bin:$PATH"
|
||||
|
||||
RUN groupadd --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
useradd -d /azerothcore --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
chown -R "$DOCKER_USER:$DOCKER_USER" /azerothcore
|
||||
|
||||
COPY --chown=$USER_ID:$GROUP_ID \
|
||||
--chmod=755 \
|
||||
apps/docker/entrypoint.sh /azerothcore/entrypoint.sh
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
ENTRYPOINT ["/usr/bin/env", "bash", "/azerothcore/entrypoint.sh"]
|
||||
|
||||
###############
|
||||
# Auth Server #
|
||||
###############
|
||||
|
||||
FROM runtime AS authserver
|
||||
LABEL description="AzerothCore Auth Server"
|
||||
|
||||
ENV ACORE_COMPONENT=authserver
|
||||
# Don't run database migrations. We can leave that up to the db-import container
|
||||
ENV AC_UPDATES_ENABLE_DATABASES=0
|
||||
# This disables user prompts. The console is still active, however
|
||||
ENV AC_DISABLE_INTERACTIVE=1
|
||||
ENV AC_CLOSE_IDLE_CONNECTIONS=0
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
--from=build \
|
||||
/azerothcore/env/dist/bin/authserver /azerothcore/env/dist/bin/authserver
|
||||
|
||||
|
||||
CMD ["authserver"]
|
||||
|
||||
################
|
||||
# World Server #
|
||||
################
|
||||
|
||||
FROM runtime AS worldserver
|
||||
|
||||
LABEL description="AzerothCore World Server"
|
||||
|
||||
ENV ACORE_COMPONENT=worldserver
|
||||
# Don't run database migrations. We can leave that up to the db-import container
|
||||
ENV AC_UPDATES_ENABLE_DATABASES=0
|
||||
# This disables user prompts. The console is still active, however
|
||||
ENV AC_DISABLE_INTERACTIVE=1
|
||||
ENV AC_CLOSE_IDLE_CONNECTIONS=0
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
--from=build \
|
||||
/azerothcore/env/dist/bin/worldserver /azerothcore/env/dist/bin/worldserver
|
||||
|
||||
VOLUME /azerothcore/env/dist/etc
|
||||
|
||||
CMD ["worldserver"]
|
||||
|
||||
#############
|
||||
# DB Import #
|
||||
#############
|
||||
|
||||
FROM runtime AS db-import
|
||||
|
||||
LABEL description="AzerothCore Database Import tool"
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
ENV ACORE_COMPONENT=dbimport
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
data data
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER \
|
||||
modules modules
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER\
|
||||
--from=build \
|
||||
/azerothcore/env/dist/bin/dbimport /azerothcore/env/dist/bin/dbimport
|
||||
|
||||
CMD [ "/azerothcore/env/dist/bin/dbimport" ]
|
||||
|
||||
###############
|
||||
# Client Data #
|
||||
###############
|
||||
|
||||
FROM skeleton AS client-data
|
||||
|
||||
LABEL description="AzerothCore client-data"
|
||||
|
||||
ENV DATAPATH=/azerothcore/env/dist/data
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y curl unzip && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER apps apps
|
||||
|
||||
VOLUME /azerothcore/env/dist/data
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
CMD ["bash", "-c", "source /azerothcore/apps/installer/includes/functions.sh && inst_download_client_data" ]
|
||||
|
||||
##################
|
||||
# Map Extractors #
|
||||
##################
|
||||
|
||||
FROM runtime AS tools
|
||||
|
||||
LABEL description="AzerothCore Tools"
|
||||
|
||||
WORKDIR /azerothcore/env/dist/
|
||||
|
||||
RUN mkdir -pv /azerothcore/env/dist/Cameras \
|
||||
/azerothcore/env/dist/dbc \
|
||||
/azerothcore/env/dist/maps \
|
||||
/azerothcore/env/dist/mmaps \
|
||||
/azerothcore/env/dist/vmaps
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/map_extractor /azerothcore/env/dist/bin/map_extractor
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/mmaps_generator /azerothcore/env/dist/bin/mmaps_generator
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/vmap4_assembler /azerothcore/env/dist/bin/vmap4_assembler
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
|
||||
/azerothcore/env/dist/bin/vmap4_extractor /azerothcore/env/dist/bin/vmap4_extractor
|
||||
108
apps/docker/Dockerfile.dev-server
Normal file
108
apps/docker/Dockerfile.dev-server
Normal file
@@ -0,0 +1,108 @@
|
||||
#syntax=docker/dockerfile:1.2
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# DEV: Stage used for the development environment
|
||||
# and the locally built services
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:24.04 as dev
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
ARG TZ=Etc/UTC
|
||||
|
||||
LABEL description="AC base image for dev containers"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
ENV DOCKER=1
|
||||
|
||||
# Ensure ac-dev-server can properly pull versions
|
||||
ENV GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=$TZ
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
# Classic install
|
||||
git \
|
||||
clang lldb lld clang-format clang-tidy \
|
||||
make cmake \
|
||||
gcc g++ \
|
||||
libmysqlclient-dev \
|
||||
libssl-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libncurses-dev \
|
||||
mysql-server \
|
||||
libboost-all-dev \
|
||||
# Other
|
||||
curl \
|
||||
unzip \
|
||||
sudo \
|
||||
gdb gdbserver \
|
||||
libtool \
|
||||
build-essential \
|
||||
cmake-data \
|
||||
openssl \
|
||||
google-perftools libgoogle-perftools-dev \
|
||||
libmysql++-dev \
|
||||
ccache \
|
||||
tzdata \
|
||||
# Utility for column command used by dashboard
|
||||
util-linux \
|
||||
# Certificates for downloading client data
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Ensure git will work with the AzerothCore source directory
|
||||
RUN git config --global --add safe.directory /azerothcore
|
||||
|
||||
# change timezone in container
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime \
|
||||
&& echo $TZ > /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata
|
||||
|
||||
# Create a non-root user
|
||||
RUN userdel --remove ubuntu \
|
||||
&& addgroup --gid "$GROUP_ID" "$DOCKER_USER" \
|
||||
&& adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" \
|
||||
&& passwd -d "$DOCKER_USER" \
|
||||
&& echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# must be created to set the correct permissions on them
|
||||
RUN mkdir -p \
|
||||
/azerothcore/env/dist/bin \
|
||||
/azerothcore/env/dist/data/Cameras \
|
||||
/azerothcore/env/dist/data/dbc \
|
||||
/azerothcore/env/dist/data/maps \
|
||||
/azerothcore/env/dist/data/mmaps \
|
||||
/azerothcore/env/dist/data/vmaps \
|
||||
/azerothcore/env/dist/logs \
|
||||
/azerothcore/env/dist/temp \
|
||||
/azerothcore/env/dist/etc \
|
||||
/azerothcore/var/build/obj
|
||||
|
||||
# Correct permissions for non-root operations
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /home/acore /run /opt /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# copy only necessary files for the acore dashboard
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER apps /azerothcore/apps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER bin /azerothcore/bin
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER conf /azerothcore/conf
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER data /azerothcore/data
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER deps /azerothcore/deps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.json /azerothcore/acore.json
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.sh /azerothcore/acore.sh
|
||||
|
||||
# Download deno and make sure the dashboard works
|
||||
RUN bash /azerothcore/acore.sh quit
|
||||
|
||||
WORKDIR /azerothcore
|
||||
41
apps/docker/README.md
Normal file
41
apps/docker/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Docker
|
||||
|
||||
Full documentation is [on our wiki](https://www.azerothcore.org/wiki/install-with-docker#installation)
|
||||
|
||||
## Building
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Ensure that you have docker, docker compose (v2), and the docker buildx command
|
||||
installed.
|
||||
|
||||
It's all bundled with [Docker Desktop](https://docs.docker.com/get-docker/),
|
||||
though if you're using Linux you can install them through your distribution's
|
||||
package manage or by using the [documentation from docker](https://docs.docker.com/engine/install/)
|
||||
|
||||
### Running the Build
|
||||
|
||||
1. Build containers with command
|
||||
|
||||
```console
|
||||
$ docker compose build
|
||||
```
|
||||
|
||||
1. Note that the initial build will take a long time, though subsequent builds should be faster
|
||||
|
||||
2. Start containers with command
|
||||
|
||||
```console
|
||||
$ docker compose up -d
|
||||
# Skip the build step
|
||||
$ docker compose up -d --build
|
||||
```
|
||||
|
||||
1. Note that this command may take a while the first time, for the database import
|
||||
|
||||
3. (on first install) You'll need to attach to the worldserver and create an Admin account
|
||||
|
||||
```console
|
||||
$ docker compose attach ac-worldserver
|
||||
AC> account create admin password 3 -1
|
||||
```
|
||||
216
apps/docker/docker-cmd.sh
Normal file
216
apps/docker/docker-cmd.sh
Normal file
@@ -0,0 +1,216 @@
|
||||
#!/bin/bash
|
||||
|
||||
# TODO(michaeldelago) decide if we need a wrapper like this around docker
|
||||
# commands.
|
||||
#
|
||||
# Running the docker commands should be simple and familiar.
|
||||
# Introducting extra steps through the dashboard can cause issues with people
|
||||
# getting started, especially if they already know docker.
|
||||
#
|
||||
# If a new user knows docker, they will feel (pretty close) to right at home.
|
||||
# If a new user doesn't know docker, it's easy to learn and the knowledge
|
||||
# applies to much more than azerothcore
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
COMPOSE_DOCKER_CLI_BUILD="1"
|
||||
DOCKER_BUILDKIT="1"
|
||||
# BUILDKIT_INLINE_CACHE="1"
|
||||
|
||||
function usage () {
|
||||
cat <<EOF
|
||||
Wrapper for shell scripts around docker
|
||||
|
||||
usage: $(basename $0) ACTION [ ACTION... ] [ ACTION_ARG... ]
|
||||
|
||||
actions:
|
||||
EOF
|
||||
# the `-s` will remove the "#" and properly space the action and description
|
||||
cat <<EOF | column -t -l2 -s'#'
|
||||
> start:app # Start the development worldserver and authserver
|
||||
> start:app:d # Start the development worldserver and authserver in detached mode
|
||||
> build # build the development worldserver and authserver
|
||||
> pull # pull the development worldserver and authserver
|
||||
> build:nocache # build the development worldserver and authserver without cache
|
||||
> clean:build # clean build artifacts from the dev server
|
||||
> client-data # download client data in the dev server
|
||||
> dev:up start # the dev server
|
||||
> dev:build # compile azerothcore using the dev server
|
||||
> dev:dash # execute the dashboard in the dev server container
|
||||
> dev:shell [ ARGS... ] # open a bash shell in the dev server
|
||||
> prod:build # Build the service containers used by acore-docker
|
||||
> prod:pull # Pull the containers used by acore-docker
|
||||
> prod:up # Start the services used by acore-docker
|
||||
> prod:up:d # start the services used by acore-docker in the background
|
||||
> attach SERVICE # attach to a service currently running in docker compose
|
||||
EOF
|
||||
}
|
||||
|
||||
# If no args, just spit usage and exit
|
||||
[[ $# -eq 0 ]] && usage && exit
|
||||
|
||||
# loop through commands passed
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
start:app)
|
||||
set -x
|
||||
docker compose up
|
||||
set +x
|
||||
# pop the head off of the queue of args
|
||||
# After this, the value of $1 is the value of $2
|
||||
shift
|
||||
;;
|
||||
|
||||
start:app:d)
|
||||
set -x
|
||||
docker compose up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build)
|
||||
set -x
|
||||
docker compose build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull)
|
||||
set -x
|
||||
docker compose pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:nocache)
|
||||
set -x
|
||||
docker compose build --no-cache
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
clean:build)
|
||||
set -x
|
||||
# Don't run 'docker buildx prune' since it may "escape" our bubble
|
||||
# and affect other projects on the user's workstation/server
|
||||
cat <<EOF
|
||||
This command has been deprecated, and at the moment does not do anything.
|
||||
If you'd like to build without cache, use the command './acore.sh docker build:nocache' or look into the 'docker buildx prune command'
|
||||
|
||||
> https://docs.docker.com/engine/reference/commandline/buildx_prune/
|
||||
EOF
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
client-data)
|
||||
set -x
|
||||
docker compose up ac-client-data-init
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:up)
|
||||
set -x
|
||||
docker compose --profile dev up ac-dev-server -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:build)
|
||||
set -x
|
||||
docker compose --profile dev run --rm ac-dev-server bash /azerothcore/acore.sh compiler build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:dash)
|
||||
set -x
|
||||
docker compose --profile dev run --rm ac-dev-server bash /azerothcore/acore.sh ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:shell)
|
||||
set -x
|
||||
docker compose --profile dev up -d ac-dev-server
|
||||
docker compose --profile dev exec ac-dev-server bash ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:prod|prod:build)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker build
|
||||
|
||||
The build will continue in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull:prod|prod:pull)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker pull
|
||||
|
||||
The image pull will continue in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up|start:prod)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker start:app
|
||||
|
||||
The containers will start in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose up
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up:d|start:prod:d)
|
||||
cat <<EOF
|
||||
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
|
||||
|
||||
./acore.sh docker start:app:d
|
||||
|
||||
The containers will start in 3 seconds
|
||||
EOF
|
||||
sleep 3
|
||||
set -x
|
||||
docker compose up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
attach)
|
||||
SERVICE="$2"
|
||||
set -x
|
||||
docker compose attach "$SERVICE"
|
||||
set +x
|
||||
shift
|
||||
shift # Second to pass the argument
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown or empty arg"
|
||||
usage
|
||||
exit 1
|
||||
esac
|
||||
done
|
||||
54
apps/docker/entrypoint.sh
Normal file
54
apps/docker/entrypoint.sh
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
CONF_DIR="${CONF_DIR:-/azerothcore/env/dist/etc}"
|
||||
LOGS_DIR="${LOGS_DIR:-/azerothcore/env/dist/logs}"
|
||||
|
||||
if ! touch "$CONF_DIR/.write-test" || ! touch "$LOGS_DIR/.write-test"; then
|
||||
cat <<EOF
|
||||
===== WARNING =====
|
||||
The current user doesn't have write permissions for
|
||||
the configuration dir ($CONF_DIR) or logs dir ($LOGS_DIR).
|
||||
It's likely that services will fail due to this.
|
||||
|
||||
This is usually caused by cloning the repository as root,
|
||||
so the files are owned by root (uid 0).
|
||||
|
||||
To resolve this, you can set the ownership of the
|
||||
configuration directory with the command on the host machine.
|
||||
Note that if the files are owned as root, the ownership must
|
||||
be changed as root (hence sudo).
|
||||
|
||||
$ sudo chown -R $(id -u):$(id -g) /path/to$CONF_DIR /path/to$LOGS_DIR
|
||||
|
||||
Alternatively, you can set the DOCKER_USER environment
|
||||
variable (on the host machine) to "root", though this
|
||||
isn't recommended.
|
||||
|
||||
$ DOCKER_USER=root docker-compose up -d
|
||||
====================
|
||||
EOF
|
||||
fi
|
||||
|
||||
[[ -f "$CONF_DIR/.write-test" ]] && rm -f "$CONF_DIR/.write-test"
|
||||
[[ -f "$LOGS_DIR/.write-test" ]] && rm -f "$LOGS_DIR/.write-test"
|
||||
|
||||
# Copy all default config files to env/dist/etc if they don't already exist
|
||||
# -r == recursive
|
||||
# -n == no clobber (don't overwrite)
|
||||
# -v == be verbose
|
||||
cp -rnv /azerothcore/env/ref/etc/* "$CONF_DIR"
|
||||
|
||||
CONF="$CONF_DIR/$ACORE_COMPONENT.conf"
|
||||
CONF_DIST="$CONF_DIR/$ACORE_COMPONENT.conf.dist"
|
||||
|
||||
# Copy the "dist" file to the "conf" if the conf doesn't already exist
|
||||
if [[ -f "$CONF_DIST" ]]; then
|
||||
cp -vn "$CONF_DIST" "$CONF"
|
||||
else
|
||||
touch "$CONF"
|
||||
fi
|
||||
|
||||
echo "Starting $ACORE_COMPONENT..."
|
||||
|
||||
exec "$@"
|
||||
83
apps/extractor/extractor.bat
Normal file
83
apps/extractor/extractor.bat
Normal file
@@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESS 1, 2, 3 OR 4 to select your task, or 5 to EXIT.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO WARNING! when extracting the vmaps extractor will
|
||||
ECHO output the text below, it's intended and not an error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Press 1, 2, 3 or 4 to start extracting or 5 to exit.
|
||||
ECHO 1 - Extract base files (NEEDED) and cameras.
|
||||
ECHO 2 - Extract vmaps (needs maps to be extracted before you run this) (OPTIONAL, highly recommended)
|
||||
ECHO 3 - Extract mmaps (needs vmaps to be extracted before you run this, may take hours) (OPTIONAL, highly recommended)
|
||||
ECHO 4 - Extract all (may take hours)
|
||||
ECHO 5 - EXIT
|
||||
ECHO.
|
||||
SET /P M=Type 1, 2, 3, 4 or 5 then press ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
85
apps/extractor/extractor.sh
Executable file
85
apps/extractor/extractor.sh
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/bin/bash
|
||||
function Base {
|
||||
echo "Extract Base"
|
||||
rm -rf dbc maps Cameras
|
||||
./map_extractor
|
||||
Menu
|
||||
}
|
||||
|
||||
function VMaps {
|
||||
echo "Extract VMaps"
|
||||
mkdir -p Buildings vmaps
|
||||
rm -rf Buildings/* vmaps/*
|
||||
./vmap4_extractor
|
||||
./vmap4_assembler Buildings vmaps
|
||||
rmdir -rf Buildings
|
||||
Menu
|
||||
}
|
||||
|
||||
function MMaps {
|
||||
echo "This may take a few hours to complete. Please be patient."
|
||||
mkdir -p mmaps
|
||||
rm -rf mmaps/*
|
||||
./mmaps_generator
|
||||
Menu
|
||||
}
|
||||
|
||||
function All {
|
||||
echo "This may take a few hours to complete. Please be patient."
|
||||
rm -rf dbc maps Cameras
|
||||
mkdir -p Buildings vmaps mmaps
|
||||
rm -rf Buildings/* vmaps/* mmaps/*
|
||||
./map_extractor
|
||||
./vmap4_extractor
|
||||
./vmap4_assembler Buildings vmaps
|
||||
rmdir -rf Buildings
|
||||
./mmaps_generator
|
||||
Menu
|
||||
}
|
||||
|
||||
function Menu {
|
||||
echo ""
|
||||
echo "..............................................."
|
||||
echo "AzerothCore dbc, maps, vmaps, mmaps extractor"
|
||||
echo "..............................................."
|
||||
echo "PRESS 1, 2, 3 OR 4 to select your task, or 5 to EXIT."
|
||||
echo "..............................................."
|
||||
echo ""
|
||||
echo "WARNING! when extracting the vmaps extractor will"
|
||||
echo "output the text below, it's intended and not an error:"
|
||||
echo ".........................................."
|
||||
echo "Extracting World\Wmo\Band\Final_Stage.wmo"
|
||||
echo "No such file."
|
||||
echo "Couldn't open RootWmo!!!"
|
||||
echo "Done!"
|
||||
echo " .........................................."
|
||||
echo ""
|
||||
echo "Press 1, 2, 3 or 4 to start extracting or 5 to exit."
|
||||
echo "1 - Extract base files (NEEDED) and cameras."
|
||||
echo "2 - Extract vmaps (needs maps to be extracted before you run this) (OPTIONAL, highly recommended)"
|
||||
echo "3 - Extract mmaps (needs vmaps to be extracted before you run this, may take hours) (OPTIONAL, highly recommended)"
|
||||
echo "4 - Extract all (may take hours)"
|
||||
echo "5 - EXIT"
|
||||
echo ""
|
||||
|
||||
read -rp "Type 1, 2, 3, 4 or 5 then press ENTER: " choice
|
||||
|
||||
case $choice in
|
||||
1) Base ;;
|
||||
2) VMaps ;;
|
||||
3) MMaps ;;
|
||||
4) All ;;
|
||||
5) exit 0;;
|
||||
*) echo "Invalid choice."; read -rp "Type 1, 2, 3, 4 or 5 then press ENTER: " choice ;;
|
||||
esac
|
||||
}
|
||||
|
||||
if [ -d "./Data" ] && [ -f "map_extractor" ] && [ -f "vmap4_extractor" ] && [ -f "vmap4_assembler" ] && [ -f "mmaps_generator" ]; then
|
||||
echo "The required files and folder exist in the current directory."
|
||||
chmod +x map_extractor vmap4_extractor vmap4_assembler mmaps_generator
|
||||
Menu
|
||||
else
|
||||
echo "One or more of the required files or folder is missing from the current directory."
|
||||
echo "Place map_extractor vmap4_extractor vmap4_assembler mmaps_generator"
|
||||
echo "In your WoW folder with WoW.exe"
|
||||
fi
|
||||
83
apps/extractor/extractor_es.bat
Normal file
83
apps/extractor/extractor_es.bat
Normal file
@@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESIONE 1, 2, 3 O 4 para seleccionar su tarea, o 5 para SALIR.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO ADVERTENCIA: al extraer los vmaps del extractor
|
||||
ECHO la salida del texto de abajo, es intencional y no un error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Pulse 1, 2, 3 o 4 para iniciar la extraccion o 5 para salir.
|
||||
ECHO 1 - Extraer los archivos base (NECESARIOS) y las cámaras.
|
||||
ECHO 2 - Extraer vmaps (necesita que los mapas se extraigan antes de ejecutar esto) (OPCIONAL, muy recomendable)
|
||||
ECHO 3 - Extraer mmaps (necesita que los vmaps se extraigan antes de ejecutar esto, puede llevar horas) (OPCIONAL, muy recomendable)
|
||||
ECHO 4 - Extraer todo (puede llevar varias horas)
|
||||
ECHO 5 - SALIR
|
||||
ECHO.
|
||||
SET /P M=Escriba 1, 2, 3, 4 o 5 y pulse ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user