a
This commit is contained in:
commit
4db62b5520
2
.eslintignore
Normal file
2
.eslintignore
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
dist/
|
||||||
|
node_modules/
|
11
.eslintrc.cjs
Normal file
11
.eslintrc.cjs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
module.exports = {
|
||||||
|
env: {
|
||||||
|
node: true,
|
||||||
|
commonjs: true,
|
||||||
|
es2017: true,
|
||||||
|
},
|
||||||
|
parserOptions: {
|
||||||
|
sourceType: 'module',
|
||||||
|
ecmaVersion: 2021,
|
||||||
|
},
|
||||||
|
};
|
1
.github/CODEOWNERS
vendored
Normal file
1
.github/CODEOWNERS
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
* @tabarra
|
50
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml
vendored
Normal file
50
.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yml
vendored
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
---
|
||||||
|
name: Feature Request
|
||||||
|
description: Request a feature or enhancement
|
||||||
|
title: "[FEATURE]: type here a short description"
|
||||||
|
labels: [enhancement, feature]
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
## Thank you for filling out this feature/enhancement request
|
||||||
|
Please fill out this short template to the best of your ability.
|
||||||
|
- type: dropdown
|
||||||
|
id: scope
|
||||||
|
attributes:
|
||||||
|
label: Scope
|
||||||
|
description: What part of txAdmin is this feature targeting?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- Web
|
||||||
|
- In-Game Menu
|
||||||
|
- Developer API
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: feat-description
|
||||||
|
attributes:
|
||||||
|
label: Feature Description
|
||||||
|
description: Please provide a short and concise description of the feature you are suggesting.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: use-case
|
||||||
|
attributes:
|
||||||
|
label: Use Case
|
||||||
|
description: |
|
||||||
|
What use-case (a scenario where you want to do "x", but are limited) do
|
||||||
|
you have for requesting this feature?
|
||||||
|
placeholder: Ex. I wanted to do `x` but was unable too...
|
||||||
|
- type: textarea
|
||||||
|
id: solution
|
||||||
|
attributes:
|
||||||
|
label: Proposed Solution
|
||||||
|
description: Do you have an idea for a proposed solution. If so, please describe it.
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional Info
|
||||||
|
description: |
|
||||||
|
Is there any additional information you would like to provide?
|
||||||
|
placeholder: Screenshots, logs, etc
|
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: "[BUG] type here a short description"
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**txAdmin/FXServer versions:**
|
||||||
|
You can see that at the bottom of the txAdmin page, or in the terminal as soon as you start fxserver.
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here, like for example if it's a server issue, which OS is fxserver hosted on.
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: General Support
|
||||||
|
url: https://discord.gg/NsXGTszYjK
|
||||||
|
about: Please ask general support questions on our Discord!
|
6
.github/copilot-instructions.md
vendored
Normal file
6
.github/copilot-instructions.md
vendored
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
The `!NC` comments in code are tags used by a git pre-commit hook to prevent committing the lines containing it, and are generally used to mark TO-DOs that are required to be done before committing the changes.
|
||||||
|
Import `suite, it, expect` from vitest for writing tests, whith each method having one `suite()` and a list of tests using the `it()` for its definition.
|
||||||
|
Prefer implicit over explicit function return types.
|
||||||
|
Except for React components, prefer arrow functions.
|
||||||
|
Prefer using for..of instead of forEach.
|
||||||
|
Prefer single quotes over doble quotes.
|
45
.github/workflows/locale-pull-request.yml
vendored
Normal file
45
.github/workflows/locale-pull-request.yml
vendored
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
name: Check Locale PR
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'locale/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-locale-pr:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Enforce base branch
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
// Get the pull request
|
||||||
|
const pull_request = await github.rest.pulls.get({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
pull_number: context.payload.pull_request.number
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if the base branch is 'main' or 'master'
|
||||||
|
if (pull_request.data.base.ref === 'main' || pull_request.data.base.ref === 'master') {
|
||||||
|
console.error('Pull request is targeting the main branch. Please target the develop branch instead.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Use Node.js 22
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 22
|
||||||
|
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run locale:check
|
||||||
|
id: locale-check
|
||||||
|
run: npm run locale:check
|
60
.github/workflows/publish-tagged.yml
vendored
Normal file
60
.github/workflows/publish-tagged.yml
vendored
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
name: Publish Tagged Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "v[0-9]+.[0-9]+.[0-9]+*"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: "Build Changelog & Release Prod"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Use Node.js 22
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 22
|
||||||
|
|
||||||
|
- name: Download all modules
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
# Not truly necessary for build, but for now the vite config requires it
|
||||||
|
- name: Create .env file
|
||||||
|
run: |
|
||||||
|
echo TXDEV_FXSERVER_PATH=$(pwd)/fxserver > .env
|
||||||
|
echo TXDEV_VITE_URL='http://localhost:40122' >> .env
|
||||||
|
|
||||||
|
- name: Build project
|
||||||
|
run: |
|
||||||
|
npm run build
|
||||||
|
cat .github/.cienv >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Compress build output with zip
|
||||||
|
run: |
|
||||||
|
cd dist
|
||||||
|
zip -r ../monitor.zip .
|
||||||
|
cd ..
|
||||||
|
sha256sum monitor.zip
|
||||||
|
|
||||||
|
- name: Attest build provenance
|
||||||
|
id: attest_build_provenance
|
||||||
|
uses: actions/attest-build-provenance@v1
|
||||||
|
with:
|
||||||
|
subject-path: monitor.zip
|
||||||
|
|
||||||
|
- name: Create and Upload Release
|
||||||
|
uses: "marvinpinto/action-automatic-releases@v1.2.1"
|
||||||
|
with:
|
||||||
|
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
prerelease: ${{ env.TX_IS_PRERELEASE }}
|
||||||
|
files: monitor.zip
|
38
.github/workflows/run-tests.yml
vendored
Normal file
38
.github/workflows/run-tests.yml
vendored
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
name: Run Tests for Workspaces
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- "**"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run-tests:
|
||||||
|
name: "Run Unit Testing"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Use Node.js 22
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 22
|
||||||
|
|
||||||
|
- name: Download all modules
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Create .env file
|
||||||
|
run: |
|
||||||
|
echo TXDEV_FXSERVER_PATH=$(pwd)/fxserver > .env
|
||||||
|
echo TXDEV_VITE_URL='http://localhost:40122' >> .env
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
env:
|
||||||
|
CI: true
|
||||||
|
run: npm run test --workspaces
|
82
.gitignore
vendored
Normal file
82
.gitignore
vendored
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
## Custom:
|
||||||
|
db/*
|
||||||
|
cache/*
|
||||||
|
dist/*
|
||||||
|
.reports/*
|
||||||
|
license_report/*
|
||||||
|
.tsc/*
|
||||||
|
*.ignore.*
|
||||||
|
/start_*.bat
|
||||||
|
monitor-*.zip
|
||||||
|
bundle_size_report.html
|
||||||
|
.github/.cienv
|
||||||
|
scripts/**/*.local.*
|
||||||
|
.runtime/
|
||||||
|
|
||||||
|
# IDE Specific
|
||||||
|
.idea
|
||||||
|
|
||||||
|
## Github's default node gitignore
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
node_modules/
|
||||||
|
jspm_packages/
|
||||||
|
|
||||||
|
# TypeScript v1 declaration files
|
||||||
|
typings/
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
*.tgz
|
||||||
|
|
||||||
|
# Yarn
|
||||||
|
yarn.lock
|
||||||
|
.yarn.installed
|
||||||
|
.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variables file
|
||||||
|
.env
|
||||||
|
|
||||||
|
# next.js build output
|
||||||
|
.next
|
1
.husky/.gitignore
vendored
Normal file
1
.husky/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
_
|
4
.husky/commit-msg
Normal file
4
.husky/commit-msg
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/sh
|
||||||
|
. "$(dirname "$0")/_/husky.sh"
|
||||||
|
|
||||||
|
npx --no-install commitlint --edit ""
|
15
.husky/pre-commit
Normal file
15
.husky/pre-commit
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Rejects commits with the !NC flag is present in the changes
|
||||||
|
# The !NC flag is used to mark code that should not be commited to the repository
|
||||||
|
# It's useful to avoid commiting debug code, test code, etc.
|
||||||
|
|
||||||
|
# Check if the !NC flag is present in the changes
|
||||||
|
if git diff --staged --unified=0 --no-color | grep '^+' | grep -q '!NC'; then
|
||||||
|
echo -e "COMMIT REJECTED: Found the !NC flag in your changes.\nMake sure you didn't accidently staged something you shouldn't!"
|
||||||
|
echo "Flags found:"
|
||||||
|
git diff --staged --unified=0 --no-color | grep -C 2 '!NC'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
11
.license-reportrc
Normal file
11
.license-reportrc
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"output": "html",
|
||||||
|
"fields": [
|
||||||
|
"name",
|
||||||
|
"licenseType",
|
||||||
|
"link",
|
||||||
|
"installedVersion",
|
||||||
|
"definedVersion",
|
||||||
|
"author"
|
||||||
|
]
|
||||||
|
}
|
8
.npm-upgrade.json
Normal file
8
.npm-upgrade.json
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"ignore": {
|
||||||
|
"@types/node": {
|
||||||
|
"versions": ">16.9.1",
|
||||||
|
"reason": "fixed to fxserver's node version"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019-2025 André Tabarra <maintainer@txadmin.gg>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
87
README.md
Normal file
87
README.md
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
<p align="center">
|
||||||
|
<p align="center">
|
||||||
|
<img src="docs/banner.png">
|
||||||
|
</p>
|
||||||
|
<p align="center">
|
||||||
|
In 2019 <b>txAdmin</b> was created, with the objective of making FiveM server management accessible to everyone – no matter their skill level! <br/>
|
||||||
|
Today, <b>txAdmin</b> is <i>the</i> <b>full featured</b> web panel & in-game menu to Manage & Monitor your FiveM/RedM Server, in use by over <strong>29.000</strong> servers worldwide at any given time!
|
||||||
|
</p>
|
||||||
|
<p align="center">
|
||||||
|
Join our Discord Server: <a href="https://discord.gg/AFAAXzq"><img src="https://discordapp.com/api/guilds/577993482761928734/widget.png?style=shield"></img></a>
|
||||||
|
</p>
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://zap-hosting.com/txadmin4" target="_blank" rel="noopener">
|
||||||
|
<img src="docs/zaphosting.png" alt="zap-hosting"></img>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
## Main Features
|
||||||
|
- Recipe-based Server Deployer: create a server in under 60 seconds! ([docs/recipe.md](docs/recipe.md))
|
||||||
|
- Start/Stop/Restart your server instance or resources
|
||||||
|
- Full-featured in-game admin menu:
|
||||||
|
- Player Mode: NoClip, God, SuperJump
|
||||||
|
- Teleport: waypoint, coords and back
|
||||||
|
- Vehicle: Spawn, Fix, Delete, Boost
|
||||||
|
- Heal: yourself, everyone
|
||||||
|
- Send Announcements
|
||||||
|
- Reset World Area
|
||||||
|
- Show player IDs
|
||||||
|
- Player search/sort by distance, ID, name
|
||||||
|
- Player interactions: Go To, Bring, Spectate, Freeze
|
||||||
|
- Player troll: make drunk, set fire, wild attack
|
||||||
|
- Player ban/warn/dm
|
||||||
|
- Access control:
|
||||||
|
- Login via Cfx.re or password
|
||||||
|
- Admin permission system ([docs/permissions.md](docs/permissions.md))
|
||||||
|
- Action logging
|
||||||
|
- Discord Integration:
|
||||||
|
- Server configurable, persistent, auto-updated status embed
|
||||||
|
- Command to whitelist players
|
||||||
|
- Command to display player infos
|
||||||
|
- Monitoring:
|
||||||
|
- Auto Restart FXServer on crash or hang
|
||||||
|
- Server’s CPU/RAM consumption
|
||||||
|
- Live Console (with log file, command history and search)
|
||||||
|
- Server threads performance chart with player count
|
||||||
|
- Server Activity Log (connections/disconnections, kills, chat, explosions and [custom commands](docs/custom-server-log.md))
|
||||||
|
- Player Manager:
|
||||||
|
- [Warning system](https://www.youtube.com/watch?v=DeE0-5vtZ4E) & Ban system
|
||||||
|
- Whitelist system (Discord member, Discord Role, Approved License, Admin-only)
|
||||||
|
- Take notes about players
|
||||||
|
- Keep track of player's play and session time
|
||||||
|
- Self-contained player database (no MySQL required!)
|
||||||
|
- Clean/Optimize the database by removing old players, or bans/warns/whitelists
|
||||||
|
- Real-time playerlist
|
||||||
|
- Scheduled restarts with warning announcements and custom events ([docs/events.md](docs/events.md))
|
||||||
|
- Translated into over 30 languages ([docs/translation.md](docs/translation.md))
|
||||||
|
- FiveM's Server CFG editor & validator
|
||||||
|
- Responsive web interface with Dark Mode 😎
|
||||||
|
- And much more...
|
||||||
|
|
||||||
|
Also, check our [Feature Graveyard](docs/feature-graveyard.md) for the features that are no longer among us (😔 RIP).
|
||||||
|
|
||||||
|
## Running txAdmin
|
||||||
|
- Since early 2020, **txAdmin is a component of FXServer, so there is no need to downloading or installing anything**.
|
||||||
|
- To start txAdmin, simply run FXServer **without** any `+exec server.cfg` launch argument, and FXServer will automatically start txAdmin.
|
||||||
|
- On first boot, a `txData` directory will be created to store txAdmin files, and you will need to open the URL provided in the console to configure your account and server.
|
||||||
|
|
||||||
|
|
||||||
|
## Configuration & Integrations
|
||||||
|
- Most configuration can be done inside the txAdmin settings page, but some configs (such as TCP interface & port) are only available through Environment Variables, please see [docs/env-config.md](docs/env-config.md).
|
||||||
|
- You can listen to server events broadcasted by txAdmin to allow for custom behavior in your resources, please see [docs/events.md](docs/events.md).
|
||||||
|
|
||||||
|
|
||||||
|
## Contributing & Development
|
||||||
|
- All PRs should be based on the develop branch, including translation PRs.
|
||||||
|
- Before putting effort for any significant PR, make sure to join our discord and talk to us, since the change you want to do might not have been done for a reason or there might be some required context.
|
||||||
|
- If you want to build it or run from source, please check [docs/development.md](docs/development.md).
|
||||||
|
|
||||||
|
|
||||||
|
## License, Credits and Thanks
|
||||||
|
- This project is licensed under the [MIT License](https://github.com/tabarra/txAdmin/blob/master/LICENSE);
|
||||||
|
- ["Kick" button icons](https://www.flaticon.com/free-icon/users-avatar_8188385) made by __SeyfDesigner__ from [www.flaticon.com](https://www.flaticon.com);
|
||||||
|
- Warning Sounds ([1](https://freesound.org/people/Ultranova105/sounds/136756/)/[2](https://freesound.org/people/Ultranova105/sounds/136754/)) made by __Ultranova105__ are licensed under [CC 3.0 BY](http://creativecommons.org/licenses/by/3.0/);
|
||||||
|
- [Announcement Sound](https://freesound.org/people/IENBA/sounds/545495/) made by __IENBA__ is licensed under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0/);
|
||||||
|
- [Message Sound](https://freesound.org/people/Divinux/sounds/198414/) made by __Divinux__ is licensed under [CC0 1.0](https://creativecommons.org/publicdomain/zero/1.0/);
|
||||||
|
- Especial thanks to everyone that contributed to this project, especially the very fine Discord folks that provide support for others;
|
25
commitlint.config.cjs
Normal file
25
commitlint.config.cjs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
const types = [
|
||||||
|
'build',
|
||||||
|
'chore',
|
||||||
|
'ci',
|
||||||
|
'docs',
|
||||||
|
'feat',
|
||||||
|
'fix',
|
||||||
|
'perf',
|
||||||
|
'refactor',
|
||||||
|
'revert',
|
||||||
|
'style',
|
||||||
|
'test',
|
||||||
|
|
||||||
|
//custom
|
||||||
|
'tweak',
|
||||||
|
'wip',
|
||||||
|
'locale',
|
||||||
|
];
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
extends: ['@commitlint/config-conventional'],
|
||||||
|
rules: {
|
||||||
|
'type-enum': [2, 'always', types],
|
||||||
|
},
|
||||||
|
};
|
83
core/.eslintrc.cjs
Normal file
83
core/.eslintrc.cjs
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
module.exports = {
|
||||||
|
env: {
|
||||||
|
node: true,
|
||||||
|
commonjs: true,
|
||||||
|
es2017: true,
|
||||||
|
},
|
||||||
|
globals: {
|
||||||
|
GlobalData: 'writable',
|
||||||
|
ExecuteCommand: 'readonly',
|
||||||
|
GetConvar: 'readonly',
|
||||||
|
GetCurrentResourceName: 'readonly',
|
||||||
|
GetPasswordHash: 'readonly',
|
||||||
|
GetResourceMetadata: 'readonly',
|
||||||
|
GetResourcePath: 'readonly',
|
||||||
|
IsDuplicityVersion: 'readonly',
|
||||||
|
VerifyPasswordHash: 'readonly',
|
||||||
|
},
|
||||||
|
extends: [],
|
||||||
|
ignorePatterns: [
|
||||||
|
'*.ignore.*',
|
||||||
|
],
|
||||||
|
rules: {
|
||||||
|
//Review these
|
||||||
|
'no-control-regex': 'off',
|
||||||
|
'no-empty': ['error', { allowEmptyCatch: true }],
|
||||||
|
'no-prototype-builtins': 'off',
|
||||||
|
'no-unused-vars': ['warn', {
|
||||||
|
varsIgnorePattern: '^_\\w*',
|
||||||
|
vars: 'all',
|
||||||
|
args: 'none', //diff
|
||||||
|
ignoreRestSiblings: true,
|
||||||
|
}],
|
||||||
|
|
||||||
|
//From Airbnb, fixed them already
|
||||||
|
'keyword-spacing': ['error', {
|
||||||
|
before: true,
|
||||||
|
after: true,
|
||||||
|
overrides: {
|
||||||
|
return: { after: true },
|
||||||
|
throw: { after: true },
|
||||||
|
case: { after: true },
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
'space-before-blocks': 'error',
|
||||||
|
quotes: ['error', 'single', { allowTemplateLiterals: true }],
|
||||||
|
semi: ['error', 'always'],
|
||||||
|
'no-trailing-spaces': ['error', {
|
||||||
|
skipBlankLines: false,
|
||||||
|
ignoreComments: false,
|
||||||
|
}],
|
||||||
|
'space-infix-ops': 'error',
|
||||||
|
'comma-dangle': ['error', {
|
||||||
|
arrays: 'always-multiline',
|
||||||
|
objects: 'always-multiline',
|
||||||
|
imports: 'always-multiline',
|
||||||
|
exports: 'always-multiline',
|
||||||
|
functions: 'always-multiline',
|
||||||
|
}],
|
||||||
|
'padded-blocks': ['error', {
|
||||||
|
blocks: 'never',
|
||||||
|
classes: 'never',
|
||||||
|
switches: 'never',
|
||||||
|
}, {
|
||||||
|
allowSingleLineBlocks: true,
|
||||||
|
}],
|
||||||
|
'comma-spacing': ['error', { before: false, after: true }],
|
||||||
|
'arrow-spacing': ['error', { before: true, after: true }],
|
||||||
|
'arrow-parens': ['error', 'always'],
|
||||||
|
'operator-linebreak': ['error', 'before', { overrides: { '=': 'none' } }],
|
||||||
|
|
||||||
|
// Custom
|
||||||
|
indent: ['error', 4],
|
||||||
|
|
||||||
|
// FIXME: re-enable it somewhen
|
||||||
|
'linebreak-style': 'off',
|
||||||
|
'spaced-comment': 'off',
|
||||||
|
'object-curly-spacing': 'off', //maybe keep this disabled?
|
||||||
|
'arrow-body-style': 'off', //maybe keep this disabled?
|
||||||
|
|
||||||
|
// Check it:
|
||||||
|
// 'object-curly-newline': ['error', 'never'],
|
||||||
|
},
|
||||||
|
};
|
48
core/.npm-upgrade.json
Normal file
48
core/.npm-upgrade.json
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
{
|
||||||
|
"ignore": {
|
||||||
|
"open": {
|
||||||
|
"versions": ">7.1.0",
|
||||||
|
"reason": "Doesn't work when powershell is not in PATH or something like that."
|
||||||
|
},
|
||||||
|
"fs-extra": {
|
||||||
|
"versions": ">9",
|
||||||
|
"reason": "recipe errors on some OSs"
|
||||||
|
},
|
||||||
|
"execa": {
|
||||||
|
"versions": "^5",
|
||||||
|
"reason": "following @sindresorhus/windows-release"
|
||||||
|
},
|
||||||
|
"windows-release": {
|
||||||
|
"versions": "^5.1.1",
|
||||||
|
"reason": "inlined to transform it into async"
|
||||||
|
},
|
||||||
|
"nanoid": {
|
||||||
|
"versions": ">4",
|
||||||
|
"reason": "dropped support for node 16"
|
||||||
|
},
|
||||||
|
"got": {
|
||||||
|
"versions": ">13",
|
||||||
|
"reason": "dropped support for node 16"
|
||||||
|
},
|
||||||
|
"jose": {
|
||||||
|
"versions": ">4",
|
||||||
|
"reason": "dropped support for node 16"
|
||||||
|
},
|
||||||
|
"lowdb": {
|
||||||
|
"versions": ">6",
|
||||||
|
"reason": "dropped support for node 16"
|
||||||
|
},
|
||||||
|
"slug": {
|
||||||
|
"versions": ">8",
|
||||||
|
"reason": "dropped support for node 16"
|
||||||
|
},
|
||||||
|
"boxen": {
|
||||||
|
"versions": ">7",
|
||||||
|
"reason": "dropped support for node 16"
|
||||||
|
},
|
||||||
|
"discord.js": {
|
||||||
|
"versions": ">14.11.0",
|
||||||
|
"reason": "undici sub-dependency dropped support for node 16"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
48
core/boot/checkPreRelease.ts
Normal file
48
core/boot/checkPreRelease.ts
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import fatalError from '@lib/fatalError';
|
||||||
|
import { chalkInversePad, msToDuration } from '@lib/misc';
|
||||||
|
const console = consoleFactory('ATTENTION');
|
||||||
|
|
||||||
|
|
||||||
|
//@ts-ignore esbuild will replace TX_PRERELEASE_EXPIRATION with a string
|
||||||
|
const PRERELEASE_EXPIRATION = parseInt(TX_PRERELEASE_EXPIRATION)
|
||||||
|
|
||||||
|
const expiredError = [
|
||||||
|
'This pre-release version has expired, please update your txAdmin.',
|
||||||
|
'Bye bye 👋',
|
||||||
|
]
|
||||||
|
|
||||||
|
const printExpirationBanner = (timeUntilExpiration: number) => {
|
||||||
|
const timeLeft = msToDuration(timeUntilExpiration)
|
||||||
|
console.error('This is a pre-release version of txAdmin!');
|
||||||
|
console.error('This build is meant to be used by txAdmin beta testers.');
|
||||||
|
console.error('txAdmin will automatically shut down when this pre-release expires.');
|
||||||
|
console.error(`Time until expiration: ${chalkInversePad(timeLeft)}.`);
|
||||||
|
console.error('For more information: https://discord.gg/txAdmin.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const cronCheckExpiration = () => {
|
||||||
|
if (isNaN(PRERELEASE_EXPIRATION) || PRERELEASE_EXPIRATION === 0) return;
|
||||||
|
|
||||||
|
const timeUntilExpiration = PRERELEASE_EXPIRATION - Date.now();
|
||||||
|
if (timeUntilExpiration < 0) {
|
||||||
|
fatalError.Boot(11, expiredError);
|
||||||
|
} else if (timeUntilExpiration < 24 * 60 * 60 * 1000) {
|
||||||
|
printExpirationBanner(timeUntilExpiration);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default () => {
|
||||||
|
if (isNaN(PRERELEASE_EXPIRATION) || PRERELEASE_EXPIRATION === 0) return;
|
||||||
|
|
||||||
|
const timeUntilExpiration = PRERELEASE_EXPIRATION - Date.now();
|
||||||
|
if (timeUntilExpiration < 0) {
|
||||||
|
fatalError.Boot(10, expiredError);
|
||||||
|
}
|
||||||
|
|
||||||
|
//First warning
|
||||||
|
printExpirationBanner(timeUntilExpiration);
|
||||||
|
|
||||||
|
//Check every 15 minutes
|
||||||
|
setInterval(cronCheckExpiration, 15 * 60 * 1000);
|
||||||
|
};
|
116
core/boot/getHostVars.ts
Normal file
116
core/boot/getHostVars.ts
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
import path from 'node:path';
|
||||||
|
import { z } from "zod";
|
||||||
|
import { fromZodError } from "zod-validation-error";
|
||||||
|
import fatalError from '@lib/fatalError';
|
||||||
|
import consts from '@shared/consts';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schemas for the TXHOST_ env variables
|
||||||
|
*/
|
||||||
|
export const hostEnvVarSchemas = {
|
||||||
|
//General
|
||||||
|
API_TOKEN: z.union([
|
||||||
|
z.literal('disabled'),
|
||||||
|
z.string().regex(
|
||||||
|
/^[A-Za-z0-9_-]{16,48}$/,
|
||||||
|
'Token must be alphanumeric, underscores or dashes only, and between 16 and 48 characters long.'
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
DATA_PATH: z.string().min(1).refine(
|
||||||
|
(val) => path.isAbsolute(val),
|
||||||
|
'DATA_PATH must be an absolute path'
|
||||||
|
),
|
||||||
|
GAME_NAME: z.enum(
|
||||||
|
['fivem', 'redm'],
|
||||||
|
{ message: 'GAME_NAME must be either "gta5", "rdr3", or undefined' }
|
||||||
|
),
|
||||||
|
MAX_SLOTS: z.coerce.number().int().positive(),
|
||||||
|
QUIET_MODE: z.preprocess((val) => val === 'true', z.boolean()),
|
||||||
|
|
||||||
|
//Networking
|
||||||
|
TXA_URL: z.string().url(),
|
||||||
|
TXA_PORT: z.coerce.number().int().positive().refine(
|
||||||
|
(val) => val !== 30120,
|
||||||
|
'TXA_PORT cannot be 30120'
|
||||||
|
),
|
||||||
|
FXS_PORT: z.coerce.number().int().positive().refine(
|
||||||
|
(val) => val < 40120 || val > 40150,
|
||||||
|
'FXS_PORT cannot be between 40120 and 40150'
|
||||||
|
),
|
||||||
|
INTERFACE: z.string().ip({ version: "v4" }),
|
||||||
|
|
||||||
|
//Provider
|
||||||
|
PROVIDER_NAME: z.string()
|
||||||
|
.regex(
|
||||||
|
/^[a-zA-Z0-9_.\- ]{2,16}$/,
|
||||||
|
'Provider name must be between 2 and 16 characters long and can only contain letters, numbers, underscores, periods, hyphens and spaces.'
|
||||||
|
)
|
||||||
|
.refine(
|
||||||
|
x => !/[_.\- ]{2,}/.test(x),
|
||||||
|
'Provider name cannot contain consecutive special characters.'
|
||||||
|
)
|
||||||
|
.refine(
|
||||||
|
x => /^[a-zA-Z0-9].*[a-zA-Z0-9]$/.test(x),
|
||||||
|
'Provider name must start and end with a letter or number.'
|
||||||
|
),
|
||||||
|
PROVIDER_LOGO: z.string().url(),
|
||||||
|
|
||||||
|
//Defaults (no reason to coerce or check, except the cfxkey)
|
||||||
|
DEFAULT_DBHOST: z.string(),
|
||||||
|
DEFAULT_DBPORT: z.string(),
|
||||||
|
DEFAULT_DBUSER: z.string(),
|
||||||
|
DEFAULT_DBPASS: z.string(),
|
||||||
|
DEFAULT_DBNAME: z.string(),
|
||||||
|
DEFAULT_ACCOUNT: z.string().refine(
|
||||||
|
(val) => {
|
||||||
|
const parts = val.split(':').length;
|
||||||
|
return parts === 2 || parts === 3;
|
||||||
|
},
|
||||||
|
'The account needs to be in the username:fivemId or username:fivemId:bcrypt format',
|
||||||
|
),
|
||||||
|
DEFAULT_CFXKEY: z.string().refine(
|
||||||
|
//apparently zap still uses the old format?
|
||||||
|
(val) => consts.regexSvLicenseNew.test(val) || consts.regexSvLicenseOld.test(val),
|
||||||
|
'The key needs to be in the cfxk_xxxxxxxxxxxxxxxxxxxx_yyyyy format'
|
||||||
|
),
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export type HostEnvVars = {
|
||||||
|
[K in keyof typeof hostEnvVarSchemas]: z.infer<typeof hostEnvVarSchemas[K]> | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the TXHOST_ env variables
|
||||||
|
*/
|
||||||
|
export const getHostVars = () => {
|
||||||
|
const txHostEnv: any = {};
|
||||||
|
for (const partialKey of Object.keys(hostEnvVarSchemas)) {
|
||||||
|
const keySchema = hostEnvVarSchemas[partialKey as keyof HostEnvVars];
|
||||||
|
const fullKey = `TXHOST_` + partialKey;
|
||||||
|
const value = process.env[fullKey];
|
||||||
|
if (value === undefined || value === '') continue;
|
||||||
|
if(/^['"]|['"]$/.test(value)) {
|
||||||
|
fatalError.GlobalData(20, [
|
||||||
|
'Invalid value for a TXHOST environment variable.',
|
||||||
|
'The value is surrounded by quotes (" or \'), and you must remove them.',
|
||||||
|
'This is likely a mistake in how you declared this env var.',
|
||||||
|
['Key', fullKey],
|
||||||
|
['Value', String(value)],
|
||||||
|
'For more information: https://aka.cfx.re/txadmin-env-config',
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
const res = keySchema.safeParse(value);
|
||||||
|
if (!res.success) {
|
||||||
|
fatalError.GlobalData(20, [
|
||||||
|
'Invalid value for a TXHOST environment variable.',
|
||||||
|
['Key', fullKey],
|
||||||
|
['Value', String(value)],
|
||||||
|
'For more information: https://aka.cfx.re/txadmin-env-config',
|
||||||
|
], fromZodError(res.error, { prefix: null }));
|
||||||
|
}
|
||||||
|
txHostEnv[partialKey] = res.data;
|
||||||
|
}
|
||||||
|
return txHostEnv as HostEnvVars;
|
||||||
|
}
|
85
core/boot/getNativeVars.ts
Normal file
85
core/boot/getNativeVars.ts
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
|
||||||
|
//Helper function to get convars WITHOUT a fallback value
|
||||||
|
const undefinedKey = 'UNDEFINED:CONVAR:' + Math.random().toString(36).substring(2, 15);
|
||||||
|
const getConvarString = (convarName: string) => {
|
||||||
|
const cvar = GetConvar(convarName, undefinedKey);
|
||||||
|
return (cvar !== undefinedKey) ? cvar.trim() : undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
//Helper to clean up the resource native responses which apparently might be 'null'
|
||||||
|
const cleanNativeResp = (resp: any) => {
|
||||||
|
return (typeof resp === 'string' && resp !== 'null' && resp.length) ? resp : undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
//Warning for convar usage
|
||||||
|
let anyWarnSent = false;
|
||||||
|
const replacedConvarWarning = (convarName: string, newName: string) => {
|
||||||
|
console.warn(`WARNING: The '${convarName}' ConVar is deprecated and will be removed in the next update.`);
|
||||||
|
console.warn(` Please use the '${newName}' environment variable instead.`);
|
||||||
|
anyWarnSent = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Native variables that are required for the boot process.
|
||||||
|
* This file is not supposed to validate or default any of the values.
|
||||||
|
*/
|
||||||
|
export const getNativeVars = (ignoreDeprecatedConfigs: boolean) => {
|
||||||
|
//FXServer
|
||||||
|
const fxsVersion = getConvarString('version');
|
||||||
|
const fxsCitizenRoot = getConvarString('citizen_root');
|
||||||
|
|
||||||
|
//Resource
|
||||||
|
const resourceName = cleanNativeResp(GetCurrentResourceName());
|
||||||
|
if (!resourceName) throw new Error('GetCurrentResourceName() failed');
|
||||||
|
const txaResourceVersion = cleanNativeResp(GetResourceMetadata(resourceName, 'version', 0));
|
||||||
|
const txaResourcePath = cleanNativeResp(GetResourcePath(resourceName));
|
||||||
|
|
||||||
|
//Profile Convar - with warning
|
||||||
|
const txAdminProfile = getConvarString('serverProfile');
|
||||||
|
if (txAdminProfile) {
|
||||||
|
console.warn(`WARNING: The 'serverProfile' ConVar is deprecated and will be removed in a future update.`);
|
||||||
|
console.warn(` To create multiple servers, set up a different TXHOST_DATA_PATH instead.`);
|
||||||
|
anyWarnSent = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Convars replaced by TXHOST_* env vars
|
||||||
|
let txDataPath, txAdminPort, txAdminInterface;
|
||||||
|
if (!ignoreDeprecatedConfigs) {
|
||||||
|
txDataPath = getConvarString('txDataPath');
|
||||||
|
if (txDataPath) {
|
||||||
|
replacedConvarWarning('txDataPath', 'TXHOST_DATA_PATH');
|
||||||
|
//As it used to support relative paths, we need to resolve it
|
||||||
|
if (!path.isAbsolute(txDataPath)) {
|
||||||
|
txDataPath = path.resolve(txDataPath);
|
||||||
|
console.error(`WARNING: The 'txDataPath' ConVar is not an absolute path, please update it to:`);
|
||||||
|
console.error(` TXHOST_DATA_PATH=${txDataPath}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
txAdminPort = getConvarString('txAdminPort');
|
||||||
|
if (txAdminPort) replacedConvarWarning('txAdminPort', 'TXHOST_TXA_PORT');
|
||||||
|
txAdminInterface = getConvarString('txAdminInterface');
|
||||||
|
if (txAdminInterface) replacedConvarWarning('txAdminInterface', 'TXHOST_INTERFACE');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (anyWarnSent) {
|
||||||
|
console.warn(`WARNING: For more information: https://aka.cfx.re/txadmin-env-config`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Final object
|
||||||
|
return {
|
||||||
|
fxsVersion,
|
||||||
|
fxsCitizenRoot,
|
||||||
|
resourceName,
|
||||||
|
txaResourceVersion,
|
||||||
|
txaResourcePath,
|
||||||
|
|
||||||
|
//custom vars
|
||||||
|
txAdminProfile,
|
||||||
|
txDataPath,
|
||||||
|
txAdminPort,
|
||||||
|
txAdminInterface,
|
||||||
|
};
|
||||||
|
}
|
73
core/boot/getZapVars.ts
Normal file
73
core/boot/getZapVars.ts
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
import fs from 'node:fs';
|
||||||
|
|
||||||
|
|
||||||
|
//Keeping the typo mostly so I can remember the old usage types
|
||||||
|
type ZapConfigVars = {
|
||||||
|
providerName: string;
|
||||||
|
forceInterface: string | undefined;
|
||||||
|
forceFXServerPort: number | undefined;
|
||||||
|
txAdminPort: number | undefined;
|
||||||
|
loginPageLogo: string | undefined;
|
||||||
|
defaultMasterAccount?: {
|
||||||
|
name: string,
|
||||||
|
password_hash: string
|
||||||
|
};
|
||||||
|
deployerDefaults: {
|
||||||
|
license?: string,
|
||||||
|
maxClients?: number,
|
||||||
|
mysqlHost?: string,
|
||||||
|
mysqlPort?: string,
|
||||||
|
mysqlUser?: string,
|
||||||
|
mysqlPassword?: string,
|
||||||
|
mysqlDatabase?: string,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const allowType = (type: 'string' | 'number', value: any) => typeof value === type ? value : undefined;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets & parses the txAdminZapConfig.json variables
|
||||||
|
*/
|
||||||
|
export const getZapVars = (zapCfgFilePath: string): ZapConfigVars | undefined => {
|
||||||
|
if (!fs.existsSync(zapCfgFilePath)) return;
|
||||||
|
console.warn(`WARNING: The 'txAdminZapConfig.json' file has been deprecated and this feature will be removed in the next update.`);
|
||||||
|
console.warn(` Please use the 'TXHOST_' environment variables instead.`);
|
||||||
|
console.warn(` For more information: https://aka.cfx.re/txadmin-env-config.`);
|
||||||
|
const cfgFileData = JSON.parse(fs.readFileSync(zapCfgFilePath, 'utf8'));
|
||||||
|
|
||||||
|
const zapVars: ZapConfigVars = {
|
||||||
|
providerName: 'ZAP-Hosting',
|
||||||
|
|
||||||
|
forceInterface: allowType('string', cfgFileData.interface),
|
||||||
|
forceFXServerPort: allowType('number', cfgFileData.fxServerPort),
|
||||||
|
txAdminPort: allowType('number', cfgFileData.txAdminPort),
|
||||||
|
loginPageLogo: allowType('string', cfgFileData.loginPageLogo),
|
||||||
|
|
||||||
|
deployerDefaults: {
|
||||||
|
license: allowType('string', cfgFileData.defaults.license),
|
||||||
|
maxClients: allowType('number', cfgFileData.defaults.maxClients),
|
||||||
|
mysqlHost: allowType('string', cfgFileData.defaults.mysqlHost),
|
||||||
|
mysqlUser: allowType('string', cfgFileData.defaults.mysqlUser),
|
||||||
|
mysqlPassword: allowType('string', cfgFileData.defaults.mysqlPassword),
|
||||||
|
mysqlDatabase: allowType('string', cfgFileData.defaults.mysqlDatabase),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
//Port is a special case because the cfg is likely int, but we want string
|
||||||
|
if(typeof cfgFileData.defaults.mysqlPort === 'string') {
|
||||||
|
zapVars.deployerDefaults.mysqlPort = cfgFileData.defaults.mysqlPort;
|
||||||
|
} else if (typeof cfgFileData.defaults.mysqlPort === 'number') {
|
||||||
|
zapVars.deployerDefaults.mysqlPort = String(cfgFileData.defaults.mysqlPort);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Validation is done in the globalData file
|
||||||
|
if (cfgFileData.customer) {
|
||||||
|
zapVars.defaultMasterAccount = {
|
||||||
|
name: allowType('string', cfgFileData.customer.name),
|
||||||
|
password_hash: allowType('string', cfgFileData.customer.password_hash),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return zapVars;
|
||||||
|
}
|
52
core/boot/globalPlaceholder.ts
Normal file
52
core/boot/globalPlaceholder.ts
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import { txDevEnv } from "@core/globalData";
|
||||||
|
import consoleFactory from "@lib/console";
|
||||||
|
import fatalError from "@lib/fatalError";
|
||||||
|
const console = consoleFactory('GlobalPlaceholder');
|
||||||
|
|
||||||
|
//Messages
|
||||||
|
const MSG_VIOLATION = 'Global Proxy Access Violation!';
|
||||||
|
const MSG_BOOT_FAIL = 'Failed to boot due to Module Race Condition.';
|
||||||
|
const MSG_CONTACT_DEV = 'This error should never happen, please report it to the developers.';
|
||||||
|
const MSG_ERR_PARTIAL = 'Attempted to access txCore before it was initialized!';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a Proxy that will throw a fatalError when accessing an uninitialized property
|
||||||
|
*/
|
||||||
|
export const getCoreProxy = (refSrc: any) => {
|
||||||
|
return new Proxy(refSrc, {
|
||||||
|
get: function (target, prop) {
|
||||||
|
// if (!txDevEnv.ENABLED && Reflect.has(target, prop)) {
|
||||||
|
// if (console.isVerbose) {
|
||||||
|
// console.majorMultilineError([
|
||||||
|
// MSG_VIOLATION,
|
||||||
|
// MSG_CONTACT_DEV,
|
||||||
|
// `Getter for ${String(prop)}`,
|
||||||
|
// ]);
|
||||||
|
// }
|
||||||
|
// return Reflect.get(target, prop).deref();
|
||||||
|
// }
|
||||||
|
fatalError.Boot(
|
||||||
|
22,
|
||||||
|
[
|
||||||
|
MSG_BOOT_FAIL,
|
||||||
|
MSG_CONTACT_DEV,
|
||||||
|
['Getter for', String(prop)],
|
||||||
|
],
|
||||||
|
new Error(MSG_ERR_PARTIAL)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
set: function (target, prop, value) {
|
||||||
|
fatalError.Boot(
|
||||||
|
23,
|
||||||
|
[
|
||||||
|
MSG_BOOT_FAIL,
|
||||||
|
MSG_CONTACT_DEV,
|
||||||
|
['Setter for', String(prop)],
|
||||||
|
],
|
||||||
|
new Error(MSG_ERR_PARTIAL)
|
||||||
|
);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
67
core/boot/setup.ts
Normal file
67
core/boot/setup.ts
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
import path from 'node:path';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
|
||||||
|
import fatalError from '@lib/fatalError';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import ConfigStore from '@modules/ConfigStore';
|
||||||
|
import { chalkInversePad } from '@lib/misc';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure the profile subfolders exist
|
||||||
|
*/
|
||||||
|
export const ensureProfileStructure = () => {
|
||||||
|
const dataPath = path.join(txEnv.profilePath, 'data');
|
||||||
|
if (!fs.existsSync(dataPath)) {
|
||||||
|
fs.mkdirSync(dataPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
const logsPath = path.join(txEnv.profilePath, 'logs');
|
||||||
|
if (!fs.existsSync(logsPath)) {
|
||||||
|
fs.mkdirSync(logsPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup the profile folder structure
|
||||||
|
*/
|
||||||
|
export const setupProfile = () => {
|
||||||
|
//Create new profile folder
|
||||||
|
try {
|
||||||
|
fs.mkdirSync(txEnv.profilePath);
|
||||||
|
const configStructure = ConfigStore.getEmptyConfigFile();
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(txEnv.profilePath, 'config.json'),
|
||||||
|
JSON.stringify(configStructure, null, 2)
|
||||||
|
);
|
||||||
|
ensureProfileStructure();
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.Boot(4, [
|
||||||
|
'Failed to set up data folder structure.',
|
||||||
|
['Path', txEnv.profilePath],
|
||||||
|
], error);
|
||||||
|
}
|
||||||
|
console.log(`Server data will be saved in ${chalkInversePad(txEnv.profilePath)}`);
|
||||||
|
|
||||||
|
//Saving start.bat (yes, I also wish this didn't exist)
|
||||||
|
if (txEnv.isWindows && txEnv.profileName !== 'default') {
|
||||||
|
const batFilename = `start_${txEnv.fxsVersion}_${txEnv.profileName}.bat`;
|
||||||
|
try {
|
||||||
|
const fxsPath = path.join(txEnv.fxsPath, 'FXServer.exe');
|
||||||
|
const batLines = [
|
||||||
|
//TODO: add note to not add any server convars in here
|
||||||
|
`@echo off`,
|
||||||
|
`"${fxsPath}" +set serverProfile "${txEnv.profileName}"`,
|
||||||
|
`pause`
|
||||||
|
];
|
||||||
|
const batFolder = path.resolve(txEnv.fxsPath, '..');
|
||||||
|
const batPath = path.join(batFolder, batFilename);
|
||||||
|
fs.writeFileSync(batPath, batLines.join('\r\n'));
|
||||||
|
console.ok(`You can use ${chalkInversePad(batPath)} to start this profile.`);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to create '${batFilename}' with error:`);
|
||||||
|
console.dir(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
37
core/boot/setupProcessHandlers.ts
Normal file
37
core/boot/setupProcessHandlers.ts
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
import { txDevEnv } from "@core/globalData";
|
||||||
|
import consoleFactory from "@lib/console";
|
||||||
|
const console = consoleFactory('ProcessHandlers');
|
||||||
|
|
||||||
|
|
||||||
|
export default function setupProcessHandlers() {
|
||||||
|
//Handle any stdio error
|
||||||
|
process.stdin.on('error', (data) => { });
|
||||||
|
process.stdout.on('error', (data) => { });
|
||||||
|
process.stderr.on('error', (data) => { });
|
||||||
|
|
||||||
|
//Handling warnings (ignoring some)
|
||||||
|
Error.stackTraceLimit = 25;
|
||||||
|
process.removeAllListeners('warning'); //FIXME: this causes errors in Bun
|
||||||
|
process.on('warning', (warning) => {
|
||||||
|
//totally ignoring the warning, we know this is bad and shouldn't happen
|
||||||
|
if (warning.name === 'UnhandledPromiseRejectionWarning') return;
|
||||||
|
|
||||||
|
if (warning.name !== 'ExperimentalWarning' || txDevEnv.ENABLED) {
|
||||||
|
console.verbose.dir(warning, { multilineError: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
//Handle "the unexpected"
|
||||||
|
process.on('unhandledRejection', (err: Error) => {
|
||||||
|
//We are handling this inside the DiscordBot component
|
||||||
|
if (err.message === 'Used disallowed intents') return;
|
||||||
|
|
||||||
|
console.error('Ohh nooooo - unhandledRejection');
|
||||||
|
console.dir(err);
|
||||||
|
});
|
||||||
|
process.on('uncaughtException', function (err: Error) {
|
||||||
|
console.error('Ohh nooooo - uncaughtException');
|
||||||
|
console.error(err.message);
|
||||||
|
console.dir(err.stack);
|
||||||
|
});
|
||||||
|
}
|
193
core/boot/startReadyWatcher.ts
Normal file
193
core/boot/startReadyWatcher.ts
Normal file
|
@ -0,0 +1,193 @@
|
||||||
|
import boxen, { type Options as BoxenOptions } from 'boxen';
|
||||||
|
import chalk from 'chalk';
|
||||||
|
import open from 'open';
|
||||||
|
import { shuffle } from 'd3-array';
|
||||||
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
import got from '@lib/got';
|
||||||
|
import getOsDistro from '@lib/host/getOsDistro.js';
|
||||||
|
import { txDevEnv, txEnv, txHostConfig } from '@core/globalData';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { addLocalIpAddress } from '@lib/host/isIpAddressLocal';
|
||||||
|
import { chalkInversePad } from '@lib/misc';
|
||||||
|
const console = consoleFactory();
|
||||||
|
|
||||||
|
|
||||||
|
const getPublicIp = async () => {
|
||||||
|
const zIpValidator = z.string().ip();
|
||||||
|
const reqOptions = {
|
||||||
|
timeout: { request: 2000 },
|
||||||
|
};
|
||||||
|
const httpGetter = async (url: string, jsonPath: string) => {
|
||||||
|
const res = await got(url, reqOptions).json();
|
||||||
|
return zIpValidator.parse((res as any)[jsonPath]);
|
||||||
|
};
|
||||||
|
|
||||||
|
const allApis = shuffle([
|
||||||
|
['https://api.ipify.org?format=json', 'ip'],
|
||||||
|
['https://api.myip.com', 'ip'],
|
||||||
|
['https://ipv4.jsonip.com/', 'ip'],
|
||||||
|
['https://api.my-ip.io/v2/ip.json', 'ip'],
|
||||||
|
['https://www.l2.io/ip.json', 'ip'],
|
||||||
|
]);
|
||||||
|
for await (const [url, jsonPath] of allApis) {
|
||||||
|
try {
|
||||||
|
return await httpGetter(url, jsonPath);
|
||||||
|
} catch (error) { }
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getOSMessage = async () => {
|
||||||
|
const serverMessage = [
|
||||||
|
`To be able to access txAdmin from the internet open port ${txHostConfig.txaPort}`,
|
||||||
|
'on your OS Firewall as well as in the hosting company.',
|
||||||
|
];
|
||||||
|
const winWorkstationMessage = [
|
||||||
|
'[!] Home-hosting fxserver is not recommended [!]',
|
||||||
|
'You need to open the fxserver port (usually 30120) on Windows Firewall',
|
||||||
|
'and set up port forwarding on your router so other players can access it.',
|
||||||
|
];
|
||||||
|
if (txEnv.displayAds) {
|
||||||
|
winWorkstationMessage.push('We recommend renting a server from ' + chalk.inverse(' https://zap-hosting.com/txAdmin ') + '.');
|
||||||
|
}
|
||||||
|
|
||||||
|
//FIXME: use si.osInfo() instead
|
||||||
|
const distro = await getOsDistro();
|
||||||
|
return (distro && distro.includes('Linux') || distro.includes('Server'))
|
||||||
|
? serverMessage
|
||||||
|
: winWorkstationMessage;
|
||||||
|
};
|
||||||
|
|
||||||
|
const awaitHttp = new Promise((resolve, reject) => {
|
||||||
|
const tickLimit = 100; //if over 15 seconds
|
||||||
|
let counter = 0;
|
||||||
|
let interval: NodeJS.Timeout;
|
||||||
|
const check = () => {
|
||||||
|
counter++;
|
||||||
|
if (txCore.webServer && txCore.webServer.isListening && txCore.webServer.isServing) {
|
||||||
|
clearInterval(interval);
|
||||||
|
resolve(true);
|
||||||
|
} else if (counter == tickLimit) {
|
||||||
|
clearInterval(interval);
|
||||||
|
interval = setInterval(check, 2500);
|
||||||
|
} else if (counter > tickLimit) {
|
||||||
|
console.warn('The WebServer is taking too long to start:', {
|
||||||
|
module: !!txCore.webServer,
|
||||||
|
listening: txCore?.webServer?.isListening,
|
||||||
|
serving: txCore?.webServer?.isServing,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
interval = setInterval(check, 150);
|
||||||
|
});
|
||||||
|
|
||||||
|
const awaitMasterPin = new Promise((resolve, reject) => {
|
||||||
|
const tickLimit = 100; //if over 15 seconds
|
||||||
|
let counter = 0;
|
||||||
|
let interval: NodeJS.Timeout;
|
||||||
|
const check = () => {
|
||||||
|
counter++;
|
||||||
|
if (txCore.adminStore && txCore.adminStore.admins !== null) {
|
||||||
|
clearInterval(interval);
|
||||||
|
const pin = (txCore.adminStore.admins === false) ? txCore.adminStore.addMasterPin : false;
|
||||||
|
resolve(pin);
|
||||||
|
} else if (counter == tickLimit) {
|
||||||
|
clearInterval(interval);
|
||||||
|
interval = setInterval(check, 2500);
|
||||||
|
} else if (counter > tickLimit) {
|
||||||
|
console.warn('The AdminStore is taking too long to start:', {
|
||||||
|
module: !!txCore.adminStore,
|
||||||
|
admins: txCore?.adminStore?.admins === null ? 'null' : 'not null',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
interval = setInterval(check, 150);
|
||||||
|
});
|
||||||
|
|
||||||
|
const awaitDatabase = new Promise((resolve, reject) => {
|
||||||
|
const tickLimit = 100; //if over 15 seconds
|
||||||
|
let counter = 0;
|
||||||
|
let interval: NodeJS.Timeout;
|
||||||
|
const check = () => {
|
||||||
|
counter++;
|
||||||
|
if (txCore.database && txCore.database.isReady) {
|
||||||
|
clearInterval(interval);
|
||||||
|
resolve(true);
|
||||||
|
} else if (counter == tickLimit) {
|
||||||
|
clearInterval(interval);
|
||||||
|
interval = setInterval(check, 2500);
|
||||||
|
} else if (counter > tickLimit) {
|
||||||
|
console.warn('The Database is taking too long to start:', {
|
||||||
|
module: !!txCore.database,
|
||||||
|
ready: !!txCore?.database?.isReady,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
interval = setInterval(check, 150);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export const startReadyWatcher = async (cb: () => void) => {
|
||||||
|
const [publicIpResp, msgRes, adminPinRes] = await Promise.allSettled([
|
||||||
|
getPublicIp(),
|
||||||
|
getOSMessage(),
|
||||||
|
awaitMasterPin as Promise<undefined | string | false>,
|
||||||
|
awaitHttp,
|
||||||
|
awaitDatabase,
|
||||||
|
]);
|
||||||
|
|
||||||
|
//Addresses
|
||||||
|
let detectedUrls;
|
||||||
|
if (txHostConfig.netInterface && txHostConfig.netInterface !== '0.0.0.0') {
|
||||||
|
detectedUrls = [txHostConfig.netInterface];
|
||||||
|
} else {
|
||||||
|
detectedUrls = [
|
||||||
|
(txEnv.isWindows) ? 'localhost' : 'your-public-ip',
|
||||||
|
];
|
||||||
|
if ('value' in publicIpResp && publicIpResp.value) {
|
||||||
|
detectedUrls.push(publicIpResp.value);
|
||||||
|
addLocalIpAddress(publicIpResp.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const bannerUrls = txHostConfig.txaUrl
|
||||||
|
? [txHostConfig.txaUrl]
|
||||||
|
: detectedUrls.map((addr) => `http://${addr}:${txHostConfig.txaPort}/`);
|
||||||
|
|
||||||
|
//Admin PIN
|
||||||
|
const adminMasterPin = 'value' in adminPinRes && adminPinRes.value ? adminPinRes.value : false;
|
||||||
|
const adminPinLines = !adminMasterPin ? [] : [
|
||||||
|
'',
|
||||||
|
'Use the PIN below to register:',
|
||||||
|
chalk.inverse(` ${adminMasterPin} `),
|
||||||
|
];
|
||||||
|
|
||||||
|
//Printing stuff
|
||||||
|
const boxOptions = {
|
||||||
|
padding: 1,
|
||||||
|
margin: 1,
|
||||||
|
align: 'center',
|
||||||
|
borderStyle: 'bold',
|
||||||
|
borderColor: 'cyan',
|
||||||
|
} satisfies BoxenOptions;
|
||||||
|
const boxLines = [
|
||||||
|
'All ready! Please access:',
|
||||||
|
...bannerUrls.map(chalkInversePad),
|
||||||
|
...adminPinLines,
|
||||||
|
];
|
||||||
|
console.multiline(boxen(boxLines.join('\n'), boxOptions), chalk.bgGreen);
|
||||||
|
if (!txDevEnv.ENABLED && !txHostConfig.netInterface && 'value' in msgRes && msgRes.value) {
|
||||||
|
console.multiline(msgRes.value, chalk.bgBlue);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Opening page
|
||||||
|
if (txEnv.isWindows && adminMasterPin && bannerUrls[0]) {
|
||||||
|
const linkUrl = new URL(bannerUrls[0]);
|
||||||
|
linkUrl.pathname = '/addMaster/pin';
|
||||||
|
linkUrl.hash = adminMasterPin;
|
||||||
|
open(linkUrl.href);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Callback
|
||||||
|
cb();
|
||||||
|
};
|
216
core/deployer/index.js
Normal file
216
core/deployer/index.js
Normal file
|
@ -0,0 +1,216 @@
|
||||||
|
const modulename = 'Deployer';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import fse from 'fs-extra';
|
||||||
|
import open from 'open';
|
||||||
|
import getOsDistro from '@lib/host/getOsDistro.js';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import recipeEngine from './recipeEngine.js';
|
||||||
|
import consoleFactory from '@lib/console.js';
|
||||||
|
import recipeParser from './recipeParser.js';
|
||||||
|
import { getTimeHms } from '@lib/misc.js';
|
||||||
|
import { makeTemplateRecipe } from './utils.js';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
//Constants
|
||||||
|
export const RECIPE_DEPLOYER_VERSION = 3;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The deployer class is responsible for running the recipe and handling status and errors
|
||||||
|
*/
|
||||||
|
export class Deployer {
|
||||||
|
/**
|
||||||
|
* @param {string|false} originalRecipe
|
||||||
|
* @param {string} deployPath
|
||||||
|
* @param {boolean} isTrustedSource
|
||||||
|
* @param {object} customMetaData
|
||||||
|
*/
|
||||||
|
constructor(originalRecipe, deploymentID, deployPath, isTrustedSource, customMetaData = {}) {
|
||||||
|
console.log('Deployer instance ready.');
|
||||||
|
|
||||||
|
//Setup variables
|
||||||
|
this.step = 'review'; //FIXME: transform into an enum
|
||||||
|
this.deployFailed = false;
|
||||||
|
this.deployPath = deployPath;
|
||||||
|
this.isTrustedSource = isTrustedSource;
|
||||||
|
this.originalRecipe = originalRecipe;
|
||||||
|
this.deploymentID = deploymentID;
|
||||||
|
this.progress = 0;
|
||||||
|
this.serverName = customMetaData.serverName || txConfig.general.serverName || '';
|
||||||
|
this.logLines = [];
|
||||||
|
|
||||||
|
//Load recipe
|
||||||
|
const impRecipe = (originalRecipe !== false)
|
||||||
|
? originalRecipe
|
||||||
|
: makeTemplateRecipe(customMetaData.serverName, customMetaData.author);
|
||||||
|
try {
|
||||||
|
this.recipe = recipeParser(impRecipe);
|
||||||
|
} catch (error) {
|
||||||
|
console.verbose.dir(error);
|
||||||
|
throw new Error(`Recipe Error: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Dumb helpers - don't care enough to make this less bad
|
||||||
|
customLog(str) {
|
||||||
|
this.logLines.push(`[${getTimeHms()}] ${str}`);
|
||||||
|
console.log(str);
|
||||||
|
}
|
||||||
|
customLogError(str) {
|
||||||
|
this.logLines.push(`[${getTimeHms()}] ${str}`);
|
||||||
|
console.error(str);
|
||||||
|
}
|
||||||
|
getDeployerLog() {
|
||||||
|
return this.logLines.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Confirms the recipe and goes to the input stage
|
||||||
|
* @param {string} userRecipe
|
||||||
|
*/
|
||||||
|
async confirmRecipe(userRecipe) {
|
||||||
|
if (this.step !== 'review') throw new Error('expected review step');
|
||||||
|
|
||||||
|
//Parse/set recipe
|
||||||
|
try {
|
||||||
|
this.recipe = recipeParser(userRecipe);
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Cannot start() deployer due to a Recipe Error: ${error.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Ensure deployment path
|
||||||
|
try {
|
||||||
|
await fse.ensureDir(this.deployPath);
|
||||||
|
} catch (error) {
|
||||||
|
console.verbose.dir(error);
|
||||||
|
throw new Error(`Failed to create ${this.deployPath} with error: ${error.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.step = 'input';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the recipe variables for the deployer run step
|
||||||
|
*/
|
||||||
|
getRecipeVars() {
|
||||||
|
if (this.step !== 'input') throw new Error('expected input step');
|
||||||
|
return cloneDeep(this.recipe.variables);
|
||||||
|
//TODO: ?? Object.keys pra montar varname: {type: 'string'}?
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Starts the deployment process
|
||||||
|
* @param {string} userInputs
|
||||||
|
*/
|
||||||
|
start(userInputs) {
|
||||||
|
if (this.step !== 'input') throw new Error('expected input step');
|
||||||
|
Object.assign(this.recipe.variables, userInputs);
|
||||||
|
this.logLines = [];
|
||||||
|
this.customLog(`Starting deployment of ${this.recipe.name}.`);
|
||||||
|
this.deployFailed = false;
|
||||||
|
this.progress = 0;
|
||||||
|
this.step = 'run';
|
||||||
|
this.runTasks();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Marks the deploy as failed
|
||||||
|
*/
|
||||||
|
async markFailedDeploy() {
|
||||||
|
this.deployFailed = true;
|
||||||
|
try {
|
||||||
|
const filePath = path.join(this.deployPath, '_DEPLOY_FAILED_DO_NOT_USE');
|
||||||
|
await fse.outputFile(filePath, 'This deploy has failed, please do not use these files.');
|
||||||
|
} catch (error) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* (Private) Run the tasks in a sequential way.
|
||||||
|
*/
|
||||||
|
async runTasks() {
|
||||||
|
if (this.step !== 'run') throw new Error('expected run step');
|
||||||
|
const contextVariables = cloneDeep(this.recipe.variables);
|
||||||
|
contextVariables.deploymentID = this.deploymentID;
|
||||||
|
contextVariables.serverName = this.serverName;
|
||||||
|
contextVariables.recipeName = this.recipe.name;
|
||||||
|
contextVariables.recipeAuthor = this.recipe.author;
|
||||||
|
contextVariables.recipeDescription = this.recipe.description;
|
||||||
|
|
||||||
|
//Run all the tasks
|
||||||
|
for (let index = 0; index < this.recipe.tasks.length; index++) {
|
||||||
|
this.progress = Math.round((index / this.recipe.tasks.length) * 100);
|
||||||
|
const task = this.recipe.tasks[index];
|
||||||
|
const taskID = `[task${index + 1}:${task.action}]`;
|
||||||
|
this.customLog(`Running ${taskID}...`);
|
||||||
|
const taskTimeoutSeconds = task.timeoutSeconds ?? recipeEngine[task.action].timeoutSeconds;
|
||||||
|
|
||||||
|
try {
|
||||||
|
contextVariables.$step = `loading task ${task.action}`;
|
||||||
|
await Promise.race([
|
||||||
|
recipeEngine[task.action].run(task, this.deployPath, contextVariables),
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
setTimeout(() => {
|
||||||
|
reject(new Error(`timed out after ${taskTimeoutSeconds}s.`));
|
||||||
|
}, taskTimeoutSeconds * 1000);
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
this.logLines[this.logLines.length - 1] += ' ✔️';
|
||||||
|
} catch (error) {
|
||||||
|
this.logLines[this.logLines.length - 1] += ' ❌';
|
||||||
|
let msg = `Task Failed: ${error.message}\n`
|
||||||
|
+ 'Options: \n'
|
||||||
|
+ JSON.stringify(task, null, 2);
|
||||||
|
if (contextVariables.$step) {
|
||||||
|
msg += '\nDebug/Status: '
|
||||||
|
+ JSON.stringify([
|
||||||
|
txEnv.txaVersion,
|
||||||
|
await getOsDistro(),
|
||||||
|
contextVariables.$step
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
this.customLogError(msg);
|
||||||
|
return await this.markFailedDeploy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Set progress
|
||||||
|
this.progress = 100;
|
||||||
|
this.customLog('All tasks completed.');
|
||||||
|
|
||||||
|
//Check deploy folder validity (resources + server.cfg)
|
||||||
|
try {
|
||||||
|
if (!fse.existsSync(path.join(this.deployPath, 'resources'))) {
|
||||||
|
throw new Error('this recipe didn\'t create a \'resources\' folder.');
|
||||||
|
} else if (!fse.existsSync(path.join(this.deployPath, 'server.cfg'))) {
|
||||||
|
throw new Error('this recipe didn\'t create a \'server.cfg\' file.');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
this.customLogError(`Deploy validation error: ${error.message}`);
|
||||||
|
return await this.markFailedDeploy();
|
||||||
|
}
|
||||||
|
|
||||||
|
//Replace all vars in the server.cfg
|
||||||
|
try {
|
||||||
|
const task = {
|
||||||
|
mode: 'all_vars',
|
||||||
|
file: './server.cfg',
|
||||||
|
};
|
||||||
|
await recipeEngine['replace_string'].run(task, this.deployPath, contextVariables);
|
||||||
|
this.customLog('Replacing all vars in server.cfg... ✔️');
|
||||||
|
} catch (error) {
|
||||||
|
this.customLogError(`Failed to replace all vars in server.cfg: ${error.message}`);
|
||||||
|
return await this.markFailedDeploy();
|
||||||
|
}
|
||||||
|
|
||||||
|
//Else: success :)
|
||||||
|
this.customLog('Deploy finished and folder validated. All done!');
|
||||||
|
this.step = 'configure';
|
||||||
|
if (txEnv.isWindows) {
|
||||||
|
try {
|
||||||
|
await open(path.normalize(this.deployPath), { app: 'explorer' });
|
||||||
|
} catch (error) { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
565
core/deployer/recipeEngine.js
Normal file
565
core/deployer/recipeEngine.js
Normal file
|
@ -0,0 +1,565 @@
|
||||||
|
const modulename = 'RecipeEngine';
|
||||||
|
import { promisify } from 'node:util';
|
||||||
|
import fse from 'fs-extra';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import path from 'node:path';
|
||||||
|
import stream from 'node:stream';
|
||||||
|
import StreamZip from 'node-stream-zip';
|
||||||
|
import { cloneDeep, escapeRegExp } from 'lodash-es';
|
||||||
|
import mysql from 'mysql2/promise';
|
||||||
|
import got from '@lib/got';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
//Helper functions
|
||||||
|
const safePath = (base, suffix) => {
|
||||||
|
const safeSuffix = path.normalize(suffix).replace(/^(\.\.(\/|\\|$))+/, '');
|
||||||
|
return path.join(base, safeSuffix);
|
||||||
|
};
|
||||||
|
const isPathLinear = (pathInput) => {
|
||||||
|
return pathInput.match(/(\.\.(\/|\\|$))+/g) === null;
|
||||||
|
};
|
||||||
|
const isPathRoot = (pathInput) => {
|
||||||
|
return /^\.[/\\]*$/.test(pathInput);
|
||||||
|
};
|
||||||
|
const pathCleanTrail = (pathInput) => {
|
||||||
|
return pathInput.replace(/[/\\]+$/, '');
|
||||||
|
};
|
||||||
|
const isPathValid = (pathInput, acceptRoot = true) => {
|
||||||
|
return (
|
||||||
|
typeof pathInput == 'string'
|
||||||
|
&& pathInput.length
|
||||||
|
&& isPathLinear(pathInput)
|
||||||
|
&& (acceptRoot || !isPathRoot(pathInput))
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const replaceVars = (inputString, deployerCtx) => {
|
||||||
|
const allVars = Object.keys(deployerCtx);
|
||||||
|
for (const varName of allVars) {
|
||||||
|
const varNameReplacer = new RegExp(escapeRegExp(`{{${varName}}}`), 'g');
|
||||||
|
inputString = inputString.replace(varNameReplacer, deployerCtx[varName].toString());
|
||||||
|
}
|
||||||
|
return inputString;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downloads a file to a target path using streams
|
||||||
|
*/
|
||||||
|
const validatorDownloadFile = (options) => {
|
||||||
|
return (
|
||||||
|
typeof options.url == 'string'
|
||||||
|
&& isPathValid(options.path)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskDownloadFile = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorDownloadFile(options)) throw new Error('invalid options');
|
||||||
|
if (options.path.endsWith('/')) throw new Error('target filename not specified'); //FIXME: this should be on the validator
|
||||||
|
|
||||||
|
//Process and create target file/path
|
||||||
|
const destPath = safePath(basePath, options.path);
|
||||||
|
await fse.outputFile(destPath, 'file save attempt, please ignore or remove');
|
||||||
|
|
||||||
|
//Start file download and create write stream
|
||||||
|
deployerCtx.$step = 'before stream';
|
||||||
|
const gotOptions = {
|
||||||
|
timeout: { request: 150e3 },
|
||||||
|
retry: { limit: 5 },
|
||||||
|
};
|
||||||
|
const gotStream = got.stream(options.url, gotOptions);
|
||||||
|
gotStream.on('downloadProgress', (progress) => {
|
||||||
|
deployerCtx.$step = `downloading ${Math.round(progress.percent * 100)}%`;
|
||||||
|
});
|
||||||
|
const pipeline = promisify(stream.pipeline);
|
||||||
|
await pipeline(
|
||||||
|
gotStream,
|
||||||
|
fse.createWriteStream(destPath),
|
||||||
|
);
|
||||||
|
deployerCtx.$step = 'after stream';
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downloads a github repository with an optional reference (branch, tag, commit hash) or subpath.
|
||||||
|
* If the directory structure does not exist, it is created.
|
||||||
|
*/
|
||||||
|
const githubRepoSourceRegex = /^((https?:\/\/github\.com\/)?|@)?([\w.\-_]+)\/([\w.\-_]+).*$/;
|
||||||
|
const validatorDownloadGithub = (options) => {
|
||||||
|
return (
|
||||||
|
typeof options.src == 'string'
|
||||||
|
&& isPathValid(options.dest, false)
|
||||||
|
&& (typeof options.ref == 'string' || typeof options.ref == 'undefined')
|
||||||
|
&& (typeof options.subpath == 'string' || typeof options.subpath == 'undefined')
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskDownloadGithub = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorDownloadGithub(options)) throw new Error('invalid options');
|
||||||
|
//FIXME: caso seja eperm, tentar criar um arquivo na pasta e checar se funciona
|
||||||
|
|
||||||
|
//Parsing source
|
||||||
|
deployerCtx.$step = 'task start';
|
||||||
|
const srcMatch = options.src.match(githubRepoSourceRegex);
|
||||||
|
if (!srcMatch || !srcMatch[3] || !srcMatch[4]) throw new Error('invalid repository');
|
||||||
|
const repoOwner = srcMatch[3];
|
||||||
|
const repoName = srcMatch[4];
|
||||||
|
|
||||||
|
//Setting git ref
|
||||||
|
let reference;
|
||||||
|
if (options.ref) {
|
||||||
|
reference = options.ref;
|
||||||
|
} else {
|
||||||
|
const data = await got.get(
|
||||||
|
`https://api.github.com/repos/${repoOwner}/${repoName}`,
|
||||||
|
{
|
||||||
|
timeout: { request: 15e3 }
|
||||||
|
}
|
||||||
|
).json();
|
||||||
|
if (typeof data !== 'object' || !data.default_branch) {
|
||||||
|
throw new Error('reference not set, and wasn ot able to detect using github\'s api');
|
||||||
|
}
|
||||||
|
reference = data.default_branch;
|
||||||
|
}
|
||||||
|
deployerCtx.$step = 'ref set';
|
||||||
|
|
||||||
|
//Preparing vars
|
||||||
|
const downURL = `https://api.github.com/repos/${repoOwner}/${repoName}/zipball/${reference}`;
|
||||||
|
const tmpFilePath = path.join(basePath, `.${(Date.now() % 100000000).toString(36)}.download`);
|
||||||
|
const destPath = safePath(basePath, options.dest);
|
||||||
|
|
||||||
|
//Downloading file
|
||||||
|
deployerCtx.$step = 'before stream';
|
||||||
|
const gotOptions = {
|
||||||
|
timeout: { request: 150e3 },
|
||||||
|
retry: { limit: 5 },
|
||||||
|
};
|
||||||
|
const gotStream = got.stream(downURL, gotOptions);
|
||||||
|
gotStream.on('downloadProgress', (progress) => {
|
||||||
|
deployerCtx.$step = `downloading ${Math.round(progress.percent * 100)}%`;
|
||||||
|
});
|
||||||
|
const pipeline = promisify(stream.pipeline);
|
||||||
|
await pipeline(
|
||||||
|
gotStream,
|
||||||
|
fse.createWriteStream(tmpFilePath),
|
||||||
|
);
|
||||||
|
deployerCtx.$step = 'after stream';
|
||||||
|
|
||||||
|
//Extracting files
|
||||||
|
const zip = new StreamZip.async({ file: tmpFilePath });
|
||||||
|
const entries = Object.values(await zip.entries());
|
||||||
|
if (!entries.length || !entries[0].isDirectory) throw new Error('unexpected zip structure');
|
||||||
|
const zipSubPath = path.posix.join(entries[0].name, options.subpath || '');
|
||||||
|
deployerCtx.$step = 'zip parsed';
|
||||||
|
await fsp.mkdir(destPath, { recursive: true });
|
||||||
|
deployerCtx.$step = 'dest path created';
|
||||||
|
await zip.extract(zipSubPath, destPath);
|
||||||
|
deployerCtx.$step = 'zip extracted';
|
||||||
|
await zip.close();
|
||||||
|
deployerCtx.$step = 'zip closed';
|
||||||
|
|
||||||
|
//Removing temp path
|
||||||
|
await fse.remove(tmpFilePath);
|
||||||
|
deployerCtx.$step = 'task finished';
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes a file or directory. The directory can have contents. If the path does not exist, silently does nothing.
|
||||||
|
*/
|
||||||
|
const validatorRemovePath = (options) => {
|
||||||
|
return (
|
||||||
|
isPathValid(options.path, false)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskRemovePath = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorRemovePath(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
//Process and create target file/path
|
||||||
|
const targetPath = safePath(basePath, options.path);
|
||||||
|
|
||||||
|
//NOTE: being extra safe about not deleting itself
|
||||||
|
const cleanBasePath = pathCleanTrail(path.normalize(basePath));
|
||||||
|
if (cleanBasePath == targetPath) throw new Error('cannot remove base folder');
|
||||||
|
await fse.remove(targetPath);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensures that the directory exists. If the directory structure does not exist, it is created.
|
||||||
|
*/
|
||||||
|
const validatorEnsureDir = (options) => {
|
||||||
|
return (
|
||||||
|
isPathValid(options.path, false)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskEnsureDir = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorEnsureDir(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
//Process and create target file/path
|
||||||
|
const destPath = safePath(basePath, options.path);
|
||||||
|
await fse.ensureDir(destPath);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts a ZIP file to a targt folder.
|
||||||
|
* NOTE: wow that was not easy to pick a library!
|
||||||
|
* - tar: no zip files
|
||||||
|
* - minizlib: terrible docs, probably too low level
|
||||||
|
* - yauzl: deprecation warning, slow
|
||||||
|
* - extract-zip: deprecation warning, slow due to yauzl
|
||||||
|
* - jszip: it's more a browser thing than node, doesn't appear to have an extract option
|
||||||
|
* - archiver: no extract
|
||||||
|
* - zip-stream: no extract
|
||||||
|
* - adm-zip: 50ms the old one, shitty
|
||||||
|
* - node-stream-zip: 180ms, acceptable
|
||||||
|
* - unzip: last update 7 years ago
|
||||||
|
* - unzipper: haven't tested
|
||||||
|
* - fflate: haven't tested
|
||||||
|
* - decompress-zip: haven't tested
|
||||||
|
*/
|
||||||
|
const validatorUnzip = (options) => {
|
||||||
|
return (
|
||||||
|
isPathValid(options.src, false)
|
||||||
|
&& isPathValid(options.dest)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskUnzip = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorUnzip(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
const srcPath = safePath(basePath, options.src);
|
||||||
|
const destPath = safePath(basePath, options.dest);
|
||||||
|
await fsp.mkdir(destPath, { recursive: true });
|
||||||
|
|
||||||
|
const zip = new StreamZip.async({ file: srcPath });
|
||||||
|
const count = await zip.extract(null, destPath);
|
||||||
|
console.log(`Extracted ${count} entries`);
|
||||||
|
await zip.close();
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Moves a file or directory. The directory can have contents.
|
||||||
|
*/
|
||||||
|
const validatorMovePath = (options) => {
|
||||||
|
return (
|
||||||
|
isPathValid(options.src, false)
|
||||||
|
&& isPathValid(options.dest, false)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskMovePath = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorMovePath(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
const srcPath = safePath(basePath, options.src);
|
||||||
|
const destPath = safePath(basePath, options.dest);
|
||||||
|
await fse.move(srcPath, destPath, {
|
||||||
|
overwrite: (options.overwrite === 'true' || options.overwrite === true),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy a file or directory. The directory can have contents.
|
||||||
|
* TODO: add a filter property and use a glob lib in the fse.copy filter function
|
||||||
|
*/
|
||||||
|
const validatorCopyPath = (options) => {
|
||||||
|
return (
|
||||||
|
isPathValid(options.src)
|
||||||
|
&& isPathValid(options.dest)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskCopyPath = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorCopyPath(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
const srcPath = safePath(basePath, options.src);
|
||||||
|
const destPath = safePath(basePath, options.dest);
|
||||||
|
await fse.copy(srcPath, destPath, {
|
||||||
|
overwrite: (typeof options.overwrite !== 'undefined' && (options.overwrite === 'true' || options.overwrite === true)),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes or appends data to a file. If not in the append mode, the file will be overwritten and the directory structure will be created if it doesn't exists.
|
||||||
|
*/
|
||||||
|
const validatorWriteFile = (options) => {
|
||||||
|
return (
|
||||||
|
typeof options.data == 'string'
|
||||||
|
&& options.data.length
|
||||||
|
&& isPathValid(options.file, false)
|
||||||
|
);
|
||||||
|
};
|
||||||
|
const taskWriteFile = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorWriteFile(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
const filePath = safePath(basePath, options.file);
|
||||||
|
if (options.append === 'true' || options.append === true) {
|
||||||
|
await fse.appendFile(filePath, options.data);
|
||||||
|
} else {
|
||||||
|
await fse.outputFile(filePath, options.data);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces a string in the target file or files array based on a search string.
|
||||||
|
* Modes:
|
||||||
|
* - template: (default) target string will be processed for vars
|
||||||
|
* - literal: normal string search/replace without any vars
|
||||||
|
* - all_vars: all vars.toString() will be replaced. The search option will be ignored
|
||||||
|
*/
|
||||||
|
const validatorReplaceString = (options) => {
|
||||||
|
//Validate file
|
||||||
|
const fileList = (Array.isArray(options.file)) ? options.file : [options.file];
|
||||||
|
if (fileList.some((s) => !isPathValid(s, false))) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Validate mode
|
||||||
|
if (
|
||||||
|
typeof options.mode == 'undefined'
|
||||||
|
|| options.mode == 'template'
|
||||||
|
|| options.mode == 'literal'
|
||||||
|
) {
|
||||||
|
return (
|
||||||
|
typeof options.search == 'string'
|
||||||
|
&& options.search.length
|
||||||
|
&& typeof options.replace == 'string'
|
||||||
|
);
|
||||||
|
} else if (options.mode == 'all_vars') {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const taskReplaceString = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorReplaceString(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
const fileList = (Array.isArray(options.file)) ? options.file : [options.file];
|
||||||
|
for (let i = 0; i < fileList.length; i++) {
|
||||||
|
const filePath = safePath(basePath, fileList[i]);
|
||||||
|
const original = await fse.readFile(filePath, 'utf8');
|
||||||
|
let changed;
|
||||||
|
if (typeof options.mode == 'undefined' || options.mode == 'template') {
|
||||||
|
changed = original.replace(new RegExp(options.search, 'g'), replaceVars(options.replace, deployerCtx));
|
||||||
|
} else if (options.mode == 'all_vars') {
|
||||||
|
changed = replaceVars(original, deployerCtx);
|
||||||
|
} else if (options.mode == 'literal') {
|
||||||
|
changed = original.replace(new RegExp(options.search, 'g'), options.replace);
|
||||||
|
}
|
||||||
|
await fse.writeFile(filePath, changed);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connects to a MySQL/MariaDB server and creates a database if the dbName variable is null.
|
||||||
|
*/
|
||||||
|
const validatorConnectDatabase = (options) => {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
const taskConnectDatabase = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorConnectDatabase(options)) throw new Error('invalid options');
|
||||||
|
if (typeof deployerCtx.dbHost !== 'string') throw new Error('invalid dbHost');
|
||||||
|
if (typeof deployerCtx.dbPort !== 'number') throw new Error('invalid dbPort, should be number');
|
||||||
|
if (typeof deployerCtx.dbUsername !== 'string') throw new Error('invalid dbUsername');
|
||||||
|
if (typeof deployerCtx.dbPassword !== 'string') throw new Error('dbPassword should be a string');
|
||||||
|
if (typeof deployerCtx.dbName !== 'string') throw new Error('dbName should be a string');
|
||||||
|
if (typeof deployerCtx.dbDelete !== 'boolean') throw new Error('dbDelete should be a boolean');
|
||||||
|
//Connect to the database
|
||||||
|
const mysqlOptions = {
|
||||||
|
host: deployerCtx.dbHost,
|
||||||
|
port: deployerCtx.dbPort,
|
||||||
|
user: deployerCtx.dbUsername,
|
||||||
|
password: deployerCtx.dbPassword,
|
||||||
|
multipleStatements: true,
|
||||||
|
};
|
||||||
|
deployerCtx.dbConnection = await mysql.createConnection(mysqlOptions);
|
||||||
|
const escapedDBName = mysql.escapeId(deployerCtx.dbName);
|
||||||
|
if (deployerCtx.dbDelete) {
|
||||||
|
await deployerCtx.dbConnection.query(`DROP DATABASE IF EXISTS ${escapedDBName}`);
|
||||||
|
}
|
||||||
|
await deployerCtx.dbConnection.query(`CREATE DATABASE IF NOT EXISTS ${escapedDBName} CHARACTER SET utf8 COLLATE utf8_general_ci`);
|
||||||
|
await deployerCtx.dbConnection.query(`USE ${escapedDBName}`);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs a SQL query in the previously connected database. This query can be a file path or a string.
|
||||||
|
*/
|
||||||
|
const validatorQueryDatabase = (options) => {
|
||||||
|
if (typeof options.file !== 'undefined' && typeof options.query !== 'undefined') return false;
|
||||||
|
if (typeof options.file == 'string') return isPathValid(options.file, false);
|
||||||
|
if (typeof options.query == 'string') return options.query.length;
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
const taskQueryDatabase = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorQueryDatabase(options)) throw new Error('invalid options');
|
||||||
|
if (!deployerCtx.dbConnection) throw new Error('Database connection not found. Run connect_database before query_database');
|
||||||
|
|
||||||
|
let sql;
|
||||||
|
if (options.file) {
|
||||||
|
const filePath = safePath(basePath, options.file);
|
||||||
|
sql = await fse.readFile(filePath, 'utf8');
|
||||||
|
} else {
|
||||||
|
sql = options.query;
|
||||||
|
}
|
||||||
|
await deployerCtx.dbConnection.query(sql);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads variables from a json file to the context.
|
||||||
|
*/
|
||||||
|
const validatorLoadVars = (options) => {
|
||||||
|
return isPathValid(options.src, false);
|
||||||
|
};
|
||||||
|
const taskLoadVars = async (options, basePath, deployerCtx) => {
|
||||||
|
if (!validatorLoadVars(options)) throw new Error('invalid options');
|
||||||
|
|
||||||
|
const srcPath = safePath(basePath, options.src);
|
||||||
|
const rawData = await fse.readFile(srcPath, 'utf8');
|
||||||
|
const inData = JSON.parse(rawData);
|
||||||
|
inData.dbConnection = undefined;
|
||||||
|
Object.assign(deployerCtx, inData);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DEBUG Just wastes time /shrug
|
||||||
|
*/
|
||||||
|
const validatorWasteTime = (options) => {
|
||||||
|
return (typeof options.seconds == 'number');
|
||||||
|
};
|
||||||
|
const taskWasteTime = (options, basePath, deployerCtx) => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
setTimeout(() => {
|
||||||
|
resolve(true);
|
||||||
|
}, options.seconds * 1000);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DEBUG Fail fail fail :o
|
||||||
|
*/
|
||||||
|
const taskFailTest = async (options, basePath, deployerCtx) => {
|
||||||
|
throw new Error('test error :p');
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DEBUG logs all ctx vars
|
||||||
|
*/
|
||||||
|
const taskDumpVars = async (options, basePath, deployerCtx) => {
|
||||||
|
const toDump = cloneDeep(deployerCtx);
|
||||||
|
toDump.dbConnection = toDump?.dbConnection?.constructor?.name;
|
||||||
|
console.dir(toDump);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
DONE:
|
||||||
|
- download_file
|
||||||
|
- remove_path (file or folder)
|
||||||
|
- ensure_dir
|
||||||
|
- unzip
|
||||||
|
- move_path (file or folder)
|
||||||
|
- copy_path (file or folder)
|
||||||
|
- write_file (with option to append only)
|
||||||
|
- replace_string (single or array)
|
||||||
|
- connect_database (connects to mysql, creates db if not set)
|
||||||
|
- query_database (file or string)
|
||||||
|
- download_github (with ref and subpath)
|
||||||
|
- load_vars
|
||||||
|
|
||||||
|
DEBUG:
|
||||||
|
- waste_time
|
||||||
|
- fail_test
|
||||||
|
- dump_vars
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
- ??????
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
//Exports
|
||||||
|
export default {
|
||||||
|
download_file: {
|
||||||
|
validate: validatorDownloadFile,
|
||||||
|
run: taskDownloadFile,
|
||||||
|
timeoutSeconds: 180,
|
||||||
|
},
|
||||||
|
download_github: {
|
||||||
|
validate: validatorDownloadGithub,
|
||||||
|
run: taskDownloadGithub,
|
||||||
|
timeoutSeconds: 180,
|
||||||
|
},
|
||||||
|
remove_path: {
|
||||||
|
validate: validatorRemovePath,
|
||||||
|
run: taskRemovePath,
|
||||||
|
timeoutSeconds: 15,
|
||||||
|
},
|
||||||
|
ensure_dir: {
|
||||||
|
validate: validatorEnsureDir,
|
||||||
|
run: taskEnsureDir,
|
||||||
|
timeoutSeconds: 15,
|
||||||
|
},
|
||||||
|
unzip: {
|
||||||
|
validate: validatorUnzip,
|
||||||
|
run: taskUnzip,
|
||||||
|
timeoutSeconds: 180,
|
||||||
|
},
|
||||||
|
move_path: {
|
||||||
|
validate: validatorMovePath,
|
||||||
|
run: taskMovePath,
|
||||||
|
timeoutSeconds: 180,
|
||||||
|
},
|
||||||
|
copy_path: {
|
||||||
|
validate: validatorCopyPath,
|
||||||
|
run: taskCopyPath,
|
||||||
|
timeoutSeconds: 180,
|
||||||
|
},
|
||||||
|
write_file: {
|
||||||
|
validate: validatorWriteFile,
|
||||||
|
run: taskWriteFile,
|
||||||
|
timeoutSeconds: 15,
|
||||||
|
},
|
||||||
|
replace_string: {
|
||||||
|
validate: validatorReplaceString,
|
||||||
|
run: taskReplaceString,
|
||||||
|
timeoutSeconds: 15,
|
||||||
|
},
|
||||||
|
connect_database: {
|
||||||
|
validate: validatorConnectDatabase,
|
||||||
|
run: taskConnectDatabase,
|
||||||
|
timeoutSeconds: 30,
|
||||||
|
},
|
||||||
|
query_database: {
|
||||||
|
validate: validatorQueryDatabase,
|
||||||
|
run: taskQueryDatabase,
|
||||||
|
timeoutSeconds: 90,
|
||||||
|
},
|
||||||
|
load_vars: {
|
||||||
|
validate: validatorLoadVars,
|
||||||
|
run: taskLoadVars,
|
||||||
|
timeoutSeconds: 5,
|
||||||
|
},
|
||||||
|
|
||||||
|
//DEBUG only
|
||||||
|
waste_time: {
|
||||||
|
validate: validatorWasteTime,
|
||||||
|
run: taskWasteTime,
|
||||||
|
timeoutSeconds: 300,
|
||||||
|
},
|
||||||
|
fail_test: {
|
||||||
|
validate: (() => true),
|
||||||
|
run: taskFailTest,
|
||||||
|
timeoutSeconds: 300,
|
||||||
|
},
|
||||||
|
dump_vars: {
|
||||||
|
validate: (() => true),
|
||||||
|
run: taskDumpVars,
|
||||||
|
timeoutSeconds: 5,
|
||||||
|
},
|
||||||
|
};
|
126
core/deployer/recipeParser.ts
Normal file
126
core/deployer/recipeParser.ts
Normal file
|
@ -0,0 +1,126 @@
|
||||||
|
const modulename = 'Deployer';
|
||||||
|
import YAML from 'js-yaml';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import { default as untypedRecipeEngine } from './recipeEngine.js';
|
||||||
|
import consoleFactory from '@lib/console.js';
|
||||||
|
import { RECIPE_DEPLOYER_VERSION } from './index.js'; //FIXME: circular_dependency
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
//Types
|
||||||
|
type YamlRecipeTaskType = {
|
||||||
|
action: string;
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
|
type YamlRecipeType = Partial<{
|
||||||
|
$engine: number;
|
||||||
|
$minFxVersion: number;
|
||||||
|
$onesync: string;
|
||||||
|
$steamRequired: boolean;
|
||||||
|
|
||||||
|
name: string;
|
||||||
|
author: string;
|
||||||
|
description: string;
|
||||||
|
|
||||||
|
variables: Record<string, any>;
|
||||||
|
tasks: YamlRecipeTaskType[];
|
||||||
|
}>;
|
||||||
|
type ParsedRecipeType = {
|
||||||
|
raw: string;
|
||||||
|
name: string;
|
||||||
|
author: string;
|
||||||
|
description: string;
|
||||||
|
variables: Record<string, any>; //TODO: define this
|
||||||
|
tasks: YamlRecipeTaskType[];
|
||||||
|
onesync?: string;
|
||||||
|
fxserverMinVersion?: number;
|
||||||
|
recipeEngineVersion?: number;
|
||||||
|
steamRequired?: boolean;
|
||||||
|
requireDBConfig: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//FIXME: move to the recipeEngine.js file after typescript migration
|
||||||
|
type RecipeEngineTask = {
|
||||||
|
validate: (task: YamlRecipeTaskType) => boolean;
|
||||||
|
run: (options: YamlRecipeTaskType, basePath: string, deployerCtx: unknown) => Promise<void>;
|
||||||
|
timeoutSeconds: number;
|
||||||
|
};
|
||||||
|
type RecipeEngine = Record<string, RecipeEngineTask>;
|
||||||
|
const recipeEngine = untypedRecipeEngine as RecipeEngine;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a Recipe file
|
||||||
|
* FIXME: use Zod for schema validaiton
|
||||||
|
*/
|
||||||
|
const recipeParser = (rawRecipe: string) => {
|
||||||
|
if (typeof rawRecipe !== 'string') throw new Error('not a string');
|
||||||
|
|
||||||
|
//Loads YAML
|
||||||
|
let recipe: YamlRecipeType;
|
||||||
|
try {
|
||||||
|
recipe = YAML.load(rawRecipe, { schema: YAML.JSON_SCHEMA }) as YamlRecipeType;
|
||||||
|
} catch (error) {
|
||||||
|
console.verbose.dir(error);
|
||||||
|
throw new Error('invalid yaml');
|
||||||
|
}
|
||||||
|
|
||||||
|
//Basic validation
|
||||||
|
if (typeof recipe !== 'object') throw new Error('invalid YAML, couldn\'t resolve to object');
|
||||||
|
if (!Array.isArray(recipe.tasks)) throw new Error('no tasks array found');
|
||||||
|
|
||||||
|
//Preparing output
|
||||||
|
const outRecipe: ParsedRecipeType = {
|
||||||
|
raw: rawRecipe.trim(),
|
||||||
|
name: (recipe.name ?? 'unnamed').trim(),
|
||||||
|
author: (recipe.author ?? 'unknown').trim(),
|
||||||
|
description: (recipe.description ?? '').trim(),
|
||||||
|
variables: {},
|
||||||
|
tasks: [],
|
||||||
|
requireDBConfig: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
//Checking/parsing meta tag requirements
|
||||||
|
if (typeof recipe.$onesync == 'string') {
|
||||||
|
const onesync = recipe.$onesync.trim();
|
||||||
|
if (!['off', 'legacy', 'on'].includes(onesync)) throw new Error(`the onesync option selected required for this recipe ("${onesync}") is not supported by this FXServer version.`);
|
||||||
|
outRecipe.onesync = onesync;
|
||||||
|
}
|
||||||
|
if (typeof recipe.$minFxVersion == 'number') {
|
||||||
|
if (recipe.$minFxVersion > txEnv.fxsVersion) throw new Error(`this recipe requires FXServer v${recipe.$minFxVersion} or above`);
|
||||||
|
outRecipe.fxserverMinVersion = recipe.$minFxVersion; //NOTE: currently no downstream use
|
||||||
|
}
|
||||||
|
if (typeof recipe.$engine == 'number') {
|
||||||
|
if (recipe.$engine < RECIPE_DEPLOYER_VERSION) throw new Error(`unsupported '$engine' version ${recipe.$engine}`);
|
||||||
|
outRecipe.recipeEngineVersion = recipe.$engine; //NOTE: currently no downstream use
|
||||||
|
}
|
||||||
|
if (recipe.$steamRequired === true) {
|
||||||
|
outRecipe.steamRequired = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Validate tasks
|
||||||
|
if (!Array.isArray(recipe.tasks)) throw new Error('no tasks array found');
|
||||||
|
recipe.tasks.forEach((task, index) => {
|
||||||
|
if (typeof task.action !== 'string') throw new Error(`[task${index + 1}] no action specified`);
|
||||||
|
if (typeof recipeEngine[task.action] === 'undefined') throw new Error(`[task${index + 1}] unknown action '${task.action}'`);
|
||||||
|
if (!recipeEngine[task.action].validate(task)) throw new Error(`[task${index + 1}:${task.action}] invalid parameters`);
|
||||||
|
outRecipe.tasks.push(task);
|
||||||
|
});
|
||||||
|
|
||||||
|
//Process inputs
|
||||||
|
outRecipe.requireDBConfig = recipe.tasks.some((t) => t.action.includes('database'));
|
||||||
|
const protectedVarNames = ['licenseKey', 'dbHost', 'dbUsername', 'dbPassword', 'dbName', 'dbConnection', 'dbPort'];
|
||||||
|
if (typeof recipe.variables == 'object' && recipe.variables !== null) {
|
||||||
|
const varNames = Object.keys(recipe.variables);
|
||||||
|
if (varNames.some((n) => protectedVarNames.includes(n))) {
|
||||||
|
throw new Error('One or more of the variables declared in the recipe are not allowed.');
|
||||||
|
}
|
||||||
|
Object.assign(outRecipe.variables, recipe.variables);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Output
|
||||||
|
return outRecipe;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default recipeParser;
|
40
core/deployer/utils.ts
Normal file
40
core/deployer/utils.ts
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
import { canWriteToPath, getPathFiles } from '@lib/fs';
|
||||||
|
|
||||||
|
//File created up to v7.3.2
|
||||||
|
const EMPTY_FILE_NAME = '.empty';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform deployer local target path permission/emptiness checking.
|
||||||
|
*/
|
||||||
|
export const validateTargetPath = async (deployPath: string) => {
|
||||||
|
const canCreateFolder = await canWriteToPath(deployPath);
|
||||||
|
if(!canCreateFolder) {
|
||||||
|
throw new Error('Path is not writable due to missing permissions or invalid path.');
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const pathFiles = await getPathFiles(deployPath);
|
||||||
|
if (pathFiles.some((x) => x.name !== EMPTY_FILE_NAME)) {
|
||||||
|
throw new Error('This folder already exists and is not empty!');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as any).code !== 'ENOENT') throw error;
|
||||||
|
}
|
||||||
|
return true as const;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a template recipe file
|
||||||
|
*/
|
||||||
|
export const makeTemplateRecipe = (serverName: string, author: string) => [
|
||||||
|
`name: ${serverName}`,
|
||||||
|
`author: ${author}`,
|
||||||
|
'',
|
||||||
|
'# This is just a placeholder, please don\'t use it!',
|
||||||
|
'tasks: ',
|
||||||
|
' - action: waste_time',
|
||||||
|
' seconds: 5',
|
||||||
|
' - action: waste_time',
|
||||||
|
' seconds: 5',
|
||||||
|
].join('\n');
|
66
core/global.d.ts
vendored
Normal file
66
core/global.d.ts
vendored
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
//NOTE: don't import anything at the root of this file or it breaks the type definitions
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: txAdmin stuff
|
||||||
|
*/
|
||||||
|
type RefreshConfigFunc = import('@modules/ConfigStore/').RefreshConfigFunc;
|
||||||
|
interface GenericTxModuleInstance {
|
||||||
|
public handleConfigUpdate?: RefreshConfigFunc;
|
||||||
|
public handleShutdown?: () => void;
|
||||||
|
public timers?: NodeJS.Timer[];
|
||||||
|
// public measureMemory?: () => { [key: string]: number };
|
||||||
|
}
|
||||||
|
declare interface GenericTxModule<T> {
|
||||||
|
new(): InstanceType<T> & GenericTxModuleInstance;
|
||||||
|
static readonly configKeysWatched?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
declare type TxConfigs = import('@modules/ConfigStore/schema').TxConfigs
|
||||||
|
declare const txConfig: TxConfigs;
|
||||||
|
|
||||||
|
declare type TxCoreType = import('./txAdmin').TxCoreType;
|
||||||
|
declare const txCore: TxCoreType;
|
||||||
|
|
||||||
|
declare type TxManagerType = import('./txManager').TxManagerType;
|
||||||
|
declare const txManager: TxManagerType;
|
||||||
|
|
||||||
|
declare type TxConsole = import('./lib/console').TxConsole;
|
||||||
|
declare namespace globalThis {
|
||||||
|
interface Console extends TxConsole { }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Natives
|
||||||
|
* Natives extracted from https://www.npmjs.com/package/@citizenfx/server
|
||||||
|
* I prefer extracting than importing the whole package because it's
|
||||||
|
* easier to keep track of what natives are being used.
|
||||||
|
*
|
||||||
|
* To use the package, add the following line to the top of the file:
|
||||||
|
* /// <reference types="@citizenfx/server" />
|
||||||
|
*/
|
||||||
|
declare function ExecuteCommand(commandString: string): void;
|
||||||
|
declare function GetConvar(varName: string, default_: string): string;
|
||||||
|
declare function GetCurrentResourceName(): string;
|
||||||
|
declare function GetPasswordHash(password: string): string;
|
||||||
|
declare function GetResourceMetadata(resourceName: string, metadataKey: string, index: number): string;
|
||||||
|
declare function GetResourcePath(resourceName: string): string;
|
||||||
|
declare function IsDuplicityVersion(): boolean;
|
||||||
|
declare function PrintStructuredTrace(payload: string): void;
|
||||||
|
declare function RegisterCommand(commandName: string, handler: Function, restricted: boolean): void;
|
||||||
|
declare function ScanResourceRoot(rootPath: string, callback: (data: object) => void): boolean;
|
||||||
|
declare function VerifyPasswordHash(password: string, hash: string): boolean;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Fixes
|
||||||
|
*/
|
||||||
|
declare module 'unicode-emoji-json/data-ordered-emoji' {
|
||||||
|
const emojis: string[];
|
||||||
|
export = emojis;
|
||||||
|
}
|
||||||
|
|
||||||
|
//FIXME: checar se eu preciso disso
|
||||||
|
// interface ProcessEnv {
|
||||||
|
// [x: string]: string | undefined;
|
||||||
|
// }
|
578
core/globalData.ts
Normal file
578
core/globalData.ts
Normal file
|
@ -0,0 +1,578 @@
|
||||||
|
import os from 'node:os';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import path from 'node:path';
|
||||||
|
import slash from 'slash';
|
||||||
|
|
||||||
|
import consoleFactory, { setConsoleEnvData } from '@lib/console';
|
||||||
|
import { addLocalIpAddress } from '@lib/host/isIpAddressLocal';
|
||||||
|
import { parseFxserverVersion } from '@lib/fxserver/fxsVersionParser';
|
||||||
|
import { parseTxDevEnv, TxDevEnvType } from '@shared/txDevEnv';
|
||||||
|
import { Overwrite } from 'utility-types';
|
||||||
|
import fatalError from '@lib/fatalError';
|
||||||
|
import { getNativeVars } from './boot/getNativeVars';
|
||||||
|
import { getHostVars, hostEnvVarSchemas } from './boot/getHostVars';
|
||||||
|
import { getZapVars } from './boot/getZapVars';
|
||||||
|
import { z, ZodSchema } from 'zod';
|
||||||
|
import { fromZodError } from 'zod-validation-error';
|
||||||
|
import defaultAds from '../dynamicAds2.json';
|
||||||
|
import consts from '@shared/consts';
|
||||||
|
const console = consoleFactory();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: GETTING VARIABLES
|
||||||
|
*/
|
||||||
|
//Get OSType
|
||||||
|
const osTypeVar = os.type();
|
||||||
|
let isWindows;
|
||||||
|
if (osTypeVar === 'Windows_NT') {
|
||||||
|
isWindows = true;
|
||||||
|
} else if (osTypeVar === 'Linux') {
|
||||||
|
isWindows = false;
|
||||||
|
} else {
|
||||||
|
fatalError.GlobalData(0, `OS type not supported: ${osTypeVar}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Simple env vars
|
||||||
|
const ignoreDeprecatedConfigs = process.env?.TXHOST_IGNORE_DEPRECATED_CONFIGS === 'true';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: HELPERS
|
||||||
|
*/
|
||||||
|
const cleanPath = (x: string) => slash(path.normalize(x));
|
||||||
|
const handleMultiVar = <T extends ZodSchema>(
|
||||||
|
name: string,
|
||||||
|
schema: T,
|
||||||
|
procenv: z.infer<T> | undefined,
|
||||||
|
zapcfg: string | number | undefined,
|
||||||
|
convar: any,
|
||||||
|
): z.infer<T> | undefined => {
|
||||||
|
const alt = zapcfg ?? convar;
|
||||||
|
if (alt === undefined) {
|
||||||
|
return procenv;
|
||||||
|
}
|
||||||
|
const whichAlt = zapcfg !== undefined ? 'txAdminZapConfig.json' : 'ConVar';
|
||||||
|
if (procenv !== undefined) {
|
||||||
|
console.warn(`WARNING: Both the environment variable 'TXHOST_${name}' and the ${whichAlt} equivalent are set. The environment variable will be prioritized.`);
|
||||||
|
return procenv;
|
||||||
|
}
|
||||||
|
const parsed = schema.safeParse(alt);
|
||||||
|
if (!parsed.success) {
|
||||||
|
fatalError.GlobalData(20, [
|
||||||
|
`Invalid value for the TXHOST_${name}-equivalent config in ${whichAlt}.`,
|
||||||
|
['Value', alt],
|
||||||
|
'For more information: https://aka.cfx.re/txadmin-env-config',
|
||||||
|
], fromZodError(parsed.error, { prefix: null }));
|
||||||
|
}
|
||||||
|
return parsed.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: DEV ENV
|
||||||
|
*/
|
||||||
|
type TxDevEnvEnabledType = Overwrite<TxDevEnvType, {
|
||||||
|
ENABLED: true;
|
||||||
|
SRC_PATH: string, //required in core/webserver, core/getReactIndex.ts
|
||||||
|
VITE_URL: string, //required in core/getReactIndex.ts
|
||||||
|
}>;
|
||||||
|
type TxDevEnvDisabledType = Overwrite<TxDevEnvType, {
|
||||||
|
ENABLED: false;
|
||||||
|
SRC_PATH: undefined;
|
||||||
|
VITE_URL: undefined;
|
||||||
|
}>;
|
||||||
|
let _txDevEnv: TxDevEnvEnabledType | TxDevEnvDisabledType;
|
||||||
|
const devVars = parseTxDevEnv();
|
||||||
|
if (devVars.ENABLED) {
|
||||||
|
console.debug('Starting txAdmin in DEV mode.');
|
||||||
|
if (!devVars.SRC_PATH || !devVars.VITE_URL) {
|
||||||
|
fatalError.GlobalData(8, 'Missing TXDEV_VITE_URL and/or TXDEV_SRC_PATH env variables.');
|
||||||
|
}
|
||||||
|
_txDevEnv = devVars as TxDevEnvEnabledType;
|
||||||
|
} else {
|
||||||
|
_txDevEnv = {
|
||||||
|
...devVars,
|
||||||
|
SRC_PATH: undefined,
|
||||||
|
VITE_URL: undefined,
|
||||||
|
} as TxDevEnvDisabledType;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: CHECK HOST VARS
|
||||||
|
*/
|
||||||
|
const nativeVars = getNativeVars(ignoreDeprecatedConfigs);
|
||||||
|
|
||||||
|
//Getting fxserver version
|
||||||
|
//4380 = GetVehicleType was exposed server-side
|
||||||
|
//4548 = more or less when node v16 was added
|
||||||
|
//4574 = add missing PRINT_STRUCTURED_TRACE declaration
|
||||||
|
//4574 = add resource field to PRINT_STRUCTURED_TRACE
|
||||||
|
//5894 = CREATE_VEHICLE_SERVER_SETTER
|
||||||
|
//6185 = added ScanResourceRoot (not yet in use)
|
||||||
|
//6508 = unhandledRejection is now handlable, we need this due to discord.js's bug
|
||||||
|
//8495 = changed prometheus::Histogram::BucketBoundaries
|
||||||
|
//9423 = feat(server): add more infos to playerDropped event
|
||||||
|
//9655 = Fixed ScanResourceRoot + latent events
|
||||||
|
const minFxsVersion = 5894;
|
||||||
|
const fxsVerParsed = parseFxserverVersion(nativeVars.fxsVersion);
|
||||||
|
const fxsVersion = fxsVerParsed.valid ? fxsVerParsed.build : 99999;
|
||||||
|
if (!fxsVerParsed.valid) {
|
||||||
|
console.error('It looks like you are running a custom build of fxserver.');
|
||||||
|
console.error('And because of that, there is no guarantee that txAdmin will work properly.');
|
||||||
|
console.error(`Convar: ${nativeVars.fxsVersion}`);
|
||||||
|
console.error(`Parsed Build: ${fxsVerParsed.build}`);
|
||||||
|
console.error(`Parsed Branch: ${fxsVerParsed.branch}`);
|
||||||
|
console.error(`Parsed Platform: ${fxsVerParsed.platform}`);
|
||||||
|
} else if (fxsVerParsed.build < minFxsVersion) {
|
||||||
|
fatalError.GlobalData(2, [
|
||||||
|
'This version of FXServer is too outdated and NOT compatible with txAdmin',
|
||||||
|
['Current FXServer version', fxsVerParsed.build.toString()],
|
||||||
|
['Minimum required version', minFxsVersion.toString()],
|
||||||
|
'Please update your FXServer to a newer version.',
|
||||||
|
]);
|
||||||
|
} else if (fxsVerParsed.branch !== 'master') {
|
||||||
|
console.warn(`You are running a custom branch of FXServer: ${fxsVerParsed.branch}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Getting txAdmin version
|
||||||
|
if (!nativeVars.txaResourceVersion) {
|
||||||
|
fatalError.GlobalData(3, [
|
||||||
|
'txAdmin version not set or in the wrong format.',
|
||||||
|
['Detected version', nativeVars.txaResourceVersion],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
const txaVersion = nativeVars.txaResourceVersion;
|
||||||
|
|
||||||
|
//Get txAdmin Resource Path
|
||||||
|
if (!nativeVars.txaResourcePath) {
|
||||||
|
fatalError.GlobalData(4, [
|
||||||
|
'Could not resolve txAdmin resource path.',
|
||||||
|
['Convar', nativeVars.txaResourcePath],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
const txaPath = cleanPath(nativeVars.txaResourcePath);
|
||||||
|
|
||||||
|
//Get citizen Root
|
||||||
|
if (!nativeVars.fxsCitizenRoot) {
|
||||||
|
fatalError.GlobalData(5, [
|
||||||
|
'citizen_root convar not set',
|
||||||
|
['Convar', nativeVars.fxsCitizenRoot],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
const fxsPath = cleanPath(nativeVars.fxsCitizenRoot as string);
|
||||||
|
|
||||||
|
//Check if server is inside WinRar's temp folder
|
||||||
|
if (isWindows && /Temp[\\/]+Rar\$/i.test(fxsPath)) {
|
||||||
|
fatalError.GlobalData(12, [
|
||||||
|
'It looks like you ran FXServer inside WinRAR without extracting it first.',
|
||||||
|
'Please extract the server files to a proper folder before running it.',
|
||||||
|
['Server path', fxsPath.replace(/\\/g, '/').replace(/\/$/, '')],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//Setting the variables in console without it having to importing from here (circular dependency)
|
||||||
|
setConsoleEnvData(
|
||||||
|
txaVersion,
|
||||||
|
txaPath,
|
||||||
|
_txDevEnv.ENABLED,
|
||||||
|
_txDevEnv.VERBOSE
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: TXDATA & PROFILE
|
||||||
|
*/
|
||||||
|
const hostVars = getHostVars();
|
||||||
|
//Setting data path
|
||||||
|
let hasCustomDataPath = false;
|
||||||
|
let dataPath = cleanPath(path.join(
|
||||||
|
fxsPath,
|
||||||
|
isWindows ? '..' : '../../../',
|
||||||
|
'txData'
|
||||||
|
));
|
||||||
|
const dataPathVar = handleMultiVar(
|
||||||
|
'DATA_PATH',
|
||||||
|
hostEnvVarSchemas.DATA_PATH,
|
||||||
|
hostVars.DATA_PATH,
|
||||||
|
undefined,
|
||||||
|
nativeVars.txDataPath,
|
||||||
|
);
|
||||||
|
if (dataPathVar) {
|
||||||
|
hasCustomDataPath = true;
|
||||||
|
dataPath = cleanPath(dataPathVar);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check paths for non-ASCII characters
|
||||||
|
//NOTE: Non-ASCII in one of those paths (don't know which) will make NodeJS crash due to a bug in v8 (or something)
|
||||||
|
// when running localization methods like Date.toLocaleString().
|
||||||
|
// There was also an issue with the slash() lib and with the +exec on FXServer
|
||||||
|
const nonASCIIRegex = /[^\x00-\x80]+/;
|
||||||
|
if (nonASCIIRegex.test(fxsPath) || nonASCIIRegex.test(dataPath)) {
|
||||||
|
fatalError.GlobalData(7, [
|
||||||
|
'Due to environmental restrictions, your paths CANNOT contain non-ASCII characters.',
|
||||||
|
'Example of non-ASCII characters: çâýå, ρέθ, ñäé, ēļæ, глж, เซิร์, 警告.',
|
||||||
|
'Please make sure FXServer is not in a path contaning those characters.',
|
||||||
|
`If on windows, we suggest you moving the artifact to "C:/fivemserver/${fxsVersion}/".`,
|
||||||
|
['FXServer path', fxsPath],
|
||||||
|
['txData path', dataPath],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Profile - not available as env var
|
||||||
|
let profileVar = nativeVars.txAdminProfile;
|
||||||
|
if (profileVar) {
|
||||||
|
profileVar = profileVar.replace(/[^a-z0-9._-]/gi, '');
|
||||||
|
if (profileVar.endsWith('.base')) {
|
||||||
|
fatalError.GlobalData(13, [
|
||||||
|
['Invalid server profile name', profileVar],
|
||||||
|
'Profile names cannot end with ".base".',
|
||||||
|
'It looks like you are trying to point to a server folder instead of a profile.',
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
if (!profileVar.length) {
|
||||||
|
fatalError.GlobalData(14, [
|
||||||
|
'Invalid server profile name.',
|
||||||
|
'If you are using Google Translator on the instructions page,',
|
||||||
|
'make sure there are no additional spaces in your command.',
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const profileName = profileVar ?? 'default';
|
||||||
|
const profilePath = cleanPath(path.join(dataPath, profileName));
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: ZAP & NETWORKING
|
||||||
|
*/
|
||||||
|
let zapVars: ReturnType<typeof getZapVars> | undefined;
|
||||||
|
if (!ignoreDeprecatedConfigs) {
|
||||||
|
//FIXME: ZAP doesn't need this anymore, remove ASAP
|
||||||
|
const zapCfgFilePath = path.join(dataPath, 'txAdminZapConfig.json');
|
||||||
|
try {
|
||||||
|
zapVars = getZapVars(zapCfgFilePath);
|
||||||
|
if (!_txDevEnv.ENABLED) fsp.unlink(zapCfgFilePath).catch(() => { });
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.GlobalData(9, 'Failed to load with ZAP-Hosting configuration.', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//No default, no convar/zap cfg
|
||||||
|
const txaUrl = hostVars.TXA_URL;
|
||||||
|
|
||||||
|
//txAdmin port
|
||||||
|
const txaPort = handleMultiVar(
|
||||||
|
'TXA_PORT',
|
||||||
|
hostEnvVarSchemas.TXA_PORT,
|
||||||
|
hostVars.TXA_PORT,
|
||||||
|
zapVars?.txAdminPort,
|
||||||
|
nativeVars.txAdminPort,
|
||||||
|
) ?? 40120;
|
||||||
|
|
||||||
|
//fxserver port
|
||||||
|
const fxsPort = handleMultiVar(
|
||||||
|
'FXS_PORT',
|
||||||
|
hostEnvVarSchemas.FXS_PORT,
|
||||||
|
hostVars.FXS_PORT,
|
||||||
|
zapVars?.forceFXServerPort,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
//Forced interface
|
||||||
|
const netInterface = handleMultiVar(
|
||||||
|
'INTERFACE',
|
||||||
|
hostEnvVarSchemas.INTERFACE,
|
||||||
|
hostVars.INTERFACE,
|
||||||
|
zapVars?.forceInterface,
|
||||||
|
nativeVars.txAdminInterface,
|
||||||
|
);
|
||||||
|
if (netInterface) {
|
||||||
|
addLocalIpAddress(netInterface);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: GENERAL
|
||||||
|
*/
|
||||||
|
const forceGameName = hostVars.GAME_NAME;
|
||||||
|
const hostApiToken = hostVars.API_TOKEN;
|
||||||
|
|
||||||
|
const forceMaxClients = handleMultiVar(
|
||||||
|
'MAX_SLOTS',
|
||||||
|
hostEnvVarSchemas.MAX_SLOTS,
|
||||||
|
hostVars.MAX_SLOTS,
|
||||||
|
zapVars?.deployerDefaults?.maxClients,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
const forceQuietMode = handleMultiVar(
|
||||||
|
'QUIET_MODE',
|
||||||
|
hostEnvVarSchemas.QUIET_MODE,
|
||||||
|
hostVars.QUIET_MODE,
|
||||||
|
zapVars?.deployerDefaults?.maxClients,
|
||||||
|
undefined,
|
||||||
|
) ?? false;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: PROVIDER
|
||||||
|
*/
|
||||||
|
const providerName = handleMultiVar(
|
||||||
|
'PROVIDER_NAME',
|
||||||
|
hostEnvVarSchemas.PROVIDER_NAME,
|
||||||
|
hostVars.PROVIDER_NAME,
|
||||||
|
zapVars?.providerName,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
const providerLogo = handleMultiVar(
|
||||||
|
'PROVIDER_LOGO',
|
||||||
|
hostEnvVarSchemas.PROVIDER_LOGO,
|
||||||
|
hostVars.PROVIDER_LOGO,
|
||||||
|
zapVars?.loginPageLogo,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: DEFAULTS
|
||||||
|
*/
|
||||||
|
const defaultDbHost = handleMultiVar(
|
||||||
|
'DEFAULT_DBHOST',
|
||||||
|
hostEnvVarSchemas.DEFAULT_DBHOST,
|
||||||
|
hostVars.DEFAULT_DBHOST,
|
||||||
|
zapVars?.deployerDefaults?.mysqlHost,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
const defaultDbPort = handleMultiVar(
|
||||||
|
'DEFAULT_DBPORT',
|
||||||
|
hostEnvVarSchemas.DEFAULT_DBPORT,
|
||||||
|
hostVars.DEFAULT_DBPORT,
|
||||||
|
zapVars?.deployerDefaults?.mysqlPort,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
const defaultDbUser = handleMultiVar(
|
||||||
|
'DEFAULT_DBUSER',
|
||||||
|
hostEnvVarSchemas.DEFAULT_DBUSER,
|
||||||
|
hostVars.DEFAULT_DBUSER,
|
||||||
|
zapVars?.deployerDefaults?.mysqlUser,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
const defaultDbPass = handleMultiVar(
|
||||||
|
'DEFAULT_DBPASS',
|
||||||
|
hostEnvVarSchemas.DEFAULT_DBPASS,
|
||||||
|
hostVars.DEFAULT_DBPASS,
|
||||||
|
zapVars?.deployerDefaults?.mysqlPassword,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
const defaultDbName = handleMultiVar(
|
||||||
|
'DEFAULT_DBNAME',
|
||||||
|
hostEnvVarSchemas.DEFAULT_DBNAME,
|
||||||
|
hostVars.DEFAULT_DBNAME,
|
||||||
|
zapVars?.deployerDefaults?.mysqlDatabase,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
//Default Master Account
|
||||||
|
type DefaultMasterAccount = {
|
||||||
|
username: string;
|
||||||
|
fivemId?: string;
|
||||||
|
password?: string;
|
||||||
|
} | {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
} | undefined;
|
||||||
|
let defaultMasterAccount: DefaultMasterAccount;
|
||||||
|
const bcryptRegex = /^\$2[aby]\$[0-9]{2}\$[A-Za-z0-9./]{53}$/;
|
||||||
|
if (hostVars.DEFAULT_ACCOUNT) {
|
||||||
|
let [username, fivemId, password] = hostVars.DEFAULT_ACCOUNT.split(':') as (string | undefined)[];
|
||||||
|
if (username === '') username = undefined;
|
||||||
|
if (fivemId === '') fivemId = undefined;
|
||||||
|
if (password === '') password = undefined;
|
||||||
|
|
||||||
|
const errArr: [string, any][] = [
|
||||||
|
['Username', username],
|
||||||
|
['FiveM ID', fivemId],
|
||||||
|
['Password', password],
|
||||||
|
];
|
||||||
|
if (!username || !consts.regexValidFivemUsername.test(username)) {
|
||||||
|
fatalError.GlobalData(21, [
|
||||||
|
'Invalid default account username.',
|
||||||
|
'It should be a valid FiveM username.',
|
||||||
|
...errArr,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
if (fivemId && !consts.validIdentifierParts.fivem.test(fivemId)) {
|
||||||
|
fatalError.GlobalData(22, [
|
||||||
|
'Invalid default account FiveM ID.',
|
||||||
|
'It should match the number in the fivem:0000000 game identifier.',
|
||||||
|
...errArr,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
if (password && !bcryptRegex.test(password)) {
|
||||||
|
fatalError.GlobalData(23, [
|
||||||
|
'Invalid default account password.',
|
||||||
|
'Expected bcrypt hash.',
|
||||||
|
...errArr,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
if (!fivemId && !password) {
|
||||||
|
fatalError.GlobalData(24, [
|
||||||
|
'Invalid default account.',
|
||||||
|
'Expected at least the FiveM ID or password to be present.',
|
||||||
|
...errArr,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
defaultMasterAccount = {
|
||||||
|
username,
|
||||||
|
fivemId,
|
||||||
|
password,
|
||||||
|
};
|
||||||
|
} else if (zapVars?.defaultMasterAccount) {
|
||||||
|
const username = zapVars.defaultMasterAccount?.name;
|
||||||
|
const password = zapVars.defaultMasterAccount?.password_hash;
|
||||||
|
if (!consts.regexValidFivemUsername.test(username)) {
|
||||||
|
fatalError.GlobalData(25, [
|
||||||
|
'Invalid default account username.',
|
||||||
|
'It should be a valid FiveM username.',
|
||||||
|
['Username', username],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
if (!bcryptRegex.test(password)) {
|
||||||
|
fatalError.GlobalData(26, [
|
||||||
|
'Invalid default account password.',
|
||||||
|
'Expected bcrypt hash.',
|
||||||
|
['Hash', password],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
defaultMasterAccount = {
|
||||||
|
username: username,
|
||||||
|
password: password,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
//Default cfx key
|
||||||
|
const defaultCfxKey = handleMultiVar(
|
||||||
|
'DEFAULT_CFXKEY',
|
||||||
|
hostEnvVarSchemas.DEFAULT_CFXKEY,
|
||||||
|
hostVars.DEFAULT_CFXKEY,
|
||||||
|
zapVars?.deployerDefaults?.license,
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: FINAL SETUP
|
||||||
|
*/
|
||||||
|
if (ignoreDeprecatedConfigs) {
|
||||||
|
console.verbose.debug('TXHOST_IGNORE_DEPRECATED_CONFIGS is set to true. Ignoring deprecated configs.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const isPterodactyl = !isWindows && process.env?.TXADMIN_ENABLE === '1';
|
||||||
|
const isZapHosting = providerName === 'ZAP-Hosting';
|
||||||
|
|
||||||
|
//Quick config to disable ads
|
||||||
|
const displayAds = process.env?.TXHOST_TMP_HIDE_ADS !== 'true' || isPterodactyl || isZapHosting;
|
||||||
|
const adSchema = z.object({
|
||||||
|
img: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
}).nullable();
|
||||||
|
const adsDataSchema = z.object({
|
||||||
|
login: adSchema,
|
||||||
|
main: adSchema,
|
||||||
|
});
|
||||||
|
let adsData: z.infer<typeof adsDataSchema> = {
|
||||||
|
login: null,
|
||||||
|
main: null,
|
||||||
|
};
|
||||||
|
if (displayAds) {
|
||||||
|
try {
|
||||||
|
adsData = adsDataSchema.parse(defaultAds);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load ads data.', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//FXServer Display Version
|
||||||
|
let fxsVersionTag = fxsVersion.toString();
|
||||||
|
if (fxsVerParsed.branch && fxsVerParsed.branch !== 'master') {
|
||||||
|
fxsVersionTag += '-ft';
|
||||||
|
}
|
||||||
|
if (isZapHosting) {
|
||||||
|
fxsVersionTag += '/ZAP';
|
||||||
|
} else if (isPterodactyl) {
|
||||||
|
fxsVersionTag += '/Ptero';
|
||||||
|
} else if (isWindows && fxsVerParsed.platform === 'windows') {
|
||||||
|
fxsVersionTag += '/Win';
|
||||||
|
} else if (!isWindows && fxsVerParsed.platform === 'linux') {
|
||||||
|
fxsVersionTag += '/Lin';
|
||||||
|
} else {
|
||||||
|
fxsVersionTag += '/Unk';
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Exports
|
||||||
|
*/
|
||||||
|
export const txDevEnv = Object.freeze(_txDevEnv);
|
||||||
|
|
||||||
|
export const txEnv = Object.freeze({
|
||||||
|
//Calculated
|
||||||
|
isWindows,
|
||||||
|
isPterodactyl, //TODO: remove, used only in HB Data
|
||||||
|
isZapHosting, //TODO: remove, used only in HB Data and authLogic to disable src check
|
||||||
|
displayAds,
|
||||||
|
adsData,
|
||||||
|
|
||||||
|
//Natives
|
||||||
|
fxsVersionTag,
|
||||||
|
fxsVersion,
|
||||||
|
txaVersion,
|
||||||
|
txaPath,
|
||||||
|
fxsPath,
|
||||||
|
|
||||||
|
//ConVar
|
||||||
|
profileName,
|
||||||
|
profilePath, //FIXME: replace by profileSubPath in most places
|
||||||
|
profileSubPath: (...parts: string[]) => path.join(profilePath, ...parts),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const txHostConfig = Object.freeze({
|
||||||
|
//General
|
||||||
|
dataPath,
|
||||||
|
dataSubPath: (...parts: string[]) => path.join(dataPath, ...parts),
|
||||||
|
hasCustomDataPath,
|
||||||
|
forceGameName,
|
||||||
|
forceMaxClients,
|
||||||
|
forceQuietMode,
|
||||||
|
hostApiToken,
|
||||||
|
|
||||||
|
//Networking
|
||||||
|
txaUrl,
|
||||||
|
txaPort,
|
||||||
|
fxsPort,
|
||||||
|
netInterface,
|
||||||
|
|
||||||
|
//Provider
|
||||||
|
providerName,
|
||||||
|
providerLogo,
|
||||||
|
sourceName: providerName ?? 'Host Config',
|
||||||
|
|
||||||
|
//Defaults
|
||||||
|
defaults: {
|
||||||
|
account: defaultMasterAccount,
|
||||||
|
cfxKey: defaultCfxKey,
|
||||||
|
dbHost: defaultDbHost,
|
||||||
|
dbPort: defaultDbPort,
|
||||||
|
dbUser: defaultDbUser,
|
||||||
|
dbPass: defaultDbPass,
|
||||||
|
dbName: defaultDbName,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
//DEBUG
|
||||||
|
// console.dir(txEnv, { compact: true });
|
||||||
|
// console.dir(txDevEnv, { compact: true });
|
||||||
|
// console.dir(txHostConfig, { compact: true });
|
83
core/index.ts
Normal file
83
core/index.ts
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
//NOTE: must be imported first to setup the environment
|
||||||
|
import { txEnv, txHostConfig } from './globalData';
|
||||||
|
import consoleFactory, { setTTYTitle } from '@lib/console';
|
||||||
|
|
||||||
|
//Can be imported after
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import checkPreRelease from './boot/checkPreRelease';
|
||||||
|
import fatalError from '@lib/fatalError';
|
||||||
|
import { ensureProfileStructure, setupProfile } from './boot/setup';
|
||||||
|
import setupProcessHandlers from './boot/setupProcessHandlers';
|
||||||
|
import bootTxAdmin from './txAdmin';
|
||||||
|
const console = consoleFactory();
|
||||||
|
|
||||||
|
|
||||||
|
//Early process stuff
|
||||||
|
try {
|
||||||
|
process.title = 'txAdmin'; //doesn't work for now
|
||||||
|
setupProcessHandlers();
|
||||||
|
setTTYTitle();
|
||||||
|
checkPreRelease();
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.Boot(0, 'Failed early process setup.', error);
|
||||||
|
}
|
||||||
|
console.log(`Starting txAdmin v${txEnv.txaVersion}/b${txEnv.fxsVersionTag}...`);
|
||||||
|
|
||||||
|
|
||||||
|
//Setting up txData & Profile
|
||||||
|
try {
|
||||||
|
if (!fs.existsSync(txHostConfig.dataPath)) {
|
||||||
|
fs.mkdirSync(txHostConfig.dataPath);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.Boot(1, [
|
||||||
|
`Failed to check or create the data folder.`,
|
||||||
|
['Path', txHostConfig.dataPath],
|
||||||
|
], error);
|
||||||
|
}
|
||||||
|
let isNewProfile = false;
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(txEnv.profilePath)) {
|
||||||
|
ensureProfileStructure();
|
||||||
|
} else {
|
||||||
|
setupProfile();
|
||||||
|
isNewProfile = true;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.Boot(2, [
|
||||||
|
`Failed to check or create the txAdmin profile folder.`,
|
||||||
|
['Data Path', txHostConfig.dataPath],
|
||||||
|
['Profile Name', txEnv.profileName],
|
||||||
|
['Profile Path', txEnv.profilePath],
|
||||||
|
], error);
|
||||||
|
}
|
||||||
|
if (isNewProfile && txEnv.profileName !== 'default') {
|
||||||
|
console.log(`Profile path: ${txEnv.profilePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//Start txAdmin (have fun 😀)
|
||||||
|
try {
|
||||||
|
bootTxAdmin();
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.Boot(3, 'Failed to start txAdmin.', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//Freeze detector - starts after 10 seconds due to the initial bootup lag
|
||||||
|
const bootGracePeriod = 15_000;
|
||||||
|
const loopInterval = 500;
|
||||||
|
const loopElapsedLimit = 2_000;
|
||||||
|
setTimeout(() => {
|
||||||
|
let hdTimer = Date.now();
|
||||||
|
setInterval(() => {
|
||||||
|
const now = Date.now();
|
||||||
|
if (now - hdTimer > loopElapsedLimit) {
|
||||||
|
console.majorMultilineError([
|
||||||
|
'Major VPS freeze/lag detected!',
|
||||||
|
'THIS IS NOT AN ERROR CAUSED BY TXADMIN!',
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
hdTimer = now;
|
||||||
|
}, loopInterval);
|
||||||
|
}, bootGracePeriod);
|
47
core/lib/MemCache.ts
Normal file
47
core/lib/MemCache.ts
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
|
||||||
|
export default class MemCache<T = any> {
|
||||||
|
public readonly ttl: number;
|
||||||
|
public dataTimestamp: number | undefined;
|
||||||
|
private data: T | undefined;
|
||||||
|
|
||||||
|
constructor(ttlSeconds = 60) {
|
||||||
|
this.ttl = ttlSeconds * 1000; //converting to ms
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the data is still valid or wipes it
|
||||||
|
*/
|
||||||
|
isValid() {
|
||||||
|
if (this.dataTimestamp === undefined) return false;
|
||||||
|
if (this.dataTimestamp < Date.now() - this.ttl) {
|
||||||
|
this.dataTimestamp = undefined;
|
||||||
|
this.data = undefined;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the cache
|
||||||
|
*/
|
||||||
|
set(data: T) {
|
||||||
|
this.dataTimestamp = Date.now();
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the cache if valid, or undefined
|
||||||
|
*/
|
||||||
|
get() {
|
||||||
|
if (this.dataTimestamp === undefined || this.data === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isValid()) {
|
||||||
|
return cloneDeep<T>(this.data);
|
||||||
|
} else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
48
core/lib/console.test.ts
Normal file
48
core/lib/console.test.ts
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
import { suite, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { processStdioWriteRaw, processStdioEnsureEol } from './console';
|
||||||
|
|
||||||
|
|
||||||
|
suite('processStdioWriteRaw & processStdioEnsureEol', () => {
|
||||||
|
let writeSpy: ReturnType<typeof vi.spyOn>;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
writeSpy = vi.spyOn(process.stdout as any, 'write').mockImplementation(() => true);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
writeSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write a non-newline string and then add a newline', () => {
|
||||||
|
processStdioWriteRaw("Hello");
|
||||||
|
expect(writeSpy).toHaveBeenCalledWith("Hello");
|
||||||
|
processStdioEnsureEol();
|
||||||
|
expect(writeSpy).toHaveBeenCalledWith('\n');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write a string ending in newline without adding an extra one', () => {
|
||||||
|
processStdioWriteRaw("Hello\n");
|
||||||
|
expect(writeSpy).toHaveBeenCalledWith("Hello\n");
|
||||||
|
writeSpy.mockClear();
|
||||||
|
processStdioEnsureEol();
|
||||||
|
expect(writeSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write Uint8Array without trailing newline and then add one', () => {
|
||||||
|
const buffer = new Uint8Array([72, 101, 108, 108, 111]); // "Hello"
|
||||||
|
processStdioWriteRaw(buffer);
|
||||||
|
expect(writeSpy).toHaveBeenCalledWith(buffer);
|
||||||
|
processStdioEnsureEol();
|
||||||
|
expect(writeSpy).toHaveBeenCalledWith('\n');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should write Uint8Array with trailing newline and not add an extra one', () => {
|
||||||
|
const newline = 10;
|
||||||
|
const buffer = new Uint8Array([72, 101, 108, 108, 111, newline]); // "Hello\n"
|
||||||
|
processStdioWriteRaw(buffer);
|
||||||
|
expect(writeSpy).toHaveBeenCalledWith(buffer);
|
||||||
|
writeSpy.mockClear();
|
||||||
|
processStdioEnsureEol();
|
||||||
|
expect(writeSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
388
core/lib/console.ts
Normal file
388
core/lib/console.ts
Normal file
|
@ -0,0 +1,388 @@
|
||||||
|
//NOTE: due to the monkey patching of the console, this should be imported before anything else
|
||||||
|
// which means in this file you cannot import anything from inside txAdmin to prevent cyclical dependencies
|
||||||
|
import { Console } from 'node:console';
|
||||||
|
import { InspectOptions } from 'node:util';
|
||||||
|
import { Writable } from 'node:stream';
|
||||||
|
import path from 'node:path';
|
||||||
|
import chalk, { ChalkInstance } from 'chalk';
|
||||||
|
import slash from 'slash';
|
||||||
|
import ErrorStackParser from 'error-stack-parser';
|
||||||
|
import sourceMapSupport from 'source-map-support';
|
||||||
|
|
||||||
|
|
||||||
|
//Buffer handler
|
||||||
|
//NOTE: the buffer will take between 64~72kb
|
||||||
|
const headBufferLimit = 8 * 1024; //4kb
|
||||||
|
const bodyBufferLimit = 64 * 1024; //64kb
|
||||||
|
const bodyTrimSliceSize = 8 * 1024;
|
||||||
|
const BUFFER_CUT_WARNING = chalk.bgRgb(255, 69, 0)('[!] The log body was sliced to prevent memory exhaustion. [!]');
|
||||||
|
const DEBUG_COLOR = chalk.bgHex('#FF45FF');
|
||||||
|
let headBuffer = '';
|
||||||
|
let bodyBuffer = '';
|
||||||
|
|
||||||
|
const writeToBuffer = (chunk: string) => {
|
||||||
|
//if head not full yet
|
||||||
|
if (headBuffer.length + chunk.length < headBufferLimit) {
|
||||||
|
headBuffer += chunk;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//write to body and trim if needed
|
||||||
|
bodyBuffer += chunk;
|
||||||
|
if (bodyBuffer.length > bodyBufferLimit) {
|
||||||
|
let trimmedBody = bodyBuffer.slice(bodyTrimSliceSize - bodyBufferLimit);
|
||||||
|
trimmedBody = trimmedBody.substring(trimmedBody.indexOf('\n'));
|
||||||
|
bodyBuffer = `\n${BUFFER_CUT_WARNING}\n${trimmedBody}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getLogBuffer = () => headBuffer + bodyBuffer;
|
||||||
|
|
||||||
|
|
||||||
|
//Variables
|
||||||
|
const header = 'tx';
|
||||||
|
let stackPathAlias: { path: string, alias: string } | undefined;
|
||||||
|
let _txAdminVersion: string | undefined;
|
||||||
|
let _verboseFlag = false;
|
||||||
|
|
||||||
|
export const setConsoleEnvData = (
|
||||||
|
txAdminVersion: string,
|
||||||
|
txAdminResourcePath: string,
|
||||||
|
isDevMode: boolean,
|
||||||
|
isVerbose: boolean,
|
||||||
|
) => {
|
||||||
|
_txAdminVersion = txAdminVersion;
|
||||||
|
_verboseFlag = isVerbose;
|
||||||
|
if (isDevMode) {
|
||||||
|
sourceMapSupport.install();
|
||||||
|
//for some reason when using sourcemap it ends up with core/core/
|
||||||
|
stackPathAlias = {
|
||||||
|
path: txAdminResourcePath + '/core',
|
||||||
|
alias: '@monitor',
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
stackPathAlias = {
|
||||||
|
path: txAdminResourcePath,
|
||||||
|
alias: '@monitor',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* STDOUT EOL helper
|
||||||
|
*/
|
||||||
|
let stdioEolPending = false;
|
||||||
|
export const processStdioWriteRaw = (buffer: Uint8Array | string) => {
|
||||||
|
if (!buffer.length) return;
|
||||||
|
const comparator = typeof buffer === 'string' ? '\n' : 10;
|
||||||
|
stdioEolPending = buffer[buffer.length - 1] !== comparator;
|
||||||
|
process.stdout.write(buffer);
|
||||||
|
}
|
||||||
|
export const processStdioEnsureEol = () => {
|
||||||
|
if (stdioEolPending) {
|
||||||
|
process.stdout.write('\n');
|
||||||
|
stdioEolPending = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* New console and streams
|
||||||
|
*/
|
||||||
|
const defaultStream = new Writable({
|
||||||
|
decodeStrings: true,
|
||||||
|
defaultEncoding: 'utf8',
|
||||||
|
highWaterMark: 64 * 1024,
|
||||||
|
write(chunk, encoding, callback) {
|
||||||
|
writeToBuffer(chunk)
|
||||||
|
process.stdout.write(chunk);
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const verboseStream = new Writable({
|
||||||
|
decodeStrings: true,
|
||||||
|
defaultEncoding: 'utf8',
|
||||||
|
highWaterMark: 64 * 1024,
|
||||||
|
write(chunk, encoding, callback) {
|
||||||
|
writeToBuffer(chunk)
|
||||||
|
if (_verboseFlag) process.stdout.write(chunk);
|
||||||
|
callback();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const defaultConsole = new Console({
|
||||||
|
//@ts-ignore some weird change from node v16 to v22, check after update
|
||||||
|
stdout: defaultStream,
|
||||||
|
stderr: defaultStream,
|
||||||
|
colorMode: true,
|
||||||
|
});
|
||||||
|
const verboseConsole = new Console({
|
||||||
|
//@ts-ignore some weird change from node v16 to v22, check after update
|
||||||
|
stdout: verboseStream,
|
||||||
|
stderr: verboseStream,
|
||||||
|
colorMode: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns current ts in h23 format
|
||||||
|
* FIXME: same thing as utils/misc.ts getTimeHms
|
||||||
|
*/
|
||||||
|
export const getTimestamp = () => (new Date).toLocaleString(
|
||||||
|
undefined,
|
||||||
|
{ timeStyle: 'medium', hourCycle: 'h23' }
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generated the colored log prefix (ts+tags)
|
||||||
|
*/
|
||||||
|
export const genLogPrefix = (currContext: string, color: ChalkInstance) => {
|
||||||
|
return color.black(`[${getTimestamp()}][${currContext}]`);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//Dir helpers
|
||||||
|
const cleanPath = (x: string) => slash(path.normalize(x));
|
||||||
|
const ERR_STACK_PREFIX = chalk.redBright(' => ');
|
||||||
|
const DIVIDER_SIZE = 60;
|
||||||
|
const DIVIDER_CHAR = '=';
|
||||||
|
const DIVIDER = DIVIDER_CHAR.repeat(DIVIDER_SIZE);
|
||||||
|
const DIR_DIVIDER = chalk.cyan(DIVIDER);
|
||||||
|
const specialsColor = chalk.rgb(255, 228, 181).italic;
|
||||||
|
const lawngreenColor = chalk.rgb(124, 252, 0);
|
||||||
|
const orangeredColor = chalk.rgb(255, 69, 0);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses an error and returns string with prettified error and stack
|
||||||
|
* The stack filters out node modules and aliases monitor folder
|
||||||
|
*/
|
||||||
|
const getPrettyError = (error: Error, multilineError?: boolean) => {
|
||||||
|
const out: string[] = [];
|
||||||
|
const prefixStr = `[${getTimestamp()}][tx]`;
|
||||||
|
let prefixColor = chalk.redBright;
|
||||||
|
let nameColor = chalk.redBright;
|
||||||
|
if (error.name === 'ExperimentalWarning') {
|
||||||
|
prefixColor = chalk.bgYellow.black;
|
||||||
|
nameColor = chalk.yellowBright;
|
||||||
|
} else if (multilineError) {
|
||||||
|
prefixColor = chalk.bgRed.black;
|
||||||
|
}
|
||||||
|
const prefix = prefixColor(prefixStr) + ' ';
|
||||||
|
|
||||||
|
//banner
|
||||||
|
out.push(prefix + nameColor(`${error.name}: `) + error.message);
|
||||||
|
if ('type' in error) out.push(prefix + nameColor('Type:') + ` ${error.type}`);
|
||||||
|
if ('code' in error) out.push(prefix + nameColor('Code:') + ` ${error.code}`);
|
||||||
|
|
||||||
|
//stack
|
||||||
|
if (typeof error.stack === 'string') {
|
||||||
|
const stackPrefix = multilineError ? prefix : ERR_STACK_PREFIX;
|
||||||
|
try {
|
||||||
|
for (const line of ErrorStackParser.parse(error)) {
|
||||||
|
if (line.fileName && line.fileName.startsWith('node:')) continue;
|
||||||
|
let outPath = cleanPath(line.fileName ?? 'unknown');
|
||||||
|
if(stackPathAlias){
|
||||||
|
outPath = outPath.replace(stackPathAlias.path, stackPathAlias.alias);
|
||||||
|
}
|
||||||
|
const outPos = chalk.blueBright(`${line.lineNumber}:${line.columnNumber}`);
|
||||||
|
const outName = chalk.yellowBright(line.functionName || '<unknown>');
|
||||||
|
if (!outPath.startsWith('@monitor/core')) {
|
||||||
|
out.push(chalk.dim(`${stackPrefix}${outPath} > ${outPos} > ${outName}`));
|
||||||
|
} else {
|
||||||
|
out.push(`${stackPrefix}${outPath} > ${outPos} > ${outName}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
out.push(`${prefix} Unnable to parse error stack.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
out.push(`${prefix} Error stack not available.`);
|
||||||
|
}
|
||||||
|
return out.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop-in replacement for console.dir
|
||||||
|
*/
|
||||||
|
const dirHandler = (data: any, options?: TxInspectOptions, consoleInstance?: Console) => {
|
||||||
|
if (!consoleInstance) consoleInstance = defaultConsole;
|
||||||
|
|
||||||
|
if (data instanceof Error) {
|
||||||
|
consoleInstance.log(getPrettyError(data, options?.multilineError));
|
||||||
|
if (!options?.multilineError) consoleInstance.log();
|
||||||
|
} else {
|
||||||
|
consoleInstance.log(DIR_DIVIDER);
|
||||||
|
if (data === undefined) {
|
||||||
|
consoleInstance.log(specialsColor('> undefined'));
|
||||||
|
} else if (data === null) {
|
||||||
|
consoleInstance.log(specialsColor('> null'));
|
||||||
|
} else if (data instanceof Promise) {
|
||||||
|
consoleInstance.log(specialsColor('> Promise'));
|
||||||
|
} else if (typeof data === 'boolean') {
|
||||||
|
consoleInstance.log(data ? lawngreenColor('true') : orangeredColor('false'));
|
||||||
|
} else {
|
||||||
|
consoleInstance.dir(data, options);
|
||||||
|
}
|
||||||
|
consoleInstance.log(DIR_DIVIDER);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type TxInspectOptions = InspectOptions & {
|
||||||
|
multilineError?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans the terminal
|
||||||
|
*/
|
||||||
|
export const cleanTerminal = () => {
|
||||||
|
process.stdout.write('.\n'.repeat(80) + '\x1B[2J\x1B[H');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets terminal title
|
||||||
|
*/
|
||||||
|
export const setTTYTitle = (title?: string) => {
|
||||||
|
const txVers = _txAdminVersion ? `txAdmin v${_txAdminVersion}` : 'txAdmin';
|
||||||
|
const out = title ? `${title} - txAdmin` : txVers;
|
||||||
|
process.stdout.write(`\x1B]0;${out}\x07`);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a custom log function with custom context and specific Console
|
||||||
|
*/
|
||||||
|
const getLogFunc = (
|
||||||
|
currContext: string,
|
||||||
|
color: ChalkInstance,
|
||||||
|
consoleInstance?: Console,
|
||||||
|
): LogFunction => {
|
||||||
|
return (message?: any, ...optParams: any) => {
|
||||||
|
if (!consoleInstance) consoleInstance = defaultConsole;
|
||||||
|
const prefix = genLogPrefix(currContext, color);
|
||||||
|
if (typeof message === 'string') {
|
||||||
|
return consoleInstance.log.call(null, `${prefix} ${message}`, ...optParams);
|
||||||
|
} else {
|
||||||
|
return consoleInstance.log.call(null, prefix, message, ...optParams);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Reused types
|
||||||
|
type LogFunction = typeof Console.prototype.log;
|
||||||
|
type DirFunction = (data: any, options?: TxInspectOptions) => void;
|
||||||
|
interface TxBaseLogTypes {
|
||||||
|
debug: LogFunction;
|
||||||
|
log: LogFunction;
|
||||||
|
ok: LogFunction;
|
||||||
|
warn: LogFunction;
|
||||||
|
error: LogFunction;
|
||||||
|
dir: DirFunction;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Factory for console.log drop-ins
|
||||||
|
*/
|
||||||
|
const consoleFactory = (ctx?: string, subCtx?: string): CombinedConsole => {
|
||||||
|
const currContext = [header, ctx, subCtx].filter(x => x).join(':');
|
||||||
|
const baseLogs: TxBaseLogTypes = {
|
||||||
|
debug: getLogFunc(currContext, DEBUG_COLOR),
|
||||||
|
log: getLogFunc(currContext, chalk.bgBlue),
|
||||||
|
ok: getLogFunc(currContext, chalk.bgGreen),
|
||||||
|
warn: getLogFunc(currContext, chalk.bgYellow),
|
||||||
|
error: getLogFunc(currContext, chalk.bgRed),
|
||||||
|
dir: (data: any, options?: TxInspectOptions & {}) => dirHandler.call(null, data, options),
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...defaultConsole,
|
||||||
|
...baseLogs,
|
||||||
|
tag: (subCtx: string) => consoleFactory(ctx, subCtx),
|
||||||
|
multiline: (text: string | string[], color: ChalkInstance) => {
|
||||||
|
if (!Array.isArray(text)) text = text.split('\n');
|
||||||
|
const prefix = genLogPrefix(currContext, color);
|
||||||
|
for (const line of text) {
|
||||||
|
defaultConsole.log(prefix, line);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prints a multiline error message with a red background
|
||||||
|
* @param text
|
||||||
|
*/
|
||||||
|
majorMultilineError: (text: string | (string | null)[]) => {
|
||||||
|
if (!Array.isArray(text)) text = text.split('\n');
|
||||||
|
const prefix = genLogPrefix(currContext, chalk.bgRed);
|
||||||
|
defaultConsole.log(prefix, DIVIDER);
|
||||||
|
for (const line of text) {
|
||||||
|
if (line) {
|
||||||
|
defaultConsole.log(prefix, line);
|
||||||
|
} else {
|
||||||
|
defaultConsole.log(prefix, DIVIDER);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defaultConsole.log(prefix, DIVIDER);
|
||||||
|
},
|
||||||
|
|
||||||
|
//Returns a set of log functions that will be executed after a delay
|
||||||
|
defer: (ms = 250) => ({
|
||||||
|
debug: (...args) => setTimeout(() => baseLogs.debug(...args), ms),
|
||||||
|
log: (...args) => setTimeout(() => baseLogs.log(...args), ms),
|
||||||
|
ok: (...args) => setTimeout(() => baseLogs.ok(...args), ms),
|
||||||
|
warn: (...args) => setTimeout(() => baseLogs.warn(...args), ms),
|
||||||
|
error: (...args) => setTimeout(() => baseLogs.error(...args), ms),
|
||||||
|
dir: (...args) => setTimeout(() => baseLogs.dir(...args), ms),
|
||||||
|
}),
|
||||||
|
|
||||||
|
//Log functions that will output tothe verbose stream
|
||||||
|
verbose: {
|
||||||
|
debug: getLogFunc(currContext, DEBUG_COLOR, verboseConsole),
|
||||||
|
log: getLogFunc(currContext, chalk.bgBlue, verboseConsole),
|
||||||
|
ok: getLogFunc(currContext, chalk.bgGreen, verboseConsole),
|
||||||
|
warn: getLogFunc(currContext, chalk.bgYellow, verboseConsole),
|
||||||
|
error: getLogFunc(currContext, chalk.bgRed, verboseConsole),
|
||||||
|
dir: (data, options) => dirHandler.call(null, data, options, verboseConsole)
|
||||||
|
},
|
||||||
|
|
||||||
|
//Verbosity getter and explicit setter
|
||||||
|
get isVerbose() {
|
||||||
|
return _verboseFlag
|
||||||
|
},
|
||||||
|
setVerbose: (state: boolean) => {
|
||||||
|
_verboseFlag = !!state;
|
||||||
|
},
|
||||||
|
|
||||||
|
//Consts used by the fatalError util
|
||||||
|
DIVIDER,
|
||||||
|
DIVIDER_CHAR,
|
||||||
|
DIVIDER_SIZE,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
export default consoleFactory;
|
||||||
|
|
||||||
|
interface CombinedConsole extends TxConsole, Console {
|
||||||
|
dir: DirFunction;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TxConsole extends TxBaseLogTypes {
|
||||||
|
tag: (subCtx: string) => TxConsole;
|
||||||
|
multiline: (text: string | string[], color: ChalkInstance) => void;
|
||||||
|
majorMultilineError: (text: string | (string | null)[]) => void;
|
||||||
|
defer: (ms?: number) => TxBaseLogTypes;
|
||||||
|
verbose: TxBaseLogTypes;
|
||||||
|
readonly isVerbose: boolean;
|
||||||
|
setVerbose: (state: boolean) => void;
|
||||||
|
DIVIDER: string;
|
||||||
|
DIVIDER_CHAR: string;
|
||||||
|
DIVIDER_SIZE: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Replaces the global console with the new one
|
||||||
|
*/
|
||||||
|
global.console = consoleFactory();
|
331
core/lib/diagnostics.ts
Normal file
331
core/lib/diagnostics.ts
Normal file
|
@ -0,0 +1,331 @@
|
||||||
|
const modulename = 'WebServer:DiagnosticsFuncs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import humanizeDuration, { HumanizerOptions } from 'humanize-duration';
|
||||||
|
import got from '@lib/got';
|
||||||
|
import getOsDistro from '@lib/host/getOsDistro.js';
|
||||||
|
import getHostUsage from '@lib/host/getHostUsage';
|
||||||
|
import pidUsageTree from '@lib/host/pidUsageTree.js';
|
||||||
|
import { txEnv, txHostConfig } from '@core/globalData';
|
||||||
|
import si from 'systeminformation';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { parseFxserverVersion } from '@lib/fxserver/fxsVersionParser';
|
||||||
|
import { getHeapStatistics } from 'node:v8';
|
||||||
|
import bytes from 'bytes';
|
||||||
|
import { msToDuration } from './misc';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
//Helpers
|
||||||
|
const MEGABYTE = 1024 * 1024;
|
||||||
|
type HostStaticDataType = {
|
||||||
|
nodeVersion: string,
|
||||||
|
username: string,
|
||||||
|
osDistro: string,
|
||||||
|
cpu: {
|
||||||
|
manufacturer: string;
|
||||||
|
brand: string;
|
||||||
|
speedMin: number;
|
||||||
|
speedMax: number;
|
||||||
|
physicalCores: number;
|
||||||
|
cores: number;
|
||||||
|
clockWarning: string;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
type HostDynamicDataType = {
|
||||||
|
cpuUsage: number;
|
||||||
|
memory: {
|
||||||
|
usage: number;
|
||||||
|
used: number;
|
||||||
|
total: number;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
type HostDataReturnType = {
|
||||||
|
static: HostStaticDataType,
|
||||||
|
dynamic?: HostDynamicDataType
|
||||||
|
} | { error: string };
|
||||||
|
let _hostStaticDataCache: HostStaticDataType;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the Processes Data.
|
||||||
|
* FIXME: migrate to use gwmi on windows by default
|
||||||
|
*/
|
||||||
|
export const getProcessesData = async () => {
|
||||||
|
type ProcDataType = {
|
||||||
|
pid: number;
|
||||||
|
ppid: number | string;
|
||||||
|
name: string;
|
||||||
|
cpu: number;
|
||||||
|
memory: number;
|
||||||
|
order: number;
|
||||||
|
}
|
||||||
|
const procList: ProcDataType[] = [];
|
||||||
|
try {
|
||||||
|
const txProcessId = process.pid;
|
||||||
|
const processes = await pidUsageTree(txProcessId);
|
||||||
|
|
||||||
|
//NOTE: Cleaning invalid proccesses that might show up in Linux
|
||||||
|
Object.keys(processes).forEach((pid) => {
|
||||||
|
if (processes[pid] === null) delete processes[pid];
|
||||||
|
});
|
||||||
|
|
||||||
|
//Foreach PID
|
||||||
|
Object.keys(processes).forEach((pid) => {
|
||||||
|
const curr = processes[pid];
|
||||||
|
const currPidInt = parseInt(pid);
|
||||||
|
|
||||||
|
//Define name and order
|
||||||
|
let procName;
|
||||||
|
let order = curr.timestamp || 1;
|
||||||
|
if (currPidInt === txProcessId) {
|
||||||
|
procName = 'txAdmin (inside FXserver)';
|
||||||
|
order = 0; //forcing order because all process can start at the same second
|
||||||
|
} else if (curr.memory <= 10 * MEGABYTE) {
|
||||||
|
procName = 'FXServer MiniDump';
|
||||||
|
} else {
|
||||||
|
procName = 'FXServer';
|
||||||
|
}
|
||||||
|
|
||||||
|
procList.push({
|
||||||
|
pid: currPidInt,
|
||||||
|
ppid: (curr.ppid === txProcessId) ? `${txProcessId} (txAdmin)` : curr.ppid,
|
||||||
|
name: procName,
|
||||||
|
cpu: curr.cpu,
|
||||||
|
memory: curr.memory / MEGABYTE,
|
||||||
|
order: order,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as any).code = 'ENOENT') {
|
||||||
|
console.error('Failed to get processes tree usage data.');
|
||||||
|
if (txEnv.isWindows) {
|
||||||
|
console.error('This is probably because the `wmic` command is not available in your system.');
|
||||||
|
console.error('If you are on Windows 11 or Windows Server 2025, you can enable it in the "Windows Features" settings.');
|
||||||
|
} else {
|
||||||
|
console.error('This is probably because the `ps` command is not available in your system.');
|
||||||
|
console.error('This command is part of the `procps` package in most Linux distributions.');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.error('Error getting processes tree usage data.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Sort procList array
|
||||||
|
procList.sort((a, b) => a.order - b.order);
|
||||||
|
|
||||||
|
return procList;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the FXServer Data.
|
||||||
|
*/
|
||||||
|
export const getFXServerData = async () => {
|
||||||
|
//Check runner child state
|
||||||
|
const childState = txCore.fxRunner.child;
|
||||||
|
if (!childState?.isAlive) {
|
||||||
|
return { error: 'Server Offline' };
|
||||||
|
}
|
||||||
|
if (!childState?.netEndpoint) {
|
||||||
|
return { error: 'Server is has no network endpoint' };
|
||||||
|
}
|
||||||
|
|
||||||
|
//Preparing request
|
||||||
|
const requestOptions = {
|
||||||
|
url: `http://${childState.netEndpoint}/info.json`,
|
||||||
|
maxRedirects: 0,
|
||||||
|
timeout: { request: 1500 },
|
||||||
|
retry: { limit: 0 },
|
||||||
|
};
|
||||||
|
|
||||||
|
//Making HTTP Request
|
||||||
|
let infoData: Record<string, any>;
|
||||||
|
try {
|
||||||
|
infoData = await got.get(requestOptions).json();
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to get FXServer information.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
return { error: 'Failed to retrieve FXServer data. <br>The server must be online for this operation. <br>Check the terminal for more information (if verbosity is enabled)' };
|
||||||
|
}
|
||||||
|
|
||||||
|
//Processing result
|
||||||
|
try {
|
||||||
|
const ver = parseFxserverVersion(infoData.server);
|
||||||
|
return {
|
||||||
|
error: false,
|
||||||
|
statusColor: 'success',
|
||||||
|
status: ' ONLINE ',
|
||||||
|
version: ver.valid ? `${ver.platform}:${ver.branch}:${ver.build}` : `${ver.platform ?? 'unknown'}:INVALID`,
|
||||||
|
versionMismatch: (ver.build !== txEnv.fxsVersion),
|
||||||
|
resources: infoData.resources.length,
|
||||||
|
onesync: (infoData.vars && infoData.vars.onesync_enabled === 'true') ? 'enabled' : 'disabled',
|
||||||
|
maxClients: (infoData.vars && infoData.vars.sv_maxClients) ? infoData.vars.sv_maxClients : '--',
|
||||||
|
txAdminVersion: (infoData.vars && infoData.vars['txAdmin-version']) ? infoData.vars['txAdmin-version'] : '--',
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to process FXServer information.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
return { error: 'Failed to process FXServer data. <br>Check the terminal for more information (if verbosity is enabled)' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the Host Data.
|
||||||
|
*/
|
||||||
|
export const getHostData = async (): Promise<HostDataReturnType> => {
|
||||||
|
//Get and cache static information
|
||||||
|
if (!_hostStaticDataCache) {
|
||||||
|
//This errors out on pterodactyl egg
|
||||||
|
let osUsername = 'unknown';
|
||||||
|
try {
|
||||||
|
const userInfo = os.userInfo();
|
||||||
|
osUsername = userInfo.username;
|
||||||
|
} catch (error) { }
|
||||||
|
|
||||||
|
try {
|
||||||
|
const cpuStats = await si.cpu();
|
||||||
|
const cpuSpeed = cpuStats.speedMin ?? cpuStats.speed;
|
||||||
|
|
||||||
|
//TODO: move this to frontend
|
||||||
|
let clockWarning = '';
|
||||||
|
if (cpuStats.cores < 8) {
|
||||||
|
if (cpuSpeed <= 2.4) {
|
||||||
|
clockWarning = '<span class="badge badge-danger"> VERY SLOW! </span>';
|
||||||
|
} else if (cpuSpeed < 3.0) {
|
||||||
|
clockWarning = '<span class="badge badge-warning"> SLOW </span>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_hostStaticDataCache = {
|
||||||
|
nodeVersion: process.version,
|
||||||
|
username: osUsername,
|
||||||
|
osDistro: await getOsDistro(),
|
||||||
|
cpu: {
|
||||||
|
manufacturer: cpuStats.manufacturer,
|
||||||
|
brand: cpuStats.brand,
|
||||||
|
speedMin: cpuSpeed,
|
||||||
|
speedMax: cpuStats.speedMax,
|
||||||
|
physicalCores: cpuStats.physicalCores,
|
||||||
|
cores: cpuStats.cores,
|
||||||
|
clockWarning,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting Host static data.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
return { error: 'Failed to retrieve host static data. <br>Check the terminal for more information (if verbosity is enabled)' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Get dynamic info (mem/cpu usage) and prepare output
|
||||||
|
try {
|
||||||
|
const stats = await Promise.race([
|
||||||
|
getHostUsage(),
|
||||||
|
new Promise<null>((_, reject) => setTimeout(() => reject(new Error('Timeout')), 2500))
|
||||||
|
]);
|
||||||
|
if (stats) {
|
||||||
|
return {
|
||||||
|
static: _hostStaticDataCache,
|
||||||
|
dynamic: {
|
||||||
|
cpuUsage: stats.cpu.usage,
|
||||||
|
memory: {
|
||||||
|
usage: stats.memory.usage,
|
||||||
|
used: stats.memory.used,
|
||||||
|
total: stats.memory.total,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
static: _hostStaticDataCache,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting Host dynamic data.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
return { error: 'Failed to retrieve host dynamic data. <br>Check the terminal for more information (if verbosity is enabled)' };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the Host Static Data from cache.
|
||||||
|
*/
|
||||||
|
export const getHostStaticData = (): HostStaticDataType => {
|
||||||
|
if (!_hostStaticDataCache) {
|
||||||
|
throw new Error(`hostStaticDataCache not yet ready`);
|
||||||
|
}
|
||||||
|
return _hostStaticDataCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets txAdmin Data
|
||||||
|
*/
|
||||||
|
export const getTxAdminData = async () => {
|
||||||
|
const stats = txCore.metrics.txRuntime; //shortcut
|
||||||
|
const memoryUsage = getHeapStatistics();
|
||||||
|
|
||||||
|
let hostApiTokenState = 'not configured';
|
||||||
|
if (txHostConfig.hostApiToken === 'disabled') {
|
||||||
|
hostApiTokenState = 'disabled';
|
||||||
|
} else if (txHostConfig.hostApiToken) {
|
||||||
|
hostApiTokenState = 'configured';
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultFlags = Object.entries(txHostConfig.defaults).filter(([k, v]) => Boolean(v)).map(([k, v]) => k);
|
||||||
|
return {
|
||||||
|
//Stats
|
||||||
|
uptime: msToDuration(process.uptime() * 1000),
|
||||||
|
databaseFileSize: bytes(txCore.database.fileSize),
|
||||||
|
txHostConfig: {
|
||||||
|
...txHostConfig,
|
||||||
|
dataSubPath: undefined,
|
||||||
|
hostApiToken: hostApiTokenState,
|
||||||
|
defaults: defaultFlags,
|
||||||
|
},
|
||||||
|
txEnv: {
|
||||||
|
...txEnv,
|
||||||
|
adsData: undefined,
|
||||||
|
},
|
||||||
|
monitor: {
|
||||||
|
hbFails: {
|
||||||
|
http: stats.monitorStats.healthIssues.http,
|
||||||
|
fd3: stats.monitorStats.healthIssues.fd3,
|
||||||
|
},
|
||||||
|
restarts: {
|
||||||
|
bootTimeout: stats.monitorStats.restartReasons.bootTimeout,
|
||||||
|
close: stats.monitorStats.restartReasons.close,
|
||||||
|
heartBeat: stats.monitorStats.restartReasons.heartBeat,
|
||||||
|
healthCheck: stats.monitorStats.restartReasons.healthCheck,
|
||||||
|
both: stats.monitorStats.restartReasons.both,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
performance: {
|
||||||
|
banCheck: stats.banCheckTime.resultSummary('ms').summary,
|
||||||
|
whitelistCheck: stats.whitelistCheckTime.resultSummary('ms').summary,
|
||||||
|
playersTableSearch: stats.playersTableSearchTime.resultSummary('ms').summary,
|
||||||
|
historyTableSearch: stats.historyTableSearchTime.resultSummary('ms').summary,
|
||||||
|
databaseSave: stats.databaseSaveTime.resultSummary('ms').summary,
|
||||||
|
perfCollection: stats.perfCollectionTime.resultSummary('ms').summary,
|
||||||
|
},
|
||||||
|
logger: {
|
||||||
|
storageSize: (await txCore.logger.getStorageSize()).total,
|
||||||
|
statusAdmin: txCore.logger.admin.getUsageStats(),
|
||||||
|
statusFXServer: txCore.logger.fxserver.getUsageStats(),
|
||||||
|
statusServer: txCore.logger.server.getUsageStats(),
|
||||||
|
},
|
||||||
|
memoryUsage: {
|
||||||
|
heap_used: bytes(memoryUsage.used_heap_size),
|
||||||
|
heap_limit: bytes(memoryUsage.heap_size_limit),
|
||||||
|
heap_pct: (memoryUsage.heap_size_limit > 0)
|
||||||
|
? (memoryUsage.used_heap_size / memoryUsage.heap_size_limit * 100).toFixed(2)
|
||||||
|
: 0,
|
||||||
|
physical: bytes(memoryUsage.total_physical_size),
|
||||||
|
peak_malloced: bytes(memoryUsage.peak_malloced_memory),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
90
core/lib/fatalError.ts
Normal file
90
core/lib/fatalError.ts
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
|
||||||
|
import chalk from "chalk";
|
||||||
|
import consoleFactory from "./console";
|
||||||
|
import quitProcess from "./quitProcess";
|
||||||
|
const console = consoleFactory();
|
||||||
|
|
||||||
|
type ErrorLineSkipType = null | undefined | false;
|
||||||
|
type ErrorLineType = string | [desc: string, value: any] | ErrorLineSkipType;
|
||||||
|
type ErrorMsgType = ErrorLineType | ErrorLineType[];
|
||||||
|
|
||||||
|
const padStartEnd = (str: string): string => {
|
||||||
|
str = ` ${str} `;
|
||||||
|
const padStart = Math.ceil((console.DIVIDER_SIZE + str.length) / 2);
|
||||||
|
return str.padStart(padStart, '-').padEnd(console.DIVIDER_SIZE, '-');
|
||||||
|
}
|
||||||
|
|
||||||
|
const printSingleLine = (line: ErrorLineType): void => {
|
||||||
|
if (Array.isArray(line)) {
|
||||||
|
if (line.length === 2 && typeof line[0] === 'string') {
|
||||||
|
let value = typeof line[1] === 'string' ? line[1] : String(line[1]);
|
||||||
|
console.error(`${line[0]}: ${chalk.dim(value)}`);
|
||||||
|
} else {
|
||||||
|
console.error(JSON.stringify(line));
|
||||||
|
}
|
||||||
|
} else if (typeof line === 'string') {
|
||||||
|
console.error(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function fatalError(code: number, msg: ErrorMsgType, err?: any): never {
|
||||||
|
console.error(console.DIVIDER);
|
||||||
|
console.error(chalk.inverse(
|
||||||
|
padStartEnd(`FATAL ERROR: E${code}`)
|
||||||
|
));
|
||||||
|
console.error(console.DIVIDER);
|
||||||
|
if (Array.isArray(msg)) {
|
||||||
|
for (const line of msg) {
|
||||||
|
printSingleLine(line);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printSingleLine(msg);
|
||||||
|
}
|
||||||
|
if (err) {
|
||||||
|
console.error('-'.repeat(console.DIVIDER_SIZE));
|
||||||
|
console.dir(err, { multilineError: true });
|
||||||
|
}
|
||||||
|
console.error(console.DIVIDER);
|
||||||
|
console.error(chalk.inverse(
|
||||||
|
padStartEnd('For support: https://discord.gg/txAdmin')
|
||||||
|
));
|
||||||
|
console.error(console.DIVIDER);
|
||||||
|
quitProcess(code);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
NOTE: Going above 1000 to avoid collision with default nodejs error codes
|
||||||
|
ref: https://nodejs.org/docs/latest-v22.x/api/process.html#exit-codes
|
||||||
|
|
||||||
|
1000 - global data
|
||||||
|
2000 - boot
|
||||||
|
2001 - txdata
|
||||||
|
2002 - setup profile throw
|
||||||
|
2003 - boot throw
|
||||||
|
2010 - expired
|
||||||
|
2011 - expired cron
|
||||||
|
2022 - txCore placeholder getter error
|
||||||
|
2023 - txCore placeholder setter error
|
||||||
|
|
||||||
|
5100 - config store
|
||||||
|
5300 - admin store
|
||||||
|
5400 - fxrunner
|
||||||
|
5600 - database
|
||||||
|
5700 - stats txruntime
|
||||||
|
5800 - webserver
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
fatalError.GlobalData = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(1000 + code, msg, err);
|
||||||
|
fatalError.Boot = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(2000 + code, msg, err);
|
||||||
|
|
||||||
|
fatalError.ConfigStore = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(5100 + code, msg, err);
|
||||||
|
fatalError.Translator = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(5200 + code, msg, err);
|
||||||
|
fatalError.AdminStore = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(5300 + code, msg, err);
|
||||||
|
// fatalError.FxRunner = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(5400 + code, msg, err);
|
||||||
|
fatalError.Database = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(5600 + code, msg, err);
|
||||||
|
fatalError.StatsTxRuntime = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(5700 + code, msg, err);
|
||||||
|
fatalError.WebServer = (code: number, msg: ErrorMsgType, err?: any): never => fatalError(5800 + code, msg, err);
|
||||||
|
|
||||||
|
export default fatalError;
|
71
core/lib/fs.ts
Normal file
71
core/lib/fs.ts
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import type { Dirent } from 'node:fs';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if its possible to create a file in a folder
|
||||||
|
*/
|
||||||
|
export const canWriteToPath = async (targetPath: string) => {
|
||||||
|
try {
|
||||||
|
await fsp.access(path.dirname(targetPath), fs.constants.W_OK);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array of directory entries (files and directories) from the specified root path.
|
||||||
|
*/
|
||||||
|
export const getPathContent = async (root: string, filter?: (entry: Dirent) => boolean) => {
|
||||||
|
const stats = await fsp.stat(root);
|
||||||
|
if (!stats.isDirectory()) {
|
||||||
|
throw new Error(`Path '${root}' is not a directory`);
|
||||||
|
}
|
||||||
|
const allEntries = await fsp.readdir(root, { withFileTypes: true });
|
||||||
|
return allEntries.filter((entry) => (
|
||||||
|
(entry.isFile() || entry.isDirectory())
|
||||||
|
&& filter ? filter(entry) : true
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array of file entries from the specified root path.
|
||||||
|
*/
|
||||||
|
export const getPathFiles = async (root: string, filter?: (entry: Dirent) => boolean) => {
|
||||||
|
const entries = await getPathContent(root, filter);
|
||||||
|
return entries.filter((entry) => entry.isFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array of subdirectory entries from the specified root path.
|
||||||
|
*/
|
||||||
|
export const getPathSubdirs = async (root: string, filter?: (entry: Dirent) => boolean) => {
|
||||||
|
const entries = await getPathContent(root, filter);
|
||||||
|
return entries.filter((entry) => entry.isDirectory());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a user-friendly markdown error message for filesystem operations.
|
||||||
|
* Handles common cases like Windows paths on Linux and permission issues.
|
||||||
|
*/
|
||||||
|
export const getFsErrorMdMessage = (error: any, targetPath: string) => {
|
||||||
|
if(typeof error.message !== 'string') return 'unknown error';
|
||||||
|
|
||||||
|
if (!txEnv.isWindows && /^[a-zA-Z]:[\\/]/.test(targetPath)) {
|
||||||
|
return `Looks like you're using a Windows path on a Linux server.\nThis likely means you are attempting to use a path from your computer on a remote server.\nIf you want to use your local files, you will first need to upload them to the server.`;
|
||||||
|
} else if (error.message?.includes('ENOENT')) {
|
||||||
|
return `The path provided does not exist:\n\`${targetPath}\``;
|
||||||
|
} else if (error.message?.includes('EACCES') || error.message?.includes('EPERM')) {
|
||||||
|
return `The path provided is not accessible:\n\`${targetPath}\``;
|
||||||
|
}
|
||||||
|
|
||||||
|
return error.message as string;
|
||||||
|
}
|
776
core/lib/fxserver/fxsConfigHelper.ts
Normal file
776
core/lib/fxserver/fxsConfigHelper.ts
Normal file
|
@ -0,0 +1,776 @@
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import path from 'node:path';
|
||||||
|
import isLocalhost from 'is-localhost-ip';
|
||||||
|
import { txHostConfig } from '@core/globalData';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect the dominant newline character of a string.
|
||||||
|
* Extracted from https://www.npmjs.com/package/detect-newline
|
||||||
|
*/
|
||||||
|
const detectNewline = (str: string) => {
|
||||||
|
if (typeof str !== 'string') {
|
||||||
|
throw new TypeError('Expected a string');
|
||||||
|
}
|
||||||
|
|
||||||
|
const newlines = str.match(/(?:\r?\n)/g) || [];
|
||||||
|
|
||||||
|
if (newlines.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const crlf = newlines.filter((newline) => newline === '\r\n').length;
|
||||||
|
const lf = newlines.length - crlf;
|
||||||
|
|
||||||
|
return crlf > lf ? '\r\n' : '\n';
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to store commands
|
||||||
|
*/
|
||||||
|
class Command {
|
||||||
|
readonly command: string;
|
||||||
|
readonly args: string[];
|
||||||
|
readonly file: string;
|
||||||
|
readonly line: number;
|
||||||
|
|
||||||
|
constructor(tokens: string[], filePath: string, fileLine: number) {
|
||||||
|
if (!Array.isArray(tokens) || tokens.length < 1) {
|
||||||
|
throw new Error('Invalid command format');
|
||||||
|
}
|
||||||
|
if (typeof tokens[0] === 'string' && tokens[0].length) {
|
||||||
|
this.command = tokens[0].toLocaleLowerCase();
|
||||||
|
} else {
|
||||||
|
this.command = 'invalid_empty_command';
|
||||||
|
}
|
||||||
|
this.args = tokens.slice(1);
|
||||||
|
this.file = filePath;
|
||||||
|
this.line = fileLine;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Kinda confusing name, but it returns the value of a set if it's for that ne var
|
||||||
|
isConvarSetterFor(varname: string) {
|
||||||
|
if (
|
||||||
|
['set', 'sets', 'setr'].includes(this.command)
|
||||||
|
&& this.args.length === 2
|
||||||
|
&& this.args[0].toLowerCase() === varname.toLowerCase()
|
||||||
|
) {
|
||||||
|
return this.args[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
this.command === varname.toLowerCase()
|
||||||
|
&& this.args.length === 1
|
||||||
|
) {
|
||||||
|
return this.args[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to store exec errors
|
||||||
|
*/
|
||||||
|
class ExecRecursionError {
|
||||||
|
constructor(readonly file: string, readonly message: string, readonly line: number) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class to store file TODOs, errors and warnings
|
||||||
|
*/
|
||||||
|
class FilesInfoList {
|
||||||
|
readonly store: Record<string, [number | false, string][]> = {};
|
||||||
|
|
||||||
|
add(file: string, line: number | false, msg: string) {
|
||||||
|
if (Array.isArray(this.store[file])) {
|
||||||
|
this.store[file].push([line, msg]);
|
||||||
|
} else {
|
||||||
|
this.store[file] = [[line, msg]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
count() {
|
||||||
|
return Object.keys(this.store).length;
|
||||||
|
}
|
||||||
|
toJSON() {
|
||||||
|
return this.store;
|
||||||
|
}
|
||||||
|
toMarkdown(hasHostConfig = false) {
|
||||||
|
const files = Object.keys(this.store);
|
||||||
|
if (!files) return null;
|
||||||
|
|
||||||
|
const msgLines = [];
|
||||||
|
for (const file of files) {
|
||||||
|
const fileInfos = this.store[file];
|
||||||
|
msgLines.push(`\`${file}\`:`);
|
||||||
|
for (const [line, msg] of fileInfos) {
|
||||||
|
const linePrefix = line ? `Line ${line}: ` : '';
|
||||||
|
const indentedMsg = msg.replaceAll(/\n\t/gm, '\n\t- ');
|
||||||
|
msgLines.push(`- ${linePrefix}${indentedMsg}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (hasHostConfig) {
|
||||||
|
msgLines.push(''); //blank line so the warning doesn't join the list
|
||||||
|
msgLines.push(`**Some of the configuration above is controlled by ${txHostConfig.sourceName}.**`);
|
||||||
|
}
|
||||||
|
return msgLines.join('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the first likely server.cfg given a server data path, or false
|
||||||
|
*/
|
||||||
|
export const findLikelyCFGPath = (serverDataPath: string) => {
|
||||||
|
const commonCfgFileNames = [
|
||||||
|
'server.cfg',
|
||||||
|
'server.cfg.txt',
|
||||||
|
'server.cfg.cfg',
|
||||||
|
'server.txt',
|
||||||
|
'server',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const cfgFileName of commonCfgFileNames) {
|
||||||
|
const absoluteCfgPath = path.join(serverDataPath, cfgFileName);
|
||||||
|
try {
|
||||||
|
if (fs.lstatSync(absoluteCfgPath).isFile()) {
|
||||||
|
return cfgFileName;
|
||||||
|
}
|
||||||
|
} catch (error) { }
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the absolute path of the given CFG Path
|
||||||
|
*/
|
||||||
|
export const resolveCFGFilePath = (cfgPath: string, dataPath: string) => {
|
||||||
|
return (path.isAbsolute(cfgPath)) ? path.normalize(cfgPath) : path.resolve(dataPath, cfgPath);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads CFG Path and return the file contents, or throw error if:
|
||||||
|
* - the path is not valid (must be absolute)
|
||||||
|
* - cannot read the file data
|
||||||
|
*/
|
||||||
|
export const readRawCFGFile = async (cfgPath: string) => {
|
||||||
|
//Validating if the path is absolute
|
||||||
|
if (!path.isAbsolute(cfgPath)) {
|
||||||
|
throw new Error('File path must be absolute.');
|
||||||
|
}
|
||||||
|
|
||||||
|
//Validating file existence
|
||||||
|
if (!fs.existsSync(cfgPath)) {
|
||||||
|
throw new Error("File doesn't exist or its unreadable.");
|
||||||
|
}
|
||||||
|
|
||||||
|
//Validating if its actually a file
|
||||||
|
if (!fs.lstatSync(cfgPath).isFile()) {
|
||||||
|
throw new Error("File doesn't exist or its unreadable. Make sure to include the CFG file in the path, and not just the directory that contains it.");
|
||||||
|
}
|
||||||
|
|
||||||
|
//Reading file
|
||||||
|
try {
|
||||||
|
return await fsp.readFile(cfgPath, 'utf8');
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error('Cannot read CFG file.');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a cfg/console line and return an array of commands with tokens.
|
||||||
|
* Notable difference: we don't handle inline block comment
|
||||||
|
* Original Line Parser:
|
||||||
|
* fivem/code/client/citicore/console/Console.cpp > ProgramArguments Tokenize
|
||||||
|
*/
|
||||||
|
export const readLineCommands = (input: string) => {
|
||||||
|
let inQuote = false;
|
||||||
|
let inEscape = false;
|
||||||
|
const prevCommands = [];
|
||||||
|
let currCommand = [];
|
||||||
|
let currToken = '';
|
||||||
|
for (let i = 0; i < input.length; i++) {
|
||||||
|
if (inEscape) {
|
||||||
|
if (input[i] === '"' || input[i] === '\\') {
|
||||||
|
currToken += input[i];
|
||||||
|
}
|
||||||
|
inEscape = false;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!currToken.length) {
|
||||||
|
if (
|
||||||
|
input.slice(i, i + 2) === '//'
|
||||||
|
|| input[i] === '#'
|
||||||
|
) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!inQuote && input.charCodeAt(i) <= 32) {
|
||||||
|
if (currToken.length) {
|
||||||
|
currCommand.push(currToken);
|
||||||
|
currToken = '';
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input[i] === '"') {
|
||||||
|
if (inQuote) {
|
||||||
|
currCommand.push(currToken);
|
||||||
|
currToken = '';
|
||||||
|
inQuote = false;
|
||||||
|
} else {
|
||||||
|
inQuote = true;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input[i] === '\\') {
|
||||||
|
inEscape = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!inQuote && input[i] === ';') {
|
||||||
|
if (currToken.length) {
|
||||||
|
currCommand.push(currToken);
|
||||||
|
currToken = '';
|
||||||
|
}
|
||||||
|
prevCommands.push(currCommand);
|
||||||
|
currCommand = [];
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
currToken += input[i];
|
||||||
|
}
|
||||||
|
if (currToken.length) {
|
||||||
|
currCommand.push(currToken);
|
||||||
|
}
|
||||||
|
prevCommands.push(currCommand);
|
||||||
|
|
||||||
|
return prevCommands;
|
||||||
|
};
|
||||||
|
//NOTE: tests for the parser above
|
||||||
|
// import chalk from 'chalk';
|
||||||
|
// const testCommands = [
|
||||||
|
// ' \x1B ONE_ARG_WITH_SPACE "part1 part2"',
|
||||||
|
// 'TWO_ARGS arg1 arg2',
|
||||||
|
// 'ONE_ARG_WITH_SPACE_SEMICOLON "arg mid;cut"',
|
||||||
|
// 'ESCAPED_QUOTE "aa\\"bb"',
|
||||||
|
// 'ESCAPED_ESCAPE "aa\\\\bb"',
|
||||||
|
// 'ESCAPED_X "aa\\xbb"',
|
||||||
|
// // 'NO_CLOSING_QUOTE "aa',
|
||||||
|
// // 'SHOW_AB_C aaa#bbb ccc',
|
||||||
|
// // 'COMMENT //anything noshow',
|
||||||
|
// 'COMMENT #anything noshow',
|
||||||
|
// 'noshow2',
|
||||||
|
// ];
|
||||||
|
// const parsed = readLineCommands(testCommands.join(';'));
|
||||||
|
// for (const commandTokens of parsed) {
|
||||||
|
// console.log(`${commandTokens[0]}:`);
|
||||||
|
// commandTokens.slice(1).forEach((token) => {
|
||||||
|
// console.log(chalk.inverse(token));
|
||||||
|
// });
|
||||||
|
// console.log('\n');
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively parse server.cfg files losely based on the FXServer original parser.
|
||||||
|
* Notable differences: we have recursivity depth limit, and no json parsing
|
||||||
|
* Original CFG (console) parser:
|
||||||
|
* fivem/code/client/citicore/console/Console.cpp > Context::ExecuteBuffer
|
||||||
|
*
|
||||||
|
* FIXME: support `@resource/whatever.cfg` syntax
|
||||||
|
*/
|
||||||
|
export const parseRecursiveConfig = async (
|
||||||
|
cfgInputString: string | null, //cfg string, or null to load from file
|
||||||
|
cfgAbsolutePath: string,
|
||||||
|
serverDataPath: string,
|
||||||
|
stack?: string[]
|
||||||
|
) => {
|
||||||
|
if (typeof cfgInputString !== 'string' && cfgInputString !== null) {
|
||||||
|
throw new Error('cfgInputString expected to be string or null');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure safe stack
|
||||||
|
const MAX_DEPTH = 5;
|
||||||
|
if (!Array.isArray(stack)) {
|
||||||
|
stack = [];
|
||||||
|
} else if (stack.length >= MAX_DEPTH) {
|
||||||
|
throw new Error(`cfg 'exec' command depth above ${MAX_DEPTH}`);
|
||||||
|
} else if (stack.includes(cfgAbsolutePath)) {
|
||||||
|
throw new Error(`cfg cyclical 'exec' command detected to file ${cfgAbsolutePath}`); //should block
|
||||||
|
}
|
||||||
|
stack.push(cfgAbsolutePath);
|
||||||
|
|
||||||
|
// Read raw config and split lines
|
||||||
|
const cfgData = cfgInputString ?? await readRawCFGFile(cfgAbsolutePath);
|
||||||
|
const cfgLines = cfgData.split('\n');
|
||||||
|
|
||||||
|
// Parse CFG lines
|
||||||
|
const parsedCommands: (Command | ExecRecursionError)[] = [];
|
||||||
|
for (let i = 0; i < cfgLines.length; i++) {
|
||||||
|
const lineString = cfgLines[i].trim();
|
||||||
|
const lineNumber = i + 1;
|
||||||
|
const lineCommands = readLineCommands(lineString);
|
||||||
|
|
||||||
|
// For each command in that line
|
||||||
|
for (const cmdTokens of lineCommands) {
|
||||||
|
if (!cmdTokens.length) continue;
|
||||||
|
const cmdObject = new Command(cmdTokens, cfgAbsolutePath, lineNumber);
|
||||||
|
parsedCommands.push(cmdObject);
|
||||||
|
|
||||||
|
// If exec command, process recursively then flatten the output
|
||||||
|
if (cmdObject.command === 'exec' && typeof cmdObject.args[0] === 'string') {
|
||||||
|
//FIXME: temporarily disable resoure references
|
||||||
|
if (!cmdObject.args[0].startsWith('@')) {
|
||||||
|
const recursiveCfgAbsolutePath = resolveCFGFilePath(cmdObject.args[0], serverDataPath);
|
||||||
|
try {
|
||||||
|
const extractedCommands = await parseRecursiveConfig(null, recursiveCfgAbsolutePath, serverDataPath, stack);
|
||||||
|
parsedCommands.push(...extractedCommands);
|
||||||
|
} catch (error) {
|
||||||
|
parsedCommands.push(new ExecRecursionError(cfgAbsolutePath, (error as Error).message, lineNumber));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.pop();
|
||||||
|
return parsedCommands;
|
||||||
|
};
|
||||||
|
|
||||||
|
type EndpointsObjectType = Record<string, { tcp?: true; udp?: true; }>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a list of parsed commands to return endpoints, errors, warnings and lines to comment out
|
||||||
|
*/
|
||||||
|
const validateCommands = async (parsedCommands: (ExecRecursionError | Command)[]) => {
|
||||||
|
const checkedInterfaces = new Map();
|
||||||
|
let detectedGameName: string | undefined;
|
||||||
|
const requiredGameName = txHostConfig.forceGameName
|
||||||
|
? txHostConfig.forceGameName === 'fivem' ? 'gta5' : 'rdr3'
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
//To return
|
||||||
|
let hasHostConfigMessage = false;
|
||||||
|
let hasEndpointCommand = false;
|
||||||
|
const endpoints: EndpointsObjectType = {};
|
||||||
|
const errors = new FilesInfoList();
|
||||||
|
const warnings = new FilesInfoList();
|
||||||
|
const toCommentOut = new FilesInfoList();
|
||||||
|
|
||||||
|
for (const cmd of parsedCommands) {
|
||||||
|
//In case of error
|
||||||
|
if (cmd instanceof ExecRecursionError) {
|
||||||
|
warnings.add(cmd.file, cmd.line, cmd.message);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check for +set
|
||||||
|
if (['+set', '+setr', '+setr'].includes(cmd.command)) {
|
||||||
|
const msg = `Line ${cmd.line}: remove the '+' from '${cmd.command}', as this is not an launch parameter.`;
|
||||||
|
warnings.add(cmd.file, cmd.line, msg);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check for start/stop/ensure txAdmin/txAdminClient/monitor
|
||||||
|
if (
|
||||||
|
['start', 'stop', 'ensure'].includes(cmd.command)
|
||||||
|
&& cmd.args.length >= 1
|
||||||
|
&& ['txadmin', 'txadminclient', 'monitor'].includes(cmd.args[0].toLowerCase())
|
||||||
|
) {
|
||||||
|
toCommentOut.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
'you MUST NOT start/stop/ensure txadmin resources.',
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check sv_maxClients against TXHOST config
|
||||||
|
const isMaxClientsString = cmd.isConvarSetterFor('sv_maxclients');
|
||||||
|
if (
|
||||||
|
txHostConfig.forceMaxClients
|
||||||
|
&& isMaxClientsString
|
||||||
|
) {
|
||||||
|
const maxClients = parseInt(isMaxClientsString);
|
||||||
|
if (maxClients > txHostConfig.forceMaxClients) {
|
||||||
|
hasHostConfigMessage = true;
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`your 'sv_maxclients' MUST be <= ${txHostConfig.forceMaxClients}.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check gamename against TXHOST config
|
||||||
|
const isGameNameString = cmd.isConvarSetterFor('gamename');
|
||||||
|
if (isGameNameString && detectedGameName) {
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`you already set the 'gamename' to '${detectedGameName}', please remove this line.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
txHostConfig.forceGameName
|
||||||
|
&& isGameNameString
|
||||||
|
) {
|
||||||
|
detectedGameName = isGameNameString;
|
||||||
|
if (isGameNameString !== requiredGameName) {
|
||||||
|
hasHostConfigMessage = true;
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`your 'gamename' MUST be '${requiredGameName}'.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Comment out any onesync sets
|
||||||
|
if (cmd.isConvarSetterFor('onesync')) {
|
||||||
|
toCommentOut.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
'onesync MUST only be set in the txAdmin settings page.',
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//FIXME: add isConvarSetterFor for all "Settings page only" convars
|
||||||
|
|
||||||
|
//Extract & process endpoint validity
|
||||||
|
if (cmd.command === 'endpoint_add_tcp' || cmd.command === 'endpoint_add_udp') {
|
||||||
|
hasEndpointCommand = true;
|
||||||
|
|
||||||
|
//Validating args length
|
||||||
|
if (cmd.args.length !== 1) {
|
||||||
|
warnings.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`the \`endpoint_add_*\` commands MUST have exactly 1 argument (received ${cmd.args.length})`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Extracting parts & validating format
|
||||||
|
const endpointsRegex = /^\[?(([0-9.]{7,15})|([a-z0-9:]{2,29}))\]?:(\d{1,5})$/gi;
|
||||||
|
const matches = [...cmd.args[0].matchAll(endpointsRegex)];
|
||||||
|
if (!Array.isArray(matches) || !matches.length) {
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`the \`${cmd.args[0]}\` is not in a valid \`ip:port\` format.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const [_matchedString, iface, ipv4, ipv6, portString] = matches[0];
|
||||||
|
|
||||||
|
//Checking if that interface is available to binding
|
||||||
|
let canBind = checkedInterfaces.get(iface);
|
||||||
|
if (typeof canBind === 'undefined') {
|
||||||
|
canBind = await isLocalhost(iface, true);
|
||||||
|
checkedInterfaces.set(iface, canBind);
|
||||||
|
}
|
||||||
|
if (canBind === false) {
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`the \`${cmd.command}\` interface \`${iface}\` is not available for this host.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (txHostConfig.netInterface && iface !== txHostConfig.netInterface) {
|
||||||
|
hasHostConfigMessage = true;
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`the \`${cmd.command}\` interface MUST be \`${txHostConfig.netInterface}\`.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Validating port
|
||||||
|
const port = parseInt(portString);
|
||||||
|
if (port >= 40120 && port <= 40150) {
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`the \`${cmd.command}\` port \`${port}\` is dedicated for txAdmin and CAN NOT be used for FXServer.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (port === txHostConfig.txaPort) {
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`the \`${cmd.command}\` port \`${port}\` is being used by txAdmin and CAN NOT be used for FXServer at the same time.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (txHostConfig.fxsPort && port !== txHostConfig.fxsPort) {
|
||||||
|
hasHostConfigMessage = true;
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`the \`${cmd.command}\` port MUST be \`${txHostConfig.fxsPort}\`.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Add to the endpoint list and check duplicity
|
||||||
|
const endpoint = (ipv4) ? `${ipv4}:${port}` : `[${ipv6}]:${port}`;
|
||||||
|
const protocol = (cmd.command === 'endpoint_add_tcp') ? 'tcp' : 'udp';
|
||||||
|
if (typeof endpoints[endpoint] === 'undefined') {
|
||||||
|
endpoints[endpoint] = {};
|
||||||
|
}
|
||||||
|
if (endpoints[endpoint][protocol]) {
|
||||||
|
errors.add(
|
||||||
|
cmd.file,
|
||||||
|
cmd.line,
|
||||||
|
`you CANNOT execute \`${cmd.command}\` twice for the interface \`${endpoint}\`.`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
endpoints[endpoint][protocol] = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Since gta5 is the default, we need to check TXHOST for redm
|
||||||
|
if (txHostConfig.forceGameName === 'redm' && detectedGameName !== 'rdr3') {
|
||||||
|
const initFile = parsedCommands[0]?.file ?? 'unknown';
|
||||||
|
hasHostConfigMessage = true;
|
||||||
|
errors.add(
|
||||||
|
initFile,
|
||||||
|
false,
|
||||||
|
`your config MUST have a 'gamename' set to '${requiredGameName}'.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
endpoints,
|
||||||
|
hasEndpointCommand,
|
||||||
|
hasHostConfigMessage,
|
||||||
|
errors,
|
||||||
|
warnings,
|
||||||
|
toCommentOut,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process endpoints object, checks validity, and then returns a connection string
|
||||||
|
*/
|
||||||
|
const getConnectEndpoint = (endpoints: EndpointsObjectType, hasEndpointCommand: boolean) => {
|
||||||
|
if (!Object.keys(endpoints).length) {
|
||||||
|
const instruction = hasEndpointCommand
|
||||||
|
? 'Please delete all \`endpoint_add_*\` lines and'
|
||||||
|
: 'Please'
|
||||||
|
const suggestedPort = txHostConfig.fxsPort ?? 30120;
|
||||||
|
const suggestedInterface = txHostConfig.netInterface ?? '0.0.0.0';
|
||||||
|
const desidredEndpoint = `${suggestedInterface}:${suggestedPort}`;
|
||||||
|
const msg = [
|
||||||
|
`Your config file does not specify a valid endpoints for FXServer to use. ${instruction} add the following to the start of the file:`,
|
||||||
|
`\t\`endpoint_add_tcp "${desidredEndpoint}"\``,
|
||||||
|
`\t\`endpoint_add_udp "${desidredEndpoint}"\``,
|
||||||
|
].join('\n');
|
||||||
|
throw new Error(msg);
|
||||||
|
}
|
||||||
|
const tcpudpEndpoint = Object.keys(endpoints).find((ep) => {
|
||||||
|
return endpoints[ep].tcp && endpoints[ep].udp;
|
||||||
|
});
|
||||||
|
if (!tcpudpEndpoint) {
|
||||||
|
throw new Error('Your config file does not not contain a ip:port used in both `endpoint_add_tcp` and `endpoint_add_udp` commands. Players would not be able to connect.');
|
||||||
|
}
|
||||||
|
|
||||||
|
return tcpudpEndpoint.replace(/(0\.0\.0\.0|\[::\])/, '127.0.0.1');
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates & ensures correctness in FXServer config file recursively.
|
||||||
|
* Used when trying to start server, or validate the server.cfg.
|
||||||
|
* Returns errors, warnings and connectEndpoint
|
||||||
|
*/
|
||||||
|
export const validateFixServerConfig = async (cfgPath: string, serverDataPath: string) => {
|
||||||
|
//Parsing FXServer config & going through each command
|
||||||
|
const cfgAbsolutePath = resolveCFGFilePath(cfgPath, serverDataPath);
|
||||||
|
const parsedCommands = await parseRecursiveConfig(null, cfgAbsolutePath, serverDataPath);
|
||||||
|
const {
|
||||||
|
endpoints,
|
||||||
|
hasEndpointCommand,
|
||||||
|
hasHostConfigMessage,
|
||||||
|
errors,
|
||||||
|
warnings,
|
||||||
|
toCommentOut
|
||||||
|
} = await validateCommands(parsedCommands);
|
||||||
|
|
||||||
|
//Validating if a valid endpoint was detected
|
||||||
|
let connectEndpoint: string | null = null;
|
||||||
|
try {
|
||||||
|
connectEndpoint = getConnectEndpoint(endpoints, hasEndpointCommand);
|
||||||
|
} catch (error) {
|
||||||
|
errors.add(cfgAbsolutePath, false, (error as Error).message);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Commenting out lines or registering them as warnings
|
||||||
|
for (const targetCfgPath in toCommentOut.store) {
|
||||||
|
const actions = toCommentOut.store[targetCfgPath];
|
||||||
|
try {
|
||||||
|
const cfgRaw = await fsp.readFile(targetCfgPath, 'utf8');
|
||||||
|
|
||||||
|
//modify the cfg lines
|
||||||
|
const fileEOL = detectNewline(cfgRaw);
|
||||||
|
const cfgLines = cfgRaw.split(/\r?\n/);
|
||||||
|
for (const [ln, reason] of actions) {
|
||||||
|
if (ln === false) continue;
|
||||||
|
if (typeof cfgLines[ln - 1] !== 'string') {
|
||||||
|
throw new Error(`Line ${ln} not found.`);
|
||||||
|
}
|
||||||
|
cfgLines[ln - 1] = `## [txAdmin CFG validator]: ${reason}${fileEOL}# ${cfgLines[ln - 1]}`;
|
||||||
|
warnings.add(targetCfgPath, ln, `Commented out: ${reason}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Saving modified lines
|
||||||
|
const newCfg = cfgLines.join(fileEOL);
|
||||||
|
console.warn(`Saving modified file '${targetCfgPath}'`);
|
||||||
|
await fsp.writeFile(targetCfgPath, newCfg, 'utf8');
|
||||||
|
} catch (error) {
|
||||||
|
console.verbose.error(error);
|
||||||
|
for (const [ln, reason] of actions) {
|
||||||
|
errors.add(targetCfgPath, ln, `Please comment out this line: ${reason}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare response
|
||||||
|
return {
|
||||||
|
connectEndpoint,
|
||||||
|
errors: errors.toMarkdown(hasHostConfigMessage),
|
||||||
|
warnings: warnings.toMarkdown(hasHostConfigMessage),
|
||||||
|
// errors: errors.store,
|
||||||
|
// warnings: warnings.store,
|
||||||
|
// endpoints, //Not being used
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validating config contents + saving file and backup.
|
||||||
|
* In case of any errors, it does not save the contents.
|
||||||
|
* Does not comment out (fix) bad lines.
|
||||||
|
* Used whenever a user wants to modify server.cfg.
|
||||||
|
* Returns if saved, and warnings
|
||||||
|
*/
|
||||||
|
export const validateModifyServerConfig = async (
|
||||||
|
cfgInputString: string,
|
||||||
|
cfgPath: string,
|
||||||
|
serverDataPath: string
|
||||||
|
) => {
|
||||||
|
if (typeof cfgInputString !== 'string') {
|
||||||
|
throw new Error('cfgInputString expected to be string.');
|
||||||
|
}
|
||||||
|
|
||||||
|
//Parsing FXServer config & going through each command
|
||||||
|
const cfgAbsolutePath = resolveCFGFilePath(cfgPath, serverDataPath);
|
||||||
|
const parsedCommands = await parseRecursiveConfig(cfgInputString, cfgAbsolutePath, serverDataPath);
|
||||||
|
const {
|
||||||
|
endpoints,
|
||||||
|
hasEndpointCommand,
|
||||||
|
hasHostConfigMessage,
|
||||||
|
errors,
|
||||||
|
warnings,
|
||||||
|
toCommentOut
|
||||||
|
} = await validateCommands(parsedCommands);
|
||||||
|
|
||||||
|
//Validating if a valid endpoint was detected
|
||||||
|
try {
|
||||||
|
const _connectEndpoint = getConnectEndpoint(endpoints, hasEndpointCommand);
|
||||||
|
} catch (error) {
|
||||||
|
errors.add(cfgAbsolutePath, false, (error as Error).message);
|
||||||
|
}
|
||||||
|
|
||||||
|
//If there are any errors
|
||||||
|
if (errors.count()) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
errors: errors.toMarkdown(hasHostConfigMessage),
|
||||||
|
warnings: warnings.toMarkdown(hasHostConfigMessage),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
//Save file + backup
|
||||||
|
try {
|
||||||
|
console.warn(`Saving modified file '${cfgAbsolutePath}'`);
|
||||||
|
await fsp.copyFile(cfgAbsolutePath, `${cfgAbsolutePath}.bkp`);
|
||||||
|
await fsp.writeFile(cfgAbsolutePath, cfgInputString, 'utf8');
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to edit 'server.cfg' with error: ${(error as Error).message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
warnings: warnings.toMarkdown(),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
/*
|
||||||
|
fxrunner spawnServer: recursive validate file, get endpoint
|
||||||
|
settings handleFXServer: recursive validate file
|
||||||
|
setup handleValidateCFGFile: recursive validate file
|
||||||
|
setup handleSaveLocal: recursive validate file
|
||||||
|
|
||||||
|
cfgEditor CFGEditorSave: validate string, save
|
||||||
|
deployer handleSaveConfig: validate string, save *
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
# Endpoints test cases:
|
||||||
|
/"\[?(([0-9.]{7,15})|([a-z0-9:]{2,29}))\]?:(\d{1,5})"/gmi
|
||||||
|
|
||||||
|
# default
|
||||||
|
"0.0.0.0:30120"
|
||||||
|
"[0.0.0.0]:30120"
|
||||||
|
"0.0.0.0"
|
||||||
|
"[0.0.0.0]"
|
||||||
|
|
||||||
|
# ipv6/ipv4
|
||||||
|
"[::]:30120"
|
||||||
|
":::30120"
|
||||||
|
"[::]"
|
||||||
|
|
||||||
|
# ipv6 only
|
||||||
|
"fe80::4cec:1264:187e:ce2b:30120"
|
||||||
|
"[fe80::4cec:1264:187e:ce2b]:30120"
|
||||||
|
"::1:30120"
|
||||||
|
"[::1]:30120"
|
||||||
|
"[fe80::4cec:1264:187e:ce2b]"
|
||||||
|
"::1"
|
||||||
|
"[::1]"
|
||||||
|
|
||||||
|
# FXServer doesn't accept
|
||||||
|
"::1.30120"
|
||||||
|
"::1 port 30120"
|
||||||
|
"::1p30120"
|
||||||
|
"::1#30120"
|
||||||
|
"::"
|
||||||
|
|
||||||
|
# FXServer misreads last part as a port
|
||||||
|
"fe80::4cec:1264:187e:ce2b"
|
||||||
|
|
||||||
|
*/
|
70
core/lib/fxserver/fxsVersionParser.test.ts
Normal file
70
core/lib/fxserver/fxsVersionParser.test.ts
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
//@ts-nocheck
|
||||||
|
import { test, expect } from 'vitest';
|
||||||
|
import { parseFxserverVersion } from './fxsVersionParser';
|
||||||
|
const p = parseFxserverVersion;
|
||||||
|
|
||||||
|
|
||||||
|
test('normal versions', () => {
|
||||||
|
expect(p('FXServer-master SERVER v1.0.0.7290 win32')).toEqual({
|
||||||
|
build: 7290,
|
||||||
|
platform: 'windows',
|
||||||
|
branch: 'master',
|
||||||
|
valid: true,
|
||||||
|
});
|
||||||
|
expect(p('FXServer-master SERVER v1.0.0.10048 win32')).toEqual({
|
||||||
|
build: 10048,
|
||||||
|
platform: 'windows',
|
||||||
|
branch: 'master',
|
||||||
|
valid: true,
|
||||||
|
});
|
||||||
|
expect(p('FXServer-master v1.0.0.9956 linux')).toEqual({
|
||||||
|
build: 9956,
|
||||||
|
platform: 'linux',
|
||||||
|
branch: 'master',
|
||||||
|
valid: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('feat branch versions', () => {
|
||||||
|
expect(p('FXServer-feature/improve_player_dropped_event SERVER v1.0.0.20240707 win32')).toEqual({
|
||||||
|
build: 20240707,
|
||||||
|
platform: 'windows',
|
||||||
|
branch: 'feature/improve_player_dropped_event',
|
||||||
|
valid: true,
|
||||||
|
});
|
||||||
|
expect(p('FXServer-abcdef SERVER v1.0.0.20240707 win32')).toEqual({
|
||||||
|
build: 20240707,
|
||||||
|
platform: 'windows',
|
||||||
|
branch: 'abcdef',
|
||||||
|
valid: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('invalids', () => {
|
||||||
|
expect(() => p(1111 as any)).toThrow('expected');
|
||||||
|
expect(p('FXServer-no-version (didn\'t run build tools?)')).toEqual({
|
||||||
|
valid: false,
|
||||||
|
build: null,
|
||||||
|
branch: null,
|
||||||
|
platform: null,
|
||||||
|
});
|
||||||
|
expect(p('Invalid server (internal validation failed)')).toEqual({
|
||||||
|
valid: false,
|
||||||
|
build: null,
|
||||||
|
branch: null,
|
||||||
|
platform: null,
|
||||||
|
});
|
||||||
|
//attempt to salvage platform
|
||||||
|
expect(p('xxxxxxxx win32')).toEqual({
|
||||||
|
valid: false,
|
||||||
|
build: null,
|
||||||
|
branch: null,
|
||||||
|
platform: 'windows',
|
||||||
|
});
|
||||||
|
expect(p('xxxxxxxx linux')).toEqual({
|
||||||
|
valid: false,
|
||||||
|
build: null,
|
||||||
|
branch: null,
|
||||||
|
platform: 'linux',
|
||||||
|
});
|
||||||
|
});
|
25
core/lib/fxserver/fxsVersionParser.ts
Normal file
25
core/lib/fxserver/fxsVersionParser.ts
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
/**
|
||||||
|
* Parses a fxserver version convar into a number.
|
||||||
|
*/
|
||||||
|
export const parseFxserverVersion = (version: any): ParseFxserverVersionResult => {
|
||||||
|
if (typeof version !== 'string') throw new Error(`expected string`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
valid: true,
|
||||||
|
branch: "master",
|
||||||
|
build: 9999,
|
||||||
|
platform: "windows",
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
type ParseFxserverVersionResult = {
|
||||||
|
valid: true;
|
||||||
|
branch: string;
|
||||||
|
build: number;
|
||||||
|
platform: string;
|
||||||
|
} | {
|
||||||
|
valid: false;
|
||||||
|
branch: null;
|
||||||
|
build: null;
|
||||||
|
platform: 'windows' | 'linux' | null;
|
||||||
|
};
|
43
core/lib/fxserver/runtimeFiles.ts
Normal file
43
core/lib/fxserver/runtimeFiles.ts
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
import path from 'node:path';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import { txEnv } from "@core/globalData";
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates or removes a monitor/.runtime/ file
|
||||||
|
*/
|
||||||
|
export const setRuntimeFile = async (fileName: string, fileData: string | Buffer | null) => {
|
||||||
|
const destRuntimePath = path.resolve(txEnv.txaPath, '.runtime');
|
||||||
|
const destFilePath = path.resolve(destRuntimePath, fileName);
|
||||||
|
|
||||||
|
//Ensure the /.runtime/ folder exists
|
||||||
|
try {
|
||||||
|
await fsp.mkdir(destRuntimePath, { recursive: true });
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to create .runtime folder: ${(error as any).message}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
//If deleting the file, just unlink it
|
||||||
|
if (fileData === null) {
|
||||||
|
try {
|
||||||
|
await fsp.unlink(destFilePath);
|
||||||
|
} catch (error) {
|
||||||
|
const msg = (error as Error).message ?? 'Unknown error';
|
||||||
|
if (!msg.includes('ENOENT')) {
|
||||||
|
console.error(`Failed to delete runtime file: ${msg}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Write the file
|
||||||
|
try {
|
||||||
|
await fsp.writeFile(destFilePath, fileData);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to write runtime file: ${(error as any).message}`);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
107
core/lib/fxserver/scanMonitorFiles.ts
Normal file
107
core/lib/fxserver/scanMonitorFiles.ts
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
//FIXME: after refactor, move to the correct path
|
||||||
|
import fs from 'node:fs/promises';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { createHash } from 'node:crypto';
|
||||||
|
import { txEnv } from '../../globalData';
|
||||||
|
|
||||||
|
//Hash test
|
||||||
|
const hashFile = async (filePath: string) => {
|
||||||
|
const rawFile = await fs.readFile(filePath, 'utf8')
|
||||||
|
const normalized = rawFile.normalize('NFKC')
|
||||||
|
.replace(/\r\n/g, '\n')
|
||||||
|
.replace(/^\uFEFF/, '');
|
||||||
|
return createHash('sha1').update(normalized).digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limits
|
||||||
|
const MAX_FILES = 300;
|
||||||
|
const MAX_TOTAL_SIZE = 52_428_800; // 50MB
|
||||||
|
const MAX_FILE_SIZE = 20_971_520; // 20MB
|
||||||
|
const MAX_DEPTH = 10;
|
||||||
|
const MAX_EXECUTION_TIME = 30 * 1000;
|
||||||
|
const IGNORED_FOLDERS = [
|
||||||
|
'db',
|
||||||
|
'cache',
|
||||||
|
'dist',
|
||||||
|
'.reports',
|
||||||
|
'license_report',
|
||||||
|
'tmp_core_tsc',
|
||||||
|
'node_modules',
|
||||||
|
'txData',
|
||||||
|
];
|
||||||
|
|
||||||
|
|
||||||
|
type ContentFileType = {
|
||||||
|
path: string;
|
||||||
|
size: number;
|
||||||
|
hash: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function scanMonitorFiles() {
|
||||||
|
const rootPath = txEnv.txaPath;
|
||||||
|
const allFiles: ContentFileType[] = [];
|
||||||
|
let totalFiles = 0;
|
||||||
|
let totalSize = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const tsStart = Date.now();
|
||||||
|
const scanDir = async (dir: string, depth: number = 0) => {
|
||||||
|
if (depth > MAX_DEPTH) {
|
||||||
|
throw new Error('MAX_DEPTH');
|
||||||
|
}
|
||||||
|
|
||||||
|
let filesFound = 0;
|
||||||
|
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (totalFiles >= MAX_FILES) {
|
||||||
|
throw new Error('MAX_FILES');
|
||||||
|
} else if (totalSize >= MAX_TOTAL_SIZE) {
|
||||||
|
throw new Error('MAX_TOTAL_SIZE');
|
||||||
|
} else if (Date.now() - tsStart > MAX_EXECUTION_TIME) {
|
||||||
|
throw new Error('MAX_EXECUTION_TIME');
|
||||||
|
}
|
||||||
|
|
||||||
|
const entryPath = path.join(dir, entry.name);
|
||||||
|
let relativeEntryPath = path.relative(rootPath, entryPath);
|
||||||
|
relativeEntryPath = './' + relativeEntryPath.split(path.sep).join(path.posix.sep);
|
||||||
|
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
if (IGNORED_FOLDERS.includes(entry.name)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
await scanDir(entryPath, depth + 1);
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
const stats = await fs.stat(entryPath);
|
||||||
|
if (stats.size > MAX_FILE_SIZE) {
|
||||||
|
throw new Error('MAX_SIZE');
|
||||||
|
}
|
||||||
|
|
||||||
|
allFiles.push({
|
||||||
|
path: relativeEntryPath,
|
||||||
|
size: stats.size,
|
||||||
|
hash: await hashFile(entryPath),
|
||||||
|
});
|
||||||
|
filesFound++;
|
||||||
|
totalFiles++;
|
||||||
|
totalSize += stats.size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filesFound;
|
||||||
|
};
|
||||||
|
await scanDir(rootPath);
|
||||||
|
allFiles.sort((a, b) => a.path.localeCompare(b.path));
|
||||||
|
return {
|
||||||
|
totalFiles,
|
||||||
|
totalSize,
|
||||||
|
allFiles,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
//At least saving the progress
|
||||||
|
return {
|
||||||
|
error: (error as any).message,
|
||||||
|
totalFiles,
|
||||||
|
totalSize,
|
||||||
|
allFiles,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
236
core/lib/fxserver/serverData.ts
Normal file
236
core/lib/fxserver/serverData.ts
Normal file
|
@ -0,0 +1,236 @@
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import { getFsErrorMdMessage, getPathSubdirs } from '@lib/fs';
|
||||||
|
import * as fsp from 'node:fs/promises';
|
||||||
|
import * as path from 'node:path';
|
||||||
|
|
||||||
|
const IGNORED_DIRS = ['cache', 'db', 'node_modules', '.git', '.idea', '.vscode'];
|
||||||
|
const MANIFEST_FILES = ['fxmanifest.lua', '__resource.lua'];
|
||||||
|
const RES_CATEGORIES_LIMIT = 250; //Some servers go over 100
|
||||||
|
const CFG_SIZE_LIMIT = 32 * 1024; //32kb
|
||||||
|
|
||||||
|
|
||||||
|
//Types
|
||||||
|
export type ServerDataContentType = [string, number | boolean][];
|
||||||
|
export type ServerDataConfigsType = [string, string][];
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scans a server data folder and lists all files, up to the first level of each resource.
|
||||||
|
* Behavior reference: fivem/code/components/citizen-server-impl/src/ServerResources.cpp
|
||||||
|
*
|
||||||
|
* NOTE: this would probably be better
|
||||||
|
*
|
||||||
|
* TODO: the current sorting is not right, changing it back to recursive (depth-first) would
|
||||||
|
* probably solve it, but right now it's not critical.
|
||||||
|
* A better behavior would be to set "MAX_DEPTH" and do that for all folders, ignoring "resources"/[categories]
|
||||||
|
*/
|
||||||
|
export const getServerDataContent = async (serverDataPath: string): Promise<ServerDataContentType> => {
|
||||||
|
//Runtime vars
|
||||||
|
let resourcesInRoot = false;
|
||||||
|
const content: ServerDataContentType = []; //relative paths
|
||||||
|
let resourceCategories = 0;
|
||||||
|
|
||||||
|
//Scan root path
|
||||||
|
const rootEntries = await fsp.readdir(serverDataPath, { withFileTypes: true });
|
||||||
|
for (const entry of rootEntries) {
|
||||||
|
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
content.push([entry.name, false]);
|
||||||
|
if (entry.name === 'resources') {
|
||||||
|
resourcesInRoot = true;
|
||||||
|
}
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
const stat = await fsp.stat(path.join(serverDataPath, entry.name));
|
||||||
|
content.push([entry.name, stat.size]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//no resources, early return
|
||||||
|
if (!resourcesInRoot) return content;
|
||||||
|
|
||||||
|
|
||||||
|
//Scan categories
|
||||||
|
const categoriesToScan = [path.join(serverDataPath, 'resources')];
|
||||||
|
while (categoriesToScan.length) {
|
||||||
|
if (resourceCategories >= RES_CATEGORIES_LIMIT) {
|
||||||
|
throw new Error(`Scanning above the limit of ${RES_CATEGORIES_LIMIT} resource categories.`);
|
||||||
|
}
|
||||||
|
resourceCategories++;
|
||||||
|
const currCategory = categoriesToScan.shift()!;
|
||||||
|
const currCatDirEntries = await fsp.readdir(currCategory, { withFileTypes: true });
|
||||||
|
|
||||||
|
for (const catDirEntry of currCatDirEntries) {
|
||||||
|
const catDirEntryFullPath = path.join(currCategory, catDirEntry.name);
|
||||||
|
const catDirEntryRelativePath = path.relative(serverDataPath, catDirEntryFullPath);
|
||||||
|
|
||||||
|
if (catDirEntry.isDirectory()) {
|
||||||
|
content.push([path.relative(serverDataPath, catDirEntryFullPath), false]);
|
||||||
|
if (!catDirEntry.name.length || IGNORED_DIRS.includes(catDirEntry.name)) continue;
|
||||||
|
|
||||||
|
if (catDirEntry.name[0] === '[' && catDirEntry.name[catDirEntry.name.length - 1] === ']') {
|
||||||
|
//It's a category
|
||||||
|
categoriesToScan.push(catDirEntryFullPath);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
//It's a resource
|
||||||
|
const resourceFullPath = catDirEntryFullPath;
|
||||||
|
const resourceRelativePath = catDirEntryRelativePath;
|
||||||
|
let resourceHasManifest = false;
|
||||||
|
const resDirEntries = await fsp.readdir(resourceFullPath, { withFileTypes: true });
|
||||||
|
|
||||||
|
//for every file/folder in resources folder
|
||||||
|
for (const resDirEntry of resDirEntries) {
|
||||||
|
const resEntryFullPath = path.join(resourceFullPath, resDirEntry.name);
|
||||||
|
const resEntryRelativePath = path.join(resourceRelativePath, resDirEntry.name);
|
||||||
|
if (resDirEntry.isDirectory()) {
|
||||||
|
content.push([resEntryRelativePath, false]);
|
||||||
|
|
||||||
|
} else if (resDirEntry.isFile()) {
|
||||||
|
const stat = await fsp.stat(resEntryFullPath);
|
||||||
|
content.push([resEntryRelativePath, stat.size]);
|
||||||
|
if (!resourceHasManifest && MANIFEST_FILES.includes(resDirEntry.name)) {
|
||||||
|
resourceHasManifest = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (catDirEntry.isFile()) {
|
||||||
|
const stat = await fsp.stat(catDirEntryFullPath);
|
||||||
|
content.push([catDirEntryRelativePath, stat.size]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}//while categories
|
||||||
|
|
||||||
|
// Sorting content (folders first, then utf8)
|
||||||
|
content.sort(([aName, aSize], [bName, bSize]) => {
|
||||||
|
const aDir = path.parse(aName).dir;
|
||||||
|
const bDir = path.parse(bName).dir;
|
||||||
|
if (aDir !== bDir) {
|
||||||
|
return aName.localeCompare(bName);
|
||||||
|
} else if (aSize === false && bSize !== false) {
|
||||||
|
return -1;
|
||||||
|
} else if (aSize !== false && bSize === false) {
|
||||||
|
return 1;
|
||||||
|
} else {
|
||||||
|
return aName.localeCompare(bName);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the content of all .cfg files based on a server data content scan.
|
||||||
|
*/
|
||||||
|
export const getServerDataConfigs = async (serverDataPath: string, serverDataContent: ServerDataContentType): Promise<ServerDataConfigsType> => {
|
||||||
|
const configs: ServerDataConfigsType = [];
|
||||||
|
for (const [entryPath, entrySize] of serverDataContent) {
|
||||||
|
if (typeof entrySize !== 'number' || !entryPath.endsWith('.cfg')) continue;
|
||||||
|
if (entrySize > CFG_SIZE_LIMIT) {
|
||||||
|
configs.push([entryPath, 'file is too big']);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const rawData = await fsp.readFile(path.join(serverDataPath, entryPath), 'utf8');
|
||||||
|
configs.push([entryPath, rawData]);
|
||||||
|
} catch (error) {
|
||||||
|
configs.push([entryPath, (error as Error).message]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return configs;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate server data path for the
|
||||||
|
*/
|
||||||
|
export const isValidServerDataPath = async (dataPath: string) => {
|
||||||
|
//Check if root folder is valid
|
||||||
|
try {
|
||||||
|
const rootEntries = await getPathSubdirs(dataPath);
|
||||||
|
if (!rootEntries.some(e => e.name === 'resources')) {
|
||||||
|
throw new Error('The provided directory does not contain a \`resources\` subdirectory.');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const error = err as Error;
|
||||||
|
let msg = getFsErrorMdMessage(error, dataPath);
|
||||||
|
if (dataPath.includes('resources')) {
|
||||||
|
msg = `Looks like this path is the \`resources\` folder, but the server data path must be the folder that contains the resources folder instead of the resources folder itself.\n**Try removing the \`resources\` part at the end of the path.**`;
|
||||||
|
}
|
||||||
|
throw new Error(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check if resources folder is valid
|
||||||
|
try {
|
||||||
|
const resourceEntries = await getPathSubdirs(path.join(dataPath, 'resources'));
|
||||||
|
if (!resourceEntries.length) {
|
||||||
|
throw new Error('The \`resources\` directory is empty.');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
const error = err as Error;
|
||||||
|
let msg = error.message;
|
||||||
|
if (error.message?.includes('ENOENT')) {
|
||||||
|
msg = `The \`resources\` directory does not exist inside the provided Server Data Folder:\n\`${dataPath}\``;
|
||||||
|
} else if (error.message?.includes('EACCES') || error.message?.includes('EPERM')) {
|
||||||
|
msg = `The \`resources\` directory is not accessible inside the provided Server Data Folder:\n\`${dataPath}\``;
|
||||||
|
}
|
||||||
|
throw new Error(msg);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Look for a potential server data folder in/around the provided path.
|
||||||
|
* Forgiving behavior:
|
||||||
|
* - Ignore trailing slashes, as well as fix backslashes
|
||||||
|
* - Check if its the parent folder
|
||||||
|
* - Check if its a sibling folder
|
||||||
|
* - Check if its a child folder
|
||||||
|
* - Check if current path is a resource folder deep inside a server data folder
|
||||||
|
*/
|
||||||
|
export const findPotentialServerDataPaths = async (initialPath: string) => {
|
||||||
|
const checkTarget = async (target: string) => {
|
||||||
|
try {
|
||||||
|
return await isValidServerDataPath(target);
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//Recovery if parent folder
|
||||||
|
const parentPath = path.join(initialPath, '..');
|
||||||
|
const isParentPath = await checkTarget(parentPath);
|
||||||
|
if (isParentPath) return parentPath;
|
||||||
|
|
||||||
|
//Recovery if sibling folder
|
||||||
|
try {
|
||||||
|
const siblingPaths = await getPathSubdirs(parentPath);
|
||||||
|
for (const sibling of siblingPaths) {
|
||||||
|
const siblingPath = path.join(parentPath, sibling.name);
|
||||||
|
if (siblingPath === initialPath) continue;
|
||||||
|
if (await checkTarget(siblingPath)) return siblingPath;
|
||||||
|
}
|
||||||
|
} catch (error) { }
|
||||||
|
|
||||||
|
//Recovery if children folder
|
||||||
|
try {
|
||||||
|
const childPaths = await getPathSubdirs(initialPath);
|
||||||
|
for (const child of childPaths) {
|
||||||
|
const childPath = path.join(initialPath, child.name);
|
||||||
|
if (await checkTarget(childPath)) return childPath;
|
||||||
|
}
|
||||||
|
} catch (error) { }
|
||||||
|
|
||||||
|
//Recovery if current path is a resources folder
|
||||||
|
const resourceSplitAttempt = initialPath.split(/[/\\]resources(?:[/\\]?|$)/, 2);
|
||||||
|
if (resourceSplitAttempt.length === 2) {
|
||||||
|
const potentialServerDataPath = resourceSplitAttempt[0];
|
||||||
|
if (await checkTarget(potentialServerDataPath)) return potentialServerDataPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Really couldn't find anything
|
||||||
|
return false;
|
||||||
|
};
|
12
core/lib/got.ts
Normal file
12
core/lib/got.ts
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
import { txEnv, txHostConfig } from '@core/globalData';
|
||||||
|
import got from 'got';
|
||||||
|
|
||||||
|
export default got.extend({
|
||||||
|
timeout: {
|
||||||
|
request: 5000
|
||||||
|
},
|
||||||
|
headers: {
|
||||||
|
'User-Agent': `txAdmin ${txEnv.txaVersion}`,
|
||||||
|
},
|
||||||
|
localAddress: txHostConfig.netInterface,
|
||||||
|
});
|
60
core/lib/host/getHostUsage.ts
Normal file
60
core/lib/host/getHostUsage.ts
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
const modulename = 'GetHostUsage';
|
||||||
|
import os from 'node:os';
|
||||||
|
import si from 'systeminformation';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
//Const -hopefully
|
||||||
|
const giga = 1024 * 1024 * 1024;
|
||||||
|
const cpus = os.cpus();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the host's current memory and CPU usage.
|
||||||
|
* NOTE: It was used by the hw stats on the sidebar
|
||||||
|
* Currently only in use by diagnostics page
|
||||||
|
*/
|
||||||
|
export default async () => {
|
||||||
|
const out = {
|
||||||
|
memory: { usage: 0, used: 0, total: 0 },
|
||||||
|
cpu: {
|
||||||
|
count: cpus.length,
|
||||||
|
usage: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
//Getting memory usage
|
||||||
|
try {
|
||||||
|
let free, total, used;
|
||||||
|
if (txEnv.isWindows) {
|
||||||
|
free = os.freemem() / giga;
|
||||||
|
total = os.totalmem() / giga;
|
||||||
|
used = total - free;
|
||||||
|
} else {
|
||||||
|
const memoryData = await si.mem();
|
||||||
|
free = memoryData.available / giga;
|
||||||
|
total = memoryData.total / giga;
|
||||||
|
used = memoryData.active / giga;
|
||||||
|
}
|
||||||
|
out.memory = {
|
||||||
|
used,
|
||||||
|
total,
|
||||||
|
usage: Math.round((used / total) * 100),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.verbose.error('Failed to get memory usage.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Getting CPU usage
|
||||||
|
try {
|
||||||
|
const loads = await si.currentLoad();
|
||||||
|
out.cpu.usage = Math.round(loads.currentLoad);
|
||||||
|
} catch (error) {
|
||||||
|
console.verbose.error('Failed to get CPU usage.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
return out;
|
||||||
|
};
|
97
core/lib/host/getOsDistro.js
Normal file
97
core/lib/host/getOsDistro.js
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
const modulename = 'getOsDistro';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
/*
|
||||||
|
NOTE: this is straight from @sindresorhus/windows-release, but with async functions.
|
||||||
|
I have windows-release dependency mostly just so I know when there are updates to it.
|
||||||
|
*/
|
||||||
|
import os from 'node:os';
|
||||||
|
import execa from 'execa';
|
||||||
|
|
||||||
|
// Reference: https://www.gaijin.at/en/lstwinver.php
|
||||||
|
// Windows 11 reference: https://docs.microsoft.com/en-us/windows/release-health/windows11-release-information
|
||||||
|
const names = new Map([
|
||||||
|
['10.0.2', '11'], // It's unclear whether future Windows 11 versions will use this version scheme: https://github.com/sindresorhus/windows-release/pull/26/files#r744945281
|
||||||
|
['10.0', '10'],
|
||||||
|
['6.3', '8.1'],
|
||||||
|
['6.2', '8'],
|
||||||
|
['6.1', '7'],
|
||||||
|
['6.0', 'Vista'],
|
||||||
|
['5.2', 'Server 2003'],
|
||||||
|
['5.1', 'XP'],
|
||||||
|
['5.0', '2000'],
|
||||||
|
['4.90', 'ME'],
|
||||||
|
['4.10', '98'],
|
||||||
|
['4.03', '95'],
|
||||||
|
['4.00', '95'],
|
||||||
|
]);
|
||||||
|
|
||||||
|
async function windowsRelease(release) {
|
||||||
|
const version = /(\d+\.\d+)(?:\.(\d+))?/.exec(release || os.release());
|
||||||
|
|
||||||
|
if (release && !version) {
|
||||||
|
throw new Error('`release` argument doesn\'t match `n.n`');
|
||||||
|
}
|
||||||
|
|
||||||
|
let ver = version[1] || '';
|
||||||
|
const build = version[2] || '';
|
||||||
|
|
||||||
|
// Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime.
|
||||||
|
// If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version
|
||||||
|
// then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx
|
||||||
|
// If `wmic` is obsolete (later versions of Windows 10), use PowerShell instead.
|
||||||
|
// If the resulting caption contains the year 2008, 2012, 2016, 2019 or 2022, it is a server version, so return a server OS name.
|
||||||
|
if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) {
|
||||||
|
let stdout;
|
||||||
|
try {
|
||||||
|
const out = await execa('wmic', ['os', 'get', 'Caption']);
|
||||||
|
stdout = out.stdout || '';
|
||||||
|
} catch {
|
||||||
|
//NOTE: custom code to select the powershell path
|
||||||
|
//if systemroot/windir is not defined, just try "powershell" and hope for the best
|
||||||
|
const systemRoot = process.env?.SYSTEMROOT ?? process.env?.WINDIR ?? false;
|
||||||
|
const psBinary = systemRoot
|
||||||
|
? `${systemRoot}\\System32\\WindowsPowerShell\\v1.0\\powershell`
|
||||||
|
: 'powershell';
|
||||||
|
const out = await execa(psBinary, ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']);
|
||||||
|
stdout = out.stdout || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
const year = (stdout.match(/2008|2012|2016|2019|2022/) || [])[0];
|
||||||
|
|
||||||
|
if (year) {
|
||||||
|
return `Server ${year}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Windows 11
|
||||||
|
if (ver === '10.0' && build.startsWith('2')) {
|
||||||
|
ver = '10.0.2';
|
||||||
|
}
|
||||||
|
|
||||||
|
return names.get(ver);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache calculated os distro
|
||||||
|
*/
|
||||||
|
let _osDistro;
|
||||||
|
export default async () => {
|
||||||
|
if (_osDistro) return _osDistro;
|
||||||
|
|
||||||
|
const osType = os.type();
|
||||||
|
if (osType == 'Linux') {
|
||||||
|
_osDistro = `${osType} ${os.release()}`;
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
const distro = await windowsRelease();
|
||||||
|
_osDistro = `Windows ${distro}`;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to detect windows version with error: ${error.message}`);
|
||||||
|
_osDistro = `Windows Unknown`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _osDistro;
|
||||||
|
};
|
29
core/lib/host/isIpAddressLocal.ts
Normal file
29
core/lib/host/isIpAddressLocal.ts
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
const modulename = 'IpChecker';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
const extendedAllowedLanIps: string[] = [];
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return if the IP Address is a loopback interface, LAN, detected WAN or any other
|
||||||
|
* IP that is registered by the user via the forceInterface convar or config file.
|
||||||
|
*
|
||||||
|
* This is used to secure the webpipe auth and the rate limiter.
|
||||||
|
*/
|
||||||
|
export const isIpAddressLocal = (ipAddress: string): boolean => {
|
||||||
|
return (
|
||||||
|
/^(127\.|192\.168\.|10\.|::1|fd00::)/.test(ipAddress)
|
||||||
|
|| extendedAllowedLanIps.includes(ipAddress)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to register a new LAN interface.
|
||||||
|
* Added automatically from TXHOST_INTERFACE and banner.js after detecting the WAN address.
|
||||||
|
*/
|
||||||
|
export const addLocalIpAddress = (ipAddress: string): void => {
|
||||||
|
// console.verbose.debug(`Adding local IP address: ${ipAddress}`);
|
||||||
|
extendedAllowedLanIps.push(ipAddress);
|
||||||
|
}
|
7
core/lib/host/pidUsageTree.js
Normal file
7
core/lib/host/pidUsageTree.js
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
import pidtree from 'pidtree';
|
||||||
|
import pidusage from 'pidusage';
|
||||||
|
|
||||||
|
export default async (pid) => {
|
||||||
|
const pids = await pidtree(pid);
|
||||||
|
return await pidusage([pid, ...pids]);
|
||||||
|
};
|
238
core/lib/misc.test.ts
Normal file
238
core/lib/misc.test.ts
Normal file
|
@ -0,0 +1,238 @@
|
||||||
|
import { test, expect, suite, it } from 'vitest';
|
||||||
|
import * as misc from './misc';
|
||||||
|
|
||||||
|
|
||||||
|
suite('parseSchedule', () => {
|
||||||
|
it('should parse a valid schedule', () => {
|
||||||
|
const result = misc.parseSchedule(['00:00', '00:15', '1:30', '12:30']);
|
||||||
|
expect(result.valid).toEqual([
|
||||||
|
{ string: '00:00', hours: 0, minutes: 0 },
|
||||||
|
{ string: '00:15', hours: 0, minutes: 15 },
|
||||||
|
{ string: '01:30', hours: 1, minutes: 30 },
|
||||||
|
{ string: '12:30', hours: 12, minutes: 30 },
|
||||||
|
]);
|
||||||
|
expect(result.invalid).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should let the average american type 24:00', () => {
|
||||||
|
const result = misc.parseSchedule(['24:00']);
|
||||||
|
expect(result.valid).toEqual([
|
||||||
|
{ string: '00:00', hours: 0, minutes: 0 },
|
||||||
|
]);
|
||||||
|
expect(result.invalid).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle invalid stuff', () => {
|
||||||
|
const result = misc.parseSchedule(['12:34', 'invalid', '1030', '25:00', '1', '01', '']);
|
||||||
|
expect(result).toBeTruthy();
|
||||||
|
expect(result.valid).toEqual([
|
||||||
|
{ string: '12:34', hours: 12, minutes: 34 },
|
||||||
|
]);
|
||||||
|
expect(result.invalid).toEqual(['invalid', '1030', '25:00', '1', '01']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove duplicates', () => {
|
||||||
|
const result = misc.parseSchedule(['02:00', '02:00', '05:55', '13:55']);
|
||||||
|
expect(result.valid).toEqual([
|
||||||
|
{ string: '02:00', hours: 2, minutes: 0 },
|
||||||
|
{ string: '05:55', hours: 5, minutes: 55 },
|
||||||
|
{ string: '13:55', hours: 13, minutes: 55 },
|
||||||
|
]);
|
||||||
|
expect(result.invalid).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should sort the times', () => {
|
||||||
|
const result = misc.parseSchedule(['00:00', '00:01', '23:59', '01:01', '01:00']);
|
||||||
|
expect(result.valid).toEqual([
|
||||||
|
{ string: '00:00', hours: 0, minutes: 0 },
|
||||||
|
{ string: '00:01', hours: 0, minutes: 1 },
|
||||||
|
{ string: '01:00', hours: 1, minutes: 0 },
|
||||||
|
{ string: '01:01', hours: 1, minutes: 1 },
|
||||||
|
{ string: '23:59', hours: 23, minutes: 59 },
|
||||||
|
]);
|
||||||
|
expect(result.invalid).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('redactApiKeys', () => {
|
||||||
|
expect(misc.redactApiKeys('')).toBe('')
|
||||||
|
expect(misc.redactApiKeys('abc')).toBe('abc')
|
||||||
|
|
||||||
|
const example = `
|
||||||
|
sv_licenseKey cfxk_NYWn5555555500000000_2TLnnn
|
||||||
|
sv_licenseKey "cfxk_NYWn5555555500000000_2TLnnn"
|
||||||
|
sv_licenseKey 'cfxk_NYWn5555555500000000_2TLnnn'
|
||||||
|
|
||||||
|
steam_webApiKey A2FAF8CF83B87E795555555500000000
|
||||||
|
sv_tebexSecret 238a98bec4c0353fee20ac865555555500000000
|
||||||
|
rcon_password a5555555500000000
|
||||||
|
rcon_password "a5555555500000000"
|
||||||
|
rcon_password 'a5555555500000000'
|
||||||
|
mysql_connection_string "mysql://root:root@localhost:3306/txAdmin"
|
||||||
|
https://discord.com/api/webhooks/33335555555500000000/xxxxxxxxxxxxxxxxxxxx5555555500000000`;
|
||||||
|
|
||||||
|
const result = misc.redactApiKeys(example)
|
||||||
|
expect(result).toContain('[REDACTED]');
|
||||||
|
expect(result).toContain('2TLnnn');
|
||||||
|
expect(result).not.toContain('5555555500000000');
|
||||||
|
expect(result).not.toContain('mysql://');
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
suite('redactStartupSecrets', () => {
|
||||||
|
const redactedString = '[REDACTED]';
|
||||||
|
it('should return an empty array when given an empty array', () => {
|
||||||
|
expect(misc.redactStartupSecrets([])).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the same array if no redaction keys are present', () => {
|
||||||
|
const args = ['node', 'script.js', '--help'];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(args);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should redact a sv_licenseKey secret correctly', () => {
|
||||||
|
const args = ['sv_licenseKey', 'cfxk_12345_secret'];
|
||||||
|
// The regex captures "secret" and returns "[REDACTED cfxk...secret]"
|
||||||
|
const expected = ['sv_licenseKey', '[REDACTED cfxk...secret]'];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not redact sv_licenseKey secret if the secret does not match the regex', () => {
|
||||||
|
const args = ['sv_licenseKey', 'invalidsecret'];
|
||||||
|
const expected = ['sv_licenseKey', 'invalidsecret'];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should redact steam_webApiKey secret correctly', () => {
|
||||||
|
const validKey = 'a'.repeat(32);
|
||||||
|
const args = ['steam_webApiKey', validKey];
|
||||||
|
const expected = ['steam_webApiKey', redactedString];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should redact sv_tebexSecret secret correctly', () => {
|
||||||
|
const validSecret = 'b'.repeat(40);
|
||||||
|
const args = ['sv_tebexSecret', validSecret];
|
||||||
|
const expected = ['sv_tebexSecret', redactedString];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should redact rcon_password secret correctly', () => {
|
||||||
|
const args = ['rcon_password', 'mysecretpassword'];
|
||||||
|
const expected = ['rcon_password', redactedString];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should redact mysql_connection_string secret correctly', () => {
|
||||||
|
const args = [
|
||||||
|
'mysql_connection_string',
|
||||||
|
'Server=myServerAddress;Database=myDataBase;User Id=myUsername;Password=myPassword;',
|
||||||
|
];
|
||||||
|
const expected = ['mysql_connection_string', redactedString];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple redactions in a single array', () => {
|
||||||
|
const validSteamKey = 'c'.repeat(32);
|
||||||
|
const args = [
|
||||||
|
'sv_licenseKey', 'cfxk_12345_abcdef',
|
||||||
|
'someOtherArg', 'value',
|
||||||
|
'steam_webApiKey', validSteamKey,
|
||||||
|
];
|
||||||
|
const expected = [
|
||||||
|
'sv_licenseKey', '[REDACTED cfxk...abcdef]',
|
||||||
|
'someOtherArg', 'value',
|
||||||
|
'steam_webApiKey', redactedString,
|
||||||
|
];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle case-insensitive key matching', () => {
|
||||||
|
const args = ['SV_LICENSEKEY', 'cfxk_12345_SECRET'];
|
||||||
|
const expected = ['SV_LICENSEKEY', '[REDACTED cfxk...SECRET]'];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should leave a key unchanged if it is the last element', () => {
|
||||||
|
const args = ['sv_licenseKey'];
|
||||||
|
const expected = ['sv_licenseKey'];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle rules without regex', () => {
|
||||||
|
const args = ['rcon_password', 'whatever'];
|
||||||
|
const expected = ['rcon_password', redactedString];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a real example', () => {
|
||||||
|
const args = [
|
||||||
|
"+setr", "txAdmin-debugMode", "true",
|
||||||
|
"+set", "tx2faSecret", "whatever",
|
||||||
|
"+set", "sv_licenseKey", "cfxk_xxxxxxxxxxxxxxxxxxxx_yyyyy",
|
||||||
|
"+set", "onesync", "enabled",
|
||||||
|
"+set", "sv_enforceGameBuild", "2545",
|
||||||
|
];
|
||||||
|
const expected = [
|
||||||
|
"+setr", "txAdmin-debugMode", "true",
|
||||||
|
"+set", "tx2faSecret", redactedString,
|
||||||
|
"+set", "sv_licenseKey", "[REDACTED cfxk...yyyyy]",
|
||||||
|
"+set", "onesync", "enabled",
|
||||||
|
"+set", "sv_enforceGameBuild", "2545",
|
||||||
|
];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should redact discord webhooks', () => {
|
||||||
|
const args = [
|
||||||
|
"aaa",
|
||||||
|
"https://discord.com/api/webhooks/33335555555500000000/xxxxxxxxxxxxxxxxxxxx5555555500000000",
|
||||||
|
"bbb",
|
||||||
|
];
|
||||||
|
const expected = [
|
||||||
|
"aaa",
|
||||||
|
"https://discord.com/api/webhooks/[REDACTED]/[REDACTED]",
|
||||||
|
"bbb",
|
||||||
|
];
|
||||||
|
expect(misc.redactStartupSecrets(args)).toEqual(expected);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('now', () => {
|
||||||
|
const result = misc.now();
|
||||||
|
expect(typeof result).toBe('number');
|
||||||
|
expect(result.toString().length).toBe(10);
|
||||||
|
expect(result.toString()).not.toContain('.');
|
||||||
|
expect(result.toString()).not.toContain('-');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('anyUndefined', () => {
|
||||||
|
expect(misc.anyUndefined(undefined, 'test')).toBe(true);
|
||||||
|
expect(misc.anyUndefined('test', 'xxxx')).toBe(false);
|
||||||
|
expect(misc.anyUndefined(undefined, undefined)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('calcExpirationFromDuration', () => {
|
||||||
|
const currTs = misc.now();
|
||||||
|
let result = misc.calcExpirationFromDuration('1 hour');
|
||||||
|
expect(result?.duration).toBe(3600);
|
||||||
|
expect(result?.expiration).toBe(currTs + 3600);
|
||||||
|
|
||||||
|
result = misc.calcExpirationFromDuration('1 hours');
|
||||||
|
expect(result?.duration).toBe(3600);
|
||||||
|
|
||||||
|
result = misc.calcExpirationFromDuration('permanent');
|
||||||
|
expect(result?.expiration).toBe(false);
|
||||||
|
|
||||||
|
expect(() => misc.calcExpirationFromDuration('x day')).toThrowError('duration number');
|
||||||
|
expect(() => misc.calcExpirationFromDuration('')).toThrowError('duration number');
|
||||||
|
expect(() => misc.calcExpirationFromDuration('-1 day')).toThrowError('duration number');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parseLimitedFloat', () => {
|
||||||
|
expect(misc.parseLimitedFloat('123.4567899999')).toBe(123.45679);
|
||||||
|
expect(misc.parseLimitedFloat(123.4567899999)).toBe(123.45679);
|
||||||
|
expect(misc.parseLimitedFloat(123.4567899999, 2)).toBe(123.46);
|
||||||
|
expect(misc.parseLimitedFloat(0.1 + 0.2)).toBe(0.3);
|
||||||
|
});
|
301
core/lib/misc.ts
Normal file
301
core/lib/misc.ts
Normal file
|
@ -0,0 +1,301 @@
|
||||||
|
import chalk from 'chalk';
|
||||||
|
import dateFormat from 'dateformat';
|
||||||
|
import humanizeDuration, { HumanizerOptions } from 'humanize-duration';
|
||||||
|
import { DeepReadonly } from 'utility-types';
|
||||||
|
|
||||||
|
export const regexHoursMinutes = /^(?<hours>[01]?[0-9]|2[0-4]):(?<minutes>[0-5][0-9])$/;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts hours and minutes from an string containing times
|
||||||
|
*/
|
||||||
|
export const parseSchedule = (scheduleTimes: string[]) => {
|
||||||
|
const valid: {
|
||||||
|
string: string;
|
||||||
|
hours: number;
|
||||||
|
minutes: number;
|
||||||
|
}[] = [];
|
||||||
|
const invalid = [];
|
||||||
|
for (const timeInput of scheduleTimes) {
|
||||||
|
if (typeof timeInput !== 'string') continue;
|
||||||
|
const timeTrim = timeInput.trim();
|
||||||
|
if (!timeTrim.length) continue;
|
||||||
|
|
||||||
|
const m = timeTrim.match(regexHoursMinutes);
|
||||||
|
if (m && m.groups?.hours && m.groups?.minutes) {
|
||||||
|
if (m.groups.hours === '24') m.groups.hours = '00'; //Americans, amirite?!?!
|
||||||
|
const timeStr = m.groups.hours.padStart(2, '0') + ':' + m.groups.minutes.padStart(2, '0');
|
||||||
|
if (valid.some(item => item.string === timeStr)) continue;
|
||||||
|
valid.push({
|
||||||
|
string: timeStr,
|
||||||
|
hours: parseInt(m.groups.hours),
|
||||||
|
minutes: parseInt(m.groups.minutes),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
invalid.push(timeTrim);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
valid.sort((a, b) => {
|
||||||
|
return a.hours - b.hours || a.minutes - b.minutes;
|
||||||
|
});
|
||||||
|
return { valid, invalid };
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Redacts known keys and tokens from a string
|
||||||
|
* @deprecated Use redactApiKeysArr instead
|
||||||
|
*/
|
||||||
|
export const redactApiKeys = (src: string) => {
|
||||||
|
if (typeof src !== 'string' || !src.length) return src;
|
||||||
|
return src
|
||||||
|
.replace(/licenseKey\s+["']?cfxk_\w{1,60}_(\w+)["']?.?$/gim, 'licenseKey [REDACTED cfxk...$1]')
|
||||||
|
.replace(/steam_webApiKey\s+["']?\w{32}["']?.?$/gim, 'steam_webApiKey [REDACTED]')
|
||||||
|
.replace(/sv_tebexSecret\s+["']?\w{40}["']?.?$/gim, 'sv_tebexSecret [REDACTED]')
|
||||||
|
.replace(/rcon_password\s+["']?[^"']+["']?.?$/gim, 'rcon_password [REDACTED]')
|
||||||
|
.replace(/mysql_connection_string\s+["']?[^"']+["']?.?$/gim, 'mysql_connection_string [REDACTED]')
|
||||||
|
.replace(/discord\.com\/api\/webhooks\/\d{17,20}\/[\w\-_./=]{10,}(.*)/gim, 'discord.com/api/webhooks/[REDACTED]/[REDACTED]');
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Redacts known keys and tokens from an array of startup arguments.
|
||||||
|
*/
|
||||||
|
export const redactStartupSecrets = (args: string[]): string[] => {
|
||||||
|
if (!Array.isArray(args) || args.length === 0) return args;
|
||||||
|
|
||||||
|
const redactionRules: ApiRedactionRuleset = {
|
||||||
|
sv_licenseKey: {
|
||||||
|
regex: /^cfxk_\w{1,60}_(\w+)$/i,
|
||||||
|
replacement: (_match, p1) => `[REDACTED cfxk...${p1}]`,
|
||||||
|
},
|
||||||
|
steam_webApiKey: {
|
||||||
|
regex: /^\w{32}$/i,
|
||||||
|
replacement: '[REDACTED]',
|
||||||
|
},
|
||||||
|
sv_tebexSecret: {
|
||||||
|
regex: /^\w{40}$/i,
|
||||||
|
replacement: '[REDACTED]',
|
||||||
|
},
|
||||||
|
rcon_password: {
|
||||||
|
replacement: '[REDACTED]',
|
||||||
|
},
|
||||||
|
mysql_connection_string: {
|
||||||
|
replacement: '[REDACTED]',
|
||||||
|
},
|
||||||
|
tx2faSecret: {
|
||||||
|
replacement: '[REDACTED]',
|
||||||
|
},
|
||||||
|
'txAdmin-luaComToken': {
|
||||||
|
replacement: '[REDACTED]',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
let outArgs: string[] = [];
|
||||||
|
for (let i = 0; i < args.length; i++) {
|
||||||
|
const currElem = args[i];
|
||||||
|
const currElemLower = currElem.toLocaleLowerCase();
|
||||||
|
const ruleMatchingPrefix = Object.keys(redactionRules).find((key) =>
|
||||||
|
currElemLower.includes(key.toLocaleLowerCase())
|
||||||
|
);
|
||||||
|
// If no rule matches or there is no subsequent element, just push the current element.
|
||||||
|
if (!ruleMatchingPrefix || i + 1 >= args.length) {
|
||||||
|
outArgs.push(currElem);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const rule = redactionRules[ruleMatchingPrefix];
|
||||||
|
const nextElem = args[i + 1];
|
||||||
|
// If the secret doesn't match the expected regex, treat it as a normal argument.
|
||||||
|
if (rule.regex && !nextElem.match(rule.regex)) {
|
||||||
|
outArgs.push(currElem);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Push the key and then the redacted secret.
|
||||||
|
outArgs.push(currElem);
|
||||||
|
if (typeof rule.replacement === 'string') {
|
||||||
|
outArgs.push(rule.replacement);
|
||||||
|
} else if (rule.regex) {
|
||||||
|
outArgs.push(nextElem.replace(rule.regex, rule.replacement));
|
||||||
|
}
|
||||||
|
// Skip the secret value we just processed.
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Apply standalone redaction rules
|
||||||
|
outArgs = outArgs.map((arg) => arg
|
||||||
|
.replace(/discord\.com\/api\/webhooks\/\d{17,20}\/[\w\-_./=]{10,}(.*)/gim, 'discord.com/api/webhooks/[REDACTED]/[REDACTED]')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (args.length !== outArgs.length) {
|
||||||
|
throw new Error('Input and output lengths are different after redaction.');
|
||||||
|
}
|
||||||
|
return outArgs;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ApiRedactionRule = {
|
||||||
|
regex?: RegExp;
|
||||||
|
replacement: string | ((...args: any[]) => string);
|
||||||
|
};
|
||||||
|
|
||||||
|
type ApiRedactionRuleset = Record<string, ApiRedactionRule>;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the unix timestamp in seconds.
|
||||||
|
*/
|
||||||
|
export const now = () => Math.round(Date.now() / 1000);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current time in HH:MM:ss format
|
||||||
|
*/
|
||||||
|
export const getTimeHms = (time?: string | number | Date) => dateFormat(time ?? new Date(), 'HH:MM:ss');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current time in filename-friendly format
|
||||||
|
*/
|
||||||
|
export const getTimeFilename = (time?: string | number | Date) => dateFormat(time ?? new Date(), 'yyyy-mm-dd_HH-MM-ss');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a number of milliseconds to english words
|
||||||
|
* Accepts a humanizeDuration config object
|
||||||
|
* eg: msToDuration(ms, { units: ['h', 'm'] });
|
||||||
|
*/
|
||||||
|
export const msToDuration = humanizeDuration.humanizer({
|
||||||
|
round: true,
|
||||||
|
units: ['d', 'h', 'm'],
|
||||||
|
fallbacks: ['en'],
|
||||||
|
} satisfies HumanizerOptions);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a number of milliseconds to short-ish english words
|
||||||
|
*/
|
||||||
|
export const msToShortishDuration = humanizeDuration.humanizer({
|
||||||
|
round: true,
|
||||||
|
units: ['d', 'h', 'm'],
|
||||||
|
largest: 2,
|
||||||
|
language: 'shortishEn',
|
||||||
|
languages: {
|
||||||
|
shortishEn: {
|
||||||
|
y: (c) => 'year' + (c === 1 ? '' : 's'),
|
||||||
|
mo: (c) => 'month' + (c === 1 ? '' : 's'),
|
||||||
|
w: (c) => 'week' + (c === 1 ? '' : 's'),
|
||||||
|
d: (c) => 'day' + (c === 1 ? '' : 's'),
|
||||||
|
h: (c) => 'hr' + (c === 1 ? '' : 's'),
|
||||||
|
m: (c) => 'min' + (c === 1 ? '' : 's'),
|
||||||
|
s: (c) => 'sec' + (c === 1 ? '' : 's'),
|
||||||
|
ms: (c) => 'ms',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a number of milliseconds to shortest english representation possible
|
||||||
|
*/
|
||||||
|
export const msToShortestDuration = humanizeDuration.humanizer({
|
||||||
|
round: true,
|
||||||
|
units: ['d', 'h', 'm', 's'],
|
||||||
|
delimiter: '',
|
||||||
|
spacer: '',
|
||||||
|
largest: 2,
|
||||||
|
language: 'shortestEn',
|
||||||
|
languages: {
|
||||||
|
shortestEn: {
|
||||||
|
y: () => 'y',
|
||||||
|
mo: () => 'mo',
|
||||||
|
w: () => 'w',
|
||||||
|
d: () => 'd',
|
||||||
|
h: () => 'h',
|
||||||
|
m: () => 'm',
|
||||||
|
s: () => 's',
|
||||||
|
ms: () => 'ms',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shorthand to convert seconds to the shortest english representation possible
|
||||||
|
*/
|
||||||
|
export const secsToShortestDuration = (ms: number, options?: humanizeDuration.Options) => {
|
||||||
|
return msToShortestDuration(ms * 1000, options);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns false if any argument is undefined
|
||||||
|
*/
|
||||||
|
export const anyUndefined = (...args: any) => [...args].some((x) => (typeof x === 'undefined'));
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates expiration and duration from a ban duration string like "1 day"
|
||||||
|
*/
|
||||||
|
export const calcExpirationFromDuration = (inputDuration: string) => {
|
||||||
|
let expiration;
|
||||||
|
let duration;
|
||||||
|
if (inputDuration === 'permanent') {
|
||||||
|
expiration = false as const;
|
||||||
|
} else {
|
||||||
|
const [multiplierInput, unit] = inputDuration.split(/\s+/);
|
||||||
|
const multiplier = parseInt(multiplierInput);
|
||||||
|
if (isNaN(multiplier) || multiplier < 1) {
|
||||||
|
throw new Error(`The duration number must be at least 1.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (unit.startsWith('hour')) {
|
||||||
|
duration = multiplier * 3600;
|
||||||
|
} else if (unit.startsWith('day')) {
|
||||||
|
duration = multiplier * 86400;
|
||||||
|
} else if (unit.startsWith('week')) {
|
||||||
|
duration = multiplier * 604800;
|
||||||
|
} else if (unit.startsWith('month')) {
|
||||||
|
duration = multiplier * 2592000; //30 days
|
||||||
|
} else {
|
||||||
|
throw new Error(`Invalid ban duration. Supported units: hours, days, weeks, months`);
|
||||||
|
}
|
||||||
|
expiration = now() + duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
return { expiration, duration };
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a number or string to a float with a limited precision.
|
||||||
|
*/
|
||||||
|
export const parseLimitedFloat = (src: number | string, precision = 6) => {
|
||||||
|
const srcAsNum = typeof src === 'string' ? parseFloat(src) : src;
|
||||||
|
return parseFloat(srcAsNum.toFixed(precision));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deeply freezes an object and all its nested properties
|
||||||
|
*/
|
||||||
|
export const deepFreeze = <T extends Record<string, any>>(obj: T) => {
|
||||||
|
Object.freeze(obj);
|
||||||
|
Object.getOwnPropertyNames(obj).forEach((prop) => {
|
||||||
|
if (Object.prototype.hasOwnProperty.call(obj, prop)
|
||||||
|
&& obj[prop] !== null
|
||||||
|
&& (typeof obj[prop] === 'object' || typeof obj[prop] === 'function')
|
||||||
|
&& !Object.isFrozen(obj[prop])
|
||||||
|
) {
|
||||||
|
deepFreeze(obj[prop] as object);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return obj;
|
||||||
|
//FIXME: using DeepReadonly<T> will cause ts errors in ConfigStore
|
||||||
|
// return obj as DeepReadonly<T>;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a chalk.inverse of a string with a 1ch padding
|
||||||
|
*/
|
||||||
|
export const chalkInversePad = (str: string) => chalk.inverse(` ${str} `);
|
57
core/lib/player/idUtils.test.ts
Normal file
57
core/lib/player/idUtils.test.ts
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
import { test, expect, suite, it } from 'vitest';
|
||||||
|
import * as idUtils from './idUtils';
|
||||||
|
|
||||||
|
|
||||||
|
test('parsePlayerId', () => {
|
||||||
|
let result = idUtils.parsePlayerId('FIVEM:555555');
|
||||||
|
expect(result.isIdValid).toBe(true);
|
||||||
|
expect(result.idType).toBe('fivem');
|
||||||
|
expect(result.idValue).toBe('555555');
|
||||||
|
expect(result.idlowerCased).toBe('fivem:555555');
|
||||||
|
|
||||||
|
result = idUtils.parsePlayerId('fivem:xxxxx');
|
||||||
|
expect(result.isIdValid).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parsePlayerIds', () => {
|
||||||
|
const result = idUtils.parsePlayerIds(['fivem:555555', 'fivem:xxxxx']);
|
||||||
|
expect(result.validIdsArray).toEqual(['fivem:555555']);
|
||||||
|
expect(result.invalidIdsArray).toEqual(['fivem:xxxxx']);
|
||||||
|
expect(result.validIdsObject?.fivem).toBe('555555');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('filterPlayerHwids', () => {
|
||||||
|
const result = idUtils.filterPlayerHwids([
|
||||||
|
'5:55555555000000002d267c6638c8873d55555555000000005555555500000000',
|
||||||
|
'invalidHwid'
|
||||||
|
]);
|
||||||
|
expect(result.validHwidsArray).toEqual(['5:55555555000000002d267c6638c8873d55555555000000005555555500000000']);
|
||||||
|
expect(result.invalidHwidsArray).toEqual(['invalidHwid']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parseLaxIdsArrayInput', () => {
|
||||||
|
const result = idUtils.parseLaxIdsArrayInput('55555555000000009999, steam:1100001ffffffff, invalid');
|
||||||
|
expect(result.validIds).toEqual(['discord:55555555000000009999', 'steam:1100001ffffffff']);
|
||||||
|
expect(result.invalids).toEqual(['invalid']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('getIdFromOauthNameid', () => {
|
||||||
|
expect(idUtils.getIdFromOauthNameid('https://forum.cfx.re/internal/user/555555')).toBe('fivem:555555');
|
||||||
|
expect(idUtils.getIdFromOauthNameid('xxxxx')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('shortenId', () => {
|
||||||
|
// Invalid ids
|
||||||
|
expect(() => idUtils.shortenId(123 as any)).toThrow('id is not a string');
|
||||||
|
expect(idUtils.shortenId('invalidFormat')).toBe('invalidFormat');
|
||||||
|
expect(idUtils.shortenId(':1234567890123456')).toBe(':1234567890123456');
|
||||||
|
expect(idUtils.shortenId('discord:')).toBe('discord:');
|
||||||
|
|
||||||
|
// Valid ID with length greater than >= 10
|
||||||
|
expect(idUtils.shortenId('discord:383919883341266945')).toBe('discord:3839…6945');
|
||||||
|
expect(idUtils.shortenId('xbl:12345678901')).toBe('xbl:1234…8901');
|
||||||
|
|
||||||
|
// Valid ID with length <= 10 (should not be shortened)
|
||||||
|
expect(idUtils.shortenId('fivem:1234567890')).toBe('fivem:1234567890');
|
||||||
|
expect(idUtils.shortenId('steam:1234')).toBe('steam:1234');
|
||||||
|
});
|
159
core/lib/player/idUtils.ts
Normal file
159
core/lib/player/idUtils.ts
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
import type { PlayerIdsObjectType } from "@shared/otherTypes";
|
||||||
|
import consts from "@shared/consts";
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a single identifier and return its parts lowercased
|
||||||
|
*/
|
||||||
|
export const parsePlayerId = (idString: string) => {
|
||||||
|
if (typeof idString !== 'string') {
|
||||||
|
return { isIdValid: false, idType: null, idValue: null, idlowerCased: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
const idlowerCased = idString.toLocaleLowerCase();
|
||||||
|
const [idType, idValue] = idlowerCased.split(':', 2);
|
||||||
|
if (idType === "ip") {
|
||||||
|
return { isIdValid: false, idType, idValue, idlowerCased };
|
||||||
|
}
|
||||||
|
return { isIdValid: true, idType, idValue, idlowerCased };
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get valid, invalid and license identifier from array of ids
|
||||||
|
*/
|
||||||
|
export const parsePlayerIds = (ids: string[]) => {
|
||||||
|
let invalidIdsArray: string[] = [];
|
||||||
|
let validIdsArray: string[] = [];
|
||||||
|
const validIdsObject: PlayerIdsObjectType = {}
|
||||||
|
|
||||||
|
for (const idString of ids) {
|
||||||
|
if (typeof idString !== 'string') continue;
|
||||||
|
const { isIdValid, idType, idValue } = parsePlayerId(idString);
|
||||||
|
if (isIdValid) {
|
||||||
|
validIdsArray.push(idString);
|
||||||
|
validIdsObject[idType as keyof PlayerIdsObjectType] = idValue;
|
||||||
|
} else {
|
||||||
|
invalidIdsArray.push(idString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { invalidIdsArray, validIdsArray, validIdsObject };
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get valid and invalid player HWIDs
|
||||||
|
*/
|
||||||
|
export const filterPlayerHwids = (hwids: string[]) => {
|
||||||
|
let invalidHwidsArray: string[] = [];
|
||||||
|
let validHwidsArray: string[] = [];
|
||||||
|
|
||||||
|
for (const hwidString of hwids) {
|
||||||
|
if (typeof hwidString !== 'string') continue;
|
||||||
|
if (consts.regexValidHwidToken.test(hwidString)) {
|
||||||
|
validHwidsArray.push(hwidString);
|
||||||
|
} else {
|
||||||
|
invalidHwidsArray.push(hwidString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { invalidHwidsArray, validHwidsArray };
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempts to parse a user-provided string into an array of valid identifiers.
|
||||||
|
* This function is lenient and will attempt to parse any string into an array of valid identifiers.
|
||||||
|
* For non-prefixed ids, it will attempt to parse it as discord, fivem, steam, or license.
|
||||||
|
* Returns an array of valid ids/hwids, and array of invalid identifiers.
|
||||||
|
*
|
||||||
|
* Stricter version of this function is parsePlayerIds
|
||||||
|
*/
|
||||||
|
export const parseLaxIdsArrayInput = (fullInput: string) => {
|
||||||
|
const validIds: string[] = [];
|
||||||
|
const validHwids: string[] = [];
|
||||||
|
const invalids: string[] = [];
|
||||||
|
|
||||||
|
if (typeof fullInput !== 'string') {
|
||||||
|
return { validIds, validHwids, invalids };
|
||||||
|
}
|
||||||
|
const inputs = fullInput.toLowerCase().split(/[,;\s]+/g).filter(Boolean);
|
||||||
|
|
||||||
|
for (const input of inputs) {
|
||||||
|
if (input.includes(':')) {
|
||||||
|
if (consts.regexValidHwidToken.test(input)) {
|
||||||
|
validHwids.push(input);
|
||||||
|
} else if (Object.values(consts.validIdentifiers).some((regex) => regex.test(input))) {
|
||||||
|
validIds.push(input);
|
||||||
|
} else {
|
||||||
|
const [type, value] = input.split(':', 1);
|
||||||
|
if (consts.validIdentifierParts[type as keyof typeof consts.validIdentifierParts]?.test(value)) {
|
||||||
|
validIds.push(input);
|
||||||
|
} else {
|
||||||
|
invalids.push(input);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (consts.validIdentifierParts.discord.test(input)) {
|
||||||
|
validIds.push(`discord:${input}`);
|
||||||
|
} else if (consts.validIdentifierParts.fivem.test(input)) {
|
||||||
|
validIds.push(`fivem:${input}`);
|
||||||
|
} else if (consts.validIdentifierParts.license.test(input)) {
|
||||||
|
validIds.push(`license:${input}`);
|
||||||
|
} else if (consts.validIdentifierParts.steam.test(input)) {
|
||||||
|
validIds.push(`steam:${input}`);
|
||||||
|
} else {
|
||||||
|
invalids.push(input);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { validIds, validHwids, invalids };
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the fivem:xxxxxx identifier from the nameid field from the userInfo oauth response.
|
||||||
|
* Example: https://forum.cfx.re/internal/user/271816 -> fivem:271816
|
||||||
|
*/
|
||||||
|
export const getIdFromOauthNameid = (nameid: string) => {
|
||||||
|
try {
|
||||||
|
const res = /\/user\/(\d{1,8})/.exec(nameid);
|
||||||
|
//@ts-expect-error
|
||||||
|
return `fivem:${res[1]}`;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shortens an ID/HWID string to just leading and trailing 4 characters.
|
||||||
|
* Unicode symbol alternatives: ‥,…,~,≈,-,•,◇
|
||||||
|
*/
|
||||||
|
export const shortenId = (id: string) => {
|
||||||
|
if (typeof id !== 'string') throw new Error(`id is not a string`);
|
||||||
|
|
||||||
|
const [idType, idValue] = id.split(':', 2);
|
||||||
|
if (!idType || !idValue) {
|
||||||
|
return id; // Invalid format, return as is
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idValue.length <= 10) {
|
||||||
|
return id; // Do not shorten if ID value is 10 characters or fewer
|
||||||
|
}
|
||||||
|
|
||||||
|
const start = idValue.slice(0, 4);
|
||||||
|
const end = idValue.slice(-4);
|
||||||
|
return `${idType}:${start}…${end}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a string of shortened IDs/HWIDs
|
||||||
|
*/
|
||||||
|
export const summarizeIdsArray = (ids: string[]) => {
|
||||||
|
if (!Array.isArray(ids)) return '<invalid list>';
|
||||||
|
if (ids.length === 0) return '<empty list>';
|
||||||
|
const shortList = ids.map(shortenId).join(', ');
|
||||||
|
return `[${shortList}]`;
|
||||||
|
}
|
365
core/lib/player/playerClasses.ts
Normal file
365
core/lib/player/playerClasses.ts
Normal file
|
@ -0,0 +1,365 @@
|
||||||
|
const modulename = 'Player';
|
||||||
|
import cleanPlayerName from '@shared/cleanPlayerName';
|
||||||
|
import { DatabaseActionWarnType, DatabasePlayerType, DatabaseWhitelistApprovalsType } from '@modules/Database/databaseTypes';
|
||||||
|
import { cloneDeep, union } from 'lodash-es';
|
||||||
|
import { now } from '@lib/misc';
|
||||||
|
import { parsePlayerIds } from '@lib/player/idUtils';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import consts from '@shared/consts';
|
||||||
|
import type FxPlayerlist from '@modules/FxPlayerlist';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base class for ServerPlayer and DatabasePlayer.
|
||||||
|
* NOTE: player classes are responsible to every and only business logic regarding the player object in the database.
|
||||||
|
* In the future, when actions become part of the player object, also add them to these classes.
|
||||||
|
*/
|
||||||
|
export class BasePlayer {
|
||||||
|
displayName: string = 'unknown';
|
||||||
|
pureName: string = 'unknown';
|
||||||
|
ids: string[] = [];
|
||||||
|
hwids: string[] = [];
|
||||||
|
license: null | string = null; //extracted for convenience
|
||||||
|
dbData: false | DatabasePlayerType = false;
|
||||||
|
isConnected: boolean = false;
|
||||||
|
|
||||||
|
constructor(readonly uniqueId: Symbol) { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mutates the database data based on a source object to be applied
|
||||||
|
* FIXME: if this is called for a disconnected ServerPlayer, it will not clean after 120s
|
||||||
|
*/
|
||||||
|
protected mutateDbData(srcData: object) {
|
||||||
|
if (!this.license) throw new Error(`cannot mutate database for a player that has no license`);
|
||||||
|
this.dbData = txCore.database.players.update(this.license, srcData, this.uniqueId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all available identifiers (current+db)
|
||||||
|
*/
|
||||||
|
getAllIdentifiers() {
|
||||||
|
if (this.dbData && this.dbData.ids) {
|
||||||
|
return union(this.ids, this.dbData.ids);
|
||||||
|
} else {
|
||||||
|
return [...this.ids];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all available hardware identifiers (current+db)
|
||||||
|
*/
|
||||||
|
getAllHardwareIdentifiers() {
|
||||||
|
if (this.dbData && this.dbData.hwids) {
|
||||||
|
return union(this.hwids, this.dbData.hwids);
|
||||||
|
} else {
|
||||||
|
return [...this.hwids];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all actions related to all available ids
|
||||||
|
* NOTE: theoretically ServerPlayer.setupDatabaseData() guarantees that DatabasePlayer.dbData.ids array
|
||||||
|
* will contain the license but may be better to also explicitly add it to the array here?
|
||||||
|
*/
|
||||||
|
getHistory() {
|
||||||
|
if (!this.ids.length) return [];
|
||||||
|
return txCore.database.actions.findMany(
|
||||||
|
this.getAllIdentifiers(),
|
||||||
|
this.getAllHardwareIdentifiers()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves notes for this player.
|
||||||
|
* NOTE: Techinically, we should be checking this.isRegistered, but not available in BasePlayer
|
||||||
|
*/
|
||||||
|
setNote(text: string, author: string) {
|
||||||
|
if (!this.license) throw new Error(`cannot save notes for a player that has no license`);
|
||||||
|
this.mutateDbData({
|
||||||
|
notes: {
|
||||||
|
text,
|
||||||
|
lastAdmin: author,
|
||||||
|
tsLastEdit: now(),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves the whitelist status for this player
|
||||||
|
* NOTE: Techinically, we should be checking this.isRegistered, but not available in BasePlayer
|
||||||
|
*/
|
||||||
|
setWhitelist(enabled: boolean) {
|
||||||
|
if (!this.license) throw new Error(`cannot set whitelist status for a player that has no license`);
|
||||||
|
this.mutateDbData({
|
||||||
|
tsWhitelisted: enabled ? now() : undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
//Remove entries from whitelistApprovals & whitelistRequests
|
||||||
|
const allIdsFilter = (x: DatabaseWhitelistApprovalsType) => {
|
||||||
|
return this.ids.includes(x.identifier);
|
||||||
|
}
|
||||||
|
txCore.database.whitelist.removeManyApprovals(allIdsFilter);
|
||||||
|
txCore.database.whitelist.removeManyRequests({ license: this.license });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
type PlayerDataType = {
|
||||||
|
name: string,
|
||||||
|
ids: string[],
|
||||||
|
hwids: string[],
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class to represent a player that is or was connected to the currently running server process.
|
||||||
|
*/
|
||||||
|
export class ServerPlayer extends BasePlayer {
|
||||||
|
readonly #fxPlayerlist: FxPlayerlist;
|
||||||
|
// readonly psid: string; //TODO: calculate player session id (sv mutex, netid, rollover id) here
|
||||||
|
readonly netid: number;
|
||||||
|
readonly tsConnected = now();
|
||||||
|
readonly isRegistered: boolean;
|
||||||
|
readonly #minuteCronInterval?: ReturnType<typeof setInterval>;
|
||||||
|
// #offlineDbDataCacheTimeout?: ReturnType<typeof setTimeout>;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
netid: number,
|
||||||
|
playerData: PlayerDataType,
|
||||||
|
fxPlayerlist: FxPlayerlist
|
||||||
|
) {
|
||||||
|
super(Symbol(`netid${netid}`));
|
||||||
|
this.#fxPlayerlist = fxPlayerlist;
|
||||||
|
this.netid = netid;
|
||||||
|
this.isConnected = true;
|
||||||
|
if (
|
||||||
|
playerData === null
|
||||||
|
|| typeof playerData !== 'object'
|
||||||
|
|| typeof playerData.name !== 'string'
|
||||||
|
|| !Array.isArray(playerData.ids)
|
||||||
|
|| !Array.isArray(playerData.hwids)
|
||||||
|
) {
|
||||||
|
throw new Error(`invalid player data`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Processing identifiers
|
||||||
|
//NOTE: ignoring IP completely
|
||||||
|
const { validIdsArray, validIdsObject } = parsePlayerIds(playerData.ids);
|
||||||
|
this.license = validIdsObject.license;
|
||||||
|
this.ids = validIdsArray;
|
||||||
|
this.hwids = playerData.hwids.filter(x => {
|
||||||
|
return typeof x === 'string' && consts.regexValidHwidToken.test(x);
|
||||||
|
});
|
||||||
|
|
||||||
|
//Processing player name
|
||||||
|
const { displayName, pureName } = cleanPlayerName(playerData.name);
|
||||||
|
this.displayName = displayName;
|
||||||
|
this.pureName = pureName;
|
||||||
|
|
||||||
|
//If this player is eligible to be on the database
|
||||||
|
if (this.license) {
|
||||||
|
this.#setupDatabaseData();
|
||||||
|
this.isRegistered = !!this.dbData;
|
||||||
|
this.#minuteCronInterval = setInterval(this.#minuteCron.bind(this), 60_000);
|
||||||
|
} else {
|
||||||
|
this.isRegistered = false;
|
||||||
|
}
|
||||||
|
console.log(167, this.isRegistered)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registers or retrieves the player data from the database.
|
||||||
|
* NOTE: if player has license, we are guaranteeing license will be added to the database ids array
|
||||||
|
*/
|
||||||
|
#setupDatabaseData() {
|
||||||
|
if (!this.license || !this.isConnected) return;
|
||||||
|
|
||||||
|
//Make sure the database is ready - this should be impossible
|
||||||
|
if (!txCore.database.isReady) {
|
||||||
|
console.error(`Players database not yet ready, cannot read db status for player id ${this.displayName}.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check if player is already on the database
|
||||||
|
try {
|
||||||
|
const dbPlayer = txCore.database.players.findOne(this.license);
|
||||||
|
if (dbPlayer) {
|
||||||
|
//Updates database data
|
||||||
|
this.dbData = dbPlayer;
|
||||||
|
this.mutateDbData({
|
||||||
|
displayName: this.displayName,
|
||||||
|
pureName: this.pureName,
|
||||||
|
tsLastConnection: this.tsConnected,
|
||||||
|
ids: union(dbPlayer.ids, this.ids),
|
||||||
|
hwids: union(dbPlayer.hwids, this.hwids),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
//Register player to the database
|
||||||
|
console.log(`Registering '${this.displayName}' to players database.`);
|
||||||
|
const toRegister = {
|
||||||
|
license: this.license,
|
||||||
|
ids: this.ids,
|
||||||
|
hwids: this.hwids,
|
||||||
|
displayName: this.displayName,
|
||||||
|
pureName: this.pureName,
|
||||||
|
playTime: 0,
|
||||||
|
tsLastConnection: this.tsConnected,
|
||||||
|
tsJoined: this.tsConnected,
|
||||||
|
};
|
||||||
|
txCore.database.players.register(toRegister);
|
||||||
|
this.dbData = toRegister;
|
||||||
|
console.verbose.ok(`Adding '${this.displayName}' to players database.`);
|
||||||
|
|
||||||
|
}
|
||||||
|
setImmediate(this.#sendInitialData.bind(this));
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to load/register player ${this.displayName} from/to the database with error:`);
|
||||||
|
console.dir(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepares the initial player data and reports to FxPlayerlist, which will dispatch to the server via command.
|
||||||
|
* TODO: adapt to be used for admin auth and player tags.
|
||||||
|
*/
|
||||||
|
#sendInitialData() {
|
||||||
|
if (!this.isRegistered) return;
|
||||||
|
if (!this.dbData) throw new Error(`cannot send initial data for a player that has no dbData`);
|
||||||
|
|
||||||
|
let oldestPendingWarn: undefined | DatabaseActionWarnType;
|
||||||
|
const actionHistory = this.getHistory();
|
||||||
|
for (const action of actionHistory) {
|
||||||
|
if (action.type !== 'warn' || action.revocation.timestamp !== null) continue;
|
||||||
|
if (!action.acked) {
|
||||||
|
oldestPendingWarn = action;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (oldestPendingWarn) {
|
||||||
|
this.#fxPlayerlist.dispatchInitialPlayerData(this.netid, oldestPendingWarn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the dbData.
|
||||||
|
* Used when some other player instance mutates the database and we need to sync all players
|
||||||
|
* with the same license.
|
||||||
|
*/
|
||||||
|
syncUpstreamDbData(srcData: DatabasePlayerType) {
|
||||||
|
this.dbData = cloneDeep(srcData)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a clone of this.dbData.
|
||||||
|
* If the data is not available, it means the player was disconnected and dbData wiped to save memory,
|
||||||
|
* so start an 120s interval to wipe it from memory again. This period can be considered a "cache"
|
||||||
|
* FIXME: review dbData optimization, 50k players would be up to 50mb
|
||||||
|
*/
|
||||||
|
getDbData() {
|
||||||
|
if (this.dbData) {
|
||||||
|
return cloneDeep(this.dbData);
|
||||||
|
} else if (this.license && this.isRegistered) {
|
||||||
|
const dbPlayer = txCore.database.players.findOne(this.license);
|
||||||
|
if (!dbPlayer) return false;
|
||||||
|
|
||||||
|
this.dbData = dbPlayer;
|
||||||
|
// clearTimeout(this.#offlineDbDataCacheTimeout); //maybe not needed?
|
||||||
|
// this.#offlineDbDataCacheTimeout = setTimeout(() => {
|
||||||
|
// this.dbData = false;
|
||||||
|
// }, 120_000);
|
||||||
|
return cloneDeep(this.dbData);
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates dbData play time every minute
|
||||||
|
*/
|
||||||
|
#minuteCron() {
|
||||||
|
//FIXME: maybe use UIntXarray or mnemonist.Uint16Vector circular buffers to save memory
|
||||||
|
//TODO: rough draft of a playtime tracking system written before note above
|
||||||
|
// let list: [day: string, mins: number][] = [];
|
||||||
|
// const today = new Date;
|
||||||
|
// const currDay = today.toISOString().split('T')[0];
|
||||||
|
// if(!list.length){
|
||||||
|
// list.push([currDay, 1]);
|
||||||
|
// return;
|
||||||
|
// }
|
||||||
|
// if(list.at(-1)![0] === currDay){
|
||||||
|
// list.at(-1)![1]++;
|
||||||
|
// } else {
|
||||||
|
// //FIXME: move this cutoff to a const in the database or playerlist manager
|
||||||
|
// const cutoffTs = today.setUTCHours(0, 0, 0, 0) - 1000 * 60 * 60 * 24 * 28;
|
||||||
|
// const cutoffIndex = list.findIndex(x => new Date(x[0]).getTime() < cutoffTs);
|
||||||
|
// list = list.slice(cutoffIndex);
|
||||||
|
// list.push([currDay, 1]);
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
|
if (!this.dbData || !this.isConnected) return;
|
||||||
|
try {
|
||||||
|
this.mutateDbData({ playTime: this.dbData.playTime + 1 });
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to update playtime for player ${this.displayName}:`);
|
||||||
|
console.dir(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Marks this player as disconnected, clears dbData (mem optimization) and clears minute cron
|
||||||
|
*/
|
||||||
|
disconnect() {
|
||||||
|
this.isConnected = false;
|
||||||
|
// this.dbData = false;
|
||||||
|
clearInterval(this.#minuteCronInterval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class to represent players stored in the database.
|
||||||
|
*/
|
||||||
|
export class DatabasePlayer extends BasePlayer {
|
||||||
|
readonly isRegistered = true; //no need to check because otherwise constructor throws
|
||||||
|
|
||||||
|
constructor(license: string, srcPlayerData?: DatabasePlayerType) {
|
||||||
|
super(Symbol(`db${license}`));
|
||||||
|
if (typeof license !== 'string') {
|
||||||
|
throw new Error(`invalid player license`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Set dbData either from constructor params, or from querying the database
|
||||||
|
if (srcPlayerData) {
|
||||||
|
this.dbData = srcPlayerData;
|
||||||
|
} else {
|
||||||
|
const foundData = txCore.database.players.findOne(license);
|
||||||
|
if (!foundData) {
|
||||||
|
throw new Error(`player not found in database`);
|
||||||
|
} else {
|
||||||
|
this.dbData = foundData;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//fill in data
|
||||||
|
this.license = license;
|
||||||
|
this.ids = this.dbData.ids;
|
||||||
|
this.hwids = this.dbData.hwids;
|
||||||
|
this.displayName = this.dbData.displayName;
|
||||||
|
this.pureName = this.dbData.pureName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a clone of this.dbData
|
||||||
|
*/
|
||||||
|
getDbData() {
|
||||||
|
return cloneDeep(this.dbData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export type PlayerClass = ServerPlayer | DatabasePlayer;
|
15
core/lib/player/playerFinder.ts
Normal file
15
core/lib/player/playerFinder.ts
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
import { DatabasePlayerType } from "@modules/Database/databaseTypes.js";
|
||||||
|
import { DatabasePlayer } from "./playerClasses.js"
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds all players in the database with a particular matching identifier
|
||||||
|
*/
|
||||||
|
export const findPlayersByIdentifier = (identifier: string): DatabasePlayer[] => {
|
||||||
|
if(typeof identifier !== 'string' || !identifier.length) throw new Error(`invalid identifier`);
|
||||||
|
|
||||||
|
const filter = (player: DatabasePlayerType) => player.ids.includes(identifier);
|
||||||
|
const playersData = txCore.database.players.findMany(filter);
|
||||||
|
|
||||||
|
return playersData.map((dbData) => new DatabasePlayer(dbData.license, dbData))
|
||||||
|
}
|
62
core/lib/player/playerResolver.ts
Normal file
62
core/lib/player/playerResolver.ts
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import { SYM_CURRENT_MUTEX } from "@lib/symbols.js";
|
||||||
|
import { DatabasePlayer, ServerPlayer } from "./playerClasses.js"
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolves a ServerPlayer or DatabasePlayer based on mutex, netid and license.
|
||||||
|
* When mutex#netid is present, it takes precedence over license.
|
||||||
|
* If the mutex is not from the current server, search for the license in FxPlayerlist.licenseCache[]
|
||||||
|
* and then search for the license in the database.
|
||||||
|
*/
|
||||||
|
export default (mutex: any, netid: any, license: any) => {
|
||||||
|
const parsedNetid = parseInt(netid);
|
||||||
|
let searchLicense = license;
|
||||||
|
|
||||||
|
//For error clarification only
|
||||||
|
let hasMutex = false;
|
||||||
|
|
||||||
|
//Attempt to resolve current mutex, if needed
|
||||||
|
if(mutex === SYM_CURRENT_MUTEX){
|
||||||
|
mutex = txCore.fxRunner.child?.mutex;
|
||||||
|
if (!mutex) {
|
||||||
|
throw new Error(`current mutex not available`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//If mutex+netid provided
|
||||||
|
if (typeof mutex === 'string' && typeof netid === 'number' && !isNaN(parsedNetid)) {
|
||||||
|
hasMutex = true;
|
||||||
|
if (mutex && mutex === txCore.fxRunner.child?.mutex) {
|
||||||
|
//If the mutex is from the server currently online
|
||||||
|
const player = txCore.fxPlayerlist.getPlayerById(netid);
|
||||||
|
if (player instanceof ServerPlayer) {
|
||||||
|
return player;
|
||||||
|
} else {
|
||||||
|
throw new Error(`player not found in current server playerlist`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If mutex is from previous server, overwrite any given license
|
||||||
|
const searchRef = `${mutex}#${netid}`;
|
||||||
|
const found = txCore.fxPlayerlist.licenseCache.find(c => c[0] === searchRef);
|
||||||
|
if (found) searchLicense = found[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//If license provided or resolved through licenseCache, search in the database
|
||||||
|
if (typeof searchLicense === 'string' && searchLicense.length) {
|
||||||
|
const onlineMatches = txCore.fxPlayerlist.getOnlinePlayersByLicense(searchLicense);
|
||||||
|
if(onlineMatches.length){
|
||||||
|
return onlineMatches.at(-1) as ServerPlayer;
|
||||||
|
}else{
|
||||||
|
return new DatabasePlayer(searchLicense);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Player not found
|
||||||
|
//If not found in the db, the search above already threw error
|
||||||
|
if(hasMutex){
|
||||||
|
throw new Error(`could not resolve player by its net id which likely means it has disconnected long ago`);
|
||||||
|
}else{
|
||||||
|
throw new Error(`could not resolve this player`);
|
||||||
|
}
|
||||||
|
}
|
18
core/lib/quitProcess.ts
Normal file
18
core/lib/quitProcess.ts
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
/**
|
||||||
|
* Force Quits the process with a small delay and padding for the console.
|
||||||
|
*/
|
||||||
|
export default function quitProcess(code = 0): never {
|
||||||
|
//Process.exit will not quit if there are listeners on exit
|
||||||
|
process.removeAllListeners('SIGHUP');
|
||||||
|
process.removeAllListeners('SIGINT');
|
||||||
|
process.removeAllListeners('SIGTERM');
|
||||||
|
|
||||||
|
//Hacky solution to guarantee the error is flushed
|
||||||
|
//before fxserver double prints the exit code
|
||||||
|
process.stdout.write('\n');
|
||||||
|
process.stdout.write('\n');
|
||||||
|
|
||||||
|
//This will make the process hang for 100ms before exiting
|
||||||
|
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, 100);
|
||||||
|
process.exit(code);
|
||||||
|
}
|
16
core/lib/symbols.ts
Normal file
16
core/lib/symbols.ts
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
/**
|
||||||
|
* ConfigStore Schemas
|
||||||
|
*/
|
||||||
|
//Symbols used to mark the validation fail behavior
|
||||||
|
export const SYM_FIXER_FATAL = Symbol('ConfigSchema:FixerFatalError');
|
||||||
|
export const SYM_FIXER_DEFAULT = Symbol('ConfigSchema:FixerFallbackDefault');
|
||||||
|
|
||||||
|
//Other symbols
|
||||||
|
export const SYM_RESET_CONFIG = Symbol('ConfigSchema:SaverResetConfig');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Other symbols
|
||||||
|
*/
|
||||||
|
export const SYM_SYSTEM_AUTHOR = Symbol('Definition:AuthorIsSystem');
|
||||||
|
export const SYM_CURRENT_MUTEX = Symbol('Definition:CurrentServerMutex');
|
13
core/lib/xss.js
Normal file
13
core/lib/xss.js
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
import xssClass from 'xss';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a function with the passed whitelist parameter.
|
||||||
|
* https://github.com/leizongmin/js-xss#whitelist
|
||||||
|
*/
|
||||||
|
export default (customWL = []) => {
|
||||||
|
const xss = new xssClass.FilterXSS({
|
||||||
|
whiteList: customWL,
|
||||||
|
});
|
||||||
|
return (x) => xss.process(x);
|
||||||
|
};
|
682
core/modules/AdminStore/index.js
Normal file
682
core/modules/AdminStore/index.js
Normal file
|
@ -0,0 +1,682 @@
|
||||||
|
const modulename = 'AdminStore';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import { nanoid } from 'nanoid';
|
||||||
|
import { txHostConfig } from '@core/globalData';
|
||||||
|
import CfxProvider from './providers/CitizenFX.js';
|
||||||
|
import { createHash } from 'node:crypto';
|
||||||
|
import consoleFactory from '@lib/console.js';
|
||||||
|
import fatalError from '@lib/fatalError.js';
|
||||||
|
import { chalkInversePad } from '@lib/misc.js';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
//NOTE: The way I'm doing versioning right now is horrible but for now it's the best I can do
|
||||||
|
//NOTE: I do not need to version every admin, just the file itself
|
||||||
|
const ADMIN_SCHEMA_VERSION = 1;
|
||||||
|
|
||||||
|
|
||||||
|
//Helpers
|
||||||
|
const migrateProviderIdentifiers = (providerName, providerData) => {
|
||||||
|
if (providerName === 'citizenfx') {
|
||||||
|
// data may be empty, or nameid may be invalid
|
||||||
|
try {
|
||||||
|
const res = /\/user\/(\d{1,8})/.exec(providerData.data.nameid);
|
||||||
|
providerData.identifier = `fivem:${res[1]}`;
|
||||||
|
} catch (error) {
|
||||||
|
providerData.identifier = 'fivem:00000000';
|
||||||
|
}
|
||||||
|
} else if (providerName === 'discord') {
|
||||||
|
providerData.identifier = `discord:${providerData.id}`;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module responsible for storing, retrieving and validating admins data.
|
||||||
|
*/
|
||||||
|
export default class AdminStore {
|
||||||
|
constructor() {
|
||||||
|
this.adminsFile = txHostConfig.dataSubPath('admins.json');
|
||||||
|
this.adminsFileHash = null;
|
||||||
|
this.admins = null;
|
||||||
|
this.refreshRoutine = null;
|
||||||
|
|
||||||
|
//Not alphabetical order, but that's fine
|
||||||
|
//FIXME: move to a separate file
|
||||||
|
//TODO: maybe put in @shared so the frontend's UnauthorizedPage can use it
|
||||||
|
//TODO: when migrating the admins page to react, definitely put this in @shared so the front rendering doesn't depend on the backend response - lessons learned from the settings page.
|
||||||
|
//FIXME: if not using enums, definitely use so other type of type safety
|
||||||
|
//FIXME: maybe rename all_permissions to `administrator` (just like discord) or `super_admin` and rename the `Admins` page to `Users`. This fits better with how people use txAdmin as "mods" are not really admins
|
||||||
|
this.registeredPermissions = {
|
||||||
|
'all_permissions': 'All Permissions',
|
||||||
|
'manage.admins': 'Manage Admins', //will enable the "set admin" button in the player modal
|
||||||
|
'settings.view': 'Settings: View (no tokens)',
|
||||||
|
'settings.write': 'Settings: Change',
|
||||||
|
'console.view': 'Console: View',
|
||||||
|
'console.write': 'Console: Write',
|
||||||
|
'control.server': 'Start/Stop Server + Scheduler', //FIXME: horrible name
|
||||||
|
'announcement': 'Send Announcements',
|
||||||
|
'commands.resources': 'Start/Stop Resources',
|
||||||
|
'server.cfg.editor': 'Read/Write server.cfg', //FIXME: rename to server.cfg_editor
|
||||||
|
'txadmin.log.view': 'View System Logs', //FIXME: rename to system.log.view
|
||||||
|
'server.log.view': 'View Server Logs',
|
||||||
|
|
||||||
|
'menu.vehicle': 'Spawn / Fix Vehicles',
|
||||||
|
'menu.clear_area': 'Reset world area',
|
||||||
|
'menu.viewids': 'View Player IDs in-game', //be able to see the ID of the players
|
||||||
|
'players.direct_message': 'Direct Message',
|
||||||
|
'players.whitelist': 'Whitelist',
|
||||||
|
'players.warn': 'Warn',
|
||||||
|
'players.kick': 'Kick',
|
||||||
|
'players.ban': 'Ban',
|
||||||
|
'players.freeze': 'Freeze Players',
|
||||||
|
'players.heal': 'Heal', //self, everyone, and the "heal" button in player modal
|
||||||
|
'players.playermode': 'NoClip / God Mode', //self playermode, and also the player spectate option
|
||||||
|
'players.spectate': 'Spectate', //self playermode, and also the player spectate option
|
||||||
|
'players.teleport': 'Teleport', //self teleport, and the bring/go to on player modal
|
||||||
|
'players.troll': 'Troll Actions', //all the troll options in the player modal
|
||||||
|
};
|
||||||
|
//FIXME: pode remover, hardcode na cron function
|
||||||
|
this.hardConfigs = {
|
||||||
|
refreshInterval: 15e3,
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
//Load providers
|
||||||
|
//FIXME: pode virar um top-level singleton , não precisa estar na classe
|
||||||
|
try {
|
||||||
|
this.providers = {
|
||||||
|
discord: false,
|
||||||
|
citizenfx: new CfxProvider(),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to load providers with error: ${error.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check if admins file exists
|
||||||
|
let adminFileExists;
|
||||||
|
try {
|
||||||
|
fs.statSync(this.adminsFile, fs.constants.F_OK);
|
||||||
|
adminFileExists = true;
|
||||||
|
} catch (error) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
adminFileExists = false;
|
||||||
|
} else {
|
||||||
|
throw new Error(`Failed to check presence of admin file with error: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Printing PIN or starting loop
|
||||||
|
if (!adminFileExists) {
|
||||||
|
if (!txHostConfig.defaults.account) {
|
||||||
|
this.addMasterPin = (Math.random() * 10000).toFixed().padStart(4, '0');
|
||||||
|
this.admins = false;
|
||||||
|
} else {
|
||||||
|
const { username, fivemId, password } = txHostConfig.defaults.account;
|
||||||
|
this.createAdminsFile(
|
||||||
|
username,
|
||||||
|
fivemId ? `fivem:${fivemId}` : undefined,
|
||||||
|
undefined,
|
||||||
|
password,
|
||||||
|
password ? false : undefined,
|
||||||
|
);
|
||||||
|
console.ok(`Created master account ${chalkInversePad(username)} with credentials provided by ${txHostConfig.sourceName}.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.loadAdminsFile();
|
||||||
|
this.setupRefreshRoutine();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* sets the admins file refresh routine
|
||||||
|
*/
|
||||||
|
setupRefreshRoutine() {
|
||||||
|
this.refreshRoutine = setInterval(() => {
|
||||||
|
this.checkAdminsFile();
|
||||||
|
}, this.hardConfigs.refreshInterval);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a admins.json file based on the first account
|
||||||
|
* @param {string} username
|
||||||
|
* @param {string|undefined} fivemId with the fivem: prefix
|
||||||
|
* @param {string|undefined} discordId with the discord: prefix
|
||||||
|
* @param {string|undefined} password backup password
|
||||||
|
* @param {boolean|undefined} isPlainTextPassword
|
||||||
|
* @returns {(boolean)} true or throws an error
|
||||||
|
*/
|
||||||
|
createAdminsFile(username, fivemId, discordId, password, isPlainTextPassword) {
|
||||||
|
//Sanity check
|
||||||
|
if (this.admins !== false && this.admins !== null) throw new Error('Admins file already exists.');
|
||||||
|
if (typeof username !== 'string' || username.length < 3) throw new Error('Invalid username parameter.');
|
||||||
|
|
||||||
|
//Handling password
|
||||||
|
let password_hash, password_temporary;
|
||||||
|
if(password){
|
||||||
|
password_hash = isPlainTextPassword ? GetPasswordHash(password) : password;
|
||||||
|
// password_temporary = false; //undefined will do the same
|
||||||
|
} else {
|
||||||
|
const veryRandomString = `${username}-password-not-meant-to-be-used-${nanoid()}`;
|
||||||
|
password_hash = GetPasswordHash(veryRandomString);
|
||||||
|
password_temporary = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Handling third party providers
|
||||||
|
const providers = {};
|
||||||
|
if (fivemId) {
|
||||||
|
providers.citizenfx = {
|
||||||
|
id: username,
|
||||||
|
identifier: fivemId,
|
||||||
|
data: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (discordId) {
|
||||||
|
providers.discord = {
|
||||||
|
id: discordId,
|
||||||
|
identifier: `discord:${discordId}`,
|
||||||
|
data: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
//Creating new admin
|
||||||
|
const newAdmin = {
|
||||||
|
$schema: ADMIN_SCHEMA_VERSION,
|
||||||
|
name: username,
|
||||||
|
master: true,
|
||||||
|
password_hash,
|
||||||
|
password_temporary,
|
||||||
|
providers,
|
||||||
|
permissions: [],
|
||||||
|
};
|
||||||
|
this.admins = [newAdmin];
|
||||||
|
this.addMasterPin = undefined;
|
||||||
|
|
||||||
|
//Saving admin file
|
||||||
|
try {
|
||||||
|
const jsonData = JSON.stringify(this.admins);
|
||||||
|
this.adminsFileHash = createHash('sha1').update(jsonData).digest('hex');
|
||||||
|
fs.writeFileSync(this.adminsFile, jsonData, { encoding: 'utf8', flag: 'wx' });
|
||||||
|
this.setupRefreshRoutine();
|
||||||
|
return newAdmin;
|
||||||
|
} catch (error) {
|
||||||
|
let message = `Failed to create '${this.adminsFile}' with error: ${error.message}`;
|
||||||
|
console.verbose.error(message);
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of admins and permissions
|
||||||
|
*/
|
||||||
|
getAdminsList() {
|
||||||
|
if (this.admins == false) return [];
|
||||||
|
return this.admins.map((user) => {
|
||||||
|
return {
|
||||||
|
name: user.name,
|
||||||
|
master: user.master,
|
||||||
|
providers: Object.keys(user.providers),
|
||||||
|
permissions: user.permissions,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the raw array of admins, except for the hash
|
||||||
|
*/
|
||||||
|
getRawAdminsList() {
|
||||||
|
if (this.admins === false) return [];
|
||||||
|
return cloneDeep(this.admins);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all data from an admin by provider user id (ex discord id), or false
|
||||||
|
* @param {string} uid
|
||||||
|
*/
|
||||||
|
getAdminByProviderUID(uid) {
|
||||||
|
if (this.admins == false) return false;
|
||||||
|
let id = uid.trim().toLowerCase();
|
||||||
|
if (!id.length) return false;
|
||||||
|
let admin = this.admins.find((user) => {
|
||||||
|
return Object.keys(user.providers).find((provider) => {
|
||||||
|
return (id === user.providers[provider].id.toLowerCase());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return (admin) ? cloneDeep(admin) : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array with all identifiers of the admins (fivem/discord)
|
||||||
|
*/
|
||||||
|
getAdminsIdentifiers() {
|
||||||
|
if (this.admins === false) return [];
|
||||||
|
const ids = [];
|
||||||
|
for (const admin of this.admins) {
|
||||||
|
admin.providers.citizenfx && ids.push(admin.providers.citizenfx.identifier);
|
||||||
|
admin.providers.discord && ids.push(admin.providers.discord.identifier);
|
||||||
|
}
|
||||||
|
return ids;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all data from an admin by their name, or false
|
||||||
|
* @param {string} uname
|
||||||
|
*/
|
||||||
|
getAdminByName(uname) {
|
||||||
|
if (!this.admins) return false;
|
||||||
|
const username = uname.trim().toLowerCase();
|
||||||
|
if (!username.length) return false;
|
||||||
|
const admin = this.admins.find((user) => {
|
||||||
|
return (username === user.name.toLowerCase());
|
||||||
|
});
|
||||||
|
return (admin) ? cloneDeep(admin) : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all data from an admin by game identifier, or false
|
||||||
|
* @param {string[]} identifiers
|
||||||
|
*/
|
||||||
|
getAdminByIdentifiers(identifiers) {
|
||||||
|
if (!this.admins) return false;
|
||||||
|
identifiers = identifiers
|
||||||
|
.map((i) => i.trim().toLowerCase())
|
||||||
|
.filter((i) => i.length);
|
||||||
|
if (!identifiers.length) return false;
|
||||||
|
const admin = this.admins.find((user) =>
|
||||||
|
identifiers.find((identifier) =>
|
||||||
|
Object.keys(user.providers).find((provider) =>
|
||||||
|
(identifier === user.providers[provider].identifier.toLowerCase()))));
|
||||||
|
return (admin) ? cloneDeep(admin) : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list with all registered permissions
|
||||||
|
*/
|
||||||
|
getPermissionsList() {
|
||||||
|
return cloneDeep(this.registeredPermissions);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes to storage the admins file
|
||||||
|
*/
|
||||||
|
async writeAdminsFile() {
|
||||||
|
const jsonData = JSON.stringify(this.admins, null, 2);
|
||||||
|
this.adminsFileHash = createHash('sha1').update(jsonData).digest('hex');
|
||||||
|
await fsp.writeFile(this.adminsFile, jsonData, 'utf8');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes to storage the admins file
|
||||||
|
*/
|
||||||
|
async checkAdminsFile() {
|
||||||
|
const restore = async () => {
|
||||||
|
try {
|
||||||
|
await this.writeAdminsFile();
|
||||||
|
console.ok('Restored admins.json file.');
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to restore admins.json file: ${error.message}`);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
const jsonData = await fsp.readFile(this.adminsFile, 'utf8');
|
||||||
|
const inboundHash = createHash('sha1').update(jsonData).digest('hex');
|
||||||
|
if (this.adminsFileHash !== inboundHash) {
|
||||||
|
console.warn('The admins.json file was modified or deleted by an external source, txAdmin will try to restore it.');
|
||||||
|
restore();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Cannot check admins file integrity: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new admin to the admins file
|
||||||
|
* NOTE: I'm fully aware this coud be optimized. Leaving this way to improve readability and error verbosity
|
||||||
|
* @param {string} name
|
||||||
|
* @param {object|undefined} citizenfxData or false
|
||||||
|
* @param {object|undefined} discordData or false
|
||||||
|
* @param {string} password
|
||||||
|
* @param {array} permissions
|
||||||
|
*/
|
||||||
|
async addAdmin(name, citizenfxData, discordData, password, permissions) {
|
||||||
|
if (this.admins == false) throw new Error('Admins not set');
|
||||||
|
|
||||||
|
//Check if username is already taken
|
||||||
|
if (this.getAdminByName(name)) throw new Error('Username already taken');
|
||||||
|
|
||||||
|
//Preparing admin
|
||||||
|
const admin = {
|
||||||
|
$schema: ADMIN_SCHEMA_VERSION,
|
||||||
|
name,
|
||||||
|
master: false,
|
||||||
|
password_hash: GetPasswordHash(password),
|
||||||
|
password_temporary: true,
|
||||||
|
providers: {},
|
||||||
|
permissions,
|
||||||
|
};
|
||||||
|
|
||||||
|
//Check if provider uid already taken and inserting into admin object
|
||||||
|
if (citizenfxData) {
|
||||||
|
const existingCitizenFX = this.getAdminByProviderUID(citizenfxData.id);
|
||||||
|
if (existingCitizenFX) throw new Error('CitizenFX ID already taken');
|
||||||
|
admin.providers.citizenfx = {
|
||||||
|
id: citizenfxData.id,
|
||||||
|
identifier: citizenfxData.identifier,
|
||||||
|
data: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (discordData) {
|
||||||
|
const existingDiscord = this.getAdminByProviderUID(discordData.id);
|
||||||
|
if (existingDiscord) throw new Error('Discord ID already taken');
|
||||||
|
admin.providers.discord = {
|
||||||
|
id: discordData.id,
|
||||||
|
identifier: discordData.identifier,
|
||||||
|
data: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
//Saving admin file
|
||||||
|
this.admins.push(admin);
|
||||||
|
this.refreshOnlineAdmins().catch((e) => { });
|
||||||
|
try {
|
||||||
|
return await this.writeAdminsFile();
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to save admins.json with error: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Edit admin and save to the admins file
|
||||||
|
* @param {string} name
|
||||||
|
* @param {string|null} password
|
||||||
|
* @param {object|false} [citizenfxData] or false
|
||||||
|
* @param {object|false} [discordData] or false
|
||||||
|
* @param {string[]} [permissions]
|
||||||
|
*/
|
||||||
|
async editAdmin(name, password, citizenfxData, discordData, permissions) {
|
||||||
|
if (this.admins == false) throw new Error('Admins not set');
|
||||||
|
|
||||||
|
//Find admin index
|
||||||
|
let username = name.toLowerCase();
|
||||||
|
let adminIndex = this.admins.findIndex((user) => {
|
||||||
|
return (username === user.name.toLowerCase());
|
||||||
|
});
|
||||||
|
if (adminIndex == -1) throw new Error('Admin not found');
|
||||||
|
|
||||||
|
//Editing admin
|
||||||
|
if (password !== null) {
|
||||||
|
this.admins[adminIndex].password_hash = GetPasswordHash(password);
|
||||||
|
delete this.admins[adminIndex].password_temporary;
|
||||||
|
}
|
||||||
|
if (typeof citizenfxData !== 'undefined') {
|
||||||
|
if (!citizenfxData) {
|
||||||
|
delete this.admins[adminIndex].providers.citizenfx;
|
||||||
|
} else {
|
||||||
|
this.admins[adminIndex].providers.citizenfx = {
|
||||||
|
id: citizenfxData.id,
|
||||||
|
identifier: citizenfxData.identifier,
|
||||||
|
data: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeof discordData !== 'undefined') {
|
||||||
|
if (!discordData) {
|
||||||
|
delete this.admins[adminIndex].providers.discord;
|
||||||
|
} else {
|
||||||
|
this.admins[adminIndex].providers.discord = {
|
||||||
|
id: discordData.id,
|
||||||
|
identifier: discordData.identifier,
|
||||||
|
data: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeof permissions !== 'undefined') this.admins[adminIndex].permissions = permissions;
|
||||||
|
|
||||||
|
//Prevent race condition, will allow the session to be updated before refreshing socket.io
|
||||||
|
//sessions which will cause reauth and closing of the temp password modal on first access
|
||||||
|
setTimeout(() => {
|
||||||
|
this.refreshOnlineAdmins().catch((e) => { });
|
||||||
|
}, 250);
|
||||||
|
|
||||||
|
//Saving admin file
|
||||||
|
try {
|
||||||
|
await this.writeAdminsFile();
|
||||||
|
return (password !== null) ? this.admins[adminIndex].password_hash : true;
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to save admins.json with error: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete admin and save to the admins file
|
||||||
|
* @param {string} name
|
||||||
|
*/
|
||||||
|
async deleteAdmin(name) {
|
||||||
|
if (this.admins == false) throw new Error('Admins not set');
|
||||||
|
|
||||||
|
//Delete admin
|
||||||
|
let username = name.toLowerCase();
|
||||||
|
let found = false;
|
||||||
|
this.admins = this.admins.filter((user) => {
|
||||||
|
if (username !== user.name.toLowerCase()) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
found = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (!found) throw new Error('Admin not found');
|
||||||
|
|
||||||
|
//Saving admin file
|
||||||
|
this.refreshOnlineAdmins().catch((e) => { });
|
||||||
|
try {
|
||||||
|
return await this.writeAdminsFile();
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to save admins.json with error: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads the admins.json file into the admins list
|
||||||
|
* NOTE: The verbosity here is driving me insane.
|
||||||
|
* But still seems not to be enough for people that don't read the README.
|
||||||
|
*/
|
||||||
|
async loadAdminsFile() {
|
||||||
|
let raw = null;
|
||||||
|
let jsonData = null;
|
||||||
|
let hasMigration = false;
|
||||||
|
|
||||||
|
const callError = (reason) => {
|
||||||
|
let details;
|
||||||
|
if (reason === 'cannot read file') {
|
||||||
|
details = ['This means the file doesn\'t exist or txAdmin doesn\'t have permission to read it.'];
|
||||||
|
} else {
|
||||||
|
details = [
|
||||||
|
'This likely means the file got somehow corrupted.',
|
||||||
|
'You can try restoring it or you can delete it and let txAdmin create a new one.',
|
||||||
|
];
|
||||||
|
}
|
||||||
|
fatalError.AdminStore(0, [
|
||||||
|
['Unable to load admins.json', reason],
|
||||||
|
...details,
|
||||||
|
['Admin File Path', this.adminsFile],
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
raw = await fsp.readFile(this.adminsFile, 'utf8');
|
||||||
|
this.adminsFileHash = createHash('sha1').update(raw).digest('hex');
|
||||||
|
} catch (error) {
|
||||||
|
return callError('cannot read file');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!raw.length) {
|
||||||
|
return callError('empty file');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
jsonData = JSON.parse(raw);
|
||||||
|
} catch (error) {
|
||||||
|
return callError('json parse error');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(jsonData)) {
|
||||||
|
return callError('not an array');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!jsonData.length) {
|
||||||
|
return callError('no admins');
|
||||||
|
}
|
||||||
|
|
||||||
|
const structureIntegrityTest = jsonData.some((x) => {
|
||||||
|
if (typeof x.name !== 'string' || x.name.length < 3) return true;
|
||||||
|
if (typeof x.master !== 'boolean') return true;
|
||||||
|
if (typeof x.password_hash !== 'string' || !x.password_hash.startsWith('$2')) return true;
|
||||||
|
if (typeof x.providers !== 'object') return true;
|
||||||
|
const providersTest = Object.keys(x.providers).some((y) => {
|
||||||
|
if (!Object.keys(this.providers).includes(y)) return true;
|
||||||
|
if (typeof x.providers[y].id !== 'string' || x.providers[y].id.length < 3) return true;
|
||||||
|
if (typeof x.providers[y].data !== 'object') return true;
|
||||||
|
if (typeof x.providers[y].identifier === 'string') {
|
||||||
|
if (x.providers[y].identifier.length < 3) return true;
|
||||||
|
} else {
|
||||||
|
migrateProviderIdentifiers(y, x.providers[y]);
|
||||||
|
hasMigration = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (providersTest) return true;
|
||||||
|
if (!Array.isArray(x.permissions)) return true;
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
if (structureIntegrityTest) {
|
||||||
|
return callError('invalid data in the admins file');
|
||||||
|
}
|
||||||
|
|
||||||
|
const masters = jsonData.filter((x) => x.master);
|
||||||
|
if (masters.length !== 1) {
|
||||||
|
return callError('must have exactly 1 master account');
|
||||||
|
}
|
||||||
|
|
||||||
|
//Migrate admin stuff
|
||||||
|
jsonData.forEach((admin) => {
|
||||||
|
//Migration (tx v7.3.0)
|
||||||
|
if (admin.$schema === undefined) {
|
||||||
|
//adding schema version
|
||||||
|
admin.$schema = ADMIN_SCHEMA_VERSION;
|
||||||
|
hasMigration = true;
|
||||||
|
|
||||||
|
//separate DM and Announcement permissions
|
||||||
|
if (admin.permissions.includes('players.message')) {
|
||||||
|
hasMigration = true;
|
||||||
|
admin.permissions = admin.permissions.filter((perm) => perm !== 'players.message');
|
||||||
|
admin.permissions.push('players.direct_message');
|
||||||
|
admin.permissions.push('announcement');
|
||||||
|
}
|
||||||
|
|
||||||
|
//Adding the new permission, except if they have no permissions or all of them
|
||||||
|
if (admin.permissions.length && !admin.permissions.includes('all_permissions')) {
|
||||||
|
admin.permissions.push('server.log.view');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.admins = jsonData;
|
||||||
|
if (hasMigration) {
|
||||||
|
try {
|
||||||
|
await this.writeAdminsFile();
|
||||||
|
console.ok('The admins.json file was migrated to a new version.');
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to migrate admins.json with error: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Notify game server about admin changes
|
||||||
|
*/
|
||||||
|
async refreshOnlineAdmins() {
|
||||||
|
//Refresh auth of all admins connected to socket.io
|
||||||
|
txCore.webServer.webSocket.reCheckAdminAuths().catch((e) => { });
|
||||||
|
|
||||||
|
try {
|
||||||
|
//Getting all admin identifiers
|
||||||
|
const adminIDs = this.admins.reduce((ids, adm) => {
|
||||||
|
const adminIDs = Object.keys(adm.providers).map((pName) => adm.providers[pName].identifier);
|
||||||
|
return ids.concat(adminIDs);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
//Finding online admins
|
||||||
|
const playerList = txCore.fxPlayerlist.getPlayerList();
|
||||||
|
const onlineIDs = playerList.filter((p) => {
|
||||||
|
return p.ids.some((i) => adminIDs.includes(i));
|
||||||
|
}).map((p) => p.netid);
|
||||||
|
|
||||||
|
txCore.fxRunner.sendEvent('adminsUpdated', onlineIDs);
|
||||||
|
} catch (error) {
|
||||||
|
console.verbose.error('Failed to refreshOnlineAdmins() with error:');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a random token to be used as CSRF Token.
|
||||||
|
*/
|
||||||
|
genCsrfToken() {
|
||||||
|
return nanoid();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if there are admins configured or not.
|
||||||
|
* Optionally, prints the master PIN on the console.
|
||||||
|
*/
|
||||||
|
hasAdmins(printPin = false) {
|
||||||
|
if (Array.isArray(this.admins) && this.admins.length) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
if (printPin) {
|
||||||
|
console.warn('Use this PIN to add a new master account: ' + chalkInversePad(this.addMasterPin));
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the public name to display for that particular purpose
|
||||||
|
* TODO: maybe use enums for the purpose
|
||||||
|
*/
|
||||||
|
getAdminPublicName(name, purpose) {
|
||||||
|
if (!name || !purpose) throw new Error('Invalid parameters');
|
||||||
|
const replacer = txConfig.general.serverName ?? 'txAdmin';
|
||||||
|
|
||||||
|
if (purpose === 'punishment') {
|
||||||
|
return txConfig.gameFeatures.hideAdminInPunishments ? replacer : name;
|
||||||
|
} else if (purpose === 'message') {
|
||||||
|
return txConfig.gameFeatures.hideAdminInMessages ? replacer : name;
|
||||||
|
} else {
|
||||||
|
throw new Error(`Invalid purpose: ${purpose}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
107
core/modules/AdminStore/providers/CitizenFX.ts
Normal file
107
core/modules/AdminStore/providers/CitizenFX.ts
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
const modulename = 'AdminStore:CfxProvider';
|
||||||
|
import crypto from 'node:crypto';
|
||||||
|
import { BaseClient, Issuer, custom } from 'openid-client';
|
||||||
|
import { URL } from 'node:url';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { z } from 'zod';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
const userInfoSchema = z.object({
|
||||||
|
name: z.string().min(1),
|
||||||
|
profile: z.string().min(1),
|
||||||
|
nameid: z.string().min(1),
|
||||||
|
});
|
||||||
|
export type UserInfoType = z.infer<typeof userInfoSchema> & { picture: string | undefined };
|
||||||
|
|
||||||
|
const getOauthState = (stateKern: string) => {
|
||||||
|
const stateSeed = `tx:cfxre:${stateKern}`;
|
||||||
|
return crypto.createHash('SHA1').update(stateSeed).digest('hex');
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export default class CfxProvider {
|
||||||
|
private client?: BaseClient;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
//NOTE: using static config due to performance concerns
|
||||||
|
// const fivemIssuer = await Issuer.discover('https://idms.fivem.net/.well-known/openid-configuration');
|
||||||
|
const fivemIssuer = new Issuer({ 'issuer': 'https://idms.fivem.net', 'jwks_uri': 'https://idms.fivem.net/.well-known/openid-configuration/jwks', 'authorization_endpoint': 'https://idms.fivem.net/connect/authorize', 'token_endpoint': 'https://idms.fivem.net/connect/token', 'userinfo_endpoint': 'https://idms.fivem.net/connect/userinfo', 'end_session_endpoint': 'https://idms.fivem.net/connect/endsession', 'check_session_iframe': 'https://idms.fivem.net/connect/checksession', 'revocation_endpoint': 'https://idms.fivem.net/connect/revocation', 'introspection_endpoint': 'https://idms.fivem.net/connect/introspect', 'device_authorization_endpoint': 'https://idms.fivem.net/connect/deviceauthorization', 'frontchannel_logout_supported': true, 'frontchannel_logout_session_supported': true, 'backchannel_logout_supported': true, 'backchannel_logout_session_supported': true, 'scopes_supported': ['openid', 'email', 'identify', 'offline_access'], 'claims_supported': ['sub', 'email', 'email_verified', 'nameid', 'name', 'picture', 'profile'], 'grant_types_supported': ['authorization_code', 'client_credentials', 'refresh_token', 'implicit', 'urn:ietf:params:oauth:grant-type:device_code'], 'response_types_supported': ['code', 'token', 'id_token', 'id_token token', 'code id_token', 'code token', 'code id_token token'], 'response_modes_supported': ['form_post', 'query', 'fragment'], 'token_endpoint_auth_methods_supported': ['client_secret_basic', 'client_secret_post'], 'subject_types_supported': ['public'], 'id_token_signing_alg_values_supported': ['RS256'], 'code_challenge_methods_supported': ['plain', 'S256'], 'request_parameter_supported': true });
|
||||||
|
|
||||||
|
this.client = new fivemIssuer.Client({
|
||||||
|
client_id: 'txadmin_test',
|
||||||
|
client_secret: 'txadmin_test',
|
||||||
|
response_types: ['openid'],
|
||||||
|
});
|
||||||
|
this.client[custom.clock_tolerance] = 2 * 60 * 60; //Two hours due to the DST change.
|
||||||
|
custom.setHttpOptionsDefaults({
|
||||||
|
timeout: 10000,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the Provider Auth URL
|
||||||
|
*/
|
||||||
|
getAuthURL(redirectUri: string, stateKern: string) {
|
||||||
|
if (!this.client) throw new Error(`${modulename} is not ready`);
|
||||||
|
|
||||||
|
const url = this.client.authorizationUrl({
|
||||||
|
redirect_uri: redirectUri,
|
||||||
|
state: getOauthState(stateKern),
|
||||||
|
response_type: 'code',
|
||||||
|
scope: 'openid identify',
|
||||||
|
});
|
||||||
|
if (typeof url !== 'string') throw new Error('url is not string');
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes the callback and returns the tokenSet
|
||||||
|
*/
|
||||||
|
async processCallback(sessionCallbackUri: string, sessionStateKern: string, callbackUri: string) {
|
||||||
|
if (!this.client) throw new Error(`${modulename} is not ready`);
|
||||||
|
|
||||||
|
//Process the request
|
||||||
|
const parsedUri = new URL(callbackUri);
|
||||||
|
const callback = parsedUri.searchParams;
|
||||||
|
const callbackCode = callback.get('code');
|
||||||
|
const callbackState = callback.get('state');
|
||||||
|
if (typeof callbackCode !== 'string') throw new Error('code not present');
|
||||||
|
if (typeof callbackState !== 'string') throw new Error('state not present');
|
||||||
|
|
||||||
|
//Exchange code for token
|
||||||
|
const tokenSet = await this.client.callback(
|
||||||
|
sessionCallbackUri,
|
||||||
|
{
|
||||||
|
code: callbackCode,
|
||||||
|
state: callbackState,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
state: getOauthState(sessionStateKern)
|
||||||
|
}
|
||||||
|
);
|
||||||
|
if (typeof tokenSet !== 'object') throw new Error('tokenSet is not an object');
|
||||||
|
if (typeof tokenSet.access_token == 'undefined') throw new Error('access_token not present');
|
||||||
|
if (typeof tokenSet.expires_at == 'undefined') throw new Error('expires_at not present');
|
||||||
|
return tokenSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets user info via access token
|
||||||
|
*/
|
||||||
|
async getUserInfo(accessToken: string): Promise<UserInfoType> {
|
||||||
|
if (!this.client) throw new Error(`${modulename} is not ready`);
|
||||||
|
|
||||||
|
//Perform introspection
|
||||||
|
const userInfo = await this.client.userinfo(accessToken);
|
||||||
|
const parsed = userInfoSchema.parse(userInfo);
|
||||||
|
let picture: string | undefined;
|
||||||
|
if (typeof userInfo.picture == 'string' && userInfo.picture.startsWith('https://')) {
|
||||||
|
picture = userInfo.picture;
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ...parsed, picture };
|
||||||
|
}
|
||||||
|
};
|
143
core/modules/CacheStore.ts
Normal file
143
core/modules/CacheStore.ts
Normal file
|
@ -0,0 +1,143 @@
|
||||||
|
const modulename = 'CacheStore';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import throttle from 'lodash-es/throttle.js';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { txDevEnv, txEnv } from '@core/globalData';
|
||||||
|
import type { z, ZodSchema } from 'zod';
|
||||||
|
import type { UpdateConfigKeySet } from './ConfigStore/utils';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
//NOTE: due to limitations on how we compare value changes we can only accept these types
|
||||||
|
//This is to prevent saving the same value repeatedly (eg sv_maxClients every 3 seconds)
|
||||||
|
export const isBoolean = (val: any): val is boolean => typeof val === 'boolean';
|
||||||
|
export const isNull = (val: any): val is null => val === null;
|
||||||
|
export const isNumber = (val: any): val is number => typeof val === 'number';
|
||||||
|
export const isString = (val: any): val is string => typeof val === 'string';
|
||||||
|
type IsTypeFunctions = typeof isBoolean | typeof isNull | typeof isNumber | typeof isString;
|
||||||
|
type InferValType<T> = T extends (val: any) => val is infer R ? R : never;
|
||||||
|
type AcceptedCachedTypes = boolean | null | number | string;
|
||||||
|
type CacheMap = Map<string, AcceptedCachedTypes>;
|
||||||
|
const isAcceptedType = (val: any): val is AcceptedCachedTypes => {
|
||||||
|
const valType = typeof val;
|
||||||
|
return (val === null || valType === 'string' || valType === 'boolean' || valType === 'number');
|
||||||
|
}
|
||||||
|
|
||||||
|
const CACHE_FILE_NAME = 'cachedData.json';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dead-simple Map-based persistent cache, saved in txData/<profile>/cachedData.json.
|
||||||
|
* This is not meant to store anything super important, the async save does not throw in case of failure,
|
||||||
|
* and it will reset the cache in case it fails to load.
|
||||||
|
*/
|
||||||
|
export default class CacheStore {
|
||||||
|
static readonly configKeysWatched = [
|
||||||
|
'server.dataPath',
|
||||||
|
'server.cfgPath',
|
||||||
|
];
|
||||||
|
|
||||||
|
private cache: CacheMap = new Map();
|
||||||
|
readonly cacheFilePath = `${txEnv.profilePath}/data/${CACHE_FILE_NAME}`;
|
||||||
|
readonly throttledSaveCache = throttle(
|
||||||
|
this.saveCache.bind(this),
|
||||||
|
5000,
|
||||||
|
{ leading: false, trailing: true }
|
||||||
|
);
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.loadCachedData();
|
||||||
|
|
||||||
|
//TODO: handle shutdown? copied from Metrics.svRuntime
|
||||||
|
// this.throttledSaveCache.cancel({ upcomingOnly: true });
|
||||||
|
// this.saveCache();
|
||||||
|
}
|
||||||
|
|
||||||
|
//Resets the fxsRuntime cache on server reset
|
||||||
|
public handleConfigUpdate(updatedConfigs: UpdateConfigKeySet) {
|
||||||
|
this.delete('fxsRuntime:gameName'); //from logger
|
||||||
|
this.delete('fxsRuntime:cfxId'); //from fd3
|
||||||
|
this.delete('fxsRuntime:maxClients'); //from /dynamic.json
|
||||||
|
|
||||||
|
//from /info.json
|
||||||
|
this.delete('fxsRuntime:bannerConnecting');
|
||||||
|
this.delete('fxsRuntime:bannerDetail');
|
||||||
|
this.delete('fxsRuntime:iconFilename');
|
||||||
|
this.delete('fxsRuntime:locale');
|
||||||
|
this.delete('fxsRuntime:projectDesc');
|
||||||
|
this.delete('fxsRuntime:projectName');
|
||||||
|
this.delete('fxsRuntime:tags');
|
||||||
|
}
|
||||||
|
|
||||||
|
public has(key: string) {
|
||||||
|
return this.cache.has(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get(key: string) {
|
||||||
|
return this.cache.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
public getTyped<T extends IsTypeFunctions>(key: string, typeChecker: T) {
|
||||||
|
const value = this.cache.get(key);
|
||||||
|
if (!value) return undefined;
|
||||||
|
if (typeChecker(value)) return value as InferValType<T>;
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
public set(key: string, value: AcceptedCachedTypes) {
|
||||||
|
if (!isAcceptedType(value)) throw new Error(`Value of type ${typeof value} is not acceptable.`);
|
||||||
|
const currValue = this.cache.get(key);
|
||||||
|
if (currValue !== value) {
|
||||||
|
this.cache.set(key, value);
|
||||||
|
this.throttledSaveCache();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public upsert(key: string, value: AcceptedCachedTypes | undefined) {
|
||||||
|
if (value === undefined) {
|
||||||
|
this.delete(key);
|
||||||
|
} else {
|
||||||
|
this.set(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public delete(key: string) {
|
||||||
|
const deleteResult = this.cache.delete(key);
|
||||||
|
this.throttledSaveCache();
|
||||||
|
return deleteResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async saveCache() {
|
||||||
|
try {
|
||||||
|
const serializer = (txDevEnv.ENABLED)
|
||||||
|
? (obj: any) => JSON.stringify(obj, null, 4)
|
||||||
|
: JSON.stringify
|
||||||
|
const toSave = serializer([...this.cache.entries()]);
|
||||||
|
await fsp.writeFile(this.cacheFilePath, toSave);
|
||||||
|
// console.verbose.debug(`Saved ${CACHE_FILE_NAME} with ${this.cache.size} entries.`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Unable to save ${CACHE_FILE_NAME} with error: ${(error as Error).message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async loadCachedData() {
|
||||||
|
try {
|
||||||
|
const rawFileData = await fsp.readFile(this.cacheFilePath, 'utf8');
|
||||||
|
const fileData = JSON.parse(rawFileData);
|
||||||
|
if (!Array.isArray(fileData)) throw new Error('data_is_not_an_array');
|
||||||
|
this.cache = new Map(fileData);
|
||||||
|
console.verbose.ok(`Loaded ${CACHE_FILE_NAME} with ${this.cache.size} entries.`);
|
||||||
|
} catch (error) {
|
||||||
|
this.cache = new Map();
|
||||||
|
if ((error as any)?.code === 'ENOENT') {
|
||||||
|
console.verbose.debug(`${CACHE_FILE_NAME} not found, making a new one.`);
|
||||||
|
} else if ((error as any)?.message === 'data_is_not_an_array') {
|
||||||
|
console.warn(`Failed to load ${CACHE_FILE_NAME} due to invalid data.`);
|
||||||
|
console.warn('Since this is not a critical file, it will be reset.');
|
||||||
|
} else {
|
||||||
|
console.warn(`Failed to load ${CACHE_FILE_NAME} with message: ${(error as any).message}`);
|
||||||
|
console.warn('Since this is not a critical file, it will be reset.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
30
core/modules/ConfigStore/changelog.ts
Normal file
30
core/modules/ConfigStore/changelog.ts
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
// Configs
|
||||||
|
const daysMs = 24 * 60 * 60 * 1000;
|
||||||
|
export const CCLOG_SIZE_LIMIT = 32;
|
||||||
|
export const CCLOG_RETENTION = 120 * daysMs;
|
||||||
|
export const CCLOG_VERSION = 1;
|
||||||
|
|
||||||
|
//Schemas
|
||||||
|
const ConfigChangelogEntrySchema = z.object({
|
||||||
|
author: z.string().min(1),
|
||||||
|
ts: z.number().int().nonnegative(),
|
||||||
|
keys: z.string().array(),
|
||||||
|
});
|
||||||
|
export const ConfigChangelogFileSchema = z.object({
|
||||||
|
version: z.literal(1),
|
||||||
|
log: z.array(ConfigChangelogEntrySchema),
|
||||||
|
});
|
||||||
|
export type ConfigChangelogEntry = z.infer<typeof ConfigChangelogEntrySchema>;
|
||||||
|
export type ConfigChangelogFile = z.infer<typeof ConfigChangelogFileSchema>;
|
||||||
|
|
||||||
|
//Optimizer
|
||||||
|
export const truncateConfigChangelog = (log: ConfigChangelogEntry[]): ConfigChangelogEntry[] => {
|
||||||
|
if (!log.length) return [];
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
return log
|
||||||
|
.filter(entry => (now - entry.ts) <= CCLOG_RETENTION)
|
||||||
|
.slice(-CCLOG_SIZE_LIMIT);
|
||||||
|
}
|
84
core/modules/ConfigStore/configMigrations.ts
Normal file
84
core/modules/ConfigStore/configMigrations.ts
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
const modulename = 'ConfigStore:Migration';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import { ConfigFileData, PartialTxConfigs } from './schema/index';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import fatalError from '@lib/fatalError';
|
||||||
|
import { CONFIG_VERSION } from './index'; //FIXME: circular_dependency
|
||||||
|
import { migrateOldConfig } from './schema/oldConfig';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { chalkInversePad } from '@lib/misc';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves a backup of the current config file
|
||||||
|
*/
|
||||||
|
const saveBackupFile = (version: number) => {
|
||||||
|
const bkpFileName = `config.backup.v${version}.json`;
|
||||||
|
fs.copyFileSync(
|
||||||
|
`${txEnv.profilePath}/config.json`,
|
||||||
|
`${txEnv.profilePath}/${bkpFileName}`,
|
||||||
|
);
|
||||||
|
console.log(`A backup of your config file was saved as: ${chalkInversePad(bkpFileName)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Migrates the old config file to the new schema
|
||||||
|
*/
|
||||||
|
export const migrateConfigFile = (fileData: any): ConfigFileData => {
|
||||||
|
const oldConfig = cloneDeep(fileData);
|
||||||
|
let newConfig: ConfigFileData | undefined;
|
||||||
|
let oldVersion: number | undefined;
|
||||||
|
|
||||||
|
//Sanity check
|
||||||
|
if ('version' in fileData && typeof fileData.version !== 'number') {
|
||||||
|
fatalError.ConfigStore(20, 'Your txAdmin config.json version is not a number!');
|
||||||
|
}
|
||||||
|
if (typeof fileData.version === 'number' && fileData.version > CONFIG_VERSION) {
|
||||||
|
fatalError.ConfigStore(21, [
|
||||||
|
`Your config.json file is on v${fileData.version}, and this txAdmin supports up to v${CONFIG_VERSION}.`,
|
||||||
|
'This means you likely downgraded your txAdmin or FXServer.',
|
||||||
|
'Please make sure your txAdmin is updated!',
|
||||||
|
'',
|
||||||
|
'If you want to downgrade FXServer (the "artifact") but keep txAdmin updated,',
|
||||||
|
'you can move the updated "citizen/system_resources/monitor" folder',
|
||||||
|
'to older FXserver artifact, replacing the old files.',
|
||||||
|
`Alternatively, you can restore the v${fileData.version} backup on the folder below.`,
|
||||||
|
['File Path', `${txEnv.profilePath}/config.json`],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
//The v1 is implicit, if explicit then it's a problem
|
||||||
|
if (fileData.version === 1) {
|
||||||
|
throw new Error(`File with explicit version '1' should not exist.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//Migrate from v1 (no version) to v2
|
||||||
|
//- remapping the old config to the new structure
|
||||||
|
//- applying some default changes and migrations
|
||||||
|
//- extracting just the non-default values
|
||||||
|
//- truncating the serverName to 18 chars
|
||||||
|
//- generating new banlist template IDs
|
||||||
|
if (!('version' in fileData) && 'global' in fileData && 'fxRunner' in fileData) {
|
||||||
|
console.warn('Updating your txAdmin config.json from v1 to v2.');
|
||||||
|
oldVersion ??= 1;
|
||||||
|
|
||||||
|
//Final object
|
||||||
|
const justNonDefaults = migrateOldConfig(oldConfig) as PartialTxConfigs;
|
||||||
|
newConfig = {
|
||||||
|
version: 2,
|
||||||
|
...justNonDefaults,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//Final check
|
||||||
|
if (oldVersion && newConfig && newConfig.version === CONFIG_VERSION) {
|
||||||
|
saveBackupFile(oldVersion);
|
||||||
|
return newConfig;
|
||||||
|
} else {
|
||||||
|
throw new Error(`Unknown file version: ${fileData.version}`);
|
||||||
|
}
|
||||||
|
}
|
274
core/modules/ConfigStore/configParser.test.ts
Normal file
274
core/modules/ConfigStore/configParser.test.ts
Normal file
|
@ -0,0 +1,274 @@
|
||||||
|
import { suite, it, expect } from 'vitest';
|
||||||
|
import { parseConfigFileData, bootstrapConfigProcessor, getConfigDefaults, runtimeConfigProcessor } from './configParser';
|
||||||
|
import { z } from 'zod';
|
||||||
|
import { typeDefinedConfig, typeNullableConfig } from './schema/utils';
|
||||||
|
import ConfigStore from '.';
|
||||||
|
import { SYM_FIXER_DEFAULT, SYM_FIXER_FATAL, SYM_RESET_CONFIG } from '@lib/symbols';
|
||||||
|
|
||||||
|
|
||||||
|
suite('parseConfigFileData', () => {
|
||||||
|
it('should correctly parse a valid config file', () => {
|
||||||
|
const configFileData = {
|
||||||
|
version: 1,
|
||||||
|
example: {
|
||||||
|
serverName: 'MyServer',
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
dataPath: '/path/to/data',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const result = parseConfigFileData(configFileData);
|
||||||
|
expect(result).toEqual([
|
||||||
|
{ scope: 'example', key: 'serverName', value: 'MyServer' },
|
||||||
|
{ scope: 'example', key: 'enabled', value: true },
|
||||||
|
{ scope: 'server', key: 'dataPath', value: '/path/to/data' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore the version key', () => {
|
||||||
|
const configFileData = {
|
||||||
|
version: 1,
|
||||||
|
example: {
|
||||||
|
serverName: 'MyServer',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const result = parseConfigFileData(configFileData);
|
||||||
|
expect(result).toEqual([
|
||||||
|
{ scope: 'example', key: 'serverName', value: 'MyServer' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty config file', () => {
|
||||||
|
const configFileData = {};
|
||||||
|
const result = parseConfigFileData(configFileData);
|
||||||
|
expect(result).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle undefined items', () => {
|
||||||
|
const configFileData = {
|
||||||
|
example: {
|
||||||
|
aaa: 'whatever',
|
||||||
|
bbb: undefined,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const result = parseConfigFileData(configFileData);
|
||||||
|
expect(result).toEqual([
|
||||||
|
{ scope: 'example', key: 'aaa', value: 'whatever' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle nested scopes', () => {
|
||||||
|
const configFileData = {
|
||||||
|
version: 1,
|
||||||
|
example: {
|
||||||
|
serverName: 'MyServer',
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
dataPath: { path: '/path/to/data' },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const result = parseConfigFileData(configFileData as any);
|
||||||
|
expect(result).toEqual([
|
||||||
|
{ scope: 'example', key: 'serverName', value: 'MyServer' },
|
||||||
|
{ scope: 'example', key: 'enabled', value: true },
|
||||||
|
{ scope: 'server', key: 'dataPath', value: { path: '/path/to/data' } },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
suite('bootstrapConfigProcessor', () => {
|
||||||
|
const allConfigScopes = {
|
||||||
|
example: {
|
||||||
|
serverName: typeDefinedConfig({
|
||||||
|
name: 'Server Name',
|
||||||
|
default: 'change-me',
|
||||||
|
validator: z.string().min(1).max(18),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
}),
|
||||||
|
enabled: typeDefinedConfig({
|
||||||
|
name: 'Enabled',
|
||||||
|
default: true,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
dataPath: typeNullableConfig({
|
||||||
|
name: 'Data Path',
|
||||||
|
default: null,
|
||||||
|
validator: z.string().min(1).nullable(),
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const defaultConfigs = getConfigDefaults(allConfigScopes);
|
||||||
|
|
||||||
|
it('should process valid config items', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'example', key: 'serverName', value: 'MyServer' },
|
||||||
|
{ scope: 'server', key: 'dataPath', value: '/path/to/data' },
|
||||||
|
];
|
||||||
|
const result = bootstrapConfigProcessor(parsedInput, allConfigScopes, defaultConfigs);
|
||||||
|
expect(result.stored.example.serverName).toBe('MyServer');
|
||||||
|
expect(result.stored.server.dataPath).toBe('/path/to/data');
|
||||||
|
expect(result.active.example.serverName).toBe('MyServer');
|
||||||
|
expect(result.active.server.dataPath).toBe('/path/to/data');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle unknown config items', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'unknownScope', key: 'key1', value: 'value1' },
|
||||||
|
];
|
||||||
|
const result = bootstrapConfigProcessor(parsedInput, allConfigScopes, defaultConfigs);
|
||||||
|
expect(result.unknown.unknownScope.key1).toBe('value1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should apply default for active but not stored', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'unknownScope', key: 'key1', value: 'value1' },
|
||||||
|
];
|
||||||
|
const result = bootstrapConfigProcessor(parsedInput, allConfigScopes, defaultConfigs);
|
||||||
|
expect(result.stored?.example?.serverName).toBeUndefined();
|
||||||
|
expect(result.active.example.serverName).toBe(defaultConfigs.example.serverName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should apply default values for invalid config items', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'example', key: 'serverName', value: '' },
|
||||||
|
];
|
||||||
|
const result = bootstrapConfigProcessor(parsedInput, allConfigScopes, defaultConfigs);
|
||||||
|
expect(result.stored.example.serverName).toBe(defaultConfigs.example.serverName);
|
||||||
|
expect(result.active.example.serverName).toBe(defaultConfigs.example.serverName);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error for unfixable invalid config items', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'server', key: 'dataPath', value: '' },
|
||||||
|
];
|
||||||
|
expect(() => bootstrapConfigProcessor(parsedInput, allConfigScopes, defaultConfigs)).toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
suite('runtimeConfigProcessor', () => {
|
||||||
|
const allConfigScopes = {
|
||||||
|
example: {
|
||||||
|
serverName: typeDefinedConfig({
|
||||||
|
name: 'Server Name',
|
||||||
|
default: 'change-me',
|
||||||
|
validator: z.string().min(1).max(18),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
}),
|
||||||
|
enabled: typeDefinedConfig({
|
||||||
|
name: 'Enabled',
|
||||||
|
default: true,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
dataPath: typeNullableConfig({
|
||||||
|
name: 'Data Path',
|
||||||
|
default: null,
|
||||||
|
validator: z.string().min(1).nullable(),
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
}),
|
||||||
|
scheduledRestarts: typeDefinedConfig({
|
||||||
|
name: 'Scheduled Restarts',
|
||||||
|
default: [],
|
||||||
|
validator: z.array(z.number().int()).default([]),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const storedConfigs = {
|
||||||
|
example: {
|
||||||
|
serverName: 'StoredServer',
|
||||||
|
enabled: false,
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
dataPath: '/stored/path',
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const activeConfigs = {
|
||||||
|
example: {
|
||||||
|
serverName: 'ActiveServer',
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
dataPath: '/active/path',
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should process valid config changes', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'example', key: 'serverName', value: 'NewServer' },
|
||||||
|
];
|
||||||
|
const result = runtimeConfigProcessor(parsedInput, allConfigScopes, storedConfigs, activeConfigs);
|
||||||
|
expect(result.stored.example.serverName).toBe('NewServer');
|
||||||
|
expect(result.active.example.serverName).toBe('NewServer');
|
||||||
|
expect(result.active.server.dataPath).toBe('/active/path');
|
||||||
|
expect(result.storedKeysChanges.list).toEqual(['example.serverName']);
|
||||||
|
expect(result.activeKeysChanges.list).toEqual(['example.serverName']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reset config to default', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'example', key: 'serverName', value: SYM_RESET_CONFIG },
|
||||||
|
];
|
||||||
|
const result = runtimeConfigProcessor(parsedInput, allConfigScopes, storedConfigs, activeConfigs);
|
||||||
|
expect(result.stored.example.serverName).toBeUndefined();
|
||||||
|
expect(result.active.example.serverName).toBe('change-me');
|
||||||
|
expect(result.storedKeysChanges.list).toEqual(['example.serverName']);
|
||||||
|
expect(result.activeKeysChanges.list).toEqual(['example.serverName']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should list the correct changes', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'example', key: 'serverName', value: 'StoredServer' },
|
||||||
|
{ scope: 'server', key: 'dataPath', value: '/active/path' },
|
||||||
|
];
|
||||||
|
const result = runtimeConfigProcessor(parsedInput, allConfigScopes, storedConfigs, activeConfigs);
|
||||||
|
expect(result.storedKeysChanges.list).toEqual(['server.dataPath']);
|
||||||
|
expect(result.activeKeysChanges.list).toEqual(['example.serverName']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error for invalid config changes', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'example', key: 'serverName', value: false },
|
||||||
|
];
|
||||||
|
expect(() => runtimeConfigProcessor(parsedInput, allConfigScopes, storedConfigs, activeConfigs)).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle unknown config items', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'unknownScope', key: 'key1', value: 'value1' },
|
||||||
|
];
|
||||||
|
expect(() => runtimeConfigProcessor(parsedInput, allConfigScopes, storedConfigs, activeConfigs)).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle default equality checking', () => {
|
||||||
|
const parsedInput = [
|
||||||
|
{ scope: 'server', key: 'scheduledRestarts', value: [] },
|
||||||
|
];
|
||||||
|
const result = runtimeConfigProcessor(parsedInput, allConfigScopes, storedConfigs, activeConfigs);
|
||||||
|
expect(result.stored.server.scheduledRestarts).toBeUndefined();
|
||||||
|
expect(result.active.server.scheduledRestarts).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
suite('schema sanity check', () => {
|
||||||
|
it('should have the same keys in all schemas', () => {
|
||||||
|
for (const [scopeName, scopeConfigs] of Object.entries(ConfigStore.Schema)) {
|
||||||
|
for (const [configKey, configData] of Object.entries(scopeConfigs)) {
|
||||||
|
expect(configData.default).toBeDefined();
|
||||||
|
expect(configData.validator).toBeDefined();
|
||||||
|
expect(configData.fixer).toBeDefined();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
218
core/modules/ConfigStore/configParser.ts
Normal file
218
core/modules/ConfigStore/configParser.ts
Normal file
|
@ -0,0 +1,218 @@
|
||||||
|
const modulename = 'ConfigStore:Parser';
|
||||||
|
import consoleFactory from "@lib/console";
|
||||||
|
import { ConfigFileData, ConfigScaffold } from "./schema";
|
||||||
|
import { ConfigScope, ListOf, ScopeConfigItem } from "./schema/utils";
|
||||||
|
import { confx, UpdateConfigKeySet } from "./utils";
|
||||||
|
import { cloneDeep } from "lodash";
|
||||||
|
import { dequal } from 'dequal/lite';
|
||||||
|
import { fromZodError } from "zod-validation-error";
|
||||||
|
import { SYM_FIXER_DEFAULT, SYM_RESET_CONFIG } from "@lib/symbols";
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
// Returns object with all the scopes empty
|
||||||
|
// export const getConfigScaffold = (allConfigScopes: ListOf<ConfigScope>) => {
|
||||||
|
// const scaffold: ConfigScaffold = Object.fromEntries(
|
||||||
|
// Object.entries(allConfigScopes).map(([k, s]) => [k, {} as any])
|
||||||
|
// );
|
||||||
|
// return scaffold;
|
||||||
|
// };
|
||||||
|
|
||||||
|
|
||||||
|
// Returns object scope containing all the valid config values
|
||||||
|
export const getScopeDefaults = <T>(scope: ConfigScope): T => {
|
||||||
|
return Object.fromEntries(
|
||||||
|
Object.entries(scope)
|
||||||
|
.map(([key, schema]) => [key, schema.default])
|
||||||
|
) as T;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// Returns object with all the scopes and their default values
|
||||||
|
export const getConfigDefaults = (allConfigScopes: ListOf<ConfigScope>) => {
|
||||||
|
const defaults: ConfigScaffold = Object.fromEntries(
|
||||||
|
Object.entries(allConfigScopes).map(([k, s]) => [k, getScopeDefaults(s)])
|
||||||
|
);
|
||||||
|
return defaults;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a config structure into a list of parsed config items
|
||||||
|
*/
|
||||||
|
export const parseConfigFileData = (configFileData: ConfigScaffold | ConfigFileData) => {
|
||||||
|
const parsedConfigItems: ParsedConfigItem[] = [];
|
||||||
|
for (const [scope, values] of Object.entries(configFileData)) {
|
||||||
|
if (scope === 'version') continue;
|
||||||
|
for (const [key, value] of Object.entries(values)) {
|
||||||
|
if (value === undefined) continue;
|
||||||
|
parsedConfigItems.push({ scope, key, value });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return parsedConfigItems;
|
||||||
|
}
|
||||||
|
type ParsedConfigItem = {
|
||||||
|
scope: string;
|
||||||
|
key: string;
|
||||||
|
value: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Attempt to fix the value - USED DURING BOOTSTRAP ONLY
|
||||||
|
*/
|
||||||
|
const attemptConfigFix = (scope: string, key: string, value: any, configSchema: ScopeConfigItem) => {
|
||||||
|
const shouldBeArray = Array.isArray(configSchema.default);
|
||||||
|
if (configSchema.fixer === SYM_FIXER_DEFAULT) {
|
||||||
|
if (shouldBeArray) {
|
||||||
|
console.error(`Invalid value for '${scope}.${key}', applying default value.`);
|
||||||
|
} else {
|
||||||
|
console.error(`Invalid value for '${scope}.${key}', applying default value:`, configSchema.default);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: configSchema.default,
|
||||||
|
};
|
||||||
|
} else if (typeof configSchema.fixer === 'function') {
|
||||||
|
try {
|
||||||
|
const fixed = configSchema.fixer(value);
|
||||||
|
if (shouldBeArray) {
|
||||||
|
console.error(`Invalid value for '${scope}.${key}' has been automatically fixed.`);
|
||||||
|
} else {
|
||||||
|
console.error(`Invalid value for '${scope}.${key}', the value has been fixed to:`, fixed);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
value: fixed,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Invalid value for '${scope}.${key}', fixer failed with reason: ${(error as any).message}`);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes a parsed config based on a schema to get the stored and active values
|
||||||
|
*/
|
||||||
|
export const bootstrapConfigProcessor = (
|
||||||
|
parsedInput: ParsedConfigItem[],
|
||||||
|
allConfigScopes: ListOf<ConfigScope>,
|
||||||
|
defaultConfigs: ConfigScaffold,
|
||||||
|
) => {
|
||||||
|
//Scaffold the objects
|
||||||
|
const unknown: ListOf<any> = {};
|
||||||
|
const stored: ListOf<any> = {};
|
||||||
|
const active = cloneDeep(defaultConfigs);
|
||||||
|
|
||||||
|
//Process each item
|
||||||
|
for (const { scope, key, value } of parsedInput) {
|
||||||
|
//Check if the scope is known
|
||||||
|
const configSchema = allConfigScopes?.[scope]?.[key];
|
||||||
|
if (!configSchema) {
|
||||||
|
console.warn(`Unknown config: ${scope}.${key}`);
|
||||||
|
unknown[scope] ??= {};
|
||||||
|
unknown[scope][key] = value;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
stored[scope] ??= {};
|
||||||
|
|
||||||
|
//Validate the value
|
||||||
|
const zResult = configSchema.validator.safeParse(value);
|
||||||
|
if (zResult.success) {
|
||||||
|
stored[scope][key] = zResult.data;
|
||||||
|
active[scope][key] = zResult.data;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Attempt to fix the value
|
||||||
|
const fResult = attemptConfigFix(scope, key, value, configSchema);
|
||||||
|
if (fResult.success && fResult.value !== undefined) {
|
||||||
|
stored[scope][key] = fResult.value;
|
||||||
|
active[scope][key] = fResult.value;
|
||||||
|
} else {
|
||||||
|
console.warn(`Invalid value for '${scope}.${key}': ${(zResult.error as any).message}`);
|
||||||
|
throw fResult?.error ?? fromZodError(zResult.error, { prefix: `${scope}.${key}` });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { unknown, stored, active };
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Diff the parsed input against the stored and active configs, and validate the changes
|
||||||
|
*/
|
||||||
|
export const runtimeConfigProcessor = (
|
||||||
|
parsedInput: ParsedConfigItem[],
|
||||||
|
allConfigScopes: ListOf<ConfigScope>,
|
||||||
|
storedConfigs: ConfigScaffold,
|
||||||
|
activeConfigs: ConfigScaffold,
|
||||||
|
) => {
|
||||||
|
//Scaffold the objects
|
||||||
|
const storedKeysChanges = new UpdateConfigKeySet();
|
||||||
|
const activeKeysChanges = new UpdateConfigKeySet();
|
||||||
|
const thisStoredCopy = cloneDeep(storedConfigs);
|
||||||
|
const thisActiveCopy = cloneDeep(activeConfigs);
|
||||||
|
|
||||||
|
//Process each item
|
||||||
|
for (const { scope, key, value } of parsedInput) {
|
||||||
|
//Check if the scope is known
|
||||||
|
const configSchema = confx(allConfigScopes).get(scope, key) as ScopeConfigItem;
|
||||||
|
if (!configSchema) throw new Error(`Unknown config: ${scope}.${key}`);
|
||||||
|
|
||||||
|
//Restore or Validate the value
|
||||||
|
let newValue: any;
|
||||||
|
if (value === SYM_RESET_CONFIG) {
|
||||||
|
newValue = configSchema.default;
|
||||||
|
} else {
|
||||||
|
const zResult = configSchema.validator.safeParse(value);
|
||||||
|
if (!zResult.success) {
|
||||||
|
throw fromZodError(zResult.error, { prefix: configSchema.name });
|
||||||
|
}
|
||||||
|
newValue = zResult.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check if the value is different from the stored value
|
||||||
|
const defaultValue = configSchema.default;
|
||||||
|
const storedValue = confx(thisStoredCopy).get(scope, key);
|
||||||
|
const isNewValueDefault = dequal(newValue, defaultValue);
|
||||||
|
if (storedValue === undefined) {
|
||||||
|
if (!isNewValueDefault) {
|
||||||
|
storedKeysChanges.add(scope, key);
|
||||||
|
confx(thisStoredCopy).set(scope, key, newValue);
|
||||||
|
}
|
||||||
|
} else if (!dequal(newValue, storedValue)) {
|
||||||
|
storedKeysChanges.add(scope, key);
|
||||||
|
if (!isNewValueDefault) {
|
||||||
|
//NOTE: if default, it's being removed below already
|
||||||
|
confx(thisStoredCopy).set(scope, key, newValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//If the value is the default, remove
|
||||||
|
if (isNewValueDefault) {
|
||||||
|
confx(thisStoredCopy).unset(scope, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check if the value is different from the active value
|
||||||
|
if (!dequal(newValue, confx(thisActiveCopy).get(scope, key))) {
|
||||||
|
activeKeysChanges.add(scope, key);
|
||||||
|
confx(thisActiveCopy).set(scope, key, newValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
storedKeysChanges,
|
||||||
|
activeKeysChanges,
|
||||||
|
stored: thisStoredCopy,
|
||||||
|
active: thisActiveCopy,
|
||||||
|
}
|
||||||
|
}
|
273
core/modules/ConfigStore/index.ts
Normal file
273
core/modules/ConfigStore/index.ts
Normal file
|
@ -0,0 +1,273 @@
|
||||||
|
const modulename = 'ConfigStore';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import fatalError from '@lib/fatalError';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import { ConfigFileData, ConfigSchemas_v2, PartialTxConfigs, PartialTxConfigsToSave, TxConfigs } from './schema';
|
||||||
|
import { migrateConfigFile } from './configMigrations';
|
||||||
|
import { deepFreeze } from '@lib/misc';
|
||||||
|
import { parseConfigFileData, bootstrapConfigProcessor, runtimeConfigProcessor, getConfigDefaults } from './configParser';
|
||||||
|
import { ListOf } from './schema/utils';
|
||||||
|
import { CCLOG_VERSION, ConfigChangelogEntry, ConfigChangelogFileSchema, truncateConfigChangelog } from './changelog';
|
||||||
|
import { UpdateConfigKeySet } from './utils';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
//Types
|
||||||
|
export type RefreshConfigKey = { full: string, scope: string, key: string };
|
||||||
|
export type RefreshConfigFunc = (updatedConfigs: UpdateConfigKeySet) => void;
|
||||||
|
type RefreshConfigRegistry = {
|
||||||
|
moduleName: string,
|
||||||
|
callback: RefreshConfigFunc,
|
||||||
|
rules: string[],
|
||||||
|
}[];
|
||||||
|
|
||||||
|
//Consts
|
||||||
|
export const CONFIG_VERSION = 2;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module to handle the configuration file, validation, defaults and retrieval.
|
||||||
|
* The setup is fully sync, as nothing else can start without the config.
|
||||||
|
*/
|
||||||
|
export default class ConfigStore /*does not extend TxModuleBase*/ {
|
||||||
|
//Statics
|
||||||
|
public static readonly Schema = ConfigSchemas_v2;
|
||||||
|
public static readonly SchemaDefaults = getConfigDefaults(ConfigSchemas_v2) as TxConfigs;
|
||||||
|
public static getEmptyConfigFile() {
|
||||||
|
return { version: CONFIG_VERSION };
|
||||||
|
}
|
||||||
|
|
||||||
|
//Instance
|
||||||
|
private readonly changelogFilePath = `${txEnv.profilePath}/data/configChangelog.json`;
|
||||||
|
private readonly configFilePath = `${txEnv.profilePath}/config.json`;
|
||||||
|
private readonly moduleRefreshCallbacks: RefreshConfigRegistry = []; //Modules are in boot order
|
||||||
|
private unknownConfigs: ListOf<any>; //keeping so we can save it back
|
||||||
|
private storedConfigs: PartialTxConfigs;
|
||||||
|
private activeConfigs: TxConfigs;
|
||||||
|
private changelog: ConfigChangelogEntry[] = [];
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
//Load raw file
|
||||||
|
//TODO: create a lock file to prevent starting twice the same config file?
|
||||||
|
let fileRaw;
|
||||||
|
try {
|
||||||
|
fileRaw = fs.readFileSync(this.configFilePath, 'utf8');
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.ConfigStore(10, [
|
||||||
|
'Unable to read configuration file (filesystem error).',
|
||||||
|
['Path', this.configFilePath],
|
||||||
|
['Error', (error as Error).message],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Json parse
|
||||||
|
let fileData: ConfigFileData;
|
||||||
|
try {
|
||||||
|
fileData = JSON.parse(fileRaw);
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.ConfigStore(11, [
|
||||||
|
'Unable to parse configuration file (invalid JSON).',
|
||||||
|
'This means the file somehow got corrupted and is not a valid anymore.',
|
||||||
|
['Path', this.configFilePath],
|
||||||
|
['Error', (error as Error).message],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check version & migrate if needed
|
||||||
|
let fileMigrated = false;
|
||||||
|
if (fileData?.version !== CONFIG_VERSION) {
|
||||||
|
try {
|
||||||
|
fileData = migrateConfigFile(fileData);
|
||||||
|
fileMigrated = true;
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.ConfigStore(25, [
|
||||||
|
'Unable to migrate configuration file.',
|
||||||
|
['Path', this.configFilePath],
|
||||||
|
['File version', String(fileData?.version)],
|
||||||
|
['Supported version', String(CONFIG_VERSION)],
|
||||||
|
], error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Parse & validate
|
||||||
|
try {
|
||||||
|
const configItems = parseConfigFileData(fileData);
|
||||||
|
if (!configItems.length) console.verbose.debug('Empty configuration file.');
|
||||||
|
const config = bootstrapConfigProcessor(configItems, ConfigSchemas_v2, ConfigStore.SchemaDefaults);
|
||||||
|
this.unknownConfigs = config.unknown;
|
||||||
|
this.storedConfigs = config.stored as PartialTxConfigs;
|
||||||
|
this.activeConfigs = config.active as TxConfigs;
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.ConfigStore(14, [
|
||||||
|
'Unable to process configuration file.',
|
||||||
|
], error);
|
||||||
|
}
|
||||||
|
|
||||||
|
//If migrated, write the new file
|
||||||
|
if (fileMigrated) {
|
||||||
|
try {
|
||||||
|
this.saveFile(this.storedConfigs);
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.ConfigStore(26, [
|
||||||
|
'Unable to save the updated config.json file.',
|
||||||
|
['Path', this.configFilePath],
|
||||||
|
], error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Reflect to global
|
||||||
|
this.updatePublicConfig();
|
||||||
|
|
||||||
|
//Load changelog
|
||||||
|
setImmediate(() => {
|
||||||
|
this.loadChangelog();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mirrors the #config object to the public deep frozen config object
|
||||||
|
*/
|
||||||
|
private updatePublicConfig() {
|
||||||
|
(globalThis as any).txConfig = deepFreeze(cloneDeep(this.activeConfigs));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the stored config object, with only the known keys
|
||||||
|
*/
|
||||||
|
public getStoredConfig() {
|
||||||
|
return cloneDeep(this.storedConfigs);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the changelog
|
||||||
|
* TODO: add filters to be used in pages like ban templates
|
||||||
|
* TODO: increase CCLOG_SIZE_LIMIT to a few hundred
|
||||||
|
* TODO: increase CCLOG_RETENTION to a year, or deprecate it in favor of a full log
|
||||||
|
*/
|
||||||
|
public getChangelog() {
|
||||||
|
return cloneDeep(this.changelog);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies an input config object to the stored and active configs, then saves it to the file
|
||||||
|
*/
|
||||||
|
public saveConfigs(inputConfig: PartialTxConfigsToSave, author: string | null) {
|
||||||
|
//Process each item
|
||||||
|
const parsedInput = parseConfigFileData(inputConfig);
|
||||||
|
const processed = runtimeConfigProcessor(
|
||||||
|
parsedInput,
|
||||||
|
ConfigSchemas_v2,
|
||||||
|
this.storedConfigs,
|
||||||
|
this.activeConfigs,
|
||||||
|
);
|
||||||
|
|
||||||
|
//If nothing thrown, update the state, file, and
|
||||||
|
this.saveFile(processed.stored);
|
||||||
|
this.storedConfigs = processed.stored as PartialTxConfigs;
|
||||||
|
this.activeConfigs = processed.active as TxConfigs;
|
||||||
|
this.logChanges(author ?? 'txAdmin', processed.storedKeysChanges.list);
|
||||||
|
this.updatePublicConfig(); //before callbacks
|
||||||
|
this.processCallbacks(processed.activeKeysChanges);
|
||||||
|
return processed.storedKeysChanges;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves the config.json file, maintaining the unknown configs
|
||||||
|
*/
|
||||||
|
private saveFile(toStore: PartialTxConfigs) {
|
||||||
|
const outFile = {
|
||||||
|
version: CONFIG_VERSION,
|
||||||
|
...this.unknownConfigs,
|
||||||
|
...toStore,
|
||||||
|
};
|
||||||
|
fs.writeFileSync(this.configFilePath, JSON.stringify(outFile, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logs changes to logger and changelog file
|
||||||
|
* FIXME: ignore banlist.templates? or join consequent changes?
|
||||||
|
*/
|
||||||
|
private logChanges(author: string, keysUpdated: string[]) {
|
||||||
|
txCore.logger.admin.write(author, `Config changes: ${keysUpdated.join(', ')}`);
|
||||||
|
this.changelog.push({
|
||||||
|
author,
|
||||||
|
ts: Date.now(),
|
||||||
|
keys: keysUpdated,
|
||||||
|
});
|
||||||
|
this.changelog = truncateConfigChangelog(this.changelog);
|
||||||
|
setImmediate(async () => {
|
||||||
|
try {
|
||||||
|
const json = JSON.stringify({
|
||||||
|
version: CCLOG_VERSION,
|
||||||
|
log: this.changelog,
|
||||||
|
});
|
||||||
|
await fsp.writeFile(this.changelogFilePath, json);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to save ${this.changelogFilePath} with message: ${(error as any).message}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads the changelog file
|
||||||
|
*/
|
||||||
|
private async loadChangelog() {
|
||||||
|
try {
|
||||||
|
const rawFileData = await fsp.readFile(this.changelogFilePath, 'utf8');
|
||||||
|
const fileData = JSON.parse(rawFileData);
|
||||||
|
if (fileData?.version !== CCLOG_VERSION) throw new Error(`invalid_version`);
|
||||||
|
const changelogData = ConfigChangelogFileSchema.parse(fileData);
|
||||||
|
this.changelog = truncateConfigChangelog(changelogData.log);
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as any)?.code === 'ENOENT') {
|
||||||
|
console.verbose.debug(`${this.changelogFilePath} not found, making a new one.`);
|
||||||
|
} else if ((error as any)?.message === 'invalid_version') {
|
||||||
|
console.warn(`Failed to load ${this.changelogFilePath} due to invalid version.`);
|
||||||
|
console.warn('Since this is not a critical file, it will be reset.');
|
||||||
|
} else {
|
||||||
|
console.warn(`Failed to load ${this.changelogFilePath} with message: ${(error as any).message}`);
|
||||||
|
console.warn('Since this is not a critical file, it will be reset.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process the callbacks for the modules that registered for config changes
|
||||||
|
*/
|
||||||
|
private processCallbacks(updatedConfigs: UpdateConfigKeySet) {
|
||||||
|
for (const txModule of this.moduleRefreshCallbacks) {
|
||||||
|
if (!updatedConfigs.hasMatch(txModule.rules)) continue;
|
||||||
|
setImmediate(() => {
|
||||||
|
try {
|
||||||
|
console.verbose.debug(`Triggering update callback for module ${txModule.moduleName}`);
|
||||||
|
txModule.callback(updatedConfigs);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error in config update callback for module ${txModule.moduleName}: ${(error as any).message}`);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a callback to be called when the config is updated
|
||||||
|
*/
|
||||||
|
public registerUpdateCallback(moduleName: string, rules: string[], callback: RefreshConfigFunc) {
|
||||||
|
this.moduleRefreshCallbacks.push({
|
||||||
|
moduleName,
|
||||||
|
callback,
|
||||||
|
rules,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
92
core/modules/ConfigStore/schema/banlist.ts
Normal file
92
core/modules/ConfigStore/schema/banlist.ts
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { typeDefinedConfig } from "./utils";
|
||||||
|
|
||||||
|
import { alphanumeric } from 'nanoid-dictionary';
|
||||||
|
import { customAlphabet } from "nanoid";
|
||||||
|
import { SYM_FIXER_DEFAULT, SYM_FIXER_FATAL } from "@lib/symbols";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Ban templates
|
||||||
|
*/
|
||||||
|
export const BAN_TEMPLATE_ID_LENGTH = 21;
|
||||||
|
|
||||||
|
export const genBanTemplateId = customAlphabet(alphanumeric, BAN_TEMPLATE_ID_LENGTH);
|
||||||
|
|
||||||
|
export const BanDurationTypeSchema = z.union([
|
||||||
|
z.literal('permanent'),
|
||||||
|
z.object({
|
||||||
|
value: z.number().positive(),
|
||||||
|
unit: z.enum(['hours', 'days', 'weeks', 'months']),
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
export type BanDurationType = z.infer<typeof BanDurationTypeSchema>;
|
||||||
|
|
||||||
|
export const BanTemplatesDataSchema = z.object({
|
||||||
|
id: z.string().length(BAN_TEMPLATE_ID_LENGTH), //nanoid fixed at 21 chars
|
||||||
|
reason: z.string().min(3).max(2048), //should be way less, but just in case
|
||||||
|
duration: BanDurationTypeSchema,
|
||||||
|
});
|
||||||
|
export type BanTemplatesDataType = z.infer<typeof BanTemplatesDataSchema>;
|
||||||
|
|
||||||
|
//Ensure all templates have unique ids
|
||||||
|
export const polishBanTemplatesArray = (input: BanTemplatesDataType[]) => {
|
||||||
|
const ids = new Set();
|
||||||
|
const unique: BanTemplatesDataType[] = [];
|
||||||
|
for (const template of input) {
|
||||||
|
if (ids.has(template.id)) {
|
||||||
|
unique.push({
|
||||||
|
...template,
|
||||||
|
id: genBanTemplateId(),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
unique.push(template);
|
||||||
|
}
|
||||||
|
ids.add(template.id);
|
||||||
|
}
|
||||||
|
return unique;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Default
|
||||||
|
*/
|
||||||
|
const enabled = typeDefinedConfig({
|
||||||
|
name: 'Ban Checking Enabled',
|
||||||
|
default: true,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const rejectionMessage = typeDefinedConfig({
|
||||||
|
name: 'Ban Rejection Message',
|
||||||
|
default: 'You can join http://discord.gg/example to appeal this ban.',
|
||||||
|
validator: z.string(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const requiredHwidMatches = typeDefinedConfig({
|
||||||
|
name: 'Required Ban HWID Matches',
|
||||||
|
default: 1,
|
||||||
|
validator: z.number().int().min(0),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const templates = typeDefinedConfig({
|
||||||
|
name: 'Ban Templates',
|
||||||
|
default: [],
|
||||||
|
validator: BanTemplatesDataSchema.array().transform(polishBanTemplatesArray),
|
||||||
|
//NOTE: if someone messed with their templates and broke it, we don't want to wipe it all out
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
enabled,
|
||||||
|
rejectionMessage,
|
||||||
|
requiredHwidMatches,
|
||||||
|
templates,
|
||||||
|
} as const;
|
69
core/modules/ConfigStore/schema/discordBot.ts
Normal file
69
core/modules/ConfigStore/schema/discordBot.ts
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { discordSnowflakeSchema, typeDefinedConfig, typeNullableConfig } from "./utils";
|
||||||
|
import { defaultEmbedConfigJson, defaultEmbedJson } from "@modules/DiscordBot/defaultJsons";
|
||||||
|
import { SYM_FIXER_DEFAULT } from "@lib/symbols";
|
||||||
|
|
||||||
|
|
||||||
|
const enabled = typeDefinedConfig({
|
||||||
|
name: 'Bot Enabled',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const token = typeNullableConfig({
|
||||||
|
name: 'Bot Token',
|
||||||
|
default: null,
|
||||||
|
validator: z.string().min(1).nullable(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const guild = typeNullableConfig({
|
||||||
|
name: 'Server ID',
|
||||||
|
default: null,
|
||||||
|
validator: discordSnowflakeSchema.nullable(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const warningsChannel = typeNullableConfig({
|
||||||
|
name: 'Warnings Channel ID',
|
||||||
|
default: null,
|
||||||
|
validator: discordSnowflakeSchema.nullable(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
//We are not validating the JSON, only that it is a string
|
||||||
|
export const attemptMinifyJsonString = (input: string) => {
|
||||||
|
try {
|
||||||
|
return JSON.stringify(JSON.parse(input));
|
||||||
|
} catch (error) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const embedJson = typeDefinedConfig({
|
||||||
|
name: 'Status Embed JSON',
|
||||||
|
default: defaultEmbedJson,
|
||||||
|
validator: z.string().min(1).transform(attemptMinifyJsonString),
|
||||||
|
//NOTE: no true valiation in here, done in the module only
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const embedConfigJson = typeDefinedConfig({
|
||||||
|
name: 'Status Config JSON',
|
||||||
|
default: defaultEmbedConfigJson,
|
||||||
|
validator: z.string().min(1).transform(attemptMinifyJsonString),
|
||||||
|
//NOTE: no true valiation in here, done in the module only
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
enabled,
|
||||||
|
token,
|
||||||
|
guild,
|
||||||
|
warningsChannel,
|
||||||
|
embedJson,
|
||||||
|
embedConfigJson,
|
||||||
|
} as const;
|
88
core/modules/ConfigStore/schema/gameFeatures.ts
Normal file
88
core/modules/ConfigStore/schema/gameFeatures.ts
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { typeDefinedConfig } from "./utils";
|
||||||
|
import { SYM_FIXER_DEFAULT } from "@lib/symbols";
|
||||||
|
|
||||||
|
|
||||||
|
const menuEnabled = typeDefinedConfig({
|
||||||
|
name: 'Menu Enabled',
|
||||||
|
default: true,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const menuAlignRight = typeDefinedConfig({
|
||||||
|
name: 'Align Menu Right',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const menuPageKey = typeDefinedConfig({
|
||||||
|
name: 'Menu Page Switch Key',
|
||||||
|
default: 'Tab',
|
||||||
|
validator: z.string().min(1),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const playerModePtfx = typeDefinedConfig({
|
||||||
|
name: 'Player Mode Change Effect',
|
||||||
|
default: true,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hideAdminInPunishments = typeDefinedConfig({
|
||||||
|
name: 'Hide Admin Name In Punishments',
|
||||||
|
default: true,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hideAdminInMessages = typeDefinedConfig({
|
||||||
|
name: 'Hide Admin Name In Messages',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hideDefaultAnnouncement = typeDefinedConfig({
|
||||||
|
name: 'Hide Announcement Notifications',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hideDefaultDirectMessage = typeDefinedConfig({
|
||||||
|
name: 'Hide Direct Message Notification',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hideDefaultWarning = typeDefinedConfig({
|
||||||
|
name: 'Hide Warning Notification',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const hideDefaultScheduledRestartWarning = typeDefinedConfig({
|
||||||
|
name: 'Hide Scheduled Restart Warnings',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
menuEnabled,
|
||||||
|
menuAlignRight,
|
||||||
|
menuPageKey,
|
||||||
|
playerModePtfx,
|
||||||
|
hideAdminInPunishments,
|
||||||
|
hideAdminInMessages,
|
||||||
|
hideDefaultAnnouncement,
|
||||||
|
hideDefaultDirectMessage,
|
||||||
|
hideDefaultWarning,
|
||||||
|
hideDefaultScheduledRestartWarning,
|
||||||
|
} as const;
|
28
core/modules/ConfigStore/schema/general.ts
Normal file
28
core/modules/ConfigStore/schema/general.ts
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { typeDefinedConfig } from "./utils";
|
||||||
|
import { SYM_FIXER_DEFAULT } from "@lib/symbols";
|
||||||
|
import localeMap from "@shared/localeMap";
|
||||||
|
|
||||||
|
|
||||||
|
const serverName = typeDefinedConfig({
|
||||||
|
name: 'Server Name',
|
||||||
|
default: 'change-me',
|
||||||
|
validator: z.string().min(1).max(18),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const language = typeDefinedConfig({
|
||||||
|
name: 'Language',
|
||||||
|
default: 'en',
|
||||||
|
validator: z.string().min(2).refine(
|
||||||
|
(value) => (value === 'custom' || localeMap[value] !== undefined),
|
||||||
|
(value) => ({ message: `Invalid language code \`${value ?? '??'}\`.` }),
|
||||||
|
),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
serverName,
|
||||||
|
language,
|
||||||
|
} as const;
|
55
core/modules/ConfigStore/schema/index.ts
Normal file
55
core/modules/ConfigStore/schema/index.ts
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { ConfigScope, ListOf } from "./utils";
|
||||||
|
import general from "./general";
|
||||||
|
import server from "./server";
|
||||||
|
import restarter from "./restarter";
|
||||||
|
import banlist from "./banlist";
|
||||||
|
import whitelist from "./whitelist";
|
||||||
|
import discordBot from "./discordBot";
|
||||||
|
import gameFeatures from "./gameFeatures";
|
||||||
|
import webServer from "./webServer";
|
||||||
|
import logger from "./logger";
|
||||||
|
import { SYM_RESET_CONFIG } from "@lib/symbols";
|
||||||
|
|
||||||
|
|
||||||
|
//Type inference utils
|
||||||
|
type InferConfigScopes<S extends ConfigScope> = IferConfigValues<S>;
|
||||||
|
type IferConfigValues<S extends ConfigScope> = {
|
||||||
|
[K in keyof S]: S[K]['default'] | z.infer<S[K]['validator']>;
|
||||||
|
}
|
||||||
|
type WritableValues<T> = {
|
||||||
|
-readonly [P in keyof T]: T[P]
|
||||||
|
};
|
||||||
|
type InferConfigScopesToSave<S extends ConfigScope> = InferConfigValuesToSave<WritableValues<S>>;
|
||||||
|
type InferConfigValuesToSave<S extends ConfigScope> = WritableValues<{
|
||||||
|
[K in keyof S]: S[K]['default'] | z.infer<S[K]['validator']> | typeof SYM_RESET_CONFIG;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
//Exporting the schemas
|
||||||
|
export const ConfigSchemas_v2 = {
|
||||||
|
general,
|
||||||
|
server,
|
||||||
|
restarter,
|
||||||
|
banlist,
|
||||||
|
whitelist,
|
||||||
|
discordBot,
|
||||||
|
gameFeatures,
|
||||||
|
webServer,
|
||||||
|
logger,
|
||||||
|
} satisfies ListOf<ConfigScope>;
|
||||||
|
|
||||||
|
//Exporting the types
|
||||||
|
export type TxConfigScopes = keyof typeof ConfigSchemas_v2;
|
||||||
|
export type TxConfigs = {
|
||||||
|
[K in TxConfigScopes]: InferConfigScopes<typeof ConfigSchemas_v2[K]>
|
||||||
|
};
|
||||||
|
export type PartialTxConfigs = Partial<{
|
||||||
|
[K in TxConfigScopes]: Partial<InferConfigScopes<typeof ConfigSchemas_v2[K]>>
|
||||||
|
}>;
|
||||||
|
export type PartialTxConfigsToSave = Partial<{
|
||||||
|
[K in TxConfigScopes]: Partial<InferConfigScopesToSave<typeof ConfigSchemas_v2[K]>>
|
||||||
|
}>;
|
||||||
|
export type ConfigFileData = PartialTxConfigs & { version: number };
|
||||||
|
|
||||||
|
//Allow unknown scopes/keys
|
||||||
|
export type ConfigScaffold = ListOf<ListOf<any>>;
|
41
core/modules/ConfigStore/schema/logger.ts
Normal file
41
core/modules/ConfigStore/schema/logger.ts
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { typeDefinedConfig } from "./utils";
|
||||||
|
import { SYM_FIXER_FATAL } from "@lib/symbols";
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
The logger module passes the options to the library which is responsible for evaluating them.
|
||||||
|
There has never been strict definitions about those settings in txAdmin.
|
||||||
|
The only exception is setting it to false for disabling the specific logger.
|
||||||
|
Ref: https://github.com/iccicci/rotating-file-stream#options
|
||||||
|
*/
|
||||||
|
const rfsOptionValidator = z.union([
|
||||||
|
z.literal(false),
|
||||||
|
z.object({}).passthrough(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
|
||||||
|
//NOTE: don't fallback to default because storage issues might crash the server
|
||||||
|
export default {
|
||||||
|
//admin & some system logs
|
||||||
|
admin: typeDefinedConfig({
|
||||||
|
name: 'Admin Logs',
|
||||||
|
default: {},
|
||||||
|
validator: rfsOptionValidator,
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
}),
|
||||||
|
//fxserver output
|
||||||
|
fxserver: typeDefinedConfig({
|
||||||
|
name: 'FXServer Logs',
|
||||||
|
default: {},
|
||||||
|
validator: rfsOptionValidator,
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
}),
|
||||||
|
//in-game logs
|
||||||
|
server: typeDefinedConfig({
|
||||||
|
name: 'Server Logs',
|
||||||
|
default: {},
|
||||||
|
validator: rfsOptionValidator,
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
}),
|
||||||
|
} as const;
|
168
core/modules/ConfigStore/schema/oldConfig.ts
Normal file
168
core/modules/ConfigStore/schema/oldConfig.ts
Normal file
|
@ -0,0 +1,168 @@
|
||||||
|
import { dequal } from 'dequal/lite';
|
||||||
|
import parseArgsStringToArgv from "string-argv";
|
||||||
|
import { ConfigSchemas_v2 } from "./index";
|
||||||
|
import { ListOf } from "./utils";
|
||||||
|
import { genBanTemplateId } from "./banlist";
|
||||||
|
import { getConfigDefaults } from "../configParser";
|
||||||
|
import { confx } from "../utils";
|
||||||
|
|
||||||
|
const restructureOldConfig = (old: any) => {
|
||||||
|
//Apply the legacy migrations (mutation)
|
||||||
|
old.playerDatabase ??= old.playerDatabase ?? old.playerController ?? {};
|
||||||
|
if (old.global.language === 'pt_PT' || old.global.language === 'pt_BR') {
|
||||||
|
old.global.language = 'pt';
|
||||||
|
}
|
||||||
|
if (typeof old.monitor.resourceStartingTolerance === 'string') {
|
||||||
|
old.monitor.resourceStartingTolerance = parseInt(old.monitor.resourceStartingTolerance);
|
||||||
|
if (isNaN(old.monitor.resourceStartingTolerance)) {
|
||||||
|
old.monitor.resourceStartingTolerance = 120;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Remap the old config to the new structure
|
||||||
|
const remapped: TxConfigs = {
|
||||||
|
general: { //NOTE:renamed
|
||||||
|
serverName: old?.global?.serverName,
|
||||||
|
language: old?.global?.language,
|
||||||
|
},
|
||||||
|
webServer: {
|
||||||
|
disableNuiSourceCheck: old?.webServer?.disableNuiSourceCheck,
|
||||||
|
limiterMinutes: old?.webServer?.limiterMinutes,
|
||||||
|
limiterAttempts: old?.webServer?.limiterAttempts,
|
||||||
|
},
|
||||||
|
discordBot: {
|
||||||
|
enabled: old?.discordBot?.enabled,
|
||||||
|
token: old?.discordBot?.token,
|
||||||
|
guild: old?.discordBot?.guild,
|
||||||
|
warningsChannel: old?.discordBot?.announceChannel, //NOTE:renamed
|
||||||
|
embedJson: old?.discordBot?.embedJson,
|
||||||
|
embedConfigJson: old?.discordBot?.embedConfigJson,
|
||||||
|
},
|
||||||
|
server: {//NOTE:renamed
|
||||||
|
dataPath: old?.fxRunner?.serverDataPath, //NOTE:renamed
|
||||||
|
cfgPath: old?.fxRunner?.cfgPath,
|
||||||
|
startupArgs: old?.fxRunner?.commandLine, //NOTE:renamed
|
||||||
|
onesync: old?.fxRunner?.onesync,
|
||||||
|
autoStart: old?.fxRunner?.autostart, //NOTE:renamed
|
||||||
|
quiet: old?.fxRunner?.quiet,
|
||||||
|
shutdownNoticeDelayMs: old?.fxRunner?.shutdownNoticeDelay, //NOTE:renamed
|
||||||
|
restartSpawnDelayMs: old?.fxRunner?.restartDelay, //NOTE:renamed
|
||||||
|
},
|
||||||
|
restarter: {
|
||||||
|
schedule: old?.monitor?.restarterSchedule, //NOTE:renamed
|
||||||
|
bootGracePeriod: old?.monitor?.cooldown, //NOTE:renamed
|
||||||
|
resourceStartingTolerance: old?.monitor?.resourceStartingTolerance,
|
||||||
|
},
|
||||||
|
banlist: { //NOTE: All Renamed
|
||||||
|
enabled: old?.playerDatabase?.onJoinCheckBan,
|
||||||
|
rejectionMessage: old?.playerDatabase?.banRejectionMessage,
|
||||||
|
requiredHwidMatches: old?.playerDatabase?.requiredBanHwidMatches,
|
||||||
|
templates: old?.banTemplates,
|
||||||
|
},
|
||||||
|
whitelist: { //NOTE: All Renamed
|
||||||
|
mode: old?.playerDatabase?.whitelistMode,
|
||||||
|
rejectionMessage: old?.playerDatabase?.whitelistRejectionMessage,
|
||||||
|
discordRoles: old?.playerDatabase?.whitelistedDiscordRoles,
|
||||||
|
},
|
||||||
|
gameFeatures: {
|
||||||
|
menuEnabled: old?.global?.menuEnabled,
|
||||||
|
menuAlignRight: old?.global?.menuAlignRight,
|
||||||
|
menuPageKey: old?.global?.menuPageKey,
|
||||||
|
playerModePtfx: true, //NOTE: new config
|
||||||
|
hideAdminInPunishments: old?.global?.hideAdminInPunishments,
|
||||||
|
hideAdminInMessages: old?.global?.hideAdminInMessages,
|
||||||
|
hideDefaultAnnouncement: old?.global?.hideDefaultAnnouncement,
|
||||||
|
hideDefaultDirectMessage: old?.global?.hideDefaultDirectMessage,
|
||||||
|
hideDefaultWarning: old?.global?.hideDefaultWarning,
|
||||||
|
hideDefaultScheduledRestartWarning: old?.global?.hideDefaultScheduledRestartWarning,
|
||||||
|
},
|
||||||
|
logger: {
|
||||||
|
admin: old?.logger?.admin,
|
||||||
|
fxserver: old?.logger?.fxserver,
|
||||||
|
server: old?.logger?.server,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return remapped;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
export const migrateOldConfig = (old: any) => {
|
||||||
|
//Get the old configs in the new structure
|
||||||
|
const remapped = restructureOldConfig(old) as any;
|
||||||
|
|
||||||
|
//Some migrations before comparing because defaults changed
|
||||||
|
if (typeof remapped.restarter?.bootGracePeriod === 'number') {
|
||||||
|
remapped.restarter.bootGracePeriod = Math.round(remapped.restarter.bootGracePeriod);
|
||||||
|
if (remapped.restarter.bootGracePeriod === 60) {
|
||||||
|
remapped.restarter.bootGracePeriod = 45;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeof remapped.server?.shutdownNoticeDelayMs === 'number') {
|
||||||
|
remapped.server.shutdownNoticeDelayMs *= 1000;
|
||||||
|
}
|
||||||
|
if (remapped.server?.restartSpawnDelayMs === 750) {
|
||||||
|
remapped.server.restartSpawnDelayMs = 500;
|
||||||
|
}
|
||||||
|
if (remapped.whitelist?.mode === 'guildMember') {
|
||||||
|
remapped.whitelist.mode = 'discordMember';
|
||||||
|
}
|
||||||
|
if (remapped.whitelist?.mode === 'guildRoles') {
|
||||||
|
remapped.whitelist.mode = 'discordRoles';
|
||||||
|
}
|
||||||
|
|
||||||
|
//Migrating the menu ptfx convar (can't do anything about it being set in server.cfg tho)
|
||||||
|
if (typeof remapped.server?.startupArgs === 'string') {
|
||||||
|
try {
|
||||||
|
const str = remapped.server.startupArgs.trim();
|
||||||
|
const convarSetRegex = /\+setr?\s+['"]?txAdmin-menuPtfxDisable['"]?\s+['"]?(?<value>\w+)['"]?\s?/g;
|
||||||
|
const matches = [...str.matchAll(convarSetRegex)];
|
||||||
|
if (matches.length) {
|
||||||
|
const valueSet = matches[matches.length - 1].groups?.value;
|
||||||
|
remapped.gameFeatures.playerModePtfx = valueSet !== 'true';
|
||||||
|
remapped.server.startupArgs = str.replaceAll(convarSetRegex, '');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to migrate the menuPtfxDisable convar. Assuming it\'s unset.');
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
remapped.server.startupArgs = remapped.server.startupArgs.length
|
||||||
|
? parseArgsStringToArgv(remapped.server.startupArgs)
|
||||||
|
: [];
|
||||||
|
}
|
||||||
|
|
||||||
|
//Removing stuff from unconfigured profile
|
||||||
|
if (remapped.general?.serverName === null) {
|
||||||
|
delete remapped.general.serverName;
|
||||||
|
}
|
||||||
|
if (remapped.server?.cfgPath === null) {
|
||||||
|
delete remapped.server.cfgPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Extract just the non-default values
|
||||||
|
const baseConfigs = getConfigDefaults(ConfigSchemas_v2) as TxConfigs;
|
||||||
|
const justNonDefaults: ListOf<any> = {};
|
||||||
|
for (const [scopeName, scopeConfigs] of Object.entries(baseConfigs)) {
|
||||||
|
for (const [configKey, configDefault] of Object.entries(scopeConfigs)) {
|
||||||
|
const configValue = confx(remapped).get(scopeName, configKey);
|
||||||
|
if (configValue === undefined) continue;
|
||||||
|
if (!dequal(configValue, configDefault)) {
|
||||||
|
confx(justNonDefaults).set(scopeName, configKey, configValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Last migrations
|
||||||
|
if (typeof justNonDefaults.general?.serverName === 'string') {
|
||||||
|
justNonDefaults.general.serverName = justNonDefaults.general.serverName.slice(0, 18);
|
||||||
|
}
|
||||||
|
if (Array.isArray(justNonDefaults.banlist?.templates)) {
|
||||||
|
for (const tpl of justNonDefaults.banlist.templates) {
|
||||||
|
if (typeof tpl.id !== 'string') continue;
|
||||||
|
tpl.id = genBanTemplateId();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Final object
|
||||||
|
return justNonDefaults;
|
||||||
|
}
|
39
core/modules/ConfigStore/schema/restarter.ts
Normal file
39
core/modules/ConfigStore/schema/restarter.ts
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { typeDefinedConfig } from "./utils";
|
||||||
|
import { SYM_FIXER_DEFAULT } from "@lib/symbols";
|
||||||
|
import { parseSchedule, regexHoursMinutes } from "@lib/misc";
|
||||||
|
|
||||||
|
export const polishScheduleTimesArray = (input: string[]) => {
|
||||||
|
return parseSchedule(input).valid.map((v) => v.string);
|
||||||
|
};
|
||||||
|
|
||||||
|
const schedule = typeDefinedConfig({
|
||||||
|
name: 'Restart Schedule',
|
||||||
|
default: [],
|
||||||
|
validator: z.string().regex(regexHoursMinutes).array().transform(polishScheduleTimesArray),
|
||||||
|
fixer: (input: any) => {
|
||||||
|
if(!Array.isArray(input)) return [];
|
||||||
|
return polishScheduleTimesArray(input);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const bootGracePeriod = typeDefinedConfig({
|
||||||
|
name: 'Boot Grace Period',
|
||||||
|
default: 45,
|
||||||
|
validator: z.number().int().min(15),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const resourceStartingTolerance = typeDefinedConfig({
|
||||||
|
name: 'Resource Starting Tolerance',
|
||||||
|
default: 90,
|
||||||
|
validator: z.number().int().min(30),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
schedule,
|
||||||
|
bootGracePeriod,
|
||||||
|
resourceStartingTolerance,
|
||||||
|
} as const;
|
72
core/modules/ConfigStore/schema/server.ts
Normal file
72
core/modules/ConfigStore/schema/server.ts
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { typeDefinedConfig, typeNullableConfig } from "./utils";
|
||||||
|
import { SYM_FIXER_DEFAULT, SYM_FIXER_FATAL } from "@lib/symbols";
|
||||||
|
|
||||||
|
|
||||||
|
const dataPath = typeNullableConfig({
|
||||||
|
name: 'Server Data Path',
|
||||||
|
default: null,
|
||||||
|
validator: z.string().min(1).nullable(),
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const cfgPath = typeDefinedConfig({
|
||||||
|
name: 'CFG File Path',
|
||||||
|
default: 'server.cfg',
|
||||||
|
validator: z.string().min(1),
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const startupArgs = typeDefinedConfig({
|
||||||
|
name: 'Startup Arguments',
|
||||||
|
default: [],
|
||||||
|
validator: z.string().array(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const onesync = typeDefinedConfig({
|
||||||
|
name: 'OneSync',
|
||||||
|
default: 'on',
|
||||||
|
validator: z.enum(['on', 'legacy', 'off']),
|
||||||
|
fixer: SYM_FIXER_FATAL,
|
||||||
|
});
|
||||||
|
|
||||||
|
const autoStart = typeDefinedConfig({
|
||||||
|
name: 'Autostart',
|
||||||
|
default: true,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const quiet = typeDefinedConfig({
|
||||||
|
name: 'Quiet Mode',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const shutdownNoticeDelayMs = typeDefinedConfig({
|
||||||
|
name: 'Shutdown Notice Delay',
|
||||||
|
default: 5000,
|
||||||
|
validator: z.number().int().min(0).max(60_000),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const restartSpawnDelayMs = typeDefinedConfig({
|
||||||
|
name: 'Restart Spawn Delay',
|
||||||
|
default: 500,
|
||||||
|
validator: z.number().int().min(0).max(15_000),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
dataPath,
|
||||||
|
cfgPath,
|
||||||
|
startupArgs,
|
||||||
|
onesync,
|
||||||
|
autoStart,
|
||||||
|
quiet,
|
||||||
|
shutdownNoticeDelayMs,
|
||||||
|
restartSpawnDelayMs,
|
||||||
|
} as const;
|
53
core/modules/ConfigStore/schema/utils.ts
Normal file
53
core/modules/ConfigStore/schema/utils.ts
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
import { z } from 'zod';
|
||||||
|
import { SYM_FIXER_DEFAULT, SYM_FIXER_FATAL } from '@lib/symbols';
|
||||||
|
import consts from '@shared/consts';
|
||||||
|
import { fromError } from 'zod-validation-error';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Types
|
||||||
|
*/
|
||||||
|
//Definitions
|
||||||
|
export type ConfigScope = ListOf<DefinedConfigItem | NulledConfigItem>;
|
||||||
|
export type ConfigItemFixer<T> = (value: any) => T;
|
||||||
|
interface BaseConfigItem<T = unknown> {
|
||||||
|
name: string;
|
||||||
|
validator: z.Schema<T>;
|
||||||
|
fixer: typeof SYM_FIXER_FATAL | typeof SYM_FIXER_DEFAULT | ConfigItemFixer<T>;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Utilities
|
||||||
|
export type ListOf<T> = { [key: string]: T };
|
||||||
|
export interface DefinedConfigItem<T = unknown> extends BaseConfigItem<T> {
|
||||||
|
default: T extends null ? never : T;
|
||||||
|
}
|
||||||
|
export interface NulledConfigItem<T = unknown> extends BaseConfigItem<T> {
|
||||||
|
default: null;
|
||||||
|
}
|
||||||
|
export type ScopeConfigItem = DefinedConfigItem | NulledConfigItem;
|
||||||
|
|
||||||
|
//NOTE: Split into two just because I couldn't figure out how to make the default value be null
|
||||||
|
export const typeDefinedConfig = <T>(config: DefinedConfigItem<T>): DefinedConfigItem<T> => config;
|
||||||
|
export const typeNullableConfig = <T>(config: NulledConfigItem<T>): NulledConfigItem<T> => config;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Common Schemas
|
||||||
|
*/
|
||||||
|
export const discordSnowflakeSchema = z.string().regex(
|
||||||
|
consts.regexDiscordSnowflake,
|
||||||
|
'The ID should be a 17-20 digit number.'
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MARK: Utilities
|
||||||
|
*/
|
||||||
|
export const getSchemaChainError = (chain: [schema: ScopeConfigItem, val: any][]) => {
|
||||||
|
for (const [schema, val] of chain) {
|
||||||
|
const res = schema.validator.safeParse(val);
|
||||||
|
if (!res.success) {
|
||||||
|
return fromError(res.error, { prefix: schema.name }).message;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
32
core/modules/ConfigStore/schema/webServer.ts
Normal file
32
core/modules/ConfigStore/schema/webServer.ts
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { typeDefinedConfig } from "./utils";
|
||||||
|
import { SYM_FIXER_DEFAULT } from "@lib/symbols";
|
||||||
|
|
||||||
|
|
||||||
|
const disableNuiSourceCheck = typeDefinedConfig({
|
||||||
|
name: 'Disable NUI Source Check',
|
||||||
|
default: false,
|
||||||
|
validator: z.boolean(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const limiterMinutes = typeDefinedConfig({
|
||||||
|
name: 'Rate Limiter Minutes',
|
||||||
|
default: 15,
|
||||||
|
validator: z.number().int().min(1),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const limiterAttempts = typeDefinedConfig({
|
||||||
|
name: 'Rate Limiter Attempts',
|
||||||
|
default: 10,
|
||||||
|
validator: z.number().int().min(5),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
disableNuiSourceCheck,
|
||||||
|
limiterMinutes,
|
||||||
|
limiterAttempts,
|
||||||
|
} as const;
|
43
core/modules/ConfigStore/schema/whitelist.ts
Normal file
43
core/modules/ConfigStore/schema/whitelist.ts
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
import { discordSnowflakeSchema, typeDefinedConfig } from "./utils";
|
||||||
|
import { SYM_FIXER_DEFAULT } from "@lib/symbols";
|
||||||
|
import consts from "@shared/consts";
|
||||||
|
|
||||||
|
|
||||||
|
const mode = typeDefinedConfig({
|
||||||
|
name: 'Whitelist Mode',
|
||||||
|
default: 'disabled',
|
||||||
|
validator: z.enum(['disabled', 'adminOnly', 'approvedLicense', 'discordMember', 'discordRoles']),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
const rejectionMessage = typeDefinedConfig({
|
||||||
|
name: 'Whitelist Rejection Message',
|
||||||
|
default: 'Please join http://discord.gg/example and request to be whitelisted.',
|
||||||
|
validator: z.string(),
|
||||||
|
fixer: SYM_FIXER_DEFAULT,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const polishDiscordRolesArray = (input: string[]) => {
|
||||||
|
const unique = [...new Set(input)];
|
||||||
|
unique.sort((a, b) => Number(a) - Number(b));
|
||||||
|
return unique;
|
||||||
|
}
|
||||||
|
|
||||||
|
const discordRoles = typeDefinedConfig({
|
||||||
|
name: 'Whitelisted Discord Roles',
|
||||||
|
default: [],
|
||||||
|
validator: discordSnowflakeSchema.array().transform(polishDiscordRolesArray),
|
||||||
|
fixer: (input: any) => {
|
||||||
|
if (!Array.isArray(input)) return [];
|
||||||
|
const valid = input.filter(item => consts.regexDiscordSnowflake.test(item));
|
||||||
|
return polishDiscordRolesArray(valid);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default {
|
||||||
|
mode,
|
||||||
|
rejectionMessage,
|
||||||
|
discordRoles,
|
||||||
|
} as const;
|
157
core/modules/ConfigStore/utils.test.ts
Normal file
157
core/modules/ConfigStore/utils.test.ts
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
import { suite, it, expect } from 'vitest';
|
||||||
|
import { ConfigScaffold } from './schema';
|
||||||
|
import { confx, UpdateConfigKeySet } from './utils';
|
||||||
|
|
||||||
|
|
||||||
|
suite('confx utility', () => {
|
||||||
|
it('should check if a value exists (has)', () => {
|
||||||
|
const config: ConfigScaffold = {
|
||||||
|
scope1: {
|
||||||
|
key1: 'value1',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const conf = confx(config);
|
||||||
|
|
||||||
|
expect(conf.has('scope1', 'key1')).toBe(true);
|
||||||
|
expect(conf.has('scope1', 'key2')).toBe(false);
|
||||||
|
expect(conf.has('scope2', 'key1')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should retrieve a value (get)', () => {
|
||||||
|
const config: ConfigScaffold = {
|
||||||
|
scope1: {
|
||||||
|
key1: 'value1',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const conf = confx(config);
|
||||||
|
|
||||||
|
expect(conf.get('scope1', 'key1')).toBe('value1');
|
||||||
|
expect(conf.get('scope1', 'key2')).toBeUndefined();
|
||||||
|
expect(conf.get('scope2', 'key1')).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set a value (set)', () => {
|
||||||
|
const config: ConfigScaffold = {};
|
||||||
|
const conf = confx(config);
|
||||||
|
|
||||||
|
conf.set('scope1', 'key1', 'value1');
|
||||||
|
expect(config.scope1?.key1).toBe('value1');
|
||||||
|
|
||||||
|
conf.set('scope1', 'key2', 'value2');
|
||||||
|
expect(config.scope1?.key2).toBe('value2');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should unset a value (unset)', () => {
|
||||||
|
const config: ConfigScaffold = {
|
||||||
|
scope1: {
|
||||||
|
key1: 'value1',
|
||||||
|
key2: 'value2',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const conf = confx(config);
|
||||||
|
|
||||||
|
conf.unset('scope1', 'key1');
|
||||||
|
expect(config.scope1?.key1).toBeUndefined();
|
||||||
|
expect(config.scope1?.key2).toBe('value2');
|
||||||
|
|
||||||
|
conf.unset('scope1', 'key2');
|
||||||
|
expect(config.scope1).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle nested configurations properly', () => {
|
||||||
|
const config: ConfigScaffold = {
|
||||||
|
scope1: {
|
||||||
|
key1: { nested: 'value' },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const conf = confx(config);
|
||||||
|
|
||||||
|
expect(conf.get('scope1', 'key1')).toEqual({ nested: 'value' });
|
||||||
|
conf.set('scope1', 'key2', { another: 'value' });
|
||||||
|
expect(config.scope1?.key2).toEqual({ another: 'value' });
|
||||||
|
|
||||||
|
conf.unset('scope1', 'key1');
|
||||||
|
expect(config.scope1?.key1).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
suite('UpdateConfigKeySet', () => {
|
||||||
|
it('should add keys with scope and key separately', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
set.add('example', 'serverName');
|
||||||
|
expect(set.raw).toEqual([{
|
||||||
|
full: 'example.serverName',
|
||||||
|
scope: 'example',
|
||||||
|
key: 'serverName'
|
||||||
|
}]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add keys with dot notation', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
set.add('example.serverName');
|
||||||
|
expect(set.raw).toEqual([{
|
||||||
|
full: 'example.serverName',
|
||||||
|
scope: 'example',
|
||||||
|
key: 'serverName'
|
||||||
|
}]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match exact keys', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
set.add('example', 'serverName');
|
||||||
|
expect(set.hasMatch('example.serverName')).toBe(true);
|
||||||
|
expect(set.hasMatch('example.enabled')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match wildcard patterns when checking', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
set.add('example', 'serverName');
|
||||||
|
set.add('example', 'enabled');
|
||||||
|
|
||||||
|
expect(set.hasMatch('example.*')).toBe(true);
|
||||||
|
expect(set.hasMatch('server.*')).toBe(false);
|
||||||
|
expect(set.hasMatch('example.whatever')).toBe(false);
|
||||||
|
expect(set.hasMatch('*.serverName')).toBe(true);
|
||||||
|
expect(set.hasMatch('*.*')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match when providing an array of patterns', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
set.add('example', 'serverName');
|
||||||
|
set.add('monitor', 'enabled');
|
||||||
|
|
||||||
|
expect(set.hasMatch(['example.serverName', 'monitor.status'])).toBe(true);
|
||||||
|
expect(set.hasMatch(['server.*', 'example.*'])).toBe(true);
|
||||||
|
expect(set.hasMatch(['other.thing', 'another.config'])).toBe(false);
|
||||||
|
expect(set.hasMatch(['*.enabled', '*.disabled'])).toBe(true);
|
||||||
|
expect(set.hasMatch([])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not allow adding wildcard', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
expect(() => set.add('example.*')).toThrow();
|
||||||
|
expect(() => set.add('example', '*')).toThrow();
|
||||||
|
expect(() => set.add('*.example')).toThrow();
|
||||||
|
expect(() => set.add('*', 'example')).toThrow();
|
||||||
|
expect(() => set.add('*.*')).toThrow();
|
||||||
|
expect(() => set.add('*', '*')).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should track size correctly', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
expect(set.size).toBe(0);
|
||||||
|
set.add('example', 'serverName');
|
||||||
|
expect(set.size).toBe(1);
|
||||||
|
set.add('example.enabled');
|
||||||
|
expect(set.size).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should list all added items', () => {
|
||||||
|
const set = new UpdateConfigKeySet();
|
||||||
|
set.add('example', 'serverName');
|
||||||
|
expect(set.list).toEqual(['example.serverName']);
|
||||||
|
set.add('example', 'enabled');
|
||||||
|
expect(set.list).toEqual(['example.serverName','example.enabled']);
|
||||||
|
});
|
||||||
|
});
|
120
core/modules/ConfigStore/utils.ts
Normal file
120
core/modules/ConfigStore/utils.ts
Normal file
|
@ -0,0 +1,120 @@
|
||||||
|
import type { RefreshConfigKey } from "./index";
|
||||||
|
import { ConfigScaffold } from "./schema";
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A utility for manipulating a configuration scaffold with nested key-value structures.
|
||||||
|
* Provides convenient methods to check, retrieve, set, and remove values in the configuration object.
|
||||||
|
*/
|
||||||
|
export const confx = (cfg: any) => {
|
||||||
|
return {
|
||||||
|
//Check if the config has a value defined
|
||||||
|
has: (scope: string, key: string) => {
|
||||||
|
return scope in cfg && key in cfg[scope] && cfg[scope][key] !== undefined;
|
||||||
|
},
|
||||||
|
//Get a value from the config
|
||||||
|
get: (scope: string, key: string) => {
|
||||||
|
return cfg[scope]?.[key] as any | undefined;
|
||||||
|
},
|
||||||
|
//Set a value in the config
|
||||||
|
set: (scope: string, key: string, value: any) => {
|
||||||
|
cfg[scope] ??= {};
|
||||||
|
cfg[scope][key] = value;
|
||||||
|
},
|
||||||
|
//Remove a value from the config
|
||||||
|
unset: (scope: string, key: string) => {
|
||||||
|
let deleted = false;
|
||||||
|
if (scope in cfg && key in cfg[scope]) {
|
||||||
|
delete cfg[scope][key];
|
||||||
|
deleted = true;
|
||||||
|
if (Object.keys(cfg[scope]).length === 0) {
|
||||||
|
delete cfg[scope];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return deleted;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Not really intended for use, but it's a more type-safe version if you need it
|
||||||
|
*/
|
||||||
|
export const confxTyped = <T extends ConfigScaffold>(cfg: T) => {
|
||||||
|
return {
|
||||||
|
//Check if the config has a value defined
|
||||||
|
has: (scope: keyof T, key: keyof T[typeof scope]) => {
|
||||||
|
return scope in cfg && key in cfg[scope] && cfg[scope][key] !== undefined;
|
||||||
|
},
|
||||||
|
//Get a value from the config
|
||||||
|
get: (scope: keyof T, key: keyof T[typeof scope]) => {
|
||||||
|
if (scope in cfg && key in cfg[scope]) {
|
||||||
|
return cfg[scope][key] as T[typeof scope][typeof key];
|
||||||
|
} else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
//Set a value in the config
|
||||||
|
set: (scope: keyof T, key: keyof T[typeof scope], value: any) => {
|
||||||
|
cfg[scope] ??= {} as T[typeof scope];
|
||||||
|
cfg[scope][key] = value;
|
||||||
|
},
|
||||||
|
//Remove a value from the config
|
||||||
|
unset: (scope: keyof T, key: keyof T[typeof scope]) => {
|
||||||
|
if (scope in cfg && key in cfg[scope]) {
|
||||||
|
delete cfg[scope][key];
|
||||||
|
if (Object.keys(cfg[scope]).length === 0) {
|
||||||
|
delete cfg[scope];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class to deal with config keys
|
||||||
|
*/
|
||||||
|
export class UpdateConfigKeySet {
|
||||||
|
public readonly raw: RefreshConfigKey[] = [];
|
||||||
|
|
||||||
|
public add(input1: string, input2?: string) {
|
||||||
|
let full, scope, key;
|
||||||
|
if (input2) {
|
||||||
|
full = `${input1}.${input2}`;
|
||||||
|
scope = input1;
|
||||||
|
key = input2;
|
||||||
|
} else {
|
||||||
|
full = input1;
|
||||||
|
[scope, key] = input1.split('.');
|
||||||
|
}
|
||||||
|
if (full.includes('*')) {
|
||||||
|
throw new Error('Wildcards are not allowed when adding config keys');
|
||||||
|
}
|
||||||
|
this.raw.push({ full, scope, key });
|
||||||
|
}
|
||||||
|
|
||||||
|
private _hasMatch(rule: string) {
|
||||||
|
const [inputScope, inputKey] = rule.split('.');
|
||||||
|
return this.raw.some(rawCfg =>
|
||||||
|
(inputScope === '*' || rawCfg.scope === inputScope) &&
|
||||||
|
(inputKey === '*' || rawCfg.key === inputKey)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public hasMatch(rule: string | string[]) {
|
||||||
|
if (Array.isArray(rule)) {
|
||||||
|
return rule.some(f => this._hasMatch(f));
|
||||||
|
} else {
|
||||||
|
return this._hasMatch(rule);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get size() {
|
||||||
|
return this.raw.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
get list() {
|
||||||
|
return this.raw.map(x => x.full);
|
||||||
|
}
|
||||||
|
}
|
240
core/modules/Database/dao/actions.ts
Normal file
240
core/modules/Database/dao/actions.ts
Normal file
|
@ -0,0 +1,240 @@
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import { DbInstance, SavePriority } from "../instance";
|
||||||
|
import { DatabaseActionBanType, DatabaseActionType, DatabaseActionWarnType } from "../databaseTypes";
|
||||||
|
import { genActionID } from "../dbUtils";
|
||||||
|
import { now } from '@lib/misc';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory('DatabaseDao');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data access object for the database "actions" collection.
|
||||||
|
*/
|
||||||
|
export default class ActionsDao {
|
||||||
|
constructor(private readonly db: DbInstance) { }
|
||||||
|
|
||||||
|
private get dbo() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
private get chain() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj.chain;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches for an action in the database by the id, returns action or null if not found
|
||||||
|
*/
|
||||||
|
findOne(actionId: string): DatabaseActionType | null {
|
||||||
|
if (typeof actionId !== 'string' || !actionId.length) throw new Error('Invalid actionId.');
|
||||||
|
|
||||||
|
//Performing search
|
||||||
|
const a = this.chain.get('actions')
|
||||||
|
.find({ id: actionId })
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
return (typeof a === 'undefined') ? null : a;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches for any registered action in the database by a list of identifiers and optional filters
|
||||||
|
* Usage example: findMany(['license:xxx'], undefined, {type: 'ban', revocation.timestamp: null})
|
||||||
|
*/
|
||||||
|
findMany<T extends DatabaseActionType>(
|
||||||
|
idsArray: string[],
|
||||||
|
hwidsArray?: string[],
|
||||||
|
customFilter: ((action: DatabaseActionType) => action is T) | object = {}
|
||||||
|
): T[] {
|
||||||
|
if (!Array.isArray(idsArray)) throw new Error('idsArray should be an array');
|
||||||
|
if (hwidsArray && !Array.isArray(hwidsArray)) throw new Error('hwidsArray should be an array or undefined');
|
||||||
|
const idsFilter = (action: DatabaseActionType) => idsArray.some((fi) => action.ids.includes(fi))
|
||||||
|
const hwidsFilter = (action: DatabaseActionType) => {
|
||||||
|
if ('hwids' in action && action.hwids) {
|
||||||
|
const count = hwidsArray!.filter((fi) => action.hwids?.includes(fi)).length;
|
||||||
|
return count >= txConfig.banlist.requiredHwidMatches;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
//small optimization
|
||||||
|
const idsMatchFilter = hwidsArray && hwidsArray.length && txConfig.banlist.requiredHwidMatches
|
||||||
|
? (a: DatabaseActionType) => idsFilter(a) || hwidsFilter(a)
|
||||||
|
: (a: DatabaseActionType) => idsFilter(a)
|
||||||
|
|
||||||
|
return this.chain.get('actions')
|
||||||
|
.filter(customFilter as (a: DatabaseActionType) => a is T)
|
||||||
|
.filter(idsMatchFilter)
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to search for a registered action database with error: ${(error as Error).message}`;
|
||||||
|
console.verbose.error(msg);
|
||||||
|
throw new Error(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registers a ban action and returns its id
|
||||||
|
*/
|
||||||
|
registerBan(
|
||||||
|
ids: string[],
|
||||||
|
author: string,
|
||||||
|
reason: string,
|
||||||
|
expiration: number | false,
|
||||||
|
playerName: string | false = false,
|
||||||
|
hwids?: string[], //only used for bans
|
||||||
|
): string {
|
||||||
|
//Sanity check
|
||||||
|
if (!Array.isArray(ids) || !ids.length) throw new Error('Invalid ids array.');
|
||||||
|
if (typeof author !== 'string' || !author.length) throw new Error('Invalid author.');
|
||||||
|
if (typeof reason !== 'string' || !reason.length) throw new Error('Invalid reason.');
|
||||||
|
if (expiration !== false && (typeof expiration !== 'number')) throw new Error('Invalid expiration.');
|
||||||
|
if (playerName !== false && (typeof playerName !== 'string' || !playerName.length)) throw new Error('Invalid playerName.');
|
||||||
|
if (hwids && !Array.isArray(hwids)) throw new Error('Invalid hwids array.');
|
||||||
|
|
||||||
|
//Saves it to the database
|
||||||
|
const timestamp = now();
|
||||||
|
try {
|
||||||
|
const actionID = genActionID(this.dbo, 'ban');
|
||||||
|
const toDB: DatabaseActionBanType = {
|
||||||
|
id: actionID,
|
||||||
|
type: 'ban',
|
||||||
|
ids,
|
||||||
|
hwids,
|
||||||
|
playerName,
|
||||||
|
reason,
|
||||||
|
author,
|
||||||
|
timestamp,
|
||||||
|
expiration,
|
||||||
|
revocation: {
|
||||||
|
timestamp: null,
|
||||||
|
author: null,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
this.chain.get('actions')
|
||||||
|
.push(toDB)
|
||||||
|
.value();
|
||||||
|
this.db.writeFlag(SavePriority.HIGH);
|
||||||
|
return actionID;
|
||||||
|
} catch (error) {
|
||||||
|
let msg = `Failed to register ban to database with message: ${(error as Error).message}`;
|
||||||
|
console.error(msg);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registers a warn action and returns its id
|
||||||
|
*/
|
||||||
|
registerWarn(
|
||||||
|
ids: string[],
|
||||||
|
author: string,
|
||||||
|
reason: string,
|
||||||
|
playerName: string | false = false,
|
||||||
|
): string {
|
||||||
|
//Sanity check
|
||||||
|
if (!Array.isArray(ids) || !ids.length) throw new Error('Invalid ids array.');
|
||||||
|
if (typeof author !== 'string' || !author.length) throw new Error('Invalid author.');
|
||||||
|
if (typeof reason !== 'string' || !reason.length) throw new Error('Invalid reason.');
|
||||||
|
if (playerName !== false && (typeof playerName !== 'string' || !playerName.length)) throw new Error('Invalid playerName.');
|
||||||
|
|
||||||
|
//Saves it to the database
|
||||||
|
const timestamp = now();
|
||||||
|
try {
|
||||||
|
const actionID = genActionID(this.dbo, 'warn');
|
||||||
|
const toDB: DatabaseActionWarnType = {
|
||||||
|
id: actionID,
|
||||||
|
type: 'warn',
|
||||||
|
ids,
|
||||||
|
playerName,
|
||||||
|
reason,
|
||||||
|
author,
|
||||||
|
timestamp,
|
||||||
|
expiration: false,
|
||||||
|
acked: false,
|
||||||
|
revocation: {
|
||||||
|
timestamp: null,
|
||||||
|
author: null,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
this.chain.get('actions')
|
||||||
|
.push(toDB)
|
||||||
|
.value();
|
||||||
|
this.db.writeFlag(SavePriority.HIGH);
|
||||||
|
return actionID;
|
||||||
|
} catch (error) {
|
||||||
|
let msg = `Failed to register warn to database with message: ${(error as Error).message}`;
|
||||||
|
console.error(msg);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Marks a warning as acknowledged
|
||||||
|
*/
|
||||||
|
ackWarn(actionId: string) {
|
||||||
|
if (typeof actionId !== 'string' || !actionId.length) throw new Error('Invalid actionId.');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const action = this.chain.get('actions')
|
||||||
|
.find({ id: actionId })
|
||||||
|
.value();
|
||||||
|
if (!action) throw new Error(`action not found`);
|
||||||
|
if (action.type !== 'warn') throw new Error(`action is not a warn`);
|
||||||
|
action.acked = true;
|
||||||
|
this.db.writeFlag(SavePriority.MEDIUM);
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to ack warn with message: ${(error as Error).message}`;
|
||||||
|
console.error(msg);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Revoke an action (ban, warn)
|
||||||
|
*/
|
||||||
|
revoke(
|
||||||
|
actionId: string,
|
||||||
|
author: string,
|
||||||
|
allowedTypes: string[] | true = true
|
||||||
|
): DatabaseActionType {
|
||||||
|
if (typeof actionId !== 'string' || !actionId.length) throw new Error('Invalid actionId.');
|
||||||
|
if (typeof author !== 'string' || !author.length) throw new Error('Invalid author.');
|
||||||
|
if (allowedTypes !== true && !Array.isArray(allowedTypes)) throw new Error('Invalid allowedTypes.');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const action = this.chain.get('actions')
|
||||||
|
.find({ id: actionId })
|
||||||
|
.value();
|
||||||
|
|
||||||
|
if (!action) throw new Error(`action not found`);
|
||||||
|
if (allowedTypes !== true && !allowedTypes.includes(action.type)) {
|
||||||
|
throw new Error(`you do not have permission to revoke this action`);
|
||||||
|
}
|
||||||
|
|
||||||
|
action.revocation = {
|
||||||
|
timestamp: now(),
|
||||||
|
author,
|
||||||
|
};
|
||||||
|
this.db.writeFlag(SavePriority.HIGH);
|
||||||
|
return cloneDeep(action);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to revoke action with message: ${(error as Error).message}`;
|
||||||
|
console.error(msg);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
140
core/modules/Database/dao/cleanup.ts
Normal file
140
core/modules/Database/dao/cleanup.ts
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
import { DbInstance, SavePriority } from "../instance";
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { DatabasePlayerType, DatabaseWhitelistApprovalsType, DatabaseWhitelistRequestsType } from '../databaseTypes';
|
||||||
|
import { now } from '@lib/misc';
|
||||||
|
const console = consoleFactory('DatabaseDao');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data access object for cleaning up the database.
|
||||||
|
*/
|
||||||
|
export default class CleanupDao {
|
||||||
|
constructor(private readonly db: DbInstance) { }
|
||||||
|
|
||||||
|
private get dbo() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
private get chain() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj.chain;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans the database by removing every entry that matches the provided filter function.
|
||||||
|
* @returns {number} number of removed items
|
||||||
|
*/
|
||||||
|
bulkRemove(
|
||||||
|
tableName: 'players' | 'actions' | 'whitelistApprovals' | 'whitelistRequests',
|
||||||
|
filterFunc: Function
|
||||||
|
): number {
|
||||||
|
if (!Array.isArray(this.dbo.data[tableName])) throw new Error('Table selected isn\'t an array.');
|
||||||
|
if (typeof filterFunc !== 'function') throw new Error('filterFunc must be a function.');
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.db.writeFlag(SavePriority.HIGH);
|
||||||
|
const removed = this.chain.get(tableName)
|
||||||
|
.remove(filterFunc as any)
|
||||||
|
.value();
|
||||||
|
return removed.length;
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to clean database with error: ${(error as Error).message}`;
|
||||||
|
console.verbose.error(msg);
|
||||||
|
throw new Error(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans the hwids from the database.
|
||||||
|
* @returns {number} number of removed HWIDs
|
||||||
|
*/
|
||||||
|
wipeHwids(
|
||||||
|
fromPlayers: boolean,
|
||||||
|
fromBans: boolean,
|
||||||
|
): number {
|
||||||
|
if (!Array.isArray(this.dbo.data.players)) throw new Error('Players table isn\'t an array yet.');
|
||||||
|
if (!Array.isArray(this.dbo.data.players)) throw new Error('Actions table isn\'t an array yet.');
|
||||||
|
if (typeof fromPlayers !== 'boolean' || typeof fromBans !== 'boolean') throw new Error('The parameters should be booleans.');
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.db.writeFlag(SavePriority.HIGH);
|
||||||
|
let removed = 0;
|
||||||
|
if (fromPlayers) {
|
||||||
|
this.chain.get('players')
|
||||||
|
.map(player => {
|
||||||
|
removed += player.hwids.length;
|
||||||
|
player.hwids = [];
|
||||||
|
return player;
|
||||||
|
})
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
if (fromBans)
|
||||||
|
this.chain.get('actions')
|
||||||
|
.map(action => {
|
||||||
|
if (action.type !== 'ban' || !action.hwids) {
|
||||||
|
return action;
|
||||||
|
} else {
|
||||||
|
removed += action.hwids.length;
|
||||||
|
action.hwids = [];
|
||||||
|
return action;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.value();
|
||||||
|
return removed;
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to clean database with error: ${(error as Error).message}`;
|
||||||
|
console.verbose.error(msg);
|
||||||
|
throw new Error(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cron func to optimize the database removing players and whitelist reqs/approvals
|
||||||
|
*/
|
||||||
|
runDailyOptimizer() {
|
||||||
|
const oneDay = 24 * 60 * 60;
|
||||||
|
|
||||||
|
//Optimize players
|
||||||
|
//Players that have not joined the last 16 days, and have less than 2 hours of playtime
|
||||||
|
let playerRemoved;
|
||||||
|
try {
|
||||||
|
const sixteenDaysAgo = now() - (16 * oneDay);
|
||||||
|
const filter = (p: DatabasePlayerType) => {
|
||||||
|
return (p.tsLastConnection < sixteenDaysAgo && p.playTime < 120);
|
||||||
|
}
|
||||||
|
playerRemoved = this.bulkRemove('players', filter);
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to optimize players database with error: ${(error as Error).message}`;
|
||||||
|
console.error(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Optimize whitelistRequests + whitelistApprovals
|
||||||
|
//Removing the ones older than 7 days
|
||||||
|
let wlRequestsRemoved, wlApprovalsRemoved;
|
||||||
|
const sevenDaysAgo = now() - (7 * oneDay);
|
||||||
|
try {
|
||||||
|
const wlRequestsFilter = (req: DatabaseWhitelistRequestsType) => {
|
||||||
|
return (req.tsLastAttempt < sevenDaysAgo);
|
||||||
|
}
|
||||||
|
wlRequestsRemoved = txCore.database.whitelist.removeManyRequests(wlRequestsFilter).length;
|
||||||
|
|
||||||
|
const wlApprovalsFilter = (req: DatabaseWhitelistApprovalsType) => {
|
||||||
|
return (req.tsApproved < sevenDaysAgo);
|
||||||
|
}
|
||||||
|
wlApprovalsRemoved = txCore.database.whitelist.removeManyApprovals(wlApprovalsFilter).length;
|
||||||
|
} catch (error) {
|
||||||
|
const msg = `Failed to optimize players database with error: ${(error as Error).message}`;
|
||||||
|
console.error(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.db.writeFlag(SavePriority.LOW);
|
||||||
|
console.ok(`Internal Database optimized. This applies only for the txAdmin internal database, and does not affect your MySQL or framework (ESX/QBCore/etc) databases.`);
|
||||||
|
console.ok(`- ${playerRemoved} players that haven't connected in the past 16 days and had less than 2 hours of playtime.`);
|
||||||
|
console.ok(`- ${wlRequestsRemoved} whitelist requests older than a week.`);
|
||||||
|
console.ok(`- ${wlApprovalsRemoved} whitelist approvals older than a week.`);
|
||||||
|
}
|
||||||
|
}
|
110
core/modules/Database/dao/players.ts
Normal file
110
core/modules/Database/dao/players.ts
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import { DbInstance, SavePriority } from "../instance";
|
||||||
|
import { DatabasePlayerType } from "../databaseTypes";
|
||||||
|
import { DuplicateKeyError } from "../dbUtils";
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory('DatabaseDao');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data access object for the database "players" collection.
|
||||||
|
*/
|
||||||
|
export default class PlayersDao {
|
||||||
|
constructor(private readonly db: DbInstance) { }
|
||||||
|
|
||||||
|
private get dbo() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
private get chain() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj.chain;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Searches for a player in the database by the license, returns null if not found or false in case of error
|
||||||
|
*/
|
||||||
|
findOne(license: string): DatabasePlayerType | null {
|
||||||
|
//Performing search
|
||||||
|
const p = this.chain.get('players')
|
||||||
|
.find({ license })
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
return (typeof p === 'undefined') ? null : p;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a player to the database
|
||||||
|
*/
|
||||||
|
findMany(filter: object | Function): DatabasePlayerType[] {
|
||||||
|
return this.chain.get('players')
|
||||||
|
.filter(filter as any)
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a player to the database
|
||||||
|
*/
|
||||||
|
register(player: DatabasePlayerType): void {
|
||||||
|
//TODO: validate player data vs DatabasePlayerType props
|
||||||
|
|
||||||
|
//Check for duplicated license
|
||||||
|
const found = this.chain.get('players')
|
||||||
|
.filter({ license: player.license })
|
||||||
|
.value();
|
||||||
|
if (found.length) throw new DuplicateKeyError(`this license is already registered`);
|
||||||
|
|
||||||
|
this.db.writeFlag(SavePriority.LOW);
|
||||||
|
this.chain.get('players')
|
||||||
|
.push(player)
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates a player setting assigning srcData props to the database player.
|
||||||
|
* The source data object is deep cloned to prevent weird side effects.
|
||||||
|
*/
|
||||||
|
update(license: string, srcData: object, srcUniqueId: Symbol): DatabasePlayerType {
|
||||||
|
if (typeof (srcData as any).license !== 'undefined') {
|
||||||
|
throw new Error(`cannot license field`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const playerDbObj = this.chain.get('players').find({ license });
|
||||||
|
if (!playerDbObj.value()) throw new Error('Player not found in database');
|
||||||
|
this.db.writeFlag(SavePriority.LOW);
|
||||||
|
const newData = playerDbObj
|
||||||
|
.assign(cloneDeep(srcData))
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
txCore.fxPlayerlist.handleDbDataSync(newData, srcUniqueId);
|
||||||
|
return newData;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Revokes whitelist status of all players that match a filter function
|
||||||
|
* @returns the number of revoked whitelists
|
||||||
|
*/
|
||||||
|
bulkRevokeWhitelist(filterFunc: Function): number {
|
||||||
|
if (typeof filterFunc !== 'function') throw new Error('filterFunc must be a function.');
|
||||||
|
|
||||||
|
let cntChanged = 0;
|
||||||
|
const srcSymbol = Symbol('bulkRevokePlayerWhitelist');
|
||||||
|
this.dbo.data!.players.forEach((player) => {
|
||||||
|
if (player.tsWhitelisted && filterFunc(player)) {
|
||||||
|
cntChanged++;
|
||||||
|
player.tsWhitelisted = undefined;
|
||||||
|
txCore.fxPlayerlist.handleDbDataSync(cloneDeep(player), srcSymbol);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.db.writeFlag(SavePriority.HIGH);
|
||||||
|
return cntChanged;
|
||||||
|
}
|
||||||
|
}
|
111
core/modules/Database/dao/stats.ts
Normal file
111
core/modules/Database/dao/stats.ts
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
import { DbInstance } from "../instance";
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { MultipleCounter } from '@modules/Metrics/statsUtils';
|
||||||
|
import { now } from '@lib/misc';
|
||||||
|
const console = consoleFactory('DatabaseDao');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data access object for collecting stats from the database.
|
||||||
|
*/
|
||||||
|
export default class StatsDao {
|
||||||
|
constructor(private readonly db: DbInstance) { }
|
||||||
|
|
||||||
|
private get dbo() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
private get chain() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj.chain;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns players stats for the database (for Players page callouts)
|
||||||
|
*/
|
||||||
|
getPlayersStats() {
|
||||||
|
const oneDayAgo = now() - (24 * 60 * 60);
|
||||||
|
const sevenDaysAgo = now() - (7 * 24 * 60 * 60);
|
||||||
|
const startingValue = {
|
||||||
|
total: 0,
|
||||||
|
playedLast24h: 0,
|
||||||
|
joinedLast24h: 0,
|
||||||
|
joinedLast7d: 0,
|
||||||
|
};
|
||||||
|
const playerStats = this.chain.get('players')
|
||||||
|
.reduce((acc, p, ind) => {
|
||||||
|
acc.total++;
|
||||||
|
if (p.tsLastConnection > oneDayAgo) acc.playedLast24h++;
|
||||||
|
if (p.tsJoined > oneDayAgo) acc.joinedLast24h++;
|
||||||
|
if (p.tsJoined > sevenDaysAgo) acc.joinedLast7d++;
|
||||||
|
return acc;
|
||||||
|
}, startingValue)
|
||||||
|
.value();
|
||||||
|
|
||||||
|
return playerStats;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns players stats for the database (for Players page callouts)
|
||||||
|
*/
|
||||||
|
getActionStats() {
|
||||||
|
const sevenDaysAgo = now() - (7 * 24 * 60 * 60);
|
||||||
|
const startingValue = {
|
||||||
|
totalWarns: 0,
|
||||||
|
warnsLast7d: 0,
|
||||||
|
totalBans: 0,
|
||||||
|
bansLast7d: 0,
|
||||||
|
groupedByAdmins: new MultipleCounter(),
|
||||||
|
};
|
||||||
|
const actionStats = this.chain.get('actions')
|
||||||
|
.reduce((acc, action, ind) => {
|
||||||
|
if (action.type == 'ban') {
|
||||||
|
acc.totalBans++;
|
||||||
|
if (action.timestamp > sevenDaysAgo) acc.bansLast7d++;
|
||||||
|
} else if (action.type == 'warn') {
|
||||||
|
acc.totalWarns++;
|
||||||
|
if (action.timestamp > sevenDaysAgo) acc.warnsLast7d++;
|
||||||
|
}
|
||||||
|
acc.groupedByAdmins.count(action.author);
|
||||||
|
return acc;
|
||||||
|
}, startingValue)
|
||||||
|
.value();
|
||||||
|
|
||||||
|
return {
|
||||||
|
...actionStats,
|
||||||
|
groupedByAdmins: actionStats.groupedByAdmins.toJSON(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns actions/players stats for the database
|
||||||
|
* NOTE: used by diagnostics and reporting
|
||||||
|
*/
|
||||||
|
getDatabaseStats() {
|
||||||
|
const actionStats = this.chain.get('actions')
|
||||||
|
.reduce((acc, a, ind) => {
|
||||||
|
if (a.type == 'ban') {
|
||||||
|
acc.bans++;
|
||||||
|
} else if (a.type == 'warn') {
|
||||||
|
acc.warns++;
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
}, { bans: 0, warns: 0 })
|
||||||
|
.value();
|
||||||
|
|
||||||
|
const playerStats = this.chain.get('players')
|
||||||
|
.reduce((acc, p, ind) => {
|
||||||
|
acc.players++;
|
||||||
|
acc.playTime += p.playTime;
|
||||||
|
if (p.tsWhitelisted) acc.whitelists++;
|
||||||
|
return acc;
|
||||||
|
}, { players: 0, playTime: 0, whitelists: 0 })
|
||||||
|
.value();
|
||||||
|
|
||||||
|
return { ...actionStats, ...playerStats }
|
||||||
|
}
|
||||||
|
}
|
133
core/modules/Database/dao/whitelist.ts
Normal file
133
core/modules/Database/dao/whitelist.ts
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import { DbInstance, SavePriority } from "../instance";
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { DatabaseWhitelistApprovalsType, DatabaseWhitelistRequestsType } from '../databaseTypes';
|
||||||
|
import { DuplicateKeyError, genWhitelistRequestID } from '../dbUtils';
|
||||||
|
const console = consoleFactory('DatabaseDao');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Data access object for the database whitelist collections.
|
||||||
|
*/
|
||||||
|
export default class WhitelistDao {
|
||||||
|
constructor(private readonly db: DbInstance) { }
|
||||||
|
|
||||||
|
private get dbo() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
private get chain() {
|
||||||
|
if (!this.db.obj || !this.db.isReady) throw new Error(`database not ready yet`);
|
||||||
|
return this.db.obj.chain;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all whitelist approvals, which can be optionally filtered
|
||||||
|
*/
|
||||||
|
findManyApprovals(
|
||||||
|
filter?: object | Function
|
||||||
|
): DatabaseWhitelistApprovalsType[] {
|
||||||
|
return this.chain.get('whitelistApprovals')
|
||||||
|
.filter(filter as any)
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes whitelist approvals based on a filter.
|
||||||
|
*/
|
||||||
|
removeManyApprovals(
|
||||||
|
filter: object | Function
|
||||||
|
): DatabaseWhitelistApprovalsType[] {
|
||||||
|
this.db.writeFlag(SavePriority.MEDIUM);
|
||||||
|
return this.chain.get('whitelistApprovals')
|
||||||
|
.remove(filter as any)
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a whitelist request to the database
|
||||||
|
*/
|
||||||
|
registerApproval(approval: DatabaseWhitelistApprovalsType): void {
|
||||||
|
//TODO: validate player data vs DatabaseWhitelistApprovalsType props
|
||||||
|
|
||||||
|
//Check for duplicated license
|
||||||
|
const found = this.chain.get('whitelistApprovals')
|
||||||
|
.filter({ identifier: approval.identifier })
|
||||||
|
.value();
|
||||||
|
if (found.length) throw new DuplicateKeyError(`this identifier is already whitelisted`);
|
||||||
|
|
||||||
|
//Register new
|
||||||
|
this.db.writeFlag(SavePriority.LOW);
|
||||||
|
this.chain.get('whitelistApprovals')
|
||||||
|
.push(cloneDeep(approval))
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all whitelist approvals, which can be optionally filtered
|
||||||
|
*/
|
||||||
|
findManyRequests(
|
||||||
|
filter?: object | Function
|
||||||
|
): DatabaseWhitelistRequestsType[] {
|
||||||
|
return this.chain.get('whitelistRequests')
|
||||||
|
.filter(filter as any)
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes whitelist requests based on a filter.
|
||||||
|
*/
|
||||||
|
removeManyRequests(
|
||||||
|
filter: object | Function
|
||||||
|
): DatabaseWhitelistRequestsType[] {
|
||||||
|
this.db.writeFlag(SavePriority.LOW);
|
||||||
|
return this.chain.get('whitelistRequests')
|
||||||
|
.remove(filter as any)
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates a whitelist request setting assigning srcData props to the database object.
|
||||||
|
* The source data object is deep cloned to prevent weird side effects.
|
||||||
|
*/
|
||||||
|
updateRequest(license: string, srcData: object): DatabaseWhitelistRequestsType {
|
||||||
|
if (typeof (srcData as any).id !== 'undefined' || typeof (srcData as any).license !== 'undefined') {
|
||||||
|
throw new Error(`cannot update id or license fields`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestDbObj = this.chain.get('whitelistRequests').find({ license });
|
||||||
|
if (!requestDbObj.value()) throw new Error('Request not found in database');
|
||||||
|
this.db.writeFlag(SavePriority.LOW);
|
||||||
|
return requestDbObj
|
||||||
|
.assign(cloneDeep(srcData))
|
||||||
|
.cloneDeep()
|
||||||
|
.value();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a whitelist request to the database
|
||||||
|
*/
|
||||||
|
registerRequest(request: Omit<DatabaseWhitelistRequestsType, "id">): string {
|
||||||
|
//TODO: validate player data vs DatabaseWhitelistRequestsType props
|
||||||
|
if (typeof (request as any).id !== 'undefined') {
|
||||||
|
throw new Error(`cannot manually set the id field`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const id = genWhitelistRequestID(this.dbo);
|
||||||
|
this.db.writeFlag(SavePriority.LOW);
|
||||||
|
this.chain.get('whitelistRequests')
|
||||||
|
.push({ id, ...cloneDeep(request) })
|
||||||
|
.value();
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
}
|
68
core/modules/Database/databaseTypes.ts
Normal file
68
core/modules/Database/databaseTypes.ts
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
export type DatabasePlayerType = {
|
||||||
|
license: string;
|
||||||
|
ids: string[];
|
||||||
|
hwids: string[];
|
||||||
|
displayName: string;
|
||||||
|
pureName: string;
|
||||||
|
playTime: number;
|
||||||
|
tsLastConnection: number;
|
||||||
|
tsJoined: number;
|
||||||
|
tsWhitelisted?: number;
|
||||||
|
notes?: {
|
||||||
|
text: string;
|
||||||
|
lastAdmin: string | null;
|
||||||
|
tsLastEdit: number | null;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DatabaseActionBaseType = {
|
||||||
|
id: string;
|
||||||
|
ids: string[];
|
||||||
|
playerName: string | false;
|
||||||
|
reason: string;
|
||||||
|
author: string;
|
||||||
|
timestamp: number;
|
||||||
|
//FIXME: the revocation object itself should be optional instead of nullable properties
|
||||||
|
//BUT DO REMEMBER THE `'XXX' IN YYY` ISSUE!
|
||||||
|
revocation: {
|
||||||
|
timestamp: number | null;
|
||||||
|
author: string | null;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
export type DatabaseActionBanType = {
|
||||||
|
type: 'ban';
|
||||||
|
hwids?: string[];
|
||||||
|
expiration: number | false;
|
||||||
|
} & DatabaseActionBaseType;
|
||||||
|
export type DatabaseActionWarnType = {
|
||||||
|
type: 'warn';
|
||||||
|
expiration: false; //FIXME: remove - BUT DO REMEMBER THE `'XXX' IN YYY` ISSUE!
|
||||||
|
acked: boolean; //if the player has acknowledged the warning
|
||||||
|
} & DatabaseActionBaseType;
|
||||||
|
export type DatabaseActionType = DatabaseActionBanType | DatabaseActionWarnType;
|
||||||
|
|
||||||
|
export type DatabaseWhitelistApprovalsType = {
|
||||||
|
identifier: string;
|
||||||
|
playerName: string; //always filled, even with `unknown` or license `xxxxxx...xxxxxx`
|
||||||
|
playerAvatar: string | null,
|
||||||
|
tsApproved: number,
|
||||||
|
approvedBy: string
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DatabaseWhitelistRequestsType = {
|
||||||
|
id: string, //R####
|
||||||
|
license: string,
|
||||||
|
playerDisplayName: string,
|
||||||
|
playerPureName: string,
|
||||||
|
discordTag?: string,
|
||||||
|
discordAvatar?: string, //first try to get from GuildMember, then client.users.fetch()
|
||||||
|
tsLastAttempt: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DatabaseDataType = {
|
||||||
|
version: number,
|
||||||
|
players: DatabasePlayerType[],
|
||||||
|
actions: DatabaseActionType[],
|
||||||
|
whitelistApprovals: DatabaseWhitelistApprovalsType[],
|
||||||
|
whitelistRequests: DatabaseWhitelistRequestsType[],
|
||||||
|
};
|
124
core/modules/Database/dbUtils.ts
Normal file
124
core/modules/Database/dbUtils.ts
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
const modulename = 'IDGen';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import * as nanoidSecure from 'nanoid';
|
||||||
|
import * as nanoidNonSecure from 'nanoid/non-secure';
|
||||||
|
import consts from '@shared/consts';
|
||||||
|
import getOsDistro from '@lib/host/getOsDistro.js';
|
||||||
|
import { txEnv, txHostConfig } from '@core/globalData';
|
||||||
|
import type { DatabaseObjectType } from './instance';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { msToDuration } from '@lib/misc';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
//Consts
|
||||||
|
type IdStorageTypes = DatabaseObjectType | Set<string>;
|
||||||
|
const maxAttempts = 10;
|
||||||
|
const noIdErrorMessage = 'Unnable to generate new Random ID possibly due to the decreased available entropy. Please send a screenshot of the detailed information in the terminal for the txAdmin devs.';
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prints a diagnostics message to the console that should help us identify what is the problem and the potential solution
|
||||||
|
*/
|
||||||
|
const printDiagnostics = async () => {
|
||||||
|
let uptime;
|
||||||
|
let entropy;
|
||||||
|
try {
|
||||||
|
uptime = msToDuration(process.uptime() * 1000);
|
||||||
|
entropy = (await fsp.readFile('/proc/sys/kernel/random/entropy_avail', 'utf8')).trim();
|
||||||
|
} catch (error) {
|
||||||
|
entropy = (error as Error).message;
|
||||||
|
}
|
||||||
|
|
||||||
|
const secureStorage = new Set();
|
||||||
|
for (let i = 0; i < 100; i++) {
|
||||||
|
const randID = nanoidSecure.customAlphabet(consts.actionIdAlphabet, 4)();
|
||||||
|
if (!secureStorage.has(randID)) secureStorage.add(randID);
|
||||||
|
}
|
||||||
|
|
||||||
|
const nonsecureStorage = new Set();
|
||||||
|
for (let i = 0; i < 100; i++) {
|
||||||
|
const randID = nanoidNonSecure.customAlphabet(consts.actionIdAlphabet, 4)();
|
||||||
|
if (!nonsecureStorage.has(randID)) nonsecureStorage.add(randID);
|
||||||
|
}
|
||||||
|
|
||||||
|
const osDistro = await getOsDistro();
|
||||||
|
console.error(noIdErrorMessage);
|
||||||
|
console.error(`Uptime: ${uptime}`);
|
||||||
|
console.error(`Entropy: ${entropy}`);
|
||||||
|
console.error(`Distro: ${osDistro}`);
|
||||||
|
console.error(`txAdmin: ${txEnv.txaVersion}`);
|
||||||
|
console.error(`FXServer: ${txEnv.fxsVersionTag}`);
|
||||||
|
console.error(`Provider: ${txHostConfig.providerName ?? 'none'}`);
|
||||||
|
console.error(`Unique Test: secure ${secureStorage.size}/100, non-secure ${nonsecureStorage.size}/100`);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check in a storage weather the ID is unique or not.
|
||||||
|
* @param storage the Set or lowdb instance
|
||||||
|
* @param id the ID to check
|
||||||
|
* @param lowdbTable the lowdb table to check
|
||||||
|
* @returns if is unique
|
||||||
|
*/
|
||||||
|
const checkUniqueness = (storage: IdStorageTypes, id: string, lowdbTable: string) => {
|
||||||
|
if (storage instanceof Set) {
|
||||||
|
return !storage.has(id);
|
||||||
|
} else {
|
||||||
|
//@ts-ignore: typing as ('actions' | 'whitelistRequests') did not work
|
||||||
|
return !storage.chain.get(lowdbTable).find({ id }).value();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates an unique whitelist ID, or throws an error
|
||||||
|
* @param storage set or lowdb instance
|
||||||
|
* @returns id
|
||||||
|
*/
|
||||||
|
export const genWhitelistRequestID = (storage: IdStorageTypes) => {
|
||||||
|
let attempts = 0;
|
||||||
|
while (attempts < maxAttempts) {
|
||||||
|
attempts++;
|
||||||
|
const randFunc = (attempts <= 5) ? nanoidSecure : nanoidNonSecure;
|
||||||
|
const id = 'R' + randFunc.customAlphabet(consts.actionIdAlphabet, 4)();
|
||||||
|
if (checkUniqueness(storage, id, 'whitelistRequests')) {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printDiagnostics().catch((e) => { });
|
||||||
|
throw new Error(noIdErrorMessage);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates an unique action ID, or throws an error
|
||||||
|
*/
|
||||||
|
export const genActionID = (storage: IdStorageTypes, actionType: string) => {
|
||||||
|
let attempts = 0;
|
||||||
|
while (attempts < maxAttempts) {
|
||||||
|
attempts++;
|
||||||
|
const randFunc = (attempts <= 5) ? nanoidSecure : nanoidNonSecure;
|
||||||
|
const id = actionType[0].toUpperCase()
|
||||||
|
+ randFunc.customAlphabet(consts.actionIdAlphabet, 3)()
|
||||||
|
+ '-'
|
||||||
|
+ randFunc.customAlphabet(consts.actionIdAlphabet, 4)();
|
||||||
|
if (checkUniqueness(storage, id, 'actions')) {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printDiagnostics().catch((e) => { });
|
||||||
|
throw new Error(noIdErrorMessage);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error class for key uniqueness violations
|
||||||
|
*/
|
||||||
|
export class DuplicateKeyError extends Error {
|
||||||
|
readonly code = 'DUPLICATE_KEY';
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
}
|
76
core/modules/Database/index.ts
Normal file
76
core/modules/Database/index.ts
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
const modulename = 'Database';
|
||||||
|
import { DbInstance } from './instance';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
|
||||||
|
import PlayersDao from './dao/players';
|
||||||
|
import ActionsDao from './dao/actions';
|
||||||
|
import WhitelistDao from './dao/whitelist';
|
||||||
|
import StatsDao from './dao/stats';
|
||||||
|
import CleanupDao from './dao/cleanup';
|
||||||
|
import { TxConfigState } from '@shared/enums';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This module is a hub for all database-related operations.
|
||||||
|
*/
|
||||||
|
export default class Database {
|
||||||
|
readonly #db: DbInstance;
|
||||||
|
|
||||||
|
//Database Methods
|
||||||
|
readonly players: PlayersDao;
|
||||||
|
readonly actions: ActionsDao;
|
||||||
|
readonly whitelist: WhitelistDao;
|
||||||
|
readonly stats: StatsDao;
|
||||||
|
readonly cleanup: CleanupDao;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.#db = new DbInstance();
|
||||||
|
this.players = new PlayersDao(this.#db);
|
||||||
|
this.actions = new ActionsDao(this.#db);
|
||||||
|
this.whitelist = new WhitelistDao(this.#db);
|
||||||
|
this.stats = new StatsDao(this.#db);
|
||||||
|
this.cleanup = new CleanupDao(this.#db);
|
||||||
|
|
||||||
|
//Database optimization cron function
|
||||||
|
const optimizerTask = () => {
|
||||||
|
if(txManager.configState === TxConfigState.Ready) {
|
||||||
|
this.cleanup.runDailyOptimizer();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
setTimeout(optimizerTask, 30_000);
|
||||||
|
setInterval(optimizerTask, 24 * 60 * 60_000);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Graceful shutdown handler - passing down to the db instance
|
||||||
|
*/
|
||||||
|
public handleShutdown() {
|
||||||
|
this.#db.handleShutdown();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns if the lowdb instance is ready
|
||||||
|
*/
|
||||||
|
get isReady() {
|
||||||
|
return this.#db.isReady;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns if size of the database file
|
||||||
|
*/
|
||||||
|
get fileSize() {
|
||||||
|
return (this.#db.obj?.adapter as any)?.fileSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the entire lowdb object. Please be careful with it :)
|
||||||
|
*/
|
||||||
|
getDboRef() {
|
||||||
|
if (!this.#db.obj) throw new Error(`database not ready yet`);
|
||||||
|
return this.#db.obj;
|
||||||
|
}
|
||||||
|
};
|
260
core/modules/Database/instance.ts
Normal file
260
core/modules/Database/instance.ts
Normal file
|
@ -0,0 +1,260 @@
|
||||||
|
const modulename = 'Database';
|
||||||
|
import fsp from 'node:fs/promises';
|
||||||
|
import { ExpChain } from 'lodash';
|
||||||
|
//@ts-ignore: I haven o idea why this errors, but I couldn't solve it
|
||||||
|
import lodash from 'lodash-es';
|
||||||
|
import { Low, Adapter } from 'lowdb';
|
||||||
|
import { TextFile } from 'lowdb/node';
|
||||||
|
import { txDevEnv, txEnv } from '@core/globalData';
|
||||||
|
import { DatabaseDataType } from './databaseTypes.js';
|
||||||
|
import migrations from './migrations.js';
|
||||||
|
import consoleFactory from '@lib/console.js';
|
||||||
|
import fatalError from '@lib/fatalError.js';
|
||||||
|
import { TimeCounter } from '@modules/Metrics/statsUtils.js';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
//Consts & helpers
|
||||||
|
export const DATABASE_VERSION = 5;
|
||||||
|
export const defaultDatabase = {
|
||||||
|
version: DATABASE_VERSION,
|
||||||
|
actions: [],
|
||||||
|
players: [],
|
||||||
|
whitelistApprovals: [],
|
||||||
|
whitelistRequests: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
export enum SavePriority {
|
||||||
|
STANDBY,
|
||||||
|
LOW,
|
||||||
|
MEDIUM,
|
||||||
|
HIGH,
|
||||||
|
}
|
||||||
|
|
||||||
|
const SAVE_CONFIG = {
|
||||||
|
[SavePriority.STANDBY]: {
|
||||||
|
name: 'standby',
|
||||||
|
interval: 5 * 60 * 1000,
|
||||||
|
},
|
||||||
|
[SavePriority.LOW]: {
|
||||||
|
name: 'low',
|
||||||
|
interval: 60 * 1000,
|
||||||
|
},
|
||||||
|
[SavePriority.MEDIUM]: {
|
||||||
|
name: 'medium',
|
||||||
|
interval: 30 * 1000,
|
||||||
|
},
|
||||||
|
[SavePriority.HIGH]: {
|
||||||
|
name: 'high',
|
||||||
|
interval: 15 * 1000,
|
||||||
|
},
|
||||||
|
} as Record<SavePriority, { interval: number; name: string }>;
|
||||||
|
|
||||||
|
|
||||||
|
//Reimplementing the adapter to minify json onm prod builds
|
||||||
|
class JSONFile<T> implements Adapter<T> {
|
||||||
|
private readonly adapter: TextFile;
|
||||||
|
private readonly serializer: Function;
|
||||||
|
public fileSize: number = 0;
|
||||||
|
|
||||||
|
constructor(filename: string) {
|
||||||
|
this.adapter = new TextFile(filename);
|
||||||
|
this.serializer = (txDevEnv.ENABLED)
|
||||||
|
? (obj: any) => JSON.stringify(obj, null, 4)
|
||||||
|
: JSON.stringify;
|
||||||
|
}
|
||||||
|
|
||||||
|
async read(): Promise<T | null> {
|
||||||
|
const data = await this.adapter.read();
|
||||||
|
if (data === null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return JSON.parse(data) as T;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
write(obj: T): Promise<void> {
|
||||||
|
const serialized = this.serializer(obj);
|
||||||
|
this.fileSize = serialized.length;
|
||||||
|
return this.adapter.write(serialized);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Extend Low class with a new `chain` field
|
||||||
|
//NOTE: lodash-es doesn't have ExpChain exported, so we need it from the original lodash
|
||||||
|
class LowWithLodash<T> extends Low<T> {
|
||||||
|
chain: ExpChain<this['data']> = lodash.chain(this).get('data')
|
||||||
|
}
|
||||||
|
export type DatabaseObjectType = LowWithLodash<DatabaseDataType>;
|
||||||
|
|
||||||
|
|
||||||
|
export class DbInstance {
|
||||||
|
readonly dbPath: string;
|
||||||
|
readonly backupPath: string;
|
||||||
|
obj: DatabaseObjectType | undefined = undefined;
|
||||||
|
#writePending: SavePriority = SavePriority.STANDBY;
|
||||||
|
lastWrite: number = 0;
|
||||||
|
isReady: boolean = false;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.dbPath = `${txEnv.profilePath}/data/playersDB.json`;
|
||||||
|
this.backupPath = `${txEnv.profilePath}/data/playersDB.backup.json`;
|
||||||
|
|
||||||
|
//Start database instance
|
||||||
|
this.setupDatabase();
|
||||||
|
|
||||||
|
//Cron functions
|
||||||
|
setInterval(() => {
|
||||||
|
this.checkWriteNeeded();
|
||||||
|
}, SAVE_CONFIG[SavePriority.HIGH].interval);
|
||||||
|
setInterval(() => {
|
||||||
|
this.backupDatabase();
|
||||||
|
}, SAVE_CONFIG[SavePriority.STANDBY].interval);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start lowdb instance and set defaults
|
||||||
|
*/
|
||||||
|
async setupDatabase() {
|
||||||
|
//Tries to load the database
|
||||||
|
let dbo;
|
||||||
|
try {
|
||||||
|
const adapterAsync = new JSONFile<DatabaseDataType>(this.dbPath);
|
||||||
|
dbo = new LowWithLodash(adapterAsync, defaultDatabase);
|
||||||
|
await dbo.read();
|
||||||
|
} catch (errorMain) {
|
||||||
|
const errTitle = 'Your txAdmin player/actions database could not be loaded.';
|
||||||
|
try {
|
||||||
|
await fsp.copyFile(this.backupPath, this.dbPath);
|
||||||
|
const adapterAsync = new JSONFile<DatabaseDataType>(this.dbPath);
|
||||||
|
dbo = new LowWithLodash(adapterAsync, defaultDatabase);
|
||||||
|
await dbo.read();
|
||||||
|
console.warn(errTitle);
|
||||||
|
console.warn('The database file was restored with the automatic backup file.');
|
||||||
|
console.warn('A five minute rollback is expected.');
|
||||||
|
} catch (errorBackup) {
|
||||||
|
fatalError.Database(0, [
|
||||||
|
errTitle,
|
||||||
|
'It was also not possible to load the automatic backup file.',
|
||||||
|
['Main error', (errorMain as Error).message],
|
||||||
|
['Backup error', (errorBackup as Error).message],
|
||||||
|
['Database path', this.dbPath],
|
||||||
|
'If there is a file in that location, you may try to delete or restore it manually.',
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Setting up loaded database
|
||||||
|
try {
|
||||||
|
//Need to write the database, in case it is new
|
||||||
|
await dbo.write();
|
||||||
|
|
||||||
|
//Need to chain after setting defaults
|
||||||
|
dbo.chain = lodash.chain(dbo.data);
|
||||||
|
|
||||||
|
//If old database
|
||||||
|
if (dbo.data.version !== DATABASE_VERSION) {
|
||||||
|
await this.backupDatabase(`${txEnv.profilePath}/data/playersDB.backup.v${dbo.data.version}.json`);
|
||||||
|
this.obj = await migrations(dbo);
|
||||||
|
} else {
|
||||||
|
this.obj = dbo;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Checking basic structure integrity
|
||||||
|
if (
|
||||||
|
!Array.isArray(this.obj!.data.actions)
|
||||||
|
|| !Array.isArray(this.obj!.data.players)
|
||||||
|
|| !Array.isArray(this.obj!.data.whitelistApprovals)
|
||||||
|
|| !Array.isArray(this.obj!.data.whitelistRequests)
|
||||||
|
) {
|
||||||
|
fatalError.Database(2, [
|
||||||
|
'Your txAdmin player/actions database is corrupted!',
|
||||||
|
'It is missing one of the required arrays (players, actions, whitelistApprovals, whitelistRequests).',
|
||||||
|
'If you modified the database file manually, you may try to restore it from the automatic backup file.',
|
||||||
|
['Database path', this.dbPath],
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.lastWrite = Date.now();
|
||||||
|
this.isReady = true;
|
||||||
|
} catch (error) {
|
||||||
|
fatalError.Database(1, 'Failed to setup database object.', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the database to the disk if pending.
|
||||||
|
*/
|
||||||
|
public handleShutdown() {
|
||||||
|
if (this.#writePending !== SavePriority.STANDBY) {
|
||||||
|
this.writeDatabase();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a copy of the database file
|
||||||
|
*/
|
||||||
|
async backupDatabase(targetPath?: string) {
|
||||||
|
try {
|
||||||
|
await fsp.copyFile(this.dbPath, targetPath ?? this.backupPath);
|
||||||
|
// console.verbose.debug('Database file backed up.');
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to backup database file '${this.dbPath}'`);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set write pending flag
|
||||||
|
*/
|
||||||
|
writeFlag(flag = SavePriority.MEDIUM) {
|
||||||
|
if (flag < SavePriority.LOW || flag > SavePriority.HIGH) {
|
||||||
|
throw new Error('unknown priority flag!');
|
||||||
|
}
|
||||||
|
if (flag > this.#writePending) {
|
||||||
|
const flagName = SAVE_CONFIG[flag].name;
|
||||||
|
console.verbose.debug(`writeFlag > ${flagName}`);
|
||||||
|
this.#writePending = flag;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if it's time to write the database to disk, taking in consideration the priority flag
|
||||||
|
*/
|
||||||
|
private async checkWriteNeeded() {
|
||||||
|
//Check if the database is ready
|
||||||
|
if (!this.obj) return;
|
||||||
|
|
||||||
|
const timeStart = Date.now();
|
||||||
|
const sinceLastWrite = timeStart - this.lastWrite;
|
||||||
|
|
||||||
|
if (this.#writePending === SavePriority.HIGH || sinceLastWrite > SAVE_CONFIG[this.#writePending].interval) {
|
||||||
|
const writeTime = new TimeCounter();
|
||||||
|
await this.writeDatabase();
|
||||||
|
const timeElapsed = writeTime.stop();
|
||||||
|
this.#writePending = SavePriority.STANDBY;
|
||||||
|
this.lastWrite = timeStart;
|
||||||
|
// console.verbose.debug(`DB file saved, took ${timeElapsed.milliseconds}ms.`);
|
||||||
|
txCore.metrics.txRuntime.databaseSaveTime.count(timeElapsed.milliseconds);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the database to the disk NOW
|
||||||
|
* NOTE: separate function so it can also be called by the shutdown handler
|
||||||
|
*/
|
||||||
|
private async writeDatabase() {
|
||||||
|
try {
|
||||||
|
await this.obj?.write();
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Failed to save players database with error: ${(error as Error).message}`);
|
||||||
|
console.verbose.dir(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
173
core/modules/Database/migrations.js
Normal file
173
core/modules/Database/migrations.js
Normal file
|
@ -0,0 +1,173 @@
|
||||||
|
const modulename = 'DBMigration';
|
||||||
|
import { genActionID } from './dbUtils.js';
|
||||||
|
import cleanPlayerName from '@shared/cleanPlayerName.js';
|
||||||
|
import { DATABASE_VERSION, defaultDatabase } from './instance.js'; //FIXME: circular_dependency
|
||||||
|
import { now } from '@lib/misc.js';
|
||||||
|
import consoleFactory from '@lib/console.js';
|
||||||
|
import fatalError from '@lib/fatalError.js';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles the migration of the database
|
||||||
|
*/
|
||||||
|
export default async (dbo) => {
|
||||||
|
if (dbo.data.version === DATABASE_VERSION) {
|
||||||
|
return dbo;
|
||||||
|
}
|
||||||
|
if (typeof dbo.data.version !== 'number') {
|
||||||
|
fatalError.Database(50, 'Your players database version is not a number!');
|
||||||
|
}
|
||||||
|
if (dbo.data.version > DATABASE_VERSION) {
|
||||||
|
fatalError.Database(51, [
|
||||||
|
`Your players database is on v${dbo.data.version}, and this txAdmin supports up to v${DATABASE_VERSION}.`,
|
||||||
|
'This means you likely downgraded your txAdmin or FXServer.',
|
||||||
|
'Please make sure your txAdmin is updated!',
|
||||||
|
'',
|
||||||
|
'If you want to downgrade FXServer (the "artifact") but keep txAdmin updated,',
|
||||||
|
'you can move the updated "citizen/system_resources/monitor" folder',
|
||||||
|
'to older FXserver artifact, replacing the old files.',
|
||||||
|
`Alternatively, you can restore the database v${dbo.data.version} backup on the data folder.`,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Migrate database
|
||||||
|
if (dbo.data.version < 1) {
|
||||||
|
console.warn(`Updating your players database from v${dbo.data.version} to v1. Wiping all the data.`);
|
||||||
|
dbo.data = lodash.cloneDeep(defaultDatabase);
|
||||||
|
dbo.data.version = 1;
|
||||||
|
await dbo.write();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (dbo.data.version === 1) {
|
||||||
|
console.warn('Updating your players database from v1 to v2.');
|
||||||
|
console.warn('This process will change any duplicated action ID and wipe pending whitelist.');
|
||||||
|
const actionIDStore = new Set();
|
||||||
|
const actionsToFix = [];
|
||||||
|
dbo.chain.get('actions').forEach((a) => {
|
||||||
|
if (!actionIDStore.has(a.id)) {
|
||||||
|
actionIDStore.add(a.id);
|
||||||
|
} else {
|
||||||
|
actionsToFix.push(a);
|
||||||
|
}
|
||||||
|
}).value();
|
||||||
|
console.warn(`Actions to fix: ${actionsToFix.length}`);
|
||||||
|
for (let i = 0; i < actionsToFix.length; i++) {
|
||||||
|
const action = actionsToFix[i];
|
||||||
|
action.id = genActionID(actionIDStore, action.type);
|
||||||
|
actionIDStore.add(action.id);
|
||||||
|
}
|
||||||
|
dbo.data.pendingWL = [];
|
||||||
|
dbo.data.version = 2;
|
||||||
|
await dbo.write();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dbo.data.version === 2) {
|
||||||
|
console.warn('Updating your players database from v2 to v3.');
|
||||||
|
console.warn('This process will:');
|
||||||
|
console.warn('\t- process player names for better readability/searchability');
|
||||||
|
console.warn('\t- allow txAdmin to save old player identifiers');
|
||||||
|
console.warn('\t- remove the whitelist action in favor of player property');
|
||||||
|
console.warn('\t- remove empty notes');
|
||||||
|
console.warn('\t- improve whitelist handling');
|
||||||
|
console.warn('\t- changing warn action prefix from A to W');
|
||||||
|
|
||||||
|
//Removing all whitelist actions
|
||||||
|
const ts = now();
|
||||||
|
const whitelists = new Map();
|
||||||
|
dbo.data.actions = dbo.data.actions.filter((action) => {
|
||||||
|
if (action.type !== 'whitelist') return true;
|
||||||
|
if (
|
||||||
|
(!action.expiration || action.expiration > ts)
|
||||||
|
&& (!action.revocation.timestamp)
|
||||||
|
&& action.identifiers.length
|
||||||
|
&& typeof action.identifiers[0] === 'string'
|
||||||
|
&& action.identifiers[0].startsWith('license:')
|
||||||
|
) {
|
||||||
|
const license = action.identifiers[0].substring(8);
|
||||||
|
whitelists.set(license, action.timestamp);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
|
||||||
|
//Changing Warn actions id prefix to W
|
||||||
|
dbo.data.actions.forEach((action) => {
|
||||||
|
if (action.type === 'warn') {
|
||||||
|
action.id = `W${action.id.substring(1)}`;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
//Migrating players
|
||||||
|
for (const player of dbo.data.players) {
|
||||||
|
const { displayName, pureName } = cleanPlayerName(player.name);
|
||||||
|
player.displayName = displayName;
|
||||||
|
player.pureName = pureName;
|
||||||
|
player.name = undefined;
|
||||||
|
player.ids = [`license:${player.license}`];
|
||||||
|
|
||||||
|
//adding whitelist
|
||||||
|
const tsWhitelisted = whitelists.get(player.license);
|
||||||
|
if (tsWhitelisted) player.tsWhitelisted = tsWhitelisted;
|
||||||
|
|
||||||
|
//removing empty notes
|
||||||
|
if (!player.notes.text) player.notes = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Setting new whitelist schema
|
||||||
|
dbo.data.pendingWL = undefined;
|
||||||
|
dbo.data.whitelistApprovals = [];
|
||||||
|
dbo.data.whitelistRequests = [];
|
||||||
|
|
||||||
|
//Saving db
|
||||||
|
dbo.data.version = 3;
|
||||||
|
await dbo.write();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dbo.data.version === 3) {
|
||||||
|
console.warn('Updating your players database from v3 to v4.');
|
||||||
|
console.warn('This process will add a HWIDs array to the player data.');
|
||||||
|
console.warn('As well as rename \'action[].identifiers\' to \'action[].ids\'.');
|
||||||
|
|
||||||
|
//Migrating players
|
||||||
|
for (const player of dbo.data.players) {
|
||||||
|
player.hwids = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
//Migrating actions
|
||||||
|
for (const action of dbo.data.actions) {
|
||||||
|
action.ids = action.identifiers;
|
||||||
|
action.identifiers = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Saving db
|
||||||
|
dbo.data.version = 4;
|
||||||
|
await dbo.write();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dbo.data.version === 4) {
|
||||||
|
console.warn('Updating your players database from v4 to v5.');
|
||||||
|
console.warn('This process will allow for offline warns.');
|
||||||
|
|
||||||
|
//Migrating actions
|
||||||
|
for (const action of dbo.data.actions) {
|
||||||
|
if (action.type === 'warn') {
|
||||||
|
action.acked = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Saving db
|
||||||
|
dbo.data.version = 5;
|
||||||
|
await dbo.write();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dbo.data.version !== DATABASE_VERSION) {
|
||||||
|
fatalError.Database(52, [
|
||||||
|
'Unexpected migration error: Did not reach the expected database version.',
|
||||||
|
`Your players database is on v${dbo.data.version}, but the expected version is v${DATABASE_VERSION}.`,
|
||||||
|
'Please make sure your txAdmin is on the most updated version!',
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
console.ok('Database migrated successfully');
|
||||||
|
return dbo;
|
||||||
|
};
|
147
core/modules/DiscordBot/commands/info.ts
Normal file
147
core/modules/DiscordBot/commands/info.ts
Normal file
|
@ -0,0 +1,147 @@
|
||||||
|
const modulename = 'DiscordBot:cmd:info';
|
||||||
|
import { APIEmbedField, CommandInteraction, EmbedBuilder, EmbedData } from 'discord.js';
|
||||||
|
import { parsePlayerId } from '@lib/player/idUtils';
|
||||||
|
import { embedder } from '../discordHelpers';
|
||||||
|
import { findPlayersByIdentifier } from '@lib/player/playerFinder';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import humanizeDuration from 'humanize-duration';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { msToShortishDuration } from '@lib/misc';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
//Consts
|
||||||
|
const footer = {
|
||||||
|
iconURL: 'https://cdn.discordapp.com/emojis/1062339910654246964.webp?size=96&quality=lossless',
|
||||||
|
text: `txAdmin ${txEnv.txaVersion}`,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler for /info
|
||||||
|
*/
|
||||||
|
export default async (interaction: CommandInteraction) => {
|
||||||
|
const tsToLocaleDate = (ts: number) => {
|
||||||
|
return new Date(ts * 1000).toLocaleDateString(
|
||||||
|
txCore.translator.canonical,
|
||||||
|
{ dateStyle: 'long' }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Check for admininfo & permission
|
||||||
|
let includeAdminInfo = false;
|
||||||
|
//@ts-ignore: somehow vscode is resolving interaction as CommandInteraction
|
||||||
|
const adminInfoFlag = interaction.options.getBoolean('admininfo');
|
||||||
|
if (adminInfoFlag) {
|
||||||
|
const admin = txCore.adminStore.getAdminByProviderUID(interaction.user.id);
|
||||||
|
if (!admin) {
|
||||||
|
return await interaction.reply(embedder.danger('You cannot use the `admininfo` option if you are not a txAdmin admin.'));
|
||||||
|
} else {
|
||||||
|
includeAdminInfo = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Detect search identifier
|
||||||
|
let searchId;
|
||||||
|
//@ts-ignore: somehow vscode is resolving interaction as CommandInteraction
|
||||||
|
const subcommand = interaction.options.getSubcommand();
|
||||||
|
if (subcommand === 'self') {
|
||||||
|
const targetId = interaction.member?.user.id;
|
||||||
|
if (!targetId) {
|
||||||
|
return await interaction.reply(embedder.danger('Could not resolve your Discord ID.'));
|
||||||
|
}
|
||||||
|
searchId = `discord:${targetId}`;
|
||||||
|
|
||||||
|
} else if (subcommand === 'member') {
|
||||||
|
const member = interaction.options.getMember('member');
|
||||||
|
if(!member || !('user' in member)){
|
||||||
|
return await interaction.reply(embedder.danger(`Failed to resolve member ID.`));
|
||||||
|
}
|
||||||
|
searchId = `discord:${member.user.id}`;
|
||||||
|
|
||||||
|
} else if (subcommand === 'id') {
|
||||||
|
//@ts-ignore: somehow vscode is resolving interaction as CommandInteraction
|
||||||
|
const input = interaction.options.getString('id', true).trim();
|
||||||
|
if (!input.length) {
|
||||||
|
return await interaction.reply(embedder.danger('Invalid identifier.'));
|
||||||
|
}
|
||||||
|
|
||||||
|
const { isIdValid, idType, idValue, idlowerCased } = parsePlayerId(input);
|
||||||
|
if (!isIdValid || !idType || !idValue || !idlowerCased) {
|
||||||
|
return await interaction.reply(embedder.danger(`The provided identifier (\`${input}\`) does not seem to be valid.`));
|
||||||
|
}
|
||||||
|
searchId = idlowerCased;
|
||||||
|
|
||||||
|
} else {
|
||||||
|
throw new Error(`Subcommand ${subcommand} not found.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Searching for players
|
||||||
|
const players = findPlayersByIdentifier(searchId);
|
||||||
|
if (!players.length) {
|
||||||
|
return await interaction.reply(embedder.warning(`Identifier (\`${searchId}\`) does not seem to be associated to any player in the txAdmin Database.`));
|
||||||
|
} else if (players.length > 10) {
|
||||||
|
return await interaction.reply(embedder.warning(`The identifier (\`${searchId}\`) is associated with more than 10 players, please use the txAdmin Web Panel to search for it.`));
|
||||||
|
}
|
||||||
|
|
||||||
|
//Format players
|
||||||
|
const embeds = [];
|
||||||
|
for (const player of players) {
|
||||||
|
const dbData = player.getDbData();
|
||||||
|
if (!dbData) continue;
|
||||||
|
|
||||||
|
//Basic data
|
||||||
|
const bodyText: Record<string, string> = {
|
||||||
|
'Play time': msToShortishDuration(dbData.playTime * 60 * 1000),
|
||||||
|
'Join date': tsToLocaleDate(dbData.tsJoined),
|
||||||
|
'Last connection': tsToLocaleDate(dbData.tsLastConnection),
|
||||||
|
'Whitelisted': (dbData.tsWhitelisted)
|
||||||
|
? tsToLocaleDate(dbData.tsWhitelisted)
|
||||||
|
: 'not yet',
|
||||||
|
};
|
||||||
|
|
||||||
|
//If admin query
|
||||||
|
let fields: APIEmbedField[] | undefined;
|
||||||
|
if (includeAdminInfo) {
|
||||||
|
//Counting bans/warns
|
||||||
|
const actionHistory = player.getHistory();
|
||||||
|
const actionCount = { ban: 0, warn: 0 };
|
||||||
|
for (const log of actionHistory) {
|
||||||
|
actionCount[log.type]++;
|
||||||
|
}
|
||||||
|
const banText = (actionCount.ban === 1) ? '1 ban' : `${actionCount.ban} bans`;
|
||||||
|
const warnText = (actionCount.warn === 1) ? '1 warn' : `${actionCount.warn} warns`;
|
||||||
|
bodyText['Log'] = `${banText}, ${warnText}`;
|
||||||
|
|
||||||
|
//Filling notes + identifiers
|
||||||
|
const notesText = (dbData.notes) ? dbData.notes.text : 'nothing here';
|
||||||
|
const idsText = (dbData.ids.length) ? dbData.ids.join('\n') : 'nothing here';
|
||||||
|
fields = [
|
||||||
|
{
|
||||||
|
name: '• Notes:',
|
||||||
|
value: `\`\`\`${notesText}\`\`\``
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: '• Identifiers:',
|
||||||
|
value: `\`\`\`${idsText}\`\`\``
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
//Preparing embed
|
||||||
|
const description = Object.entries(bodyText)
|
||||||
|
.map(([label, value]) => `**• ${label}:** \`${value}\``)
|
||||||
|
.join('\n')
|
||||||
|
const embedData: EmbedData = {
|
||||||
|
title: player.displayName,
|
||||||
|
fields,
|
||||||
|
description,
|
||||||
|
footer,
|
||||||
|
};
|
||||||
|
embeds.push(new EmbedBuilder(embedData).setColor('#4262e2'));
|
||||||
|
}
|
||||||
|
|
||||||
|
//Send embeds :)
|
||||||
|
return await interaction.reply({ embeds });
|
||||||
|
}
|
294
core/modules/DiscordBot/commands/status.ts
Normal file
294
core/modules/DiscordBot/commands/status.ts
Normal file
|
@ -0,0 +1,294 @@
|
||||||
|
const modulename = 'DiscordBot:cmd:status';
|
||||||
|
import humanizeDuration from 'humanize-duration';
|
||||||
|
import { ActionRowBuilder, ButtonBuilder, ButtonStyle, ChannelType, ChatInputCommandInteraction, ColorResolvable, EmbedBuilder } from 'discord.js';
|
||||||
|
import { txEnv } from '@core/globalData';
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import { embedder, ensurePermission, isValidButtonEmoji, isValidEmbedUrl, logDiscordAdminAction } from '../discordHelpers';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
import { msToShortishDuration } from '@lib/misc';
|
||||||
|
import { FxMonitorHealth } from '@shared/enums';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
const isValidButtonConfig = (btn: any) => {
|
||||||
|
const btnType = typeof btn;
|
||||||
|
return (
|
||||||
|
btn !== null && btnType === 'object'
|
||||||
|
&& typeof btn.label === 'string'
|
||||||
|
&& btn.label.length
|
||||||
|
&& typeof btn.url === 'string'
|
||||||
|
// && btn.url.length //let the function handle it
|
||||||
|
&& (typeof btn.emoji === 'string' || btn.emoji === undefined)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const invalidUrlMessage = `Every URL must start with one of (\`http://\`, \`https://\`, \`discord://\`).
|
||||||
|
URLs cannot be empty, if you do not want a URL then remove the URL line.`;
|
||||||
|
|
||||||
|
const invalidPlaceholderMessage = `Your URL starts with \`{{\`, try removing it.
|
||||||
|
If you just tried to edit a placeholder like \`{{serverBrowserUrl}}\` or \`{{serverJoinUrl}}\`, remember that those placeholders are replaced automatically by txAdmin, meaning you do not need to edit them at all.`
|
||||||
|
|
||||||
|
const invalidEmojiMessage = `All emojis must be one of:
|
||||||
|
- UTF-8 emoji ('😄')
|
||||||
|
- Valid emoji ID ('1062339910654246964')
|
||||||
|
- Discord custom emoji (\`<:name:id>\` or \`<a:name:id>\`).
|
||||||
|
To get the full emoji code, insert it into discord, and add \`\\\` before it then send the message`
|
||||||
|
|
||||||
|
|
||||||
|
export const generateStatusMessage = (
|
||||||
|
rawEmbedJson: string = txConfig.discordBot.embedJson,
|
||||||
|
rawEmbedConfigJson: string = txConfig.discordBot.embedConfigJson
|
||||||
|
) => {
|
||||||
|
//Parsing decoded JSONs
|
||||||
|
let embedJson;
|
||||||
|
try {
|
||||||
|
embedJson = JSON.parse(rawEmbedJson);
|
||||||
|
if (!(embedJson instanceof Object)) throw new Error(`not an Object`);
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Embed JSON Error: ${(error as Error).message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
let embedConfigJson;
|
||||||
|
try {
|
||||||
|
embedConfigJson = JSON.parse(rawEmbedConfigJson);
|
||||||
|
if (!(embedConfigJson instanceof Object)) throw new Error(`not an Object`);
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Embed Config JSON Error: ${(error as Error).message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare placeholders
|
||||||
|
//NOTE: serverCfxId can be undefined, breaking the URLs, but there is no easy clean way to deal with this issue
|
||||||
|
const serverCfxId = txCore.cacheStore.get('fxsRuntime:cfxId');
|
||||||
|
const fxMonitorStatus = txCore.fxMonitor.status;
|
||||||
|
const placeholders = {
|
||||||
|
serverName: txConfig.general.serverName,
|
||||||
|
statusString: 'Unknown',
|
||||||
|
statusColor: '#4C3539',
|
||||||
|
serverCfxId,
|
||||||
|
serverBrowserUrl: `https://servers.fivem.net/servers/detail/${serverCfxId}`,
|
||||||
|
serverJoinUrl: `https://cfx.re/join/${serverCfxId}`,
|
||||||
|
serverMaxClients: txCore.cacheStore.get('fxsRuntime:maxClients') ?? 'unknown',
|
||||||
|
serverClients: txCore.fxPlayerlist.onlineCount,
|
||||||
|
nextScheduledRestart: 'unknown',
|
||||||
|
uptime: (fxMonitorStatus.uptime > 0)
|
||||||
|
? msToShortishDuration(fxMonitorStatus.uptime)
|
||||||
|
: '--',
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare scheduler placeholder
|
||||||
|
const schedule = txCore.fxScheduler.getStatus();
|
||||||
|
if (typeof schedule.nextRelativeMs !== 'number') {
|
||||||
|
placeholders.nextScheduledRestart = 'not scheduled';
|
||||||
|
} else if (schedule.nextSkip) {
|
||||||
|
placeholders.nextScheduledRestart = 'skipped';
|
||||||
|
} else {
|
||||||
|
const tempFlag = (schedule.nextIsTemp) ? '(tmp)' : '';
|
||||||
|
const relativeTime = msToShortishDuration(schedule.nextRelativeMs);
|
||||||
|
const isLessThanMinute = schedule.nextRelativeMs < 60_000;
|
||||||
|
if (isLessThanMinute) {
|
||||||
|
placeholders.nextScheduledRestart = `right now ${tempFlag}`;
|
||||||
|
} else {
|
||||||
|
placeholders.nextScheduledRestart = `in ${relativeTime} ${tempFlag}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Prepare status placeholders
|
||||||
|
if (fxMonitorStatus.health === FxMonitorHealth.ONLINE) {
|
||||||
|
placeholders.statusString = embedConfigJson?.onlineString ?? '🟢 Online';
|
||||||
|
placeholders.statusColor = embedConfigJson?.onlineColor ?? "#0BA70B";
|
||||||
|
} else if (fxMonitorStatus.health === FxMonitorHealth.PARTIAL) {
|
||||||
|
placeholders.statusString = embedConfigJson?.partialString ?? '🟡 Partial';
|
||||||
|
placeholders.statusColor = embedConfigJson?.partialColor ?? "#FFF100";
|
||||||
|
} else if (fxMonitorStatus.health === FxMonitorHealth.OFFLINE) {
|
||||||
|
placeholders.statusString = embedConfigJson?.offlineString ?? '🔴 Offline';
|
||||||
|
placeholders.statusColor = embedConfigJson?.offlineColor ?? "#A70B28";
|
||||||
|
}
|
||||||
|
|
||||||
|
//Processing embed
|
||||||
|
function replacePlaceholders(inputString: string) {
|
||||||
|
Object.entries(placeholders).forEach(([key, value]) => {
|
||||||
|
inputString = inputString.replaceAll(`{{${key}}}`, String(value));
|
||||||
|
});
|
||||||
|
return inputString;
|
||||||
|
}
|
||||||
|
function processValue(inputValue: any): any {
|
||||||
|
if (typeof inputValue === 'string') {
|
||||||
|
return replacePlaceholders(inputValue);
|
||||||
|
} else if (Array.isArray(inputValue)) {
|
||||||
|
return inputValue.map((arrValue) => processValue(arrValue));
|
||||||
|
} else if (inputValue !== null && typeof inputValue === 'object') {
|
||||||
|
return processObject(inputValue);
|
||||||
|
} else {
|
||||||
|
return inputValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function processObject(inputData: object) {
|
||||||
|
const input = cloneDeep(inputData);
|
||||||
|
const out: any = {};
|
||||||
|
for (const [key, value] of Object.entries(input)) {
|
||||||
|
const processed = processValue(value);
|
||||||
|
if (key === 'url' && !isValidEmbedUrl(processed)) {
|
||||||
|
const messageHead = processed.length
|
||||||
|
? `Invalid URL \`${processed}\`.`
|
||||||
|
: `Empty URL.`;
|
||||||
|
const badPlaceholderMessage = processed.startsWith('{{')
|
||||||
|
? invalidPlaceholderMessage
|
||||||
|
: '';
|
||||||
|
throw new Error([
|
||||||
|
messageHead,
|
||||||
|
invalidUrlMessage,
|
||||||
|
badPlaceholderMessage
|
||||||
|
].join('\n'));
|
||||||
|
}
|
||||||
|
out[key] = processed;
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
const processedEmbedData = processObject(embedJson);
|
||||||
|
|
||||||
|
//Attempting to instantiate embed class
|
||||||
|
let embed;
|
||||||
|
try {
|
||||||
|
embed = new EmbedBuilder(processedEmbedData);
|
||||||
|
embed.setColor(placeholders.statusColor as ColorResolvable);
|
||||||
|
embed.setTimestamp();
|
||||||
|
embed.setFooter({
|
||||||
|
iconURL: 'https://cdn.discordapp.com/emojis/1062339910654246964.webp?size=96&quality=lossless',
|
||||||
|
text: `txAdmin ${txEnv.txaVersion} • Updated every minute`,
|
||||||
|
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`**Embed Class Error:** ${(error as Error).message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Attempting to instantiate buttons
|
||||||
|
let buttonsRow: ActionRowBuilder<ButtonBuilder> | undefined;
|
||||||
|
try {
|
||||||
|
if (Array.isArray(embedConfigJson?.buttons) && embedConfigJson.buttons.length) {
|
||||||
|
if (embedConfigJson.buttons.length > 5) {
|
||||||
|
throw new Error(`Over limit of 5 buttons.`);
|
||||||
|
}
|
||||||
|
buttonsRow = new ActionRowBuilder<ButtonBuilder>();
|
||||||
|
for (const cfgButton of embedConfigJson.buttons) {
|
||||||
|
if (!isValidButtonConfig(cfgButton)) {
|
||||||
|
throw new Error(`Invalid button in Discord Status Embed Config.
|
||||||
|
All buttons must have:
|
||||||
|
- Label: string, not empty
|
||||||
|
- URL: string, not empty, valid URL`);
|
||||||
|
}
|
||||||
|
const processedUrl = processValue(cfgButton.url);
|
||||||
|
if (!isValidEmbedUrl(processedUrl)) {
|
||||||
|
const messageHead = processedUrl.length
|
||||||
|
? `Invalid URL \`${processedUrl}\``
|
||||||
|
: `Empty URL`;
|
||||||
|
const badPlaceholderMessage = processedUrl.startsWith('{{')
|
||||||
|
? invalidPlaceholderMessage
|
||||||
|
: '';
|
||||||
|
throw new Error([
|
||||||
|
`${messageHead} for button \`${cfgButton.label}\`.`,
|
||||||
|
invalidUrlMessage,
|
||||||
|
badPlaceholderMessage
|
||||||
|
].join('\n'));
|
||||||
|
}
|
||||||
|
const btn = new ButtonBuilder({
|
||||||
|
style: ButtonStyle.Link,
|
||||||
|
label: processValue(cfgButton.label),
|
||||||
|
url: processedUrl,
|
||||||
|
});
|
||||||
|
if (cfgButton.emoji !== undefined) {
|
||||||
|
if (!isValidButtonEmoji(cfgButton.emoji)) {
|
||||||
|
throw new Error(`Invalid emoji for button \`${cfgButton.label}\`.\n${invalidEmojiMessage}`);
|
||||||
|
}
|
||||||
|
btn.setEmoji(cfgButton.emoji);
|
||||||
|
}
|
||||||
|
buttonsRow.addComponents(btn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`**Embed Buttons Error:** ${(error as Error).message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
embeds: [embed],
|
||||||
|
components: buttonsRow ? [buttonsRow] : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const removeOldEmbed = async (interaction: ChatInputCommandInteraction) => {
|
||||||
|
const oldChannelId = txCore.cacheStore.get('discord:status:channelId');
|
||||||
|
const oldMessageId = txCore.cacheStore.get('discord:status:messageId');
|
||||||
|
if (typeof oldChannelId === 'string' && typeof oldMessageId === 'string') {
|
||||||
|
const oldChannel = await interaction.client.channels.fetch(oldChannelId);
|
||||||
|
if (oldChannel?.type === ChannelType.GuildText || oldChannel?.type === ChannelType.GuildAnnouncement) {
|
||||||
|
await oldChannel.messages.delete(oldMessageId);
|
||||||
|
} else {
|
||||||
|
throw new Error(`oldChannel is not a guild text or announcement channel`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(`no old message id saved, maybe was never sent, maybe it was removed`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async (interaction: ChatInputCommandInteraction) => {
|
||||||
|
//Check permissions
|
||||||
|
const adminName = await ensurePermission(interaction, 'settings.write');
|
||||||
|
if (typeof adminName !== 'string') return;
|
||||||
|
|
||||||
|
//Attempt to remove old message
|
||||||
|
const isRemoveOnly = (interaction.options.getSubcommand() === 'remove');
|
||||||
|
try {
|
||||||
|
await removeOldEmbed(interaction);
|
||||||
|
txCore.cacheStore.delete('discord:status:channelId');
|
||||||
|
txCore.cacheStore.delete('discord:status:messageId');
|
||||||
|
if (isRemoveOnly) {
|
||||||
|
const msg = `Old status embed removed.`;
|
||||||
|
logDiscordAdminAction(adminName, msg);
|
||||||
|
return await interaction.reply(embedder.success(msg, true));
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (isRemoveOnly) {
|
||||||
|
return await interaction.reply(
|
||||||
|
embedder.warning(`**Failed to remove old status embed:**\n${(error as Error).message}`, true)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Generate new message
|
||||||
|
let newStatusMessage;
|
||||||
|
try {
|
||||||
|
newStatusMessage = generateStatusMessage();
|
||||||
|
} catch (error) {
|
||||||
|
return await interaction.reply(
|
||||||
|
embedder.warning(`**Failed to generate new embed:**\n${(error as Error).message}`, true)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Attempt to send new message
|
||||||
|
try {
|
||||||
|
if (interaction.channel?.type !== ChannelType.GuildText && interaction.channel?.type !== ChannelType.GuildAnnouncement) {
|
||||||
|
throw new Error(`channel type not supported`);
|
||||||
|
}
|
||||||
|
const placeholderEmbed = new EmbedBuilder({
|
||||||
|
description: '_placeholder message, attempting to edit with embed..._\n**Note:** If you are seeing this message, it probably means that something was wrong with the configured Embed JSONs and Discord\'s API rejected the request to replace this placeholder.'
|
||||||
|
})
|
||||||
|
const newMessage = await interaction.channel.send({ embeds: [placeholderEmbed] });
|
||||||
|
await newMessage.edit(newStatusMessage);
|
||||||
|
txCore.cacheStore.set('discord:status:channelId', interaction.channelId);
|
||||||
|
txCore.cacheStore.set('discord:status:messageId', newMessage.id);
|
||||||
|
} catch (error) {
|
||||||
|
let msg: string;
|
||||||
|
if ((error as any).code === 50013) {
|
||||||
|
msg = `This bot does not have permission to send embed messages in this channel.
|
||||||
|
Please change the channel permissions and give this bot the \`Embed Links\` and \`Send Messages\` permissions.`
|
||||||
|
} else {
|
||||||
|
msg = (error as Error).message;
|
||||||
|
}
|
||||||
|
return await interaction.reply(
|
||||||
|
embedder.warning(`**Failed to send new embed:**\n${msg}`, true)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const msg = `Status embed saved.`;
|
||||||
|
logDiscordAdminAction(adminName, msg);
|
||||||
|
return await interaction.reply(embedder.success(msg, true));
|
||||||
|
}
|
120
core/modules/DiscordBot/commands/whitelist.ts
Normal file
120
core/modules/DiscordBot/commands/whitelist.ts
Normal file
|
@ -0,0 +1,120 @@
|
||||||
|
const modulename = 'DiscordBot:cmd:whitelist';
|
||||||
|
import { CommandInteraction as ChatInputCommandInteraction, ImageURLOptions } from 'discord.js';
|
||||||
|
import { now } from '@lib/misc';
|
||||||
|
import { DuplicateKeyError } from '@modules/Database/dbUtils';
|
||||||
|
import { embedder, ensurePermission, logDiscordAdminAction } from '../discordHelpers';
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Command /whitelist member <mention>
|
||||||
|
*/
|
||||||
|
const handleMemberSubcommand = async (interaction: ChatInputCommandInteraction, adminName: string) => {
|
||||||
|
//Preparing player id/name/avatar
|
||||||
|
const member = interaction.options.getMember('member');
|
||||||
|
if(!member || !('user' in member)){
|
||||||
|
return await interaction.reply(embedder.danger(`Failed to resolve member ID.`));
|
||||||
|
}
|
||||||
|
const identifier = `discord:${member.id}`;
|
||||||
|
const playerName = member.nickname ?? member.user.username;
|
||||||
|
const avatarOptions: ImageURLOptions = { size: 64, forceStatic: true };
|
||||||
|
const playerAvatar = member.displayAvatarURL(avatarOptions) ?? member.user.displayAvatarURL(avatarOptions);
|
||||||
|
|
||||||
|
//Registering approval
|
||||||
|
try {
|
||||||
|
txCore.database.whitelist.registerApproval({
|
||||||
|
identifier,
|
||||||
|
playerName,
|
||||||
|
playerAvatar,
|
||||||
|
tsApproved: now(),
|
||||||
|
approvedBy: adminName,
|
||||||
|
});
|
||||||
|
txCore.fxRunner.sendEvent('whitelistPreApproval', {
|
||||||
|
action: 'added',
|
||||||
|
identifier,
|
||||||
|
playerName,
|
||||||
|
adminName,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
return await interaction.reply(embedder.danger(`Failed to save whitelist approval: ${(error as Error).message}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
const msg = `Added whitelist approval for ${playerName}.`;
|
||||||
|
logDiscordAdminAction(adminName, msg);
|
||||||
|
return await interaction.reply(embedder.success(msg));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Command /whitelist request <id>
|
||||||
|
*/
|
||||||
|
const handleRequestSubcommand = async (interaction: ChatInputCommandInteraction, adminName: string) => {
|
||||||
|
//@ts-ignore: somehow vscode is resolving interaction as CommandInteraction
|
||||||
|
const input = interaction.options.getString('id', true);
|
||||||
|
const reqId = input.trim().toUpperCase();
|
||||||
|
if (reqId.length !== 5 || reqId[0] !== 'R') {
|
||||||
|
return await interaction.reply(embedder.danger('Invalid request ID.'));
|
||||||
|
}
|
||||||
|
|
||||||
|
//Find request
|
||||||
|
const requests = txCore.database.whitelist.findManyRequests({ id: reqId });
|
||||||
|
if (!requests.length) {
|
||||||
|
return await interaction.reply(embedder.warning(`Whitelist request ID \`${reqId}\` not found.`));
|
||||||
|
}
|
||||||
|
const req = requests[0]; //just getting the first
|
||||||
|
|
||||||
|
//Register whitelistApprovals
|
||||||
|
const identifier = `license:${req.license}`;
|
||||||
|
const playerName = req.discordTag ?? req.playerDisplayName;
|
||||||
|
try {
|
||||||
|
txCore.database.whitelist.registerApproval({
|
||||||
|
identifier,
|
||||||
|
playerName,
|
||||||
|
playerAvatar: (req.discordAvatar) ? req.discordAvatar : null,
|
||||||
|
tsApproved: now(),
|
||||||
|
approvedBy: adminName,
|
||||||
|
});
|
||||||
|
txCore.fxRunner.sendEvent('whitelistRequest', {
|
||||||
|
action: 'approved',
|
||||||
|
playerName,
|
||||||
|
requestId: req.id,
|
||||||
|
license: req.license,
|
||||||
|
adminName,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
if (!(error instanceof DuplicateKeyError)) {
|
||||||
|
return await interaction.reply(embedder.danger(`Failed to save wl approval: ${(error as Error).message}`));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Remove record from whitelistRequests
|
||||||
|
try {
|
||||||
|
txCore.database.whitelist.removeManyRequests({ id: reqId });
|
||||||
|
} catch (error) {
|
||||||
|
return await interaction.reply(embedder.danger(`Failed to remove wl request: ${(error as Error).message}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
const msg = `Approved whitelist request \`${reqId}\` from ${playerName}.`;
|
||||||
|
logDiscordAdminAction(adminName, msg);
|
||||||
|
return await interaction.reply(embedder.success(msg));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handler for /whitelist
|
||||||
|
*/
|
||||||
|
export default async (interaction: ChatInputCommandInteraction) => {
|
||||||
|
//Check permissions
|
||||||
|
const adminName = await ensurePermission(interaction, 'players.whitelist');
|
||||||
|
if (typeof adminName !== 'string') return;
|
||||||
|
|
||||||
|
//@ts-ignore: somehow vscode is resolving interaction as CommandInteraction
|
||||||
|
const subcommand = interaction.options.getSubcommand();
|
||||||
|
if (subcommand === 'member') {
|
||||||
|
return await handleMemberSubcommand(interaction, adminName);
|
||||||
|
} else if (subcommand === 'request') {
|
||||||
|
return await handleRequestSubcommand(interaction, adminName);
|
||||||
|
}
|
||||||
|
throw new Error(`Subcommand ${subcommand} not found.`);
|
||||||
|
}
|
65
core/modules/DiscordBot/defaultJsons.ts
Normal file
65
core/modules/DiscordBot/defaultJsons.ts
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
import { txEnv } from "@core/globalData";
|
||||||
|
|
||||||
|
export const defaultEmbedJson = JSON.stringify({
|
||||||
|
"title": "{{serverName}}",
|
||||||
|
"url": "{{serverBrowserUrl}}",
|
||||||
|
"description": "You can configure this embed in `txAdmin > Settings > Discord Bot`, and edit everything from it (except footer).",
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"name": "> STATUS",
|
||||||
|
"value": "```\n{{statusString}}\n```",
|
||||||
|
"inline": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "> PLAYERS",
|
||||||
|
"value": "```\n{{serverClients}}/{{serverMaxClients}}\n```",
|
||||||
|
"inline": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "> F8 CONNECT COMMAND",
|
||||||
|
"value": "```\nconnect 123.123.123.123\n```"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "> NEXT RESTART",
|
||||||
|
"value": "```\n{{nextScheduledRestart}}\n```",
|
||||||
|
"inline": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "> UPTIME",
|
||||||
|
"value": "```\n{{uptime}}\n```",
|
||||||
|
"inline": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"image": {
|
||||||
|
"url": "https://forum-cfx-re.akamaized.net/original/5X/e/e/c/b/eecb4664ee03d39e34fcd82a075a18c24add91ed.png"
|
||||||
|
},
|
||||||
|
"thumbnail": {
|
||||||
|
"url": "https://forum-cfx-re.akamaized.net/original/5X/9/b/d/7/9bd744dc2b21804e18c3bb331e8902c930624e44.png"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export const defaultEmbedConfigJson = JSON.stringify({
|
||||||
|
"onlineString": "🟢 Online",
|
||||||
|
"onlineColor": "#0BA70B",
|
||||||
|
"partialString": "🟡 Partial",
|
||||||
|
"partialColor": "#FFF100",
|
||||||
|
"offlineString": "🔴 Offline",
|
||||||
|
"offlineColor": "#A70B28",
|
||||||
|
"buttons": [
|
||||||
|
{
|
||||||
|
"emoji": "1062338355909640233",
|
||||||
|
"label": "Connect",
|
||||||
|
"url": "{{serverJoinUrl}}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"emoji": "1062339910654246964",
|
||||||
|
"label": "txAdmin Discord",
|
||||||
|
"url": "https://discord.gg/txAdmin"
|
||||||
|
},
|
||||||
|
txEnv.displayAds ? {
|
||||||
|
"emoji": "😏",
|
||||||
|
"label": "ZAP-Hosting",
|
||||||
|
"url": "https://zap-hosting.com/txadmin6"
|
||||||
|
} : undefined,
|
||||||
|
].filter(Boolean)
|
||||||
|
});
|
113
core/modules/DiscordBot/discordHelpers.ts
Normal file
113
core/modules/DiscordBot/discordHelpers.ts
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
const modulename = 'DiscordBot:cmd';
|
||||||
|
import orderedEmojis from 'unicode-emoji-json/data-ordered-emoji';
|
||||||
|
import { ColorResolvable, CommandInteraction, EmbedBuilder, InteractionReplyOptions } from "discord.js";
|
||||||
|
import consoleFactory from '@lib/console';
|
||||||
|
const console = consoleFactory(modulename);
|
||||||
|
const allEmojis = new Set(orderedEmojis);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic embed generation functions
|
||||||
|
*/
|
||||||
|
const genericEmbed = (
|
||||||
|
msg: string,
|
||||||
|
ephemeral = false,
|
||||||
|
color: ColorResolvable | null = null,
|
||||||
|
emoji?: string
|
||||||
|
): InteractionReplyOptions => {
|
||||||
|
return {
|
||||||
|
ephemeral,
|
||||||
|
embeds: [new EmbedBuilder({
|
||||||
|
description: emoji ? `:${emoji}: ${msg}` : msg,
|
||||||
|
}).setColor(color)],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const embedColors = {
|
||||||
|
info: '#1D76C9',
|
||||||
|
success: '#0BA70B',
|
||||||
|
warning: '#FFF100',
|
||||||
|
danger: '#A70B28',
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export const embedder = {
|
||||||
|
generic: genericEmbed,
|
||||||
|
info: (msg: string, ephemeral = false) => genericEmbed(msg, ephemeral, embedColors.info, 'information_source'),
|
||||||
|
success: (msg: string, ephemeral = false) => genericEmbed(msg, ephemeral, embedColors.success, 'white_check_mark'),
|
||||||
|
warning: (msg: string, ephemeral = false) => genericEmbed(msg, ephemeral, embedColors.warning, 'warning'),
|
||||||
|
danger: (msg: string, ephemeral = false) => genericEmbed(msg, ephemeral, embedColors.danger, 'no_entry_sign'),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure that the discord interaction author has the required permission
|
||||||
|
*/
|
||||||
|
export const ensurePermission = async (interaction: CommandInteraction, reqPerm: string) => {
|
||||||
|
const admin = txCore.adminStore.getAdminByProviderUID(interaction.user.id);
|
||||||
|
if (!admin) {
|
||||||
|
await interaction.reply(
|
||||||
|
embedder.warning(`**Your account does not have txAdmin access.** :face_with_monocle:\nIf you are already registered in txAdmin, visit the Admin Manager page, and make sure the Discord ID for your user is set to \`${interaction.user.id}\`.`, true)
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
admin.master !== true
|
||||||
|
&& !admin.permissions.includes('all_permissions')
|
||||||
|
&& !admin.permissions.includes(reqPerm)
|
||||||
|
) {
|
||||||
|
//@ts-ignore: not important
|
||||||
|
const permName = txCore.adminStore.registeredPermissions[reqPerm] ?? 'Unknown';
|
||||||
|
await interaction.reply(
|
||||||
|
embedder.danger(`Your txAdmin account does not have the "${permName}" permissions required for this action.`, true)
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return admin.name;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Equivalent to ctx.admin.logAction()
|
||||||
|
*/
|
||||||
|
export const logDiscordAdminAction = async (adminName: string, message: string) => {
|
||||||
|
txCore.logger.admin.write(adminName, message);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tests if an embed url is valid or not
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
export const isValidEmbedUrl = (url: unknown) => {
|
||||||
|
return typeof url === 'string' && /^(https?|discord):\/\//.test(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tests if an emoji STRING is valid or not.
|
||||||
|
* Acceptable options:
|
||||||
|
* - UTF-8 emoji ('😄')
|
||||||
|
* - Valid emoji ID ('1062339910654246964')
|
||||||
|
* - Discord custom emoji (`<:name:id>` or `<a:name:id>`)
|
||||||
|
*/
|
||||||
|
export const isValidButtonEmoji = (emoji: unknown) => {
|
||||||
|
if (typeof emoji !== 'string') return false;
|
||||||
|
if (/^\d{17,19}$/.test(emoji)) return true;
|
||||||
|
if (/^<a?:\w{2,32}:\d{17,19}>$/.test(emoji)) return true;
|
||||||
|
return allEmojis.has(emoji);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//Works
|
||||||
|
// console.dir(isValidEmoji('<:txicon:1062339910654246964>'))
|
||||||
|
// console.dir(isValidEmoji('1062339910654246964'))
|
||||||
|
// console.dir(isValidEmoji('😄'))
|
||||||
|
// console.dir(isValidEmoji('🇵🇼'))
|
||||||
|
// console.dir(isValidEmoji('\u{1F469}\u{200D}\u{2764}\u{FE0F}\u{200D}\u{1F48B}\u{200D}\u{1F469}'))
|
||||||
|
|
||||||
|
//Discord throws api error
|
||||||
|
// console.dir(isValidEmoji(':smile:'))
|
||||||
|
// console.dir(isValidEmoji('smile'))
|
||||||
|
// console.dir(isValidEmoji({name: 'smile'}))
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user