1
0
This commit is contained in:
Your Name
2023-04-14 23:38:34 +08:00
commit ff5da79d57
1392 changed files with 378574 additions and 0 deletions

401
scripts/SDL_scancode.h Normal file
View File

@ -0,0 +1,401 @@
/*
Simple DirectMedia Layer
Copyright (C) 1997-2014 Sam Lantinga <slouken@libsdl.org>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
/**
* \file SDL_scancode.h
*
* Defines keyboard scancodes.
*/
#ifndef _SDL_scancode_h
#define _SDL_scancode_h
#include "SDL_stdinc.h"
/**
* \brief The SDL keyboard scancode representation.
*
* Values of this type are used to represent keyboard keys, among other places
* in the \link SDL_Keysym::scancode key.keysym.scancode \endlink field of the
* SDL_Event structure.
*
* The values in this enumeration are based on the USB usage page standard:
* http://www.usb.org/developers/devclass_docs/Hut1_12v2.pdf
*/
typedef enum
{
SDL_SCANCODE_UNKNOWN = 0,
/**
* \name Usage page 0x07
*
* These values are from usage page 0x07 (USB keyboard page).
*/
/* @{ */
SDL_SCANCODE_A = 4,
SDL_SCANCODE_B = 5,
SDL_SCANCODE_C = 6,
SDL_SCANCODE_D = 7,
SDL_SCANCODE_E = 8,
SDL_SCANCODE_F = 9,
SDL_SCANCODE_G = 10,
SDL_SCANCODE_H = 11,
SDL_SCANCODE_I = 12,
SDL_SCANCODE_J = 13,
SDL_SCANCODE_K = 14,
SDL_SCANCODE_L = 15,
SDL_SCANCODE_M = 16,
SDL_SCANCODE_N = 17,
SDL_SCANCODE_O = 18,
SDL_SCANCODE_P = 19,
SDL_SCANCODE_Q = 20,
SDL_SCANCODE_R = 21,
SDL_SCANCODE_S = 22,
SDL_SCANCODE_T = 23,
SDL_SCANCODE_U = 24,
SDL_SCANCODE_V = 25,
SDL_SCANCODE_W = 26,
SDL_SCANCODE_X = 27,
SDL_SCANCODE_Y = 28,
SDL_SCANCODE_Z = 29,
SDL_SCANCODE_1 = 30,
SDL_SCANCODE_2 = 31,
SDL_SCANCODE_3 = 32,
SDL_SCANCODE_4 = 33,
SDL_SCANCODE_5 = 34,
SDL_SCANCODE_6 = 35,
SDL_SCANCODE_7 = 36,
SDL_SCANCODE_8 = 37,
SDL_SCANCODE_9 = 38,
SDL_SCANCODE_0 = 39,
SDL_SCANCODE_RETURN = 40,
SDL_SCANCODE_ESCAPE = 41,
SDL_SCANCODE_BACKSPACE = 42,
SDL_SCANCODE_TAB = 43,
SDL_SCANCODE_SPACE = 44,
SDL_SCANCODE_MINUS = 45,
SDL_SCANCODE_EQUALS = 46,
SDL_SCANCODE_LEFTBRACKET = 47,
SDL_SCANCODE_RIGHTBRACKET = 48,
SDL_SCANCODE_BACKSLASH = 49, /**< Located at the lower left of the return
* key on ISO keyboards and at the right end
* of the QWERTY row on ANSI keyboards.
* Produces REVERSE SOLIDUS (backslash) and
* VERTICAL LINE in a US layout, REVERSE
* SOLIDUS and VERTICAL LINE in a UK Mac
* layout, NUMBER SIGN and TILDE in a UK
* Windows layout, DOLLAR SIGN and POUND SIGN
* in a Swiss German layout, NUMBER SIGN and
* APOSTROPHE in a German layout, GRAVE
* ACCENT and POUND SIGN in a French Mac
* layout, and ASTERISK and MICRO SIGN in a
* French Windows layout.
*/
SDL_SCANCODE_NONUSHASH = 50, /**< ISO USB keyboards actually use this code
* instead of 49 for the same key, but all
* OSes I've seen treat the two codes
* identically. So, as an implementor, unless
* your keyboard generates both of those
* codes and your OS treats them differently,
* you should generate SDL_SCANCODE_BACKSLASH
* instead of this code. As a user, you
* should not rely on this code because SDL
* will never generate it with most (all?)
* keyboards.
*/
SDL_SCANCODE_SEMICOLON = 51,
SDL_SCANCODE_APOSTROPHE = 52,
SDL_SCANCODE_GRAVE = 53, /**< Located in the top left corner (on both ANSI
* and ISO keyboards). Produces GRAVE ACCENT and
* TILDE in a US Windows layout and in US and UK
* Mac layouts on ANSI keyboards, GRAVE ACCENT
* and NOT SIGN in a UK Windows layout, SECTION
* SIGN and PLUS-MINUS SIGN in US and UK Mac
* layouts on ISO keyboards, SECTION SIGN and
* DEGREE SIGN in a Swiss German layout (Mac:
* only on ISO keyboards), CIRCUMFLEX ACCENT and
* DEGREE SIGN in a German layout (Mac: only on
* ISO keyboards), SUPERSCRIPT TWO and TILDE in a
* French Windows layout, COMMERCIAL AT and
* NUMBER SIGN in a French Mac layout on ISO
* keyboards, and LESS-THAN SIGN and GREATER-THAN
* SIGN in a Swiss German, German, or French Mac
* layout on ANSI keyboards.
*/
SDL_SCANCODE_COMMA = 54,
SDL_SCANCODE_PERIOD = 55,
SDL_SCANCODE_SLASH = 56,
SDL_SCANCODE_CAPSLOCK = 57,
SDL_SCANCODE_F1 = 58,
SDL_SCANCODE_F2 = 59,
SDL_SCANCODE_F3 = 60,
SDL_SCANCODE_F4 = 61,
SDL_SCANCODE_F5 = 62,
SDL_SCANCODE_F6 = 63,
SDL_SCANCODE_F7 = 64,
SDL_SCANCODE_F8 = 65,
SDL_SCANCODE_F9 = 66,
SDL_SCANCODE_F10 = 67,
SDL_SCANCODE_F11 = 68,
SDL_SCANCODE_F12 = 69,
SDL_SCANCODE_PRINTSCREEN = 70,
SDL_SCANCODE_SCROLLLOCK = 71,
SDL_SCANCODE_PAUSE = 72,
SDL_SCANCODE_INSERT = 73, /**< insert on PC, help on some Mac keyboards (but
does send code 73, not 117) */
SDL_SCANCODE_HOME = 74,
SDL_SCANCODE_PAGEUP = 75,
SDL_SCANCODE_DELETE = 76,
SDL_SCANCODE_END = 77,
SDL_SCANCODE_PAGEDOWN = 78,
SDL_SCANCODE_RIGHT = 79,
SDL_SCANCODE_LEFT = 80,
SDL_SCANCODE_DOWN = 81,
SDL_SCANCODE_UP = 82,
SDL_SCANCODE_NUMLOCKCLEAR = 83, /**< num lock on PC, clear on Mac keyboards
*/
SDL_SCANCODE_KP_DIVIDE = 84,
SDL_SCANCODE_KP_MULTIPLY = 85,
SDL_SCANCODE_KP_MINUS = 86,
SDL_SCANCODE_KP_PLUS = 87,
SDL_SCANCODE_KP_ENTER = 88,
SDL_SCANCODE_KP_1 = 89,
SDL_SCANCODE_KP_2 = 90,
SDL_SCANCODE_KP_3 = 91,
SDL_SCANCODE_KP_4 = 92,
SDL_SCANCODE_KP_5 = 93,
SDL_SCANCODE_KP_6 = 94,
SDL_SCANCODE_KP_7 = 95,
SDL_SCANCODE_KP_8 = 96,
SDL_SCANCODE_KP_9 = 97,
SDL_SCANCODE_KP_0 = 98,
SDL_SCANCODE_KP_PERIOD = 99,
SDL_SCANCODE_NONUSBACKSLASH = 100, /**< This is the additional key that ISO
* keyboards have over ANSI ones,
* located between left shift and Y.
* Produces GRAVE ACCENT and TILDE in a
* US or UK Mac layout, REVERSE SOLIDUS
* (backslash) and VERTICAL LINE in a
* US or UK Windows layout, and
* LESS-THAN SIGN and GREATER-THAN SIGN
* in a Swiss German, German, or French
* layout. */
SDL_SCANCODE_APPLICATION = 101, /**< windows contextual menu, compose */
SDL_SCANCODE_POWER = 102, /**< The USB document says this is a status flag,
* not a physical key - but some Mac keyboards
* do have a power key. */
SDL_SCANCODE_KP_EQUALS = 103,
SDL_SCANCODE_F13 = 104,
SDL_SCANCODE_F14 = 105,
SDL_SCANCODE_F15 = 106,
SDL_SCANCODE_F16 = 107,
SDL_SCANCODE_F17 = 108,
SDL_SCANCODE_F18 = 109,
SDL_SCANCODE_F19 = 110,
SDL_SCANCODE_F20 = 111,
SDL_SCANCODE_F21 = 112,
SDL_SCANCODE_F22 = 113,
SDL_SCANCODE_F23 = 114,
SDL_SCANCODE_F24 = 115,
SDL_SCANCODE_EXECUTE = 116,
SDL_SCANCODE_HELP = 117,
SDL_SCANCODE_MENU = 118,
SDL_SCANCODE_SELECT = 119,
SDL_SCANCODE_STOP = 120,
SDL_SCANCODE_AGAIN = 121, /**< redo */
SDL_SCANCODE_UNDO = 122,
SDL_SCANCODE_CUT = 123,
SDL_SCANCODE_COPY = 124,
SDL_SCANCODE_PASTE = 125,
SDL_SCANCODE_FIND = 126,
SDL_SCANCODE_MUTE = 127,
SDL_SCANCODE_VOLUMEUP = 128,
SDL_SCANCODE_VOLUMEDOWN = 129,
/* not sure whether there's a reason to enable these */
/* SDL_SCANCODE_LOCKINGCAPSLOCK = 130, */
/* SDL_SCANCODE_LOCKINGNUMLOCK = 131, */
/* SDL_SCANCODE_LOCKINGSCROLLLOCK = 132, */
SDL_SCANCODE_KP_COMMA = 133,
SDL_SCANCODE_KP_EQUALSAS400 = 134,
SDL_SCANCODE_INTERNATIONAL1 = 135, /**< used on Asian keyboards, see
footnotes in USB doc */
SDL_SCANCODE_INTERNATIONAL2 = 136,
SDL_SCANCODE_INTERNATIONAL3 = 137, /**< Yen */
SDL_SCANCODE_INTERNATIONAL4 = 138,
SDL_SCANCODE_INTERNATIONAL5 = 139,
SDL_SCANCODE_INTERNATIONAL6 = 140,
SDL_SCANCODE_INTERNATIONAL7 = 141,
SDL_SCANCODE_INTERNATIONAL8 = 142,
SDL_SCANCODE_INTERNATIONAL9 = 143,
SDL_SCANCODE_LANG1 = 144, /**< Hangul/English toggle */
SDL_SCANCODE_LANG2 = 145, /**< Hanja conversion */
SDL_SCANCODE_LANG3 = 146, /**< Katakana */
SDL_SCANCODE_LANG4 = 147, /**< Hiragana */
SDL_SCANCODE_LANG5 = 148, /**< Zenkaku/Hankaku */
SDL_SCANCODE_LANG6 = 149, /**< reserved */
SDL_SCANCODE_LANG7 = 150, /**< reserved */
SDL_SCANCODE_LANG8 = 151, /**< reserved */
SDL_SCANCODE_LANG9 = 152, /**< reserved */
SDL_SCANCODE_ALTERASE = 153, /**< Erase-Eaze */
SDL_SCANCODE_SYSREQ = 154,
SDL_SCANCODE_CANCEL = 155,
SDL_SCANCODE_CLEAR = 156,
SDL_SCANCODE_PRIOR = 157,
SDL_SCANCODE_RETURN2 = 158,
SDL_SCANCODE_SEPARATOR = 159,
SDL_SCANCODE_OUT = 160,
SDL_SCANCODE_OPER = 161,
SDL_SCANCODE_CLEARAGAIN = 162,
SDL_SCANCODE_CRSEL = 163,
SDL_SCANCODE_EXSEL = 164,
SDL_SCANCODE_KP_00 = 176,
SDL_SCANCODE_KP_000 = 177,
SDL_SCANCODE_THOUSANDSSEPARATOR = 178,
SDL_SCANCODE_DECIMALSEPARATOR = 179,
SDL_SCANCODE_CURRENCYUNIT = 180,
SDL_SCANCODE_CURRENCYSUBUNIT = 181,
SDL_SCANCODE_KP_LEFTPAREN = 182,
SDL_SCANCODE_KP_RIGHTPAREN = 183,
SDL_SCANCODE_KP_LEFTBRACE = 184,
SDL_SCANCODE_KP_RIGHTBRACE = 185,
SDL_SCANCODE_KP_TAB = 186,
SDL_SCANCODE_KP_BACKSPACE = 187,
SDL_SCANCODE_KP_A = 188,
SDL_SCANCODE_KP_B = 189,
SDL_SCANCODE_KP_C = 190,
SDL_SCANCODE_KP_D = 191,
SDL_SCANCODE_KP_E = 192,
SDL_SCANCODE_KP_F = 193,
SDL_SCANCODE_KP_XOR = 194,
SDL_SCANCODE_KP_POWER = 195,
SDL_SCANCODE_KP_PERCENT = 196,
SDL_SCANCODE_KP_LESS = 197,
SDL_SCANCODE_KP_GREATER = 198,
SDL_SCANCODE_KP_AMPERSAND = 199,
SDL_SCANCODE_KP_DBLAMPERSAND = 200,
SDL_SCANCODE_KP_VERTICALBAR = 201,
SDL_SCANCODE_KP_DBLVERTICALBAR = 202,
SDL_SCANCODE_KP_COLON = 203,
SDL_SCANCODE_KP_HASH = 204,
SDL_SCANCODE_KP_SPACE = 205,
SDL_SCANCODE_KP_AT = 206,
SDL_SCANCODE_KP_EXCLAM = 207,
SDL_SCANCODE_KP_MEMSTORE = 208,
SDL_SCANCODE_KP_MEMRECALL = 209,
SDL_SCANCODE_KP_MEMCLEAR = 210,
SDL_SCANCODE_KP_MEMADD = 211,
SDL_SCANCODE_KP_MEMSUBTRACT = 212,
SDL_SCANCODE_KP_MEMMULTIPLY = 213,
SDL_SCANCODE_KP_MEMDIVIDE = 214,
SDL_SCANCODE_KP_PLUSMINUS = 215,
SDL_SCANCODE_KP_CLEAR = 216,
SDL_SCANCODE_KP_CLEARENTRY = 217,
SDL_SCANCODE_KP_BINARY = 218,
SDL_SCANCODE_KP_OCTAL = 219,
SDL_SCANCODE_KP_DECIMAL = 220,
SDL_SCANCODE_KP_HEXADECIMAL = 221,
SDL_SCANCODE_LCTRL = 224,
SDL_SCANCODE_LSHIFT = 225,
SDL_SCANCODE_LALT = 226, /**< alt, option */
SDL_SCANCODE_LGUI = 227, /**< windows, command (apple), meta */
SDL_SCANCODE_RCTRL = 228,
SDL_SCANCODE_RSHIFT = 229,
SDL_SCANCODE_RALT = 230, /**< alt gr, option */
SDL_SCANCODE_RGUI = 231, /**< windows, command (apple), meta */
SDL_SCANCODE_MODE = 257, /**< I'm not sure if this is really not covered
* by any of the above, but since there's a
* special KMOD_MODE for it I'm adding it here
*/
/* @} *//* Usage page 0x07 */
/**
* \name Usage page 0x0C
*
* These values are mapped from usage page 0x0C (USB consumer page).
*/
/* @{ */
SDL_SCANCODE_AUDIONEXT = 258,
SDL_SCANCODE_AUDIOPREV = 259,
SDL_SCANCODE_AUDIOSTOP = 260,
SDL_SCANCODE_AUDIOPLAY = 261,
SDL_SCANCODE_AUDIOMUTE = 262,
SDL_SCANCODE_MEDIASELECT = 263,
SDL_SCANCODE_WWW = 264,
SDL_SCANCODE_MAIL = 265,
SDL_SCANCODE_CALCULATOR = 266,
SDL_SCANCODE_COMPUTER = 267,
SDL_SCANCODE_AC_SEARCH = 268,
SDL_SCANCODE_AC_HOME = 269,
SDL_SCANCODE_AC_BACK = 270,
SDL_SCANCODE_AC_FORWARD = 271,
SDL_SCANCODE_AC_STOP = 272,
SDL_SCANCODE_AC_REFRESH = 273,
SDL_SCANCODE_AC_BOOKMARKS = 274,
/* @} *//* Usage page 0x0C */
/**
* \name Walther keys
*
* These are values that Christian Walther added (for mac keyboard?).
*/
/* @{ */
SDL_SCANCODE_BRIGHTNESSDOWN = 275,
SDL_SCANCODE_BRIGHTNESSUP = 276,
SDL_SCANCODE_DISPLAYSWITCH = 277, /**< display mirroring/dual display
switch, video mode switch */
SDL_SCANCODE_KBDILLUMTOGGLE = 278,
SDL_SCANCODE_KBDILLUMDOWN = 279,
SDL_SCANCODE_KBDILLUMUP = 280,
SDL_SCANCODE_EJECT = 281,
SDL_SCANCODE_SLEEP = 282,
SDL_SCANCODE_APP1 = 283,
SDL_SCANCODE_APP2 = 284,
/* @} *//* Walther keys */
/* Add any other keys here. */
SDL_NUM_SCANCODES = 512 /**< not a key, just marks the number of scancodes
for array bounds */
} SDL_Scancode;
#endif /* _SDL_scancode_h */
/* vi: set ts=4 sw=4 expandtab: */

53
scripts/android/README.md Normal file
View File

@ -0,0 +1,53 @@
Requirements for building:
==========================
- Android NDK (tested with NDK 23), must be in the same location in which Android studio would unpack it (~/Android/Sdk/ndk/)
at least version 23
- Android SDK build tools
version 30.0.3
- ddnet-libs with Android libs
- Java -- JDK 11+
- 7zip (for ddnet-libs building)
- ninja
- curl runtime
How to build:
=============
- run a terminal inside the source directory:
`scripts/android/cmake_android.sh <x86/x86_64/arm/arm64/all> <Game name> <Debug/Release>`
where the first parameter is the arch (all for all arches), the second is the apk name, which must be equal to the library name (if you want to rename the APK do it after the build)
and the third parameter which simply defines the build type
- if you build with a signing key for the APK
Generate one with
`keytool -genkey -v -keystore my-release-key.jks -keyalg RSA -keysize 2048 -validity 10000 -alias my-alias`
export environment variables for the script
```
export TW_KEY_NAME=<key name>
export TW_KEY_PW=<key password>
export TW_KEY_ALIAS=<key alias>
```
so for example:
```
keytool -genkey -v -keystore Teeworlds.jks -keyalg RSA -keysize 2048 -validity 10000 -alias Teeworlds-Key
(it will prompt an input:)
Input keystore-password: mypassword
export TW_KEY_NAME=Teeworlds.jks
export TW_KEY_PW=mypassword
export TW_KEY_ALIAS=Teeworlds-Key
scripts/android/cmake_android.sh all DDNet Release
```
You can also specify the build version code and build version string before running the build script, e.g.:
```
export TW_VERSION_CODE=20210819
export TW_VERSION_NAME="1.0"
```
How to build the ddnet-libs for Android:
========================================
- There is a script to automatically download and build all repositories, this requires an active internet connection:
`scripts/compile_libs/gen_libs.sh <directory to build in> android`
Warning!: DO NOT CHOOSE A DIRECTORY INSIDE THE SOURCE TREE
After the script finished executing it should have created a ddnet-libs directory which created all libs in the right directory format and can be merged with ddnet-libs in the source directory

234
scripts/android/cmake_android.sh Executable file
View File

@ -0,0 +1,234 @@
#!/bin/bash
export ANDROID_HOME=~/Android/Sdk
export MAKEFLAGS=-j32
ANDROID_NDK_VERSION="$(cd "$ANDROID_HOME/ndk" && find . -maxdepth 1 | sort -n | tail -1)"
ANDROID_NDK_VERSION="${ANDROID_NDK_VERSION:2}"
export ANDROID_NDK_VERSION
ANDROID_NDK="$ANDROID_HOME/ndk/$ANDROID_NDK_VERSION"
_DEFAULT_ANDROID_BUILD=x86
_DEFAULT_GAME_NAME=DDNet
_DEFAULT_BUILD_TYPE=Debug
_ANDROID_API_LEVEL=android-24
_ANDROID_SUB_BUILD_DIR=build_arch
_SHOW_USAGE_INFO=0
if [ -z ${1+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass android build type, using default: ${_DEFAULT_ANDROID_BUILD}"
_SHOW_USAGE_INFO=1
else
_DEFAULT_ANDROID_BUILD=$1
fi
if [ -z ${2+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass game name, using default: ${_DEFAULT_GAME_NAME}"
_SHOW_USAGE_INFO=1
else
_DEFAULT_GAME_NAME=$2
fi
if [ -z ${3+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass build type, using default: ${_DEFAULT_BUILD_TYPE}"
_SHOW_USAGE_INFO=1
else
_DEFAULT_BUILD_TYPE=$3
fi
_ANDROID_JAR_KEY_NAME=~/.android/debug.keystore
_ANDROID_JAR_KEY_PW=android
_ANDROID_JAR_KEY_ALIAS=androiddebugkey
if [ -z ${TW_KEY_NAME+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass a key for the jar signer, using default: ${_ANDROID_JAR_KEY_NAME}"
else
_ANDROID_JAR_KEY_NAME=$TW_KEY_NAME
fi
if [ -z ${TW_KEY_PW+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass a key pw for the jar signer, using default: ${_ANDROID_JAR_KEY_PW}"
else
_ANDROID_JAR_KEY_PW=$TW_KEY_PW
fi
if [ -z ${TW_KEY_ALIAS+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass a key alias for the jar signer, using default: ${_ANDROID_JAR_KEY_ALIAS}"
else
_ANDROID_JAR_KEY_ALIAS=$TW_KEY_ALIAS
fi
export TW_KEY_NAME="${_ANDROID_JAR_KEY_NAME}"
export TW_KEY_PW=$_ANDROID_JAR_KEY_PW
export TW_KEY_ALIAS=$_ANDROID_JAR_KEY_ALIAS
_ANDROID_VERSION_CODE=1
if [ -z ${TW_VERSION_CODE+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass a version code, using default: ${_ANDROID_VERSION_CODE}"
else
_ANDROID_VERSION_CODE=$TW_VERSION_CODE
fi
export TW_VERSION_CODE=$_ANDROID_VERSION_CODE
_ANDROID_VERSION_NAME="1.0"
if [ -z ${TW_VERSION_NAME+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass a version name, using default: ${_ANDROID_VERSION_NAME}"
else
_ANDROID_VERSION_NAME=$TW_VERSION_NAME
fi
export TW_VERSION_NAME=$_ANDROID_VERSION_NAME
printf "\e[31m%s\e[1m\n" "Building with setting, for arch: ${_DEFAULT_ANDROID_BUILD}, with build type: ${_DEFAULT_BUILD_TYPE}, with name: ${_DEFAULT_GAME_NAME}"
if [ $_SHOW_USAGE_INFO == 1 ]; then
printf "\e[31m%s\e[1m\n" "Usage: ./cmake_android.sh <x86/x86_64/arm/arm64/all> <Game name> <Debug/Release>"
fi
printf "\e[33mBuilding cmake\e[0m\n"
function build_for_type() {
cmake \
-H. \
-G "Ninja" \
-DPREFER_BUNDLED_LIBS=ON \
-DCMAKE_BUILD_TYPE="${_DEFAULT_BUILD_TYPE}" \
-DANDROID_NATIVE_API_LEVEL="$_ANDROID_API_LEVEL" \
-DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" \
-DANDROID_NDK="$ANDROID_NDK" \
-DANDROID_ABI="${2}" \
-DANDROID_ARM_NEON=TRUE \
-Bbuild_android/"$_ANDROID_SUB_BUILD_DIR/$1" \
-DSERVER=OFF \
-DTOOLS=OFF \
-DDEV=TRUE \
-DCMAKE_CROSSCOMPILING=ON \
-DVULKAN=ON \
-DVIDEORECORDER=OFF
(
cd "build_android/$_ANDROID_SUB_BUILD_DIR/$1" || exit 1
cmake --build . --target DDNet
)
}
mkdir build_android
if [[ "${_DEFAULT_ANDROID_BUILD}" == "arm" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
build_for_type arm armeabi-v7a arm eabi &
fi
if [[ "${_DEFAULT_ANDROID_BUILD}" == "arm64" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
build_for_type arm64 arm64-v8a aarch64 &
fi
if [[ "${_DEFAULT_ANDROID_BUILD}" == "x86" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
build_for_type x86 x86 i686 &
fi
if [[ "${_DEFAULT_ANDROID_BUILD}" == "x86_64" || "${_DEFAULT_ANDROID_BUILD}" == "x64" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
build_for_type x86_64 x86_64 x86_64 &
fi
wait
printf "\e[36mPreparing gradle build\n"
cd build_android || exit 1
mkdir -p src/main
mkdir -p src/main/res/mipmap
function copy_dummy_files() {
rm ./"$2"
cp ../"$1" "$2"
}
function copy_dummy_files_rec() {
rm -R ./"$2"/"$1"
cp -R ../"$1" "$2"
}
copy_dummy_files scripts/android/files/build.sh build.sh
copy_dummy_files scripts/android/files/gradle-wrapper.jar gradle-wrapper.jar
copy_dummy_files scripts/android/files/build.gradle build.gradle
copy_dummy_files scripts/android/files/gradle-wrapper.properties gradle-wrapper.properties
copy_dummy_files scripts/android/files/gradle.properties gradle.properties
copy_dummy_files scripts/android/files/local.properties local.properties
copy_dummy_files scripts/android/files/proguard-rules.pro proguard-rules.pro
copy_dummy_files scripts/android/files/settings.gradle settings.gradle
copy_dummy_files scripts/android/files/AndroidManifest.xml src/main/AndroidManifest.xml
copy_dummy_files_rec scripts/android/files/res src/main
copy_dummy_files other/icons/DDNet_256x256x32.png src/main/res/mipmap/ic_launcher.png
copy_dummy_files other/icons/DDNet_256x256x32.png src/main/res/mipmap/ic_launcher_round.png
function copy_libs() {
mkdir -p "lib/$2"
cp "$_ANDROID_SUB_BUILD_DIR/$1/libDDNet.so" "lib/$2"
cp "$_ANDROID_SUB_BUILD_DIR/$1/libs/libSDL2.so" "lib/$2"
cp "$_ANDROID_SUB_BUILD_DIR/$1/libs/libhidapi.so" "lib/$2"
}
if [[ "${_DEFAULT_ANDROID_BUILD}" == "arm" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
copy_libs arm armeabi-v7a arm eabi
fi
if [[ "${_DEFAULT_ANDROID_BUILD}" == "arm64" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
copy_libs arm64 arm64-v8a aarch64
fi
if [[ "${_DEFAULT_ANDROID_BUILD}" == "x86" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
copy_libs x86 x86 i686
fi
if [[ "${_DEFAULT_ANDROID_BUILD}" == "x86_64" || "${_DEFAULT_ANDROID_BUILD}" == "x64" || "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
copy_libs x86_64 x86_64 x86_64
fi
_DEFAULT_ANDROID_BUILD_DUMMY=$_DEFAULT_ANDROID_BUILD
if [[ "${_DEFAULT_ANDROID_BUILD}" == "all" ]]; then
_DEFAULT_ANDROID_BUILD_DUMMY=arm
fi
mkdir -p assets/asset_integrity_files
cp -R "$_ANDROID_SUB_BUILD_DIR/$_DEFAULT_ANDROID_BUILD_DUMMY/data" ./assets/asset_integrity_files
curl --remote-name --time-cond cacert.pem https://curl.se/ca/cacert.pem
cp ./cacert.pem ./assets/asset_integrity_files/data/cacert.pem
# create integrity file for extracting assets
(
cd assets/asset_integrity_files || exit 1
tmpfile="$(mktemp /tmp/hash_strings.XXX)"
find data -iname "*" -type f -print0 | while IFS= read -r -d $'\0' file; do
sha_hash=$(sha256sum "$file" | cut -d' ' -f 1)
echo "$file $sha_hash" >> "$tmpfile"
done
full_hash="$(sha256sum "$tmpfile" | cut -d' ' -f 1)"
rm "integrity.txt"
{
echo "$full_hash"
cat "$tmpfile"
} > "integrity.txt"
)
printf "\e[0m"
echo "Building..."
rm -R src/main/java/tw
mkdir -p src/main/java/tw/DDNet
cp ../scripts/android/files/java/tw/DDNet/NativeMain.java src/main/java/tw/DDNet/NativeMain.java
rm -R src/main/java/org
cp -R ../scripts/android/files/java/org src/main/java/
cp -R ../ddnet-libs/sdl/java/org src/main/java/
# shellcheck disable=SC1091
source ./build.sh "$ANDROID_HOME" "$_DEFAULT_GAME_NAME" "$_DEFAULT_BUILD_TYPE"
cd ..

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="tw.DDNet">
<uses-feature
android:glEsVersion="0x00030000" />
<!-- Teeworlds does broadcasts over local networks -->
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE"/>
<uses-permission android:name="android.permission.CHANGE_WIFI_MULTICAST_STATE"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.INTERNET" />
<!-- usesCleartextTraffic because unencrypted UDP packets -->
<application
android:usesCleartextTraffic="true"
android:label="@string/app_name"
android:hasCode="true"
android:extractNativeLibs="true"
android:supportsRtl="true"
android:isGame="true"
android:icon="@mipmap/ic_launcher"
android:roundIcon="@mipmap/ic_launcher_round"
>
<activity
android:name=".NativeMain"
android:configChanges="orientation|screenSize|screenLayout|keyboardHidden">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
<meta-data android:name="android.app.lib_name"
android:value="DDNet" />
</activity>
</application>
</manifest>

View File

@ -0,0 +1,80 @@
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
buildscript {
ext.kotlin_version = '+'
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.2.1'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.3"
ndkVersion "TW_NDK_VERSION"
defaultConfig {
applicationId "tw.DDNet"
minSdkVersion 24
targetSdkVersion 30
versionCode TW_VERSION_CODE
versionName "TW_VERSION_NAME"
}
signingConfigs {
release {
storeFile file("TW_KEY_NAME")
storePassword "TW_KEY_PW"
keyAlias "TW_KEY_ALIAS"
keyPassword "TW_KEY_PW"
}
}
buildTypes {
release {
signingConfig signingConfigs.release
minifyEnabled true
shrinkResources true
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
debug {
minifyEnabled false
shrinkResources false
}
}
sourceSets {
main {
assets.srcDirs = ['assets']
jniLibs.srcDirs = ['lib']
//TW_ENABLE_RESOURCESresources.srcDirs = ['resources']
}
}
lintOptions {
abortOnError false
}
}
allprojects {
repositories {
google()
mavenCentral()
}
gradle.projectsEvaluated {
tasks.withType(JavaCompile) {
options.compilerArgs << "-Xlint:unchecked" << "-Xlint:deprecation"
}
}
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.core:core-ktx:1.3.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
}

View File

@ -0,0 +1,88 @@
#!/bin/bash
[ "$1" == "" ] && {
printf '\e[31mDid not pass ANDROID_SDK_ROOT to build script\e[30m\n'
exit 1
}
[ "$2" == "" ] && {
printf '\e[31mDid not pass APK name to build script\e[30m\n'
exit 1
}
[ "$3" == "" ] && {
printf '\e[31mDid not pass build type to build script\e[30m\n'
exit 1
}
_APK_BASENAME="$2"
sed -i "s/DDNet/${2}/g" settings.gradle
_REPLACE_PACKAGE_NAME_STR="tw.${2,,}"
sed -i "s/tw.DDNet/${_REPLACE_PACKAGE_NAME_STR}/g" build.gradle
TW_KEY_NAME_ESCAPED=$(echo "$TW_KEY_NAME"|sed 's/\//\\\//g')
TW_KEY_PW_ESCAPED=$(echo "$TW_KEY_PW"|sed 's/\//\\\//g')
TW_KEY_ALIAS_ESCAPED=$(echo "$TW_KEY_ALIAS"|sed 's/\//\\\//g')
sed -i "s/TW_KEY_NAME/${TW_KEY_NAME_ESCAPED}/g" build.gradle
sed -i "s/TW_KEY_PW/${TW_KEY_PW_ESCAPED}/g" build.gradle
sed -i "s/TW_KEY_ALIAS/${TW_KEY_ALIAS_ESCAPED}/g" build.gradle
sed -i "s/DDNet/${2}/g" src/main/res/values/strings.xml
sed -i "s/\"DDNet\"/\"${2}\"/g" src/main/AndroidManifest.xml
sed -i "s/tw.DDNet/${_REPLACE_PACKAGE_NAME_STR}/g" src/main/AndroidManifest.xml
__TW_HOME_DIR=$(echo "$HOME"|sed 's/\//\\\//g')
sed -i "s/TW_HOME_DIR/${__TW_HOME_DIR}/g" local.properties
sed -i "s/TW_NDK_VERSION/${ANDROID_NDK_VERSION}/g" build.gradle
sed -i "s/TW_VERSION_CODE/${TW_VERSION_CODE}/g" build.gradle
sed -i "s/TW_VERSION_NAME/${TW_VERSION_NAME}/g" build.gradle
mv src/main/java/tw/DDNet src/main/java/tw/"${2}"
sed -i "s/tw.DDNet/${_REPLACE_PACKAGE_NAME_STR}/g" src/main/java/tw/"${2}"/NativeMain.java
sed -i "s/tw.DDNet/${_REPLACE_PACKAGE_NAME_STR}/g" proguard-rules.pro
# disable hid manager for now
sed -i "s/mHIDDeviceManager = HIDDeviceManager.acquire(this);/mHIDDeviceManager=null;/g" src/main/java/org/libsdl/app/SDLActivity.java
if [[ "${3}" == "Debug" ]]; then
sed -i "s/android.enableR8.fullMode=true/android.enableR8.fullMode=false/g" gradle.properties
fi
if [[ -z ${GE_NO_APK_BUILD} || "${GE_NO_APK_BUILD}" != "1" ]]; then
_RELEASE_TYPE_NAME=debug
_RELEASE_TYPE_APK_NAME=
if [[ "${3}" == "Debug" ]]; then
_RELEASE_TYPE_NAME=debug
fi
if [[ "${3}" == "Release" ]]; then
_RELEASE_TYPE_NAME=release
_RELEASE_TYPE_APK_NAME=
fi
APP_BASE_NAME=Gradle
CLASSPATH=gradle-wrapper.jar
java "-Dorg.gradle.appname=${APP_BASE_NAME}" -classpath "${CLASSPATH}" org.gradle.wrapper.GradleWrapperMain --warning-mode all
if [[ "${3}" == "Debug" ]]; then
java "-Dorg.gradle.appname=${APP_BASE_NAME}" -classpath "${CLASSPATH}" org.gradle.wrapper.GradleWrapperMain --warning-mode all builddebug
java "-Dorg.gradle.appname=${APP_BASE_NAME}" -classpath "${CLASSPATH}" org.gradle.wrapper.GradleWrapperMain --warning-mode all assembleDebug
else
java "-Dorg.gradle.appname=${APP_BASE_NAME}" -classpath "${CLASSPATH}" org.gradle.wrapper.GradleWrapperMain --warning-mode all buildrelease
java "-Dorg.gradle.appname=${APP_BASE_NAME}" -classpath "${CLASSPATH}" org.gradle.wrapper.GradleWrapperMain --warning-mode all assembleRelease
fi
cp build/outputs/apk/"$_RELEASE_TYPE_NAME"/"$_APK_BASENAME"-"$_RELEASE_TYPE_NAME""$_RELEASE_TYPE_APK_NAME".apk "$_APK_BASENAME".apk
if [[ "${3}" == "Release" ]]; then
java "-Dorg.gradle.appname=${APP_BASE_NAME}" -classpath "${CLASSPATH}" org.gradle.wrapper.GradleWrapperMain --warning-mode all bundleRelease
cp build/outputs/bundle/"$_RELEASE_TYPE_NAME"/"$_APK_BASENAME"-"$_RELEASE_TYPE_NAME""$_RELEASE_TYPE_APK_NAME".aab "$_APK_BASENAME".aab
fi
fi

Binary file not shown.

View File

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -0,0 +1,22 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx3536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
android.enableR8.fullMode=true

View File

@ -0,0 +1,27 @@
package tw.DDNet;
import android.app.NativeActivity;
import org.libsdl.app.SDLActivity;
import android.os.Bundle;
import android.content.pm.ActivityInfo;
public class NativeMain extends SDLActivity {
static {
System.loadLibrary("DDNet");
}
@Override
protected String[] getLibraries() {
return new String[] {
// disable hid API for now
// "hidapi",
// "SDL2",
"DDNet",
};
}
@Override
public void onCreate(Bundle SavedInstanceState) {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
super.onCreate(SavedInstanceState);
}
}

View File

@ -0,0 +1,10 @@
## This file is automatically generated by Android Studio.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file should *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
sdk.dir="TW_HOME_DIR/Android/Sdk"

View File

@ -0,0 +1,35 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
-keepclassmembers, allowoptimization public class tw.DDNet.NativeMain {
*;
}
-keepclassmembers, allowoptimization public class org.libsdl.app.* {
*;
}
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
############################################
## OUR
############################################
############################################
## OTHER
############################################

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">DDNet</string>
</resources>

View File

@ -0,0 +1 @@
rootProject.name='DDNet'

28
scripts/check_dilate.sh Executable file
View File

@ -0,0 +1,28 @@
#!/bin/bash
#set -x
result=
dil_path=$1
result=$(find "$2" -iname '*.png' -print0 | while IFS= read -r -d $'\0' file; do
new_file=$(mktemp --tmpdir "$(basename "$file" .png).XXX.png")
cp "$file" "$new_file"
convert "$new_file" "${new_file}_old.bmp" > /dev/null
"${dil_path}"/dilate "$new_file" > /dev/null
convert "$new_file" "${new_file}_new.bmp" > /dev/null
orig_hash=$(identify -quiet -format "%#" "${new_file}_old.bmp")
new_hash=$(identify -quiet -format "%#" "${new_file}_new.bmp")
rm "$new_file"
rm "${new_file}_old.bmp"
rm "${new_file}_new.bmp"
if [ "$orig_hash" != "$new_hash" ]; then
echo "$file is not dilated"
fi
done)
if [[ "$result" != "" ]]; then
echo -n "$result"
exit 1
fi
exit 0

50
scripts/check_header_guards.py Executable file
View File

@ -0,0 +1,50 @@
#!/usr/bin/env python3
import os
import sys
os.chdir(os.path.dirname(__file__) + "/..")
PATH = "src/"
EXCEPTIONS = [
"src/base/unicode/confusables.h",
"src/base/unicode/confusables_data.h",
"src/base/unicode/tolower.h",
"src/base/unicode/tolower_data.h",
"src/tools/config_common.h"
]
def check_file(filename):
if filename in EXCEPTIONS:
return False
error = False
with open(filename, encoding="utf-8") as file:
for line in file:
if line == "// This file can be included several times.\n":
break
if line[0] == "/" or line[0] == "*" or line[0] == "\r" or line[0] == "\n" or line[0] == "\t":
continue
header_guard = "#ifndef " + ("_".join(filename.split(PATH)[1].split("/"))[:-2]).upper() + "_H"
if line.startswith("#ifndef"):
if line[:-1] != header_guard:
error = True
print(f"Wrong header guard in {filename}, is: {line[:-1]}, should be: {header_guard}")
else:
error = True
print(f"Missing header guard in {filename}, should be: {header_guard}")
break
return error
def check_dir(directory):
errors = 0
file_list = os.listdir(directory)
for file in file_list:
path = directory + file
if os.path.isdir(path):
if file not in ("external", "generated", "rust-bridge"):
errors += check_dir(path + "/")
elif file.endswith(".h") and file != "keynames.h":
errors += check_file(path)
return errors
if __name__ == '__main__':
sys.exit(int(check_dir(PATH) != 0))

View File

@ -0,0 +1,72 @@
import argparse
import csv
import sys
def check_name(kind, qualifiers, typ, name):
if kind == "variable":
return check_variable_name(qualifiers, typ, name)
if kind in "class struct".split():
if name[0] not in "CI":
return "should start with 'C' (or 'I' for interfaces)"
if len(name) < 2:
return "must be at least two characters long"
if not name[1].isupper():
return "must start with an uppercase letter"
if kind == "enum_constant":
if not name.isupper():
return "must only contain uppercase letters, digits and underscores"
return None
ALLOW = set("""
dx dy
fx fy
mx my
ix iy
px py
sx sy
wx wy
x0 x1
y0 y1
""".split())
def check_variable_name(qualifiers, typ, name):
if qualifiers == "" and typ == "" and name == "argc":
return None
if qualifiers == "" and typ == "pp" and name == "argv":
return None
if qualifiers == "cs":
# Allow all uppercase names for constant statics.
if name.isupper():
return None
qualifiers = "s"
# Allow single lowercase letters as member and variable names.
if qualifiers in ["m", ""] and len(name) == 1 and name.islower():
return None
prefix = "".join([qualifiers, "_" if qualifiers else "", typ])
if not name.startswith(prefix):
return f"should start with {prefix!r}"
if name in ALLOW:
return None
name = name[len(prefix):]
if not name[0].isupper():
if prefix:
return f"should start with an uppercase letter after the prefix {prefix!r}"
return "should start with an uppercase letter"
return None
def main():
p = argparse.ArgumentParser(description="Check identifiers (input via stdin in CSV format from extract_identifiers.py) for naming style in DDNet code")
p.parse_args()
identifiers = list(csv.DictReader(sys.stdin))
unclean = False
for i in identifiers:
error = check_name(i["kind"], i["qualifiers"], i["type"], i["name"])
if error:
unclean = True
print(f"{i['file']}:{i['line']}:{i['column']}: {i['name']}: {error}")
return unclean
if __name__ == "__main__":
sys.exit(main())

View File

@ -0,0 +1,33 @@
#!/usr/bin/env bash
# List of C headers and their corresponding C++ headers
c_headers=(assert complex ctype errno fenv float inttypes iso646 limits locale math setjmp signal stdarg stdbool stddef stdint stdio stdlib string tgmath time wchar wctype)
c_headers_map=(cassert complex cctype cerrno cfenv cfloat cinttypes ciso646 climits clocale cmath csetjmp csignal cstdarg cstdbool cstddef cstdint cstdio cstdlib cstring ctgmath ctime cwchar cwctype)
# Create regex dynamically from the array to match any C header
c_headers_regex=$(IFS="|"; echo "${c_headers[*]}")
# Find all C++ source and header files
files=$(find ./src -type f \( -name '*.cpp' -o -name '*.hpp' -o -name '*.h' \) ! -path "./src/engine/external/*")
error_found=0
# Check each source file for C headers
for file in $files; do
# First check if the file includes any C headers for more efficiency when no C header is used
if grep -E "#include\s+<($c_headers_regex)\.h>" "$file" >/dev/null; then
# Check each C header individually to print an error message with the appropriate replacement C++ header
for (( i=0; i < ${#c_headers[@]}; i++ )); do
if grep -E "#include\s+<${c_headers[i]}\.h>" "$file" >/dev/null; then
echo "Error: '$file' includes C header '${c_headers[i]}.h'. Include the C++ header '${c_headers_map[i]}' instead."
fi
done
error_found=1
fi
done
if [ $error_found -eq 1 ]; then
exit 1
fi
echo "Success: No standard C headers are used."

41
scripts/checksum.py Normal file
View File

@ -0,0 +1,41 @@
#!/usr/bin/env python3
import argparse
import hashlib
import os
os.chdir(os.path.dirname(__file__) + "/..")
def hash_bytes(b):
return f"0x{hashlib.sha256(b).hexdigest()[:8]}"
def hash_file(filename):
with open(filename, "rb") as f:
return hash_bytes(f.read())
def main():
p = argparse.ArgumentParser(description="Checksums source files")
p.add_argument("list_file", metavar="LIST_FILE", help="File listing all the files to hash")
p.add_argument("extra_file", metavar="EXTRA_FILE", help="File containing extra strings to be hashed")
args = p.parse_args()
with open(args.list_file, encoding="utf-8") as f:
files = f.read().splitlines()
with open(args.extra_file, "rb") as f:
extra = f.read().splitlines()
hashes_files = [hash_file(file) for file in files]
hashes_extra = [hash_bytes(line) for line in extra]
hashes = hashes_files + hashes_extra
print("""\
#include <engine/client/checksum.h>
void CChecksumData::InitFiles()
{
""", end="")
print(f"\tm_NumFiles = {len(hashes_files)};")
print(f"\tm_NumExtra = {len(hashes_extra)};")
for i, h in enumerate(hashes):
print(f"\tm_aFiles[0x{i:03x}] = {h};")
print("}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,81 @@
#!/bin/bash
ANDROID_HOME=~/Android/Sdk
ANDROID_NDK="$(find "$ANDROID_HOME/ndk" -maxdepth 1 | sort -n | tail -1)"
echo "$ANDROID_NDK"
export MAKEFLAGS=-j32
if [[ "${2}" == "webasm" ]]; then
COMPILEFLAGS="-pthread -O3 -g -s USE_PTHREADS=1"
LINKFLAGS="-pthread -O3 -g -s USE_PTHREADS=1 -s ASYNCIFY=1"
fi
COMPILEFLAGS=$3
LINKFLAGS=$4
function compile_source() {
if [[ "${4}" == "android" ]]; then
cmake \
-H. \
-G "Unix Makefiles" \
-DCMAKE_BUILD_TYPE=Release \
-DANDROID_NATIVE_API_LEVEL="android-$1" \
-DCMAKE_TOOLCHAIN_FILE="$ANDROID_NDK/build/cmake/android.toolchain.cmake" \
-DANDROID_ABI="${3}" \
-DANDROID_ARM_NEON=TRUE \
-B"$2" \
-DBUILD_SHARED_LIBS=OFF \
-DHIDAPI_SKIP_LIBUSB=TRUE \
-DCURL_USE_OPENSSL=ON \
-DSDL_HIDAPI=OFF \
-DOP_DISABLE_HTTP=ON \
-DOP_DISABLE_EXAMPLES=ON \
-DOP_DISABLE_DOCS=ON \
-DOPENSSL_ROOT_DIR="$PWD"/../openssl/"$2" \
-DOPENSSL_CRYPTO_LIBRARY="$PWD"/../openssl/"$2"/libcrypto.a \
-DOPENSSL_SSL_LIBRARY="$PWD"/../openssl/"$2"/libssl.a \
-DOPENSSL_INCLUDE_DIR="${PWD}/../openssl/include;${PWD}/../openssl/${2}/include"
(
cd "$2" || exit 1
cmake --build .
)
else
${5} cmake \
-H. \
-DCMAKE_BUILD_TYPE=Release \
-B"$2" \
-DSDL_STATIC=TRUE \
-DFT_DISABLE_HARFBUZZ=ON \
-DFT_DISABLE_BZIP2=ON \
-DFT_DISABLE_BROTLI=ON \
-DFT_REQUIRE_ZLIB=TRUE \
-DCMAKE_C_FLAGS="$COMPILEFLAGS -DGLEW_STATIC" -DCMAKE_CXX_FLAGS="$COMPILEFLAGS" -DCMAKE_CXX_FLAGS_RELEASE="$COMPILEFLAGS" -DCMAKE_C_FLAGS_RELEASE="$COMPILEFLAGS" \
-DCMAKE_SHARED_LINKER_FLAGS="$LINKFLAGS" -DCMAKE_SHARED_LINKER_FLAGS_RELEASE="$LINKFLAGS" \
-DSDL_PTHREADS=ON -DSDL_THREADS=ON \
-DCURL_USE_OPENSSL=ON \
-DOPUS_HARDENING=OFF \
-DOPUS_STACK_PROTECTOR=OFF \
-DOPENSSL_ROOT_DIR="$PWD"/../openssl/"$2" \
-DOPENSSL_CRYPTO_LIBRARY="$PWD"/../openssl/"$2"/libcrypto.a \
-DOPENSSL_SSL_LIBRARY="$PWD"/../openssl/"$2"/libssl.a \
-DOPENSSL_INCLUDE_DIR="${PWD}/../openssl/include;${PWD}/../openssl/${2}/include" \
-DZLIB_LIBRARY="${PWD}/../zlib/${2}/libz.a" -DZLIB_INCLUDE_DIR="${PWD}/../zlib;${PWD}/../zlib/${2}"
(
cd "$2" || exit 1
cmake --build .
)
fi
}
if [[ "${2}" == "android" ]]; then
compile_source "$1" build_"$2"_arm armeabi-v7a "$2" "" &
compile_source "$1" build_"$2"_arm64 arm64-v8a "$2" "" &
compile_source "$1" build_"$2"_x86 x86 "$2" "" &
compile_source "$1" build_"$2"_x86_64 x86_64 "$2" "" &
elif [[ "${2}" == "webasm" ]]; then
sed -i "s/include(CheckSizes)//g" CMakeLists.txt
compile_source "$1" build_"$2"_wasm wasm "$2" emcmake &
fi
wait

253
scripts/compile_libs/gen_libs.sh Executable file
View File

@ -0,0 +1,253 @@
#!/bin/bash
CURDIR="$PWD"
if [ -z ${1+x} ]; then
echo "Give a destination path where to run this script, please choose a path other than in the source directory"
exit 1
fi
if [ -z ${2+x} ]; then
echo "Specify the target system"
exit 1
fi
OS_NAME=$2
COMPILEFLAGS="-fPIC"
LINKFLAGS="-fPIC"
if [[ "${OS_NAME}" == "webasm" ]]; then
COMPILEFLAGS="-pthread -O3 -g -s USE_PTHREADS=1"
LINKFLAGS="-pthread -O3 -g -s USE_PTHREADS=1 -s ASYNCIFY=1 -s WASM=1"
fi
if [[ "${OS_NAME}" == "android" ]]; then
OS_NAME_PATH="android"
elif [[ "${OS_NAME}" == "windows" ]]; then
OS_NAME_PATH="windows"
elif [[ "${OS_NAME}" == "linux" ]]; then
OS_NAME_PATH="linux"
elif [[ "${OS_NAME}" == "webasm" ]]; then
OS_NAME_PATH="webasm"
fi
COMP_HAS_ARM32=0
COMP_HAS_ARM64=0
COMP_HAS_x86=0
COMP_HAS_x64=0
COMP_HAS_WEBASM=0
if [[ "${OS_NAME}" == "android" ]]; then
COMP_HAS_ARM32=1
COMP_HAS_ARM64=1
COMP_HAS_x86=1
COMP_HAS_x64=1
elif [[ "${OS_NAME}" == "linux" ]]; then
COMP_HAS_x64=1
elif [[ "${OS_NAME}" == "windows" ]]; then
COMP_HAS_x86=1
COMP_HAS_x64=1
elif [[ "${OS_NAME}" == "webasm" ]]; then
COMP_HAS_WEBASM=1
fi
mkdir -p "$1"
cd "$1" || exit 1
function build_cmake_lib() {
if [ ! -d "${1}" ]; then
git clone "${2}" "${1}"
fi
(
cd "${1}" || exit 1
cp "${CURDIR}"/scripts/compile_libs/cmake_lib_compile.sh cmake_lib_compile.sh
./cmake_lib_compile.sh "$_ANDROID_ABI_LEVEL" "$OS_NAME" "$COMPILEFLAGS" "$LINKFLAGS"
)
}
_ANDROID_ABI_LEVEL=24
mkdir -p compile_libs
cd compile_libs || exit 1
# start with openssl
(
_WAS_THERE_SSLFILE=1
if [ ! -d "openssl" ]; then
git clone https://github.com/openssl/openssl openssl
_WAS_THERE_SSLFILE=0
fi
(
cd openssl || exit 1
if [[ "$_WAS_THERE_SSLFILE" == 0 ]]; then
./autogen.sh
fi
cp "${CURDIR}"/scripts/compile_libs/make_lib_openssl.sh make_lib_openssl.sh
./make_lib_openssl.sh "$_ANDROID_ABI_LEVEL" "$OS_NAME" "$COMPILEFLAGS" "$LINKFLAGS"
)
)
build_cmake_lib zlib https://github.com/madler/zlib
build_cmake_lib png https://github.com/glennrp/libpng
build_cmake_lib curl https://github.com/curl/curl
build_cmake_lib freetype2 https://gitlab.freedesktop.org/freetype/freetype
build_cmake_lib sdl https://github.com/libsdl-org/SDL
build_cmake_lib ogg https://github.com/xiph/ogg
build_cmake_lib opus https://github.com/xiph/opus
(
_WAS_THERE_OPUSFILE=1
if [ ! -d "opusfile" ]; then
git clone https://github.com/xiph/opusfile opusfile
_WAS_THERE_OPUSFILE=0
fi
cd opusfile || exit 1
if [[ "$_WAS_THERE_OPUSFILE" == 0 ]]; then
./autogen.sh
fi
cp "${CURDIR}"/scripts/compile_libs/make_lib_opusfile.sh make_lib_opusfile.sh
./make_lib_opusfile.sh "$_ANDROID_ABI_LEVEL" "$OS_NAME" "$COMPILEFLAGS" "$LINKFLAGS"
)
# SQLite, just download and built by hand
if [ ! -d "sqlite3" ]; then
wget https://www.sqlite.org/2021/sqlite-amalgamation-3360000.zip
7z e sqlite-amalgamation-3360000.zip -osqlite3
fi
(
cd sqlite3 || exit 1
cp "${CURDIR}"/scripts/compile_libs/make_lib_sqlite3.sh make_lib_sqlite3.sh
./make_lib_sqlite3.sh "$_ANDROID_ABI_LEVEL" "$OS_NAME" "$COMPILEFLAGS" "$LINKFLAGS"
)
cd ..
function copy_arches_for_lib() {
if [[ "$COMP_HAS_ARM32" == "1" ]]; then
${1} arm arm
fi
if [[ "$COMP_HAS_ARM64" == "1" ]]; then
${1} arm64 arm64
fi
if [[ "$COMP_HAS_x86" == "1" ]]; then
${1} x86 32
fi
if [[ "$COMP_HAS_x64" == "1" ]]; then
${1} x86_64 64
fi
if [[ "$COMP_HAS_WEBASM" == "1" ]]; then
${1} wasm wasm
fi
}
mkdir ddnet-libs
function _copy_curl() {
mkdir -p ddnet-libs/curl/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/curl/build_"$OS_NAME"_"$1"/lib/libcurl.a ddnet-libs/curl/"$OS_NAME_PATH"/lib"$2"/libcurl.a
}
copy_arches_for_lib _copy_curl
mkdir ddnet-libs
function _copy_freetype2() {
mkdir -p ddnet-libs/freetype/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/freetype2/build_"$OS_NAME"_"$1"/libfreetype.a ddnet-libs/freetype/"$OS_NAME_PATH"/lib"$2"/libfreetype.a
}
copy_arches_for_lib _copy_freetype2
mkdir ddnet-libs
function _copy_sdl() {
mkdir -p ddnet-libs/sdl/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/sdl/build_"$OS_NAME"_"$1"/libSDL2.a ddnet-libs/sdl/"$OS_NAME_PATH"/lib"$2"/libSDL2.a
cp compile_libs/sdl/build_"$OS_NAME"_"$1"/libSDL2main.a ddnet-libs/sdl/"$OS_NAME_PATH"/lib"$2"/libSDL2main.a
if [ ! -d "ddnet-libs/sdl/include/$OS_NAME_PATH" ]; then
mkdir -p ddnet-libs/sdl/include/"$OS_NAME_PATH"
fi
cp -R compile_libs/sdl/include/* ddnet-libs/sdl/include/"$OS_NAME_PATH"
}
copy_arches_for_lib _copy_sdl
# copy java code from SDL2
if [[ "$OS_NAME" == "android" ]]; then
rm -R ddnet-libs/sdl/java
mkdir -p ddnet-libs/sdl/java
cp -R compile_libs/sdl/android-project/app/src/main/java/org ddnet-libs/sdl/java/
fi
mkdir ddnet-libs
function _copy_ogg() {
mkdir -p ddnet-libs/opus/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/ogg/build_"$OS_NAME"_"$1"/libogg.a ddnet-libs/opus/"$OS_NAME_PATH"/lib"$2"/libogg.a
}
copy_arches_for_lib _copy_ogg
mkdir ddnet-libs
function _copy_opus() {
mkdir -p ddnet-libs/opus/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/opus/build_"$OS_NAME"_"$1"/libopus.a ddnet-libs/opus/"$OS_NAME_PATH"/lib"$2"/libopus.a
}
copy_arches_for_lib _copy_opus
mkdir ddnet-libs
function _copy_opusfile() {
mkdir -p ddnet-libs/opus/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/opusfile/build_"$OS_NAME"_"$1"/libopusfile.a ddnet-libs/opus/"$OS_NAME_PATH"/lib"$2"/libopusfile.a
}
copy_arches_for_lib _copy_opusfile
mkdir ddnet-libs
function _copy_sqlite3() {
mkdir -p ddnet-libs/sqlite3/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/sqlite3/build_"$OS_NAME"_"$1"/sqlite3.a ddnet-libs/sqlite3/"$OS_NAME_PATH"/lib"$2"/libsqlite3.a
}
copy_arches_for_lib _copy_sqlite3
mkdir ddnet-libs
function _copy_openssl() {
mkdir -p ddnet-libs/openssl/"$OS_NAME_PATH"/lib"$2"
mkdir -p ddnet-libs/openssl/include
mkdir -p ddnet-libs/openssl/include/"$OS_NAME_PATH"
cp compile_libs/openssl/build_"$OS_NAME"_"$1"/libcrypto.a ddnet-libs/openssl/"$OS_NAME_PATH"/lib"$2"/libcrypto.a
cp compile_libs/openssl/build_"$OS_NAME"_"$1"/libssl.a ddnet-libs/openssl/"$OS_NAME_PATH"/lib"$2"/libssl.a
cp -R compile_libs/openssl/build_"$OS_NAME"_"$1"/include/* ddnet-libs/openssl/include/"$OS_NAME_PATH"
cp -R compile_libs/openssl/include/* ddnet-libs/openssl/include
}
copy_arches_for_lib _copy_openssl
mkdir ddnet-libs
function _copy_zlib() {
# copy headers
(
cd compile_libs/zlib || exit 1
find . -maxdepth 1 -iname '*.h' -print0 | while IFS= read -r -d $'\0' file; do
mkdir -p ../../ddnet-libs/zlib/include/"$(dirname "$file")"
cp "$file" ../../ddnet-libs/zlib/include/"$(dirname "$file")"
done
cd build_"$OS_NAME"_"$1" || exit 1
find . -maxdepth 1 -iname '*.h' -print0 | while IFS= read -r -d $'\0' file; do
mkdir -p ../../../ddnet-libs/zlib/include/"$OS_NAME_PATH"/"$(dirname "$file")"
cp "$file" ../../../ddnet-libs/zlib/include/"$OS_NAME_PATH"/"$(dirname "$file")"
done
)
mkdir -p ddnet-libs/zlib/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/zlib/build_"$OS_NAME"_"$1"/libz.a ddnet-libs/zlib/"$OS_NAME_PATH"/lib"$2"/libz.a
}
copy_arches_for_lib _copy_zlib
mkdir ddnet-libs
function _copy_png() {
mkdir -p ddnet-libs/png/"$OS_NAME_PATH"/lib"$2"
cp compile_libs/png/build_"$OS_NAME"_"$1"/libpng16.a ddnet-libs/png/"$OS_NAME_PATH"/lib"$2"/libpng16.a
}
copy_arches_for_lib _copy_png

View File

@ -0,0 +1,48 @@
#!/bin/bash
ANDROID_HOME=~/Android/Sdk
ANDROID_NDK="$(find "$ANDROID_HOME/ndk" -maxdepth 1 | sort -n | tail -1)"
export MAKEFLAGS=-j32
export CXXFLAGS="$3"
export CFLAGS="$3"
export CPPFLAGS="$4"
export LDFLAGS="$4"
export ANDROID_NDK_ROOT=$ANDROID_NDK
PATH=$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/linux-x86_64/bin:$ANDROID_NDK_ROOT/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin:$PATH
function buid_openssl() {
_EXISTS_PROJECT=0
if [ -d "$1" ]; then
_EXISTS_PROJECT=1
else
mkdir "$1"
fi
(
cd "$1" || exit 1
if [[ "$_EXISTS_PROJECT" == "0" ]]; then
if [[ "${4}" == "webasm" ]]; then
emconfigure ../Configure "$2" -no-tests -no-asm -static -no-afalgeng -DOPENSSL_SYS_NETWARE -DSIG_DFL=0 -DSIG_IGN=0 -DHAVE_FORK=0 -DOPENSSL_NO_AFALGENG=1 --with-rand-seed=getrandom
sed -i 's|^CROSS_COMPILE.*$|CROSS_COMPILE=|g' Makefile
else
../Configure "$2" no-asm no-shared
fi
fi
${5} make $MAKEFLAGS build_generated
${5} make $MAKEFLAGS libcrypto.a
${5} make $MAKEFLAGS libssl.a
cd ..
)
}
if [[ "${2}" == "android" ]]; then
buid_openssl build_"$2"_arm android-arm "$1" "$2" ""
buid_openssl build_"$2"_arm64 android-arm64 "$1" "$2" ""
buid_openssl build_"$2"_x86 android-x86 "$1" "$2" ""
buid_openssl build_"$2"_x86_64 android-x86_64 "$1" "$2" ""
elif [[ "${2}" == "webasm" ]]; then
buid_openssl build_"$2"_wasm linux-generic64 "$1" "$2" emmake
fi

View File

@ -0,0 +1,80 @@
#!/bin/bash
ANDROID_HOME=~/Android/Sdk
ANDROID_NDK="$(find "$ANDROID_HOME/ndk" -maxdepth 1 | sort -n | tail -1)"
export MAKEFLAGS=-j32
export CXXFLAGS="$3"
export CFLAGS="$3"
export CPPFLAGS="$4"
export LDFLAGS="$4"
export ANDROID_NDK_ROOT="$ANDROID_NDK"
function make_opusfile() {
_EXISTS_PROJECT=0
if [ -d "$1" ]; then
_EXISTS_PROJECT=1
else
mkdir "$1"
fi
(
cd "$1" || exit 1
if [[ "$_EXISTS_PROJECT" == 0 ]]; then
#not nice but doesn't matter
cp -R ../../ogg/include .
cp -R ../../opus/include .
cp -R ../../ogg/"$2"/include/ogg/* include/ogg/
cp ../../ogg/"$2"/libogg.a libogg.a
cp ../../opus/"$2"/libopus.a libopus.a
fi
TMP_COMPILER=""
TMP_AR=""
if [[ "${5}" == "android" ]]; then
TMP_COMPILER="$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/linux-x86_64/bin/$3$4-clang"
TMP_AR="$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar"
elif [[ "${5}" == "webasm" ]]; then
TMP_COMPILER="emcc"
TMP_AR="emar"
fi
${TMP_COMPILER} \
-c \
-fPIC \
-I"${PWD}"/../include \
-I"${PWD}"/include \
../src/opusfile.c \
../src/info.c \
../src/internal.c
${TMP_COMPILER} \
-c \
-fPIC \
-I"${PWD}"/../include \
-I"${PWD}"/include \
-include stdio.h \
../src/stream.c
${TMP_AR} \
rvs \
libopusfile.a \
opusfile.o \
info.o \
stream.o \
internal.o
)
}
function compile_all_opusfile() {
if [[ "${2}" == "android" ]]; then
make_opusfile build_"$2"_arm build_"$2"_arm armv7a-linux-androideabi "$1" "$2"
make_opusfile build_"$2"_arm64 build_"$2"_arm64 aarch64-linux-android "$1" "$2"
make_opusfile build_"$2"_x86 build_"$2"_x86 i686-linux-android "$1" "$2"
make_opusfile build_"$2"_x86_64 build_"$2"_x86_64 x86_64-linux-android "$1" "$2"
elif [[ "${2}" == "webasm" ]]; then
make_opusfile build_"$2"_wasm build_"$2"_wasm "" "$1" "$2"
fi
}
compile_all_opusfile "$1" "$2"

View File

@ -0,0 +1,65 @@
#!/bin/bash
ANDROID_HOME=~/Android/Sdk
ANDROID_NDK="$(find "$ANDROID_HOME/ndk" -maxdepth 1 | sort -n | tail -1)"
export MAKEFLAGS=-j32
export CXXFLAGS="$3"
export CFLAGS="$3"
export CPPFLAGS="$4"
LINKER_FLAGS="$4"
export ANDROID_NDK_ROOT="$ANDROID_NDK"
PATH="$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/linux-x86_64/bin:$ANDROID_NDK_ROOT/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin:$PATH"
_LD_LIBRARY_PATH=".:$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/linux-x86_64/bin:$ANDROID_NDK_ROOT/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64/bin:$LD_LIBRARY_PATH"
function make_sqlite3() {
(
mkdir -p "$1"
cd "$1" || exit 1
TMP_COMPILER=""
TMP_AR=""
if [[ "${5}" == "android" ]]; then
TMP_COMPILER="$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/linux-x86_64/bin/$3$4-clang"
TMP_AR="$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar"
elif [[ "${5}" == "webasm" ]]; then
TMP_COMPILER="emcc"
TMP_AR="emar"
fi
LDFLAGS="${LINKER_FLAGS} -L./" \
LD_LIBRARY_PATH="$_LD_LIBRARY_PATH" \
${TMP_COMPILER} \
-c \
-fPIC \
-DSQLITE_ENABLE_ATOMIC_WRITE=1 \
-DSQLITE_ENABLE_BATCH_ATOMIC_WRITE=1 \
-DSQLITE_ENABLE_MULTITHREADED_CHECKS=1 \
-DSQLITE_THREADSAFE=1 \
../sqlite3.c \
-o sqlite3.o
LDFLAGS="${LINKER_FLAGS} -L./" \
LD_LIBRARY_PATH="$_LD_LIBRARY_PATH" \
${TMP_AR} \
rvs \
sqlite3.a \
sqlite3.o
)
}
function compile_all_sqlite3() {
if [[ "${2}" == "android" ]]; then
make_sqlite3 build_"$2"_arm build_"$2"_arm armv7a-linux-androideabi "$1" "$2"
make_sqlite3 build_"$2"_arm64 build_"$2"_arm64 aarch64-linux-android "$1" "$2"
make_sqlite3 build_"$2"_x86 build_"$2"_x86 i686-linux-android "$1" "$2"
make_sqlite3 build_"$2"_x86_64 build_"$2"_x86_64 x86_64-linux-android "$1" "$2"
elif [[ "${2}" == "webasm" ]]; then
make_sqlite3 build_"$2"_wasm build_"$2"_wasm "" "$1" "$2"
fi
}
compile_all_sqlite3 "$1" "$2"

View File

@ -0,0 +1,43 @@
import argparse
import subprocess
import re
def split_cmds(lines):
cmds = []
current = []
load_cmd_regex = re.compile(r"^Load command \d+$")
for line in lines:
if load_cmd_regex.match(line):
cmds.append(current)
current = []
continue
current.append(line.strip())
return cmds[1:]
def main():
p = argparse.ArgumentParser(description="Strip LC_RPATH commands from executable")
p.add_argument('otool', help="Path to otool")
p.add_argument('install_name_tool', help="Path to install_name_tool")
p.add_argument('executable', metavar="EXECUTABLE", help="The executable to strip")
args = p.parse_args()
otool = args.otool
install_name_tool = args.install_name_tool
executable = args.executable
cmds = split_cmds(subprocess.check_output([otool, "-l", executable]).decode().splitlines())
lc_rpath_cmds = [cmd for cmd in cmds if cmd[0] == "cmd LC_RPATH"]
path_regex = re.compile(r"^path (.*) \(offset \d+\)$")
rpaths = {k[0] for k in [[path_regex.match(part).group(1) for part in cmd if path_regex.match(part)] for cmd in lc_rpath_cmds]}
print("Found paths:")
for path in rpaths:
print("\t" + path)
subprocess.check_call([install_name_tool, "-delete_rpath", path, executable])
if __name__ == '__main__':
main()

View File

@ -0,0 +1,169 @@
import argparse
import csv
import os
import sys
import clang.cindex # pylint: disable=import-error
from clang.cindex import CursorKind, LinkageKind, StorageClass, TypeKind # pylint: disable=import-error
try:
from tqdm import tqdm
except ImportError:
def tqdm(it, *_args, **_kwargs):
return it
def traverse_namespaced(root, filter_files=None, skip_namespaces=1, namespace=()):
if root.location.file is not None and root.location.file.name not in filter_files:
return
yield namespace, root
if root.displayname != "":
if skip_namespaces > 0:
skip_namespaces -= 1
else:
namespace += (root.spelling,)
for node in root.get_children():
yield from traverse_namespaced(node, filter_files, skip_namespaces, namespace)
INTERESTING_NODE_KINDS = {
CursorKind.CLASS_DECL: "class",
CursorKind.CLASS_TEMPLATE: "class",
CursorKind.ENUM_DECL: "enum",
CursorKind.ENUM_CONSTANT_DECL: "enum_constant",
CursorKind.FIELD_DECL: "variable",
CursorKind.PARM_DECL: "variable",
CursorKind.STRUCT_DECL: "struct",
CursorKind.UNION_DECL: "union",
CursorKind.VAR_DECL: "variable",
CursorKind.FUNCTION_DECL: "function",
}
def is_array_type(typ):
return typ.kind in (TypeKind.CONSTANTARRAY, TypeKind.DEPENDENTSIZEDARRAY, TypeKind.INCOMPLETEARRAY)
def get_complex_type(typ):
if typ.spelling in ("IOHANDLE", "LOCK"):
return ""
if typ.kind == TypeKind.AUTO:
return get_complex_type(typ.get_canonical())
if typ.kind == TypeKind.LVALUEREFERENCE:
return get_complex_type(typ.get_pointee())
if typ.kind == TypeKind.POINTER:
return "p" + get_complex_type(typ.get_pointee())
if is_array_type(type):
return "a" + get_complex_type(typ.element_type)
if typ.kind == TypeKind.FUNCTIONPROTO:
return "fn"
if typ.kind == TypeKind.TYPEDEF:
return get_complex_type(typ.get_declaration().underlying_typedef_type)
if typ.kind == TypeKind.ELABORATED:
return get_complex_type(typ.get_named_type())
if typ.kind in (TypeKind.UNEXPOSED, TypeKind.RECORD):
if typ.get_declaration().spelling in "shared_ptr unique_ptr".split():
return "p" + get_complex_type(typ.get_template_argument_type(0))
if typ.get_declaration().spelling in "array sorted_array".split():
return "a" + get_complex_type(typ.get_template_argument_type(0))
return ""
def is_static_member_definition_hack(node):
last_colons = False
for t in node.get_tokens():
t = t.spelling
if t == "::":
last_colons = True
elif last_colons:
if t.startswith("ms_"):
return True
last_colons = False
if t == "=":
return False
return False
def is_const(typ):
if typ.is_const_qualified():
return True
if is_array_type(type):
return is_const(typ.element_type)
return False
class ParseError(RuntimeError):
pass
def process_source_file(out, file, extra_args, break_on):
args = extra_args + ["-Isrc"]
if file.endswith(".c"):
header = f"{file[:-2]}.h"
elif file.endswith(".cpp"):
header = f"{file[:-4]}.h"
else:
raise ValueError(f"unrecognized source file: {file}")
index = clang.cindex.Index.create()
unit = index.parse(file, args=args)
errors = list(unit.diagnostics)
if errors:
for error in errors:
print(f"{file}: {error.format()}", file=sys.stderr)
print(args, file=sys.stderr)
raise ParseError(f"failed parsing {file}")
filter_files = frozenset([file, header])
for namespace, node in traverse_namespaced(unit.cursor, filter_files=filter_files):
cur_file = None
if node.location.file is not None:
cur_file = node.location.file.name
if cur_file is None or cur_file not in (file, header):
continue
if node.kind in INTERESTING_NODE_KINDS and node.spelling:
typ = get_complex_type(node.type)
qualifiers = ""
if INTERESTING_NODE_KINDS[node.kind] in {"variable", "function"}:
is_member = node.semantic_parent.kind in {CursorKind.CLASS_DECL, CursorKind.CLASS_TEMPLATE, CursorKind.STRUCT_DECL, CursorKind.UNION_DECL}
is_static = node.storage_class == StorageClass.STATIC or is_static_member_definition_hack(node)
if is_static:
qualifiers = "s" + qualifiers
if is_member:
qualifiers = "m" + qualifiers
if is_static and not is_member and is_const(node.type):
qualifiers = "c" + qualifiers
if node.linkage == LinkageKind.EXTERNAL and not is_member:
qualifiers = "g" + qualifiers
out.writerow({
"file": cur_file,
"line": node.location.line,
"column": node.location.column,
"kind": INTERESTING_NODE_KINDS[node.kind],
"path": "::".join(namespace),
"qualifiers": qualifiers,
"type": typ,
"name": node.spelling,
})
if node.spelling == break_on:
breakpoint() # pylint: disable=forgotten-debug-statement
def main():
p = argparse.ArgumentParser(description="Extracts identifier data from a Teeworlds source file and its header, outputting the data as CSV to stdout")
p.add_argument("file", metavar="FILE", nargs="+", help="Source file to analyze")
p.add_argument("--break-on", help="Break on a specific variable name, useful to debug issues with the script")
args = p.parse_args()
extra_args = []
if "CXXFLAGS" in os.environ:
extra_args = os.environ["CXXFLAGS"].split()
out = csv.DictWriter(sys.stdout, "file line column kind path qualifiers type name".split())
out.writeheader()
files = args.file
if len(files) > 1:
files = tqdm(files, leave=False)
error = False
for file in files:
try:
process_source_file(out, file, extra_args, args.break_on)
except ParseError:
error = True
return int(error)
if __name__ == "__main__":
sys.exit(main())

63
scripts/fix_style.py Executable file
View File

@ -0,0 +1,63 @@
#!/usr/bin/env python3
import os
import subprocess
import sys
import argparse
os.chdir(os.path.dirname(__file__) + "/..")
def recursive_file_list(path):
result = []
for dirpath, _, filenames in os.walk(path):
result += [os.path.join(dirpath, filename) for filename in filenames]
return result
IGNORE_FILES = [
"src/engine/client/keynames.h",
"src/engine/keys.h",
]
def filter_ignored(filenames):
return [filename for filename in filenames
if filename not in IGNORE_FILES
and not filename.startswith("src/game/generated/")
and not filename.startswith("src/rust-bridge")]
def filter_cpp(filenames):
return [filename for filename in filenames
if any(filename.endswith(ext) for ext in ".c .cpp .h".split())]
def find_clang_format(version):
for binary in (
"clang-format",
f"clang-format-{version}",
f"/opt/clang-format-static/clang-format-{version}"):
try:
out = subprocess.check_output([binary, "--version"])
except FileNotFoundError:
continue
if f"clang-format version {version}." in out.decode("utf-8"):
return binary
print(f"Found no clang-format {version}")
sys.exit(-1)
clang_format_bin = find_clang_format(10)
def reformat(filenames):
subprocess.check_call([clang_format_bin, "-i"] + filenames)
def warn(filenames):
return subprocess.call([clang_format_bin, "-Werror", "--dry-run"] + filenames)
def main():
p = argparse.ArgumentParser(description="Check and fix style of changed files")
p.add_argument("-n", "--dry-run", action="store_true", help="Don't fix, only warn")
args = p.parse_args()
filenames = filter_ignored(filter_cpp(recursive_file_list("src")))
if not args.dry_run:
reformat(filenames)
else:
sys.exit(warn(filenames))
if __name__ == "__main__":
main()

131
scripts/gen_keys.py Normal file
View File

@ -0,0 +1,131 @@
# pylint: skip-file
# generate keys.h file
f = open("src/engine/keys.h", "w")
keynames = []
for i in range(0, 512):
keynames += [f"&{int(i)}"]
print("#ifndef ENGINE_KEYS_H", file=f)
print("#define ENGINE_KEYS_H", file=f)
# KEY_EXECUTE already exists on windows platforms
print("#if defined(CONF_FAMILY_WINDOWS)", file=f)
print(" #undef KEY_EXECUTE", file=f)
print("#endif", file=f)
print('/* AUTO GENERATED! DO NOT EDIT MANUALLY! */', file=f)
print("enum", file=f)
print("{", file=f)
print("\tKEY_FIRST = 0,", file=f)
highestid = 0
for line in open("scripts/SDL_scancode.h"):
l = line.strip().split("=")
if len(l) == 2 and "SDL_SCANCODE_" in line:
key = l[0].strip().replace("SDL_SCANCODE_", "KEY_")
value = int(l[1].split(",")[0].strip())
if key[0:2] == "/*":
continue
print(f"\t{key} = {int(value)},", file=f)
keynames[value] = key.replace("KEY_", "").lower()
if value > highestid:
highestid = value
highestid += 1
print("", file=f)
print(f"\tKEY_MOUSE_1 = {int(highestid)},", file=f); keynames[highestid] = "mouse1"; highestid += 1
print(f"\tKEY_MOUSE_2 = {int(highestid)},", file=f); keynames[highestid] = "mouse2"; highestid += 1
print(f"\tKEY_MOUSE_3 = {int(highestid)},", file=f); keynames[highestid] = "mouse3"; highestid += 1
print(f"\tKEY_MOUSE_4 = {int(highestid)},", file=f); keynames[highestid] = "mouse4"; highestid += 1
print(f"\tKEY_MOUSE_5 = {int(highestid)},", file=f); keynames[highestid] = "mouse5"; highestid += 1
print(f"\tKEY_MOUSE_6 = {int(highestid)},", file=f); keynames[highestid] = "mouse6"; highestid += 1
print(f"\tKEY_MOUSE_7 = {int(highestid)},", file=f); keynames[highestid] = "mouse7"; highestid += 1
print(f"\tKEY_MOUSE_8 = {int(highestid)},", file=f); keynames[highestid] = "mouse8"; highestid += 1
print(f"\tKEY_MOUSE_9 = {int(highestid)},", file=f); keynames[highestid] = "mouse9"; highestid += 1
print(f"\tKEY_MOUSE_WHEEL_UP = {int(highestid)},", file=f); keynames[highestid] = "mousewheelup"; highestid += 1
print(f"\tKEY_MOUSE_WHEEL_DOWN = {int(highestid)},", file=f); keynames[highestid] = "mousewheeldown"; highestid += 1
print(f"\tKEY_MOUSE_WHEEL_LEFT = {int(highestid)},", file=f); keynames[highestid] = "mousewheelleft"; highestid += 1
print(f"\tKEY_MOUSE_WHEEL_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "mousewheelright"; highestid += 1
print("", file=f)
print(f"\tKEY_JOYSTICK_BUTTON_0 = {int(highestid)},", file=f); keynames[highestid] = "joystick0"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_1 = {int(highestid)},", file=f); keynames[highestid] = "joystick1"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_2 = {int(highestid)},", file=f); keynames[highestid] = "joystick2"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_3 = {int(highestid)},", file=f); keynames[highestid] = "joystick3"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_4 = {int(highestid)},", file=f); keynames[highestid] = "joystick4"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_5 = {int(highestid)},", file=f); keynames[highestid] = "joystick5"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_6 = {int(highestid)},", file=f); keynames[highestid] = "joystick6"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_7 = {int(highestid)},", file=f); keynames[highestid] = "joystick7"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_8 = {int(highestid)},", file=f); keynames[highestid] = "joystick8"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_9 = {int(highestid)},", file=f); keynames[highestid] = "joystick9"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_10 = {int(highestid)},", file=f); keynames[highestid] = "joystick10"; highestid += 1
print(f"\tKEY_JOYSTICK_BUTTON_11 = {int(highestid)},", file=f); keynames[highestid] = "joystick11"; highestid += 1
print("", file=f)
print(f"\tKEY_JOY_HAT0_UP = {int(highestid)},", file=f); keynames[highestid] = "joy_hat0_up"; highestid += 1
print(f"\tKEY_JOY_HAT0_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_hat0_left"; highestid += 1
print(f"\tKEY_JOY_HAT0_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_hat0_right"; highestid += 1
print(f"\tKEY_JOY_HAT0_DOWN = {int(highestid)},", file=f); keynames[highestid] = "joy_hat0_down"; highestid += 1
print(f"\tKEY_JOY_HAT1_UP = {int(highestid)},", file=f); keynames[highestid] = "joy_hat1_up"; highestid += 1
print(f"\tKEY_JOY_HAT1_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_hat1_left"; highestid += 1
print(f"\tKEY_JOY_HAT1_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_hat1_right"; highestid += 1
print(f"\tKEY_JOY_HAT1_DOWN = {int(highestid)},", file=f); keynames[highestid] = "joy_hat1_down"; highestid += 1
print("", file=f)
print(f"\tKEY_JOY_AXIS_0_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis0_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_0_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis0_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_1_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis1_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_1_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis1_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_2_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis2_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_2_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis2_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_3_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis3_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_3_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis3_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_4_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis4_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_4_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis4_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_5_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis5_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_5_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis5_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_6_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis6_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_6_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis6_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_7_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis7_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_7_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis7_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_8_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis8_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_8_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis8_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_9_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis9_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_9_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis9_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_10_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis10_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_10_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis10_right"; highestid += 1
print(f"\tKEY_JOY_AXIS_11_LEFT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis11_left"; highestid += 1
print(f"\tKEY_JOY_AXIS_11_RIGHT = {int(highestid)},", file=f); keynames[highestid] = "joy_axis11_right"; highestid += 1
print("", file=f)
print("\tKEY_LAST = 512,", file=f)
print("", file=f)
print("\tNUM_JOYSTICK_BUTTONS = KEY_JOYSTICK_BUTTON_11 - KEY_JOYSTICK_BUTTON_0 + 1,", file=f)
print("\tNUM_JOYSTICK_AXES_BUTTONS = KEY_JOY_AXIS_11_RIGHT - KEY_JOY_AXIS_0_LEFT + 1,", file=f)
print("\tNUM_JOYSTICK_BUTTONS_PER_AXIS = KEY_JOY_AXIS_0_RIGHT - KEY_JOY_AXIS_0_LEFT + 1,", file=f)
print("\tNUM_JOYSTICK_AXES = NUM_JOYSTICK_AXES_BUTTONS / NUM_JOYSTICK_BUTTONS_PER_AXIS,", file=f)
print("\tNUM_JOYSTICK_HAT_BUTTONS = KEY_JOY_HAT1_DOWN - KEY_JOY_HAT0_UP + 1,", file=f)
print("\tNUM_JOYSTICK_BUTTONS_PER_HAT = KEY_JOY_HAT1_DOWN - KEY_JOY_HAT1_UP + 1,", file=f)
print("\tNUM_JOYSTICK_HATS = NUM_JOYSTICK_HAT_BUTTONS / NUM_JOYSTICK_BUTTONS_PER_HAT,", file=f)
print("};", file=f)
print("", file=f)
print("#endif", file=f)
# generate keynames.c file
f = open("src/engine/client/keynames.h", "w")
print('/* AUTO GENERATED! DO NOT EDIT MANUALLY! */', file=f)
print('', file=f)
print('#ifndef KEYS_INCLUDE', file=f)
print('#error do not include this header!', file=f)
print('#endif', file=f)
print('', file=f)
print("const char g_aaKeyStrings[512][20] = // NOLINT(misc-definitions-in-headers)", file=f)
print("{", file=f)
for n in keynames:
print(f'\t"{n}",', file=f)
print("};", file=f)
print("", file=f)
f.close()

View File

@ -0,0 +1,74 @@
#!/usr/bin/env python3
import argparse
import os
import re
import subprocess
import tempfile
os.chdir(os.path.dirname(__file__) + "/..")
PATH = "src/"
CURL_RE=re.compile(r"\bcurl_\w*")
def get_curl_calls(path):
names = set()
for directory, _, files in os.walk(path):
for filename in files:
if (filename.endswith(".cpp") or
filename.endswith(".c") or
filename.endswith(".h")):
with open(os.path.join(directory, filename), encoding="utf-8") as f:
contents = f.read()
names = names.union(CURL_RE.findall(contents))
return names
def assembly_source(names):
names = sorted(names)
result = []
for name in names:
result.append(f".type {name},@function")
for name in names:
result.append(f".global {name}")
for name in names:
result.append(f"{name}:")
return "\n".join(result + [""])
DEFAULT_OUTPUT="libcurl.so"
DEFAULT_SONAME="libcurl.so.4"
def main():
p = argparse.ArgumentParser(description="Create a stub shared object for linking")
p.add_argument("-k", "--keep", action="store_true", help="Keep the intermediary assembly file")
p.add_argument("--output", help=f"Output filename (default: {DEFAULT_OUTPUT})", default=DEFAULT_OUTPUT)
p.add_argument("--soname", help=f"soname of the produced shared object (default: {DEFAULT_SONAME})", default=DEFAULT_SONAME)
p.add_argument("--functions", metavar="FUNCTION", nargs="+", help="Function symbols that should be put into the shared object (default: look for curl_* names in the source code)")
p.add_argument("--link-args", help="Colon-separated list of additional linking arguments")
args = p.parse_args()
if args.functions is not None:
functions = args.functions
else:
functions = get_curl_calls(PATH)
extra_link_args = []
if args.link_args:
extra_link_args = args.link_args.split(":")
with tempfile.NamedTemporaryFile("w", suffix=".s", delete=not args.keep) as f:
if args.keep:
print(f"using {f.name} as temporary file")
f.write(assembly_source(functions))
f.flush()
subprocess.check_call([
"cc",
] + extra_link_args + [
"-shared",
"-nostdlib", # don't need to link to libc
f"-Wl,-soname,{args.soname}",
"-o", args.output,
f.name,
])
subprocess.check_call(["strip", args.output])
if __name__ == '__main__':
main()

View File

@ -0,0 +1,135 @@
# Needs UnicodeData.txt and confusables.txt in the current directory.
#
# Those can be obtained from unicode.org:
# - http://www.unicode.org/Public/security/<VERSION>/confusables.txt
# - http://www.unicode.org/Public/<VERSION>/ucd/UnicodeData.txt
#
# If executed as a script, it will generate the contents of the files
# python3 scripts/generate_unicode_confusables_data.py header > `src/base/unicode/confusables.h`,
# python3 scripts/generate_unicode_confusables_data.py data > `src/base/unicode/confusables_data.h`.
import sys
import unicode
def generate_decompositions():
ud = unicode.data()
con = unicode.confusables()
def category(x):
return {unicode.unhex(u["Value"]) for u in ud if u["General_Category"].startswith(x)}
# TODO: Is this correct? They changed the decompositioning format
nfd = {unicode.unhex(u["Value"]): unicode.unhex_sequence(u["Decomposition_Type"]) for u in ud}
nfd = {k: v for k, v in nfd.items() if v}
con = {unicode.unhex(c["Value"]): unicode.unhex_sequence(c["Target"]) for c in con}
# C: Control
# M: Combining
# Z: Space
ignore = category("C") | category("M") | category("Z")
con[0x006C] = [0x0069] # LATIN SMALL LETTER L -> LATIN SMALL LETTER I
con[0x00A1] = [0x0069] # INVERTED EXCLAMATION MARK -> LATIN SMALL LETTER I
con[0x2800] = [] # BRAILLE PATTERN BLANK
con[0xFFFC] = [] # OBJECT REPLACEMENT CHARACTER
interesting = ignore | set(nfd) | set(con)
def apply(l, replacements):
return [d for c in l for d in replacements.get(c, [c])]
def gen(c):
result = [c]
while True:
first = apply(result, nfd)
second = apply(first, con)
# Apply substitutions until convergence.
if result == first and result == second:
break
result = second
return [c for c in result if c not in ignore]
return {c: gen(c) for c in interesting}
def gen_header(decompositions, len_set):
print("""\
#include <cstdint>
struct DECOMP_SLICE
{
\tuint16_t offset : 13;
\tuint16_t length : 3;
};
""")
print("enum")
print("{")
print(f"\tNUM_DECOMP_LENGTHS = {len(len_set)},")
print(f"\tNUM_DECOMPS = {len(decompositions)},")
print("};")
print()
print("extern const uint8_t decomp_lengths[NUM_DECOMP_LENGTHS];")
print("extern const int32_t decomp_chars[NUM_DECOMPS];")
print("extern const struct DECOMP_SLICE decomp_slices[NUM_DECOMPS];")
print("extern const int32_t decomp_data[];")
def gen_data(decompositions, decomposition_set, decomposition_offsets, len_set):
print("""\
#ifndef CONFUSABLES_DATA
#error "This file should only be included in `confusables.cpp`"
#endif
""")
print("const uint8_t decomp_lengths[NUM_DECOMP_LENGTHS] = {")
for l in len_set:
print(f"\t{l},")
print("};")
print()
print("const int32_t decomp_chars[NUM_DECOMPS] = {")
for k in sorted(decompositions):
print(f"\t0x{k:x},")
print("};")
print()
print("const struct DECOMP_SLICE decomp_slices[NUM_DECOMPS] = {")
for k in sorted(decompositions):
d = decompositions[k]
i = decomposition_set.index(tuple(d))
l = len_set.index(len(d))
print(f"\t{{{decomposition_offsets[i]}, {l}}},")
print("};")
print()
print("const int32_t decomp_data[] = {")
for d in decomposition_set:
for c in d:
print(f"\t0x{c:x},")
print("};")
def main():
decompositions = generate_decompositions()
# Deduplicate
decomposition_set = sorted(set(tuple(x) for x in decompositions.values()))
len_set = sorted(set(len(x) for x in decomposition_set))
if len(len_set) > 8:
raise ValueError("Can't pack offset (13 bit) together with len (>3bit)")
cur_offset = 0
decomposition_offsets = []
for d in decomposition_set:
decomposition_offsets.append(cur_offset)
cur_offset += len(d)
header = "header" in sys.argv
data = "data" in sys.argv
if header:
gen_header(decompositions, len_set)
elif data:
gen_data(decompositions, decomposition_set, decomposition_offsets, len_set)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,57 @@
# Needs UnicodeData.txt in the current directory.
#
# It can be obtained from unicode.org:
# - http://www.unicode.org/Public/<VERSION>/ucd/UnicodeData.txt
#
# If executed as a script, it will generate the contents of the file
# python3 scripts/generate_unicode_tolower.py header > `src/base/unicode/tolower.h`,
# python3 scripts/generate_unicode_tolower.py data > `src/base/unicode/tolower_data.h`.
import sys
import unicode
def generate_cases():
ud = unicode.data()
return [(unicode.unhex(u["Value"]), unicode.unhex(u["Simple_Lowercase_Mapping"])) for u in ud if u["Simple_Lowercase_Mapping"]]
def gen_header(cases):
print(f"""\
#include <cstdint>
struct UPPER_LOWER
{{
\tint32_t upper;
\tint32_t lower;
}};
enum
{{
\tNUM_TOLOWER = {len(cases)},
}};
extern const struct UPPER_LOWER tolowermap[];""")
def gen_data(cases):
print("""\
#ifndef TOLOWER_DATA
#error "This file must only be included in `tolower.cpp`"
#endif
const struct UPPER_LOWER tolowermap[] = {""")
for upper_code, lower_code in cases:
print(f"\t{{{upper_code}, {lower_code}}},")
print("};")
def main():
cases = generate_cases()
header = "header" in sys.argv
data = "data" in sys.argv
if header:
gen_header(cases)
elif data:
gen_data(cases)
if __name__ == '__main__':
main()

12
scripts/git_revision.py Normal file
View File

@ -0,0 +1,12 @@
import os
import subprocess
git_hash = os.environ.get("DDNET_GIT_SHORTREV_HASH")
try:
git_hash = git_hash or subprocess.check_output(["git", "rev-parse", "--short=16", "HEAD"], stderr=subprocess.DEVNULL).decode().strip()
except (FileNotFoundError, subprocess.CalledProcessError):
pass
if git_hash is not None:
definition = f'"{git_hash}"'
else:
definition = "0"
print(f"const char *GIT_SHORTREV_HASH = {definition};")

85
scripts/hash_passwords.py Normal file
View File

@ -0,0 +1,85 @@
import argparse
import tempfile
import binascii
import hashlib
import os
import re
import sys
AUTH_ADD_REGEX=re.compile(r'^\s*auth_add\s+(?P<username>"[^"]*"|[^"\s]+)\s+(?P<level>"[^"]*"|[^"\s]+)\s+(?P<password>"[^"]*"|[^"\s]+)\s*$')
AUTH_ADD_PRESENT_REGEX=re.compile(r'(^|\W)auth_add($|\W)')
def hash_password(password):
salt = os.urandom(8)
h = hashlib.md5()
h.update(password.encode())
h.update(salt)
return h.hexdigest(), binascii.hexlify(salt).decode('ascii')
def auth_add_p_line(username, level, pwhash, salt):
if level not in ('admin', 'mod', 'moderator', 'helper'):
print(f"Warning: level ({level}) not one of admin, mod or helper.", file=sys.stderr)
if repr(level) != f"'{level}'":
print(f"Warning: level ({level}) contains weird symbols, config line is possibly malformed.", file=sys.stderr)
if repr(username) != f"'{username}'":
print(f"Warning: username ({username}) contains weird symbols, config line is possibly malformed.", file=sys.stderr)
username = username.replace('"', '\\"')
if ' ' in username or ';' in username:
username = f'"{username}"'
return f"auth_add_p {username} {level} {pwhash} {salt}"
def auth_add_p_line_from_pw(username, level, password):
if len(password) < 8:
print("Warning: password too short for a long-term password.", file=sys.stderr)
pwhash, salt = hash_password(password)
return auth_add_p_line(username, level, pwhash, salt)
def parse_line(line):
m = AUTH_ADD_REGEX.match(line)
if not m:
if AUTH_ADD_PRESENT_REGEX.search(line):
print("Warning: Funny-looking line with 'auth_add', not touching it:")
print(line, end="")
return None
password = m.group('password')
if password.startswith('"'):
password = password[1:-1] # Strip quotes.
return m.group('username'), m.group('level'), password
def main():
p = argparse.ArgumentParser(description="Hash passwords in a way suitable for DDNet configs.")
p.add_argument('--new', '-n', nargs=3, metavar=("USERNAME", "LEVEL", "PASSWORD"), action='append', default=[], help="username, level and password of the new user")
p.add_argument('config', nargs='?', metavar="CONFIG", help="config file to update.")
args = p.parse_args()
if not args.new and args.config is None:
p.error("expected at least one of --new and CONFIG")
use_stdio = args.config is None or args.config == '-'
if use_stdio:
if args.config is None:
input_file = open(os.devnull, encoding="utf-8")
else:
input_file = sys.stdin
output_file = sys.stdout
else:
input_file = open(args.config, encoding="utf-8") # pylint: disable=consider-using-with
output_file = tempfile.NamedTemporaryFile('w', dir=os.getcwd(), prefix=f"{args.config}.", delete=False) # pylint: disable=consider-using-with
for line in input_file:
parsed = parse_line(line)
if parsed is None:
print(line, end="", file=output_file)
else:
print(auth_add_p_line_from_pw(*parsed), file=output_file)
for auth_tuple in args.new:
print(auth_add_p_line_from_pw(*auth_tuple), file=output_file)
if not use_stdio:
input_file.close()
output_filename = output_file.name
output_file.close()
os.rename(output_filename, args.config)
if __name__ == '__main__':
main()

84
scripts/import_file_score.py Executable file
View File

@ -0,0 +1,84 @@
#!/usr/bin/env python3
from collections import namedtuple
from decimal import Decimal
import argparse
import os.path
import re
import sqlite3
import sys
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i+n]
class Record(namedtuple('Record', 'name time checkpoints')):
@staticmethod
def parse(lines):
if len(lines) != 3:
raise ValueError("wrong amount of lines for record")
name = lines[0]
time = Decimal(lines[1])
checkpoints_str = lines[2].split(' ')
if len(checkpoints_str) != 26 or checkpoints_str[25] != "":
raise ValueError(f"wrong amount of checkpoint times: {len(checkpoints_str)}")
checkpoints_str = checkpoints_str[:25]
checkpoints = tuple(Decimal(c) for c in checkpoints_str)
return Record(name=name, time=time, checkpoints=checkpoints)
def unparse(self):
return "\n".join([self.name, str(self.time), " ".join([str(cp) for cp in self.checkpoints] + [""]), ""])
def read_records(file):
contents = file.read().splitlines()
return [Record.parse(c) for c in chunks(contents, 3)]
MAP_RE=re.compile(r"^(?P<map>.*)_record\.dtb$")
def main():
p = argparse.ArgumentParser(description="Merge multiple DDNet race database files", formatter_class=argparse.ArgumentDefaultsHelpFormatter)
p.add_argument("--out", default="ddnet-server.sqlite", help="Output SQLite database")
p.add_argument("in_", metavar="IN", nargs='+', help="Text score databases to import; must have the format MAPNAME_record.dtb")
p.add_argument("--dry-run", "-n", action='store_true', help="Don't write out the resulting SQLite database")
p.add_argument("--stats", action='store_true', help="Display some stats at the end of the import process")
args = p.parse_args()
records = {}
for in_ in args.in_:
match = MAP_RE.match(os.path.basename(in_))
if not match:
raise ValueError(f"Invalid text score database name, does not end in '_record.dtb': {in_}")
m = match.group("map")
if m in records:
raise ValueError(f"Two text score databases refer to the same map: {in_}")
with open(in_, encoding="utf-8") as f:
records[m] = read_records(f)
if not args.dry_run:
conn = sqlite3.connect(args.out)
c = conn.cursor()
c.execute("CREATE TABLE IF NOT EXISTS record_race ("
"Map VARCHAR(128) COLLATE BINARY NOT NULL, "
"Name VARCHAR(16) COLLATE BINARY NOT NULL, "
"Timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, "
"Time FLOAT DEFAULT 0, "
"Server CHAR(4), " +
"".join(f"cp{i + 1} FLOAT DEFAULT 0, " for i in range(25)) +
"GameID VARCHAR(64), "
"DDNet7 BOOL DEFAULT FALSE"
");")
c.executemany(
"INSERT INTO record_race (Map, Name, Time, Server, " +
"".join(f"cp{i + 1}, " for i in range(25)) +
"GameID, DDNet7) " +
f"VALUES ({','.join('?' * 31)})",
[(map, r.name, float(r.time), "TEXT", *[float(c) for c in r.checkpoints], None, False) for map, record in records.items() for r in record]
)
conn.commit()
conn.close()
if args.stats:
print(f"Number of imported text databases: {len(records)}", file=sys.stderr)
print(f"Number of imported ranks: {sum(len(r) for r in records.values())}", file=sys.stderr)
if __name__ == '__main__':
sys.exit(main())

373
scripts/integration_test.sh Executable file
View File

@ -0,0 +1,373 @@
#!/bin/bash
arg_verbose=0
arg_valgrind_memcheck=0
for arg in "$@"
do
if [ "$arg" == "-h" ] || [ "$arg" == "--help" ]
then
echo "usage: $(basename "$0") [OPTION..] [build dir]"
echo "description:"
echo " Runs a simple integration test of the client and server"
echo " binaries from the given build dir"
echo "options:"
echo " --help|-h show this help"
echo " --verbose|-v verbose output"
echo " --valgrind-memcheck use valgrind's memcheck to run server and client"
exit 0
elif [ "$arg" == "-v" ] || [ "$arg" == "--verbose" ]
then
arg_verbose=1
elif [ "$arg" == "--valgrind-memcheck" ]
then
arg_valgrind_memcheck=1
else
echo "Error: unknown arg '$arg'"
exit 1
fi
done
if [ ! -f DDNet ]
then
echo "Error: client binary not found DDNet' not found"
exit 1
fi
if [ ! -f DDNet-Server ]
then
echo "Error: server binary not found DDNet-Server' not found"
exit 1
fi
got_killed=0
function kill_all() {
# needed to fix hang fifo with additional ctrl+c
if [ "$got_killed" == "1" ]
then
exit
fi
got_killed=1
if [ "$arg_verbose" == "1" ]
then
echo "[*] shutting down test clients and server ..."
fi
sleep 1
if [[ ! -f fail_server.txt ]]
then
echo "[*] shutting down server"
echo "shutdown" > server.fifo
fi
local i
for ((i=1;i<3;i++))
do
if [[ ! -f fail_client$i.txt ]]
then
echo "[*] shutting down client$i"
echo "quit" > "client$i.fifo"
fi
done
}
function cleanup() {
kill_all
}
trap cleanup EXIT
function fail()
{
sleep 1
tail -n2 "$1".log > fail_"$1".txt
echo "$1 exited with code $2" >> fail_"$1".txt
echo "[-] $1 exited with code $2"
}
# TODO: check for open ports instead
port=17822
if [[ $OSTYPE == 'darwin'* ]]; then
DETECT_LEAKS=0
else
DETECT_LEAKS=1
fi
export UBSAN_OPTIONS=suppressions=../ubsan.supp:log_path=./SAN:print_stacktrace=1:halt_on_errors=0
export ASAN_OPTIONS=log_path=./SAN:print_stacktrace=1:check_initialization_order=1:detect_leaks=$DETECT_LEAKS:halt_on_errors=0
export LSAN_OPTIONS=suppressions=../lsan.supp:print_suppressions=0
function print_results() {
if [ "$arg_valgrind_memcheck" == "1" ]; then
if grep "ERROR SUMMARY" server.log client1.log client2.log | grep -q -v "ERROR SUMMARY: 0"; then
grep "^==" server.log client1.log client2.log
return 1
fi
else
if test -n "$(find . -maxdepth 1 -name 'SAN.*' -print -quit)"
then
cat SAN.*
return 1
fi
fi
return 0
}
rm -rf integration_test
mkdir -p integration_test/data/maps
cp data/maps/coverage.map integration_test/data/maps
cp data/maps/Tutorial.map integration_test/data/maps
cd integration_test || exit 1
{
echo $'add_path $CURRENTDIR'
echo $'add_path $USERDIR'
echo $'add_path $DATADIR'
echo $'add_path ../data'
} > storage.cfg
if [ "$arg_valgrind_memcheck" == "1" ]; then
tool="valgrind --tool=memcheck --gen-suppressions=all --suppressions=../memcheck.supp --track-origins=yes"
client_args="cl_menu_map \"\";"
else
tool=""
client_args=""
fi
function wait_for_fifo() {
local fifo="$1"
local tries="$2"
local fails=0
# give the client time to launch and create the fifo file
# but assume after X secs that the client crashed before
# being able to create the file
while [[ ! -p "$fifo" ]]
do
fails="$((fails+1))"
if [ "$arg_verbose" == "1" ]
then
echo "[!] client fifos not found (attempts $fails/$tries)"
fi
if [ "$fails" -gt "$tries" ]
then
print_results
echo "[-] Error: client possibly crashed on launch"
exit 1
fi
sleep 1
done
}
echo "[*] launch server"
$tool ../DDNet-Server \
"sv_input_fifo server.fifo;
sv_rcon_password rcon;
sv_map coverage;
sv_sqlite_file ddnet-server.sqlite;
logfile server.log;
sv_register 0;
sv_port $port" > stdout_server.txt 2> stderr_server.txt || fail server "$?" &
echo "[*] launch client 1"
$tool ../DDNet \
"cl_input_fifo client1.fifo;
player_name client1;
cl_download_skins 0;
gfx_fullscreen 0;
logfile client1.log;
$client_args
connect localhost:$port" > stdout_client1.txt 2> stderr_client1.txt || fail client1 "$?" &
if [ "$arg_valgrind_memcheck" == "1" ]; then
wait_for_fifo client1.fifo 120
sleep 20
else
wait_for_fifo client1.fifo 50
sleep 1
fi
echo "[*] start demo recording"
echo "record server" > server.fifo
echo "record client1" > client1.fifo
sleep 1
echo "[*] launch client 2"
$tool ../DDNet \
"cl_input_fifo client2.fifo;
player_name client2;
cl_download_skins 0;
gfx_fullscreen 0;
logfile client2.log;
$client_args
connect localhost:$port" > stdout_client2.txt 2> stderr_client2.txt || fail client2 "$?" &
if [ "$arg_valgrind_memcheck" == "1" ]; then
wait_for_fifo client2.fifo 120
sleep 20
else
wait_for_fifo client2.fifo 50
sleep 2
fi
echo "[*] test chat and chat commands"
echo "say hello world" > client1.fifo
echo "rcon_auth rcon" > client1.fifo
sleep 1
tr -d '\n' > client1.fifo << EOF
say "/mc
;top5
;rank
;team 512
;emote happy -999
;pause
;points
;mapinfo
;list
;whisper client2 hi
;kill
;settings cheats
;timeout 123
;timer broadcast
;cmdlist
;saytime"
EOF
sleep 1
echo "[*] test rcon commands"
tr -d '\n' > client1.fifo << EOF
rcon say hello from admin;
rcon broadcast test;
rcon status;
rcon echo test;
muteid 1 900 spam;
unban_all;
EOF
sleep 1
echo "[*] stop demo recording"
echo "stoprecord" > server.fifo
echo "stoprecord" > client1.fifo
sleep 1
echo "[*] test map change"
echo "rcon sv_map Tutorial" > client1.fifo
if [ "$arg_valgrind_memcheck" == "1" ]; then
sleep 30
else
sleep 15
fi
echo "[*] play demos"
echo "play demos/server.demo" > client1.fifo
echo "play demos/client1.demo" > client2.fifo
if [ "$arg_valgrind_memcheck" == "1" ]; then
sleep 20
else
sleep 5
fi
# Kill all processes first so all outputs are fully written
kill_all
wait
sleep 1
if ! grep -qE '^[0-9]{4}-[0-9]{2}-[0-9]{2} ([0-9]{2}:){2}[0-9]{2} I chat: 0:-2:client1: hello world$' server.log
then
touch fail_chat.txt
echo "[-] Error: chat message not found in server log"
fi
if ! grep -q 'cmdlist' client1.log || \
! grep -q 'pause' client1.log || \
! grep -q 'rank' client1.log || \
! grep -q 'points' client1.log
then
touch fail_chatcommand.txt
echo "[-] Error: did not find output of /cmdlist command"
fi
if ! grep -q "hello from admin" server.log
then
touch fail_rcon.txt
echo "[-] Error: admin message not found in server log"
fi
if ! grep -q "demo_player: Stopped playback" client1.log
then
touch fail_demo_server.txt
echo "[-] Error: demo playback of server demo in client 1 was not started/finished"
fi
if ! grep -q "demo_player: Stopped playback" client2.log
then
touch fail_demo_client.txt
echo "[-] Error: demo playback of client demo in client 2 was not started/finished"
fi
ranks="$(sqlite3 ddnet-server.sqlite < <(echo "select * from record_race;"))"
num_ranks="$(echo "$ranks" | wc -l | xargs)"
if [ "$ranks" == "" ]
then
touch fail_ranks.txt
echo "[-] Error: no ranks found in database"
elif [ "$num_ranks" != "1" ]
then
touch fail_ranks.txt
echo "[-] Error: expected 1 rank got $num_ranks"
elif ! echo "$ranks" | grep -q client1
then
touch fail_ranks.txt
echo "[-] Error: expected a rank from client1 instead got:"
echo " $ranks"
fi
for logfile in client1.log client2.log server.log
do
if [ "$arg_valgrind_memcheck" == "1" ]
then
break
fi
if [ ! -f "$logfile" ]
then
echo "[-] Error: logfile '$logfile' not found."
touch fail_logs.txt
continue
fi
logdiff="$(diff -u <(sort "$logfile") <(sort "stdout_$(basename "$logfile" .log).txt"))"
if [ "$logdiff" != "" ]
then
echo "[-] Error: logfile '$logfile' differs from stdout"
echo "$logdiff"
echo "[-] Error: logfile '$logfile' differs from stdout" >> fail_logs.txt
echo "$logdiff" >> fail_logs.txt
fi
done
for stderr in ./stderr_*.txt
do
if [ ! -f "$stderr" ]
then
continue
fi
if [ "$(cat "$stderr")" == "" ]
then
continue
fi
echo "[!] Warning: $stderr"
cat "$stderr"
done
if test -n "$(find . -maxdepth 1 -name 'fail_*' -print -quit)"
then
for fail in fail_*
do
cat "$fail"
done
print_results
echo "[-] Test failed. See errors above."
exit 1
else
echo "[*] all tests passed"
fi
print_results || exit 1

35
scripts/languages/analyze.py Executable file
View File

@ -0,0 +1,35 @@
#!/usr/bin/env python3
import os
import sys
import twlang
os.chdir(os.path.dirname(__file__) + "/../..")
if len(sys.argv) > 1:
langs = sys.argv[1:]
else:
langs = twlang.languages()
local = twlang.localizes()
table = []
for lang in langs:
trans = twlang.translations(lang)
empty = 0
supported = 0
unused = 0
for tran, (_, expr, _) in trans.items():
if not expr:
empty += 1
else:
if tran in local:
supported += 1
else:
unused += 1
table.append([lang, len(trans), empty, len(local)-supported, unused])
table.sort(key=lambda l: l[3])
table = [["filename", "total", "empty", "missing", "unused"]] + table
s = [[str(e) for e in row] for row in table]
lens = [max(map(len, col)) for col in zip(*s)]
fmt = " ".join(f"{{:{x}}}" for x in lens)
t = [fmt.format(*row) for row in s]
print("\n".join(t))

68
scripts/languages/copy_fix.py Executable file
View File

@ -0,0 +1,68 @@
#!/usr/bin/env python3
import os
import sys
import twlang
def copy_fix(infile, delete_unused, append_missing, delete_empty):
with open(infile, encoding="utf-8") as f:
content = f.readlines()
trans = twlang.translations(infile)
if delete_unused or append_missing:
local = twlang.localizes()
if append_missing:
supported = []
for tran, (start, expr, end) in trans.items():
if delete_unused and tran not in local:
content[start:end] = [None]*(end-start)
if append_missing and tran in local:
if expr or (not expr and delete_empty):
supported.append(local.index(tran))
else:
content[start:end] = [None]*(end-start)
if delete_empty and not expr:
content[start:end] = [None]*(end-start)
content = [line for line in content if line is not None]
if append_missing:
missing = [index for index in range(len(local)) if index not in supported]
if missing:
if content[-1] != "\n":
content.append("\n")
for miss in missing:
if local[miss][1] != "":
content.append("["+local[miss][1]+"]\n")
content.append(local[miss][0]+"\n== \n\n")
content[-1] = content[-1][:-1]
return "".join(content)
def main(argv):
os.chdir(os.path.dirname(__file__) + "/../..")
if len(argv) < 3:
print("usage: python copy_fix.py <infile> <outfile> [--delete-unused] [--append-missing] [--delete-empty]")
sys.exit()
infile = argv[1]
outfile = argv[2]
args = argv[3:]
delete_unused = False
append_missing = False
delete_empty = False
for arg in args:
if arg == "--delete-unused":
delete_unused = True
elif arg == "--append-missing":
append_missing = True
elif arg == "--delete-empty":
delete_empty = True
else:
print("No such argument '"+arg+"'.")
sys.exit()
content = copy_fix(infile, delete_unused, append_missing, delete_empty)
with open(outfile, "w", encoding="utf-8") as f:
f.write("".join(content))
print("Successfully created '" + outfile + "'.")
if __name__ == '__main__':
main(sys.argv)

View File

@ -0,0 +1,16 @@
#!/usr/bin/env python3
import os
import sys
import twlang
os.chdir(os.path.dirname(__file__) + "/../..")
if len(sys.argv) < 2:
print("usage: python find_unchanged.py <file>")
sys.exit()
infile = sys.argv[1]
trans = twlang.translations(infile)
for tran, (_, expr, _) in trans.items():
if tran == expr:
print(tran)

View File

@ -0,0 +1,45 @@
Teeworlds language scripts by timakro
These scripts can help you to improve the language files. This is a short tutorial
about how to use the scripts.
You can start by analyzing the current languages using analyze.py
$ ./analyze.py
analyze.py outputs several columns. The column 'total' is the number of
translations in the file. 'empty' is the number of untranslated phrases in the
file. The 2nd and 3rd columns are calculated by looking into the source. The script
searches through the source for all phrases. 'missing' is the number of phrases in
the source but not in the file. 'unused' is the number of phrases in the file but
not in the source.
To update a language you should first copy it using the copy_fix.py script.
$ ./copy_fix.py ../spanish.txt newspanish.txt --delete-unused --append-missing
Then you can start to modify newspanish.txt. Search in the file for the following
line and remove it:
##### generated by copy_fix.py, please translate this #####
Every phrase below this line should be empty. Now you can fill in the translations
you know. It's also okay to write just the same phrase as in english for words like
'Hookcollision' or 'Screenshot'. If you don't know how to translate something just
remove it.
That's all, you improved the translation. If you want you can add your name at the
start of the file. Now you can move the file back to its original location.
$ mv newspanish.txt ../spanish.txt
Often people know better phrases for translations where the english phrase was just
copied. To find these you can use find_unchanged.py
$ ./find_unchanged.py ../spanish.txt
To update all languages:
$ ./update_all.py
To get a statistic of how complete the translation is:
$ for i in data/languages/*.txt; do COVERED=$(cat $i|grep "^== "|grep -v "^== $"|wc -l); TOTAL=$(cat $i|grep "^== "|wc -l); [ "${i:t}" != "license.txt" ] && [ "${i:t}" != "index.txt" ] && echo "$(($COVERED*100/$TOTAL))% ${i:r:t}"; done | sort -n -r

106
scripts/languages/twlang.py Normal file
View File

@ -0,0 +1,106 @@
import os
import re
from collections import OrderedDict
class LanguageDecodeError(Exception):
def __init__(self, message, filename, line):
error = f"File \"{filename}\", line {line+1}: {message}"
super().__init__(error)
# Taken from https://stackoverflow.com/questions/30011379/how-can-i-parse-a-c-format-string-in-python
cfmt = r'''\
( # start of capture group 1
% # literal "%"
(?: # first option
(?:[-+0 #]{0,5}) # optional flags
(?:\d+|\*)? # width
(?:\.(?:\d+|\*))? # precision
(?:h|l|ll|w|I|I32|I64)? # size
[cCdiouxXeEfgGaAnpsSZ] # type
) | # OR
%%) # literal "%%"
'''
def decode(fileobj, elements_per_key):
data = {}
current_context = ""
current_key = None
index = -1
for index, line in enumerate(fileobj):
line = line.encode("utf-8").decode("utf-8-sig")
line = line[:-1]
if line and line[-1] == "\r":
line = line[:-1]
if not line or line[:1] == "#":
current_context = ""
continue
if line[0] == "[":
if line[-1] != "]":
raise LanguageDecodeError("Invalid context string", fileobj.name, index)
current_context = line[1:-1]
elif line[:3] == "== ":
if len(data[current_key]) >= 1+elements_per_key:
raise LanguageDecodeError("Wrong number of elements per key", fileobj.name, index)
if current_key:
original = current_key[0] # pylint: disable=unsubscriptable-object
translation = line[3:]
if translation and [m.group(1) for m in re.finditer(cfmt, original, flags=re.X)] != [m.group(1) for m in re.finditer(cfmt, translation, flags=re.X)]:
raise LanguageDecodeError("Non-matching formatting string", fileobj.name, index)
data[current_key].extend([translation])
else:
raise LanguageDecodeError("Element before key given", fileobj.name, index)
else:
if current_key:
if len(data[current_key]) != 1+elements_per_key:
raise LanguageDecodeError("Wrong number of elements per key", fileobj.name, index)
data[current_key].append(index)
if line in data:
raise LanguageDecodeError("Key defined multiple times: " + line, fileobj.name, index)
data[(line, current_context)] = [index - 1 if current_context else index]
current_key = (line, current_context)
if len(data[current_key]) != 1+elements_per_key:
raise LanguageDecodeError("Wrong number of elements per key", fileobj.name, index)
data[current_key].append(index+1)
new_data = {}
for key, value in data.items():
if key[0]:
new_data[key] = value
return new_data
def check_file(path):
with open(path, encoding="utf-8") as fileobj:
matches = re.findall(r"(Localize|Localizable)\s*\(\s*\"([^\"]+)\"(?:\s*,\s*\"([^\"]+)\")?\s*\)", fileobj.read())
return matches
def check_folder(path):
englishlist = OrderedDict()
for path2, dirs, files in os.walk(path):
dirs.sort()
for f in sorted(files):
if not any(f.endswith(x) for x in [".cpp", ".c", ".h"]):
continue
for sentence in check_file(os.path.join(path2, f)):
englishlist[sentence[1:]] = None
return englishlist
def languages():
with open("data/languages/index.txt", encoding="utf-8") as f:
index = decode(f, 3)
langs = {"data/languages/"+key[0]+".txt" : [key[0]]+elements for key, elements in index.items()}
return langs
def translations(filename):
with open(filename, encoding="utf-8") as f:
return decode(f, 1)
def localizes():
englishlist = list(check_folder("src"))
return englishlist

11
scripts/languages/update_all.py Executable file
View File

@ -0,0 +1,11 @@
#!/usr/bin/env python3
import os
from copy_fix import copy_fix
import twlang
os.chdir(os.path.dirname(__file__) + "/../..")
for lang in twlang.languages():
content = copy_fix(lang, delete_unused=True, append_missing=True, delete_empty=False)
with open(lang, "w", encoding="utf-8") as f:
f.write(content)

136
scripts/move_sqlite.py Executable file
View File

@ -0,0 +1,136 @@
#!/usr/bin/env python3
# This script is intended to be called automatically every day (e.g. via cron).
# It only output stuff if new ranks have to be inserted. Therefore the output
# may be redirected to email notifying about manual action to transfer the
# ranks to MySQL.
#
# Configure cron as the user running the DDNet-Server processes
#
# $ crontab -e
# 30 5 * * * /path/to/this/script/move_sqlite.py --from /path/to/ddnet-server.sqlite
#
# Afterwards configure a MTA (e.g. postfix) and the users email address.
import sqlite3
import argparse
from time import strftime
import os
from datetime import datetime, timedelta
TABLES = ['record_race', 'record_teamrace', 'record_saves']
def sqlite_table_exists(cursor, table):
cursor.execute(f"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='{table}'")
return cursor.fetchone()[0] != 0
def sqlite_num_transfer(conn, table, date):
c = conn.cursor()
if not sqlite_table_exists(c, table):
return 0
query = f'SELECT COUNT(*) FROM {table}'
if date is not None:
query += f' WHERE Timestamp < DATETIME("{date}", "utc")'
c.execute(query)
num = c.fetchone()[0]
return num
def transfer(file_from, file_to, date, keep_timestamp_utc):
conn_to = sqlite3.connect(file_to, isolation_level='EXCLUSIVE')
cursor_to = conn_to.cursor()
conn_from = sqlite3.connect(file_from, isolation_level='EXCLUSIVE')
conn_from.text_factory = lambda b: b.decode(errors = 'ignore').rstrip()
for line in conn_from.iterdump():
cursor_to.execute(line)
for table in TABLES:
cursor_to.execute(f'INSERT INTO {table} SELECT * FROM {table}_backup WHERE Timestamp < DATETIME("{date}", "utc")')
cursor_to.close()
conn_to.commit()
cursor_from = conn_from.cursor()
for table in TABLES:
if sqlite_table_exists(cursor_from, table):
cursor_from.execute(f'DELETE FROM {table}')
backup_table = f'{table}_backup'
if sqlite_table_exists(cursor_from, backup_table):
cursor_from.execute(f'DELETE FROM {backup_table} WHERE Timestamp < DATETIME("{date}", "utc")')
cursor_from.close()
conn_from.commit()
conn_from.close()
cursor_to = conn_to.cursor()
# delete non-moved backup-rows:
for table in TABLES:
backup_table = f'{table}_backup'
if sqlite_table_exists(cursor_to, backup_table):
cursor_to.execute(f'DELETE FROM {backup_table}')
if not keep_timestamp_utc:
# change date from utc to wanted current timezone for mysql https://github.com/ddnet/ddnet/issues/6105
for table in TABLES:
cursor_to.execute(f'''
UPDATE {table}
SET Timestamp = DATETIME(original.Timestamp, "localtime")
FROM (
SELECT rowid, Timestamp FROM {table}
) as original
WHERE {table}.rowid = original.rowid''')
cursor_to.close()
conn_to.commit()
for line in conn_to.iterdump():
print(line.encode('utf-8'))
conn_to.close()
def main():
default_output = 'ddnet-server-' + strftime('%Y-%m-%dT%H:%M:%S') + '.sqlite'
parser = argparse.ArgumentParser(
description='Move DDNet ranks, teamranks and saves from a possible active SQLite3 to a new one',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--from', '-f', dest='f',
default='ddnet-server.sqlite',
help='Input file where ranks are deleted from when moved successfully (default: ddnet-server.sqlite)')
parser.add_argument('--to', '-t',
default=default_output,
help='Output file where ranks are saved adds current date by default')
parser.add_argument('--backup-timeout',
default=60,
type=int,
help='Time in minutes until when a rank is moved from the _backup tables')
parser.add_argument('--keep-timestamp-utc',
default=False,
action="store_true",
help='Timestamps are converted to localtime by default. To keep them utc set this config option')
args = parser.parse_args()
if not os.path.exists(args.f):
print(f"Warning: '{args.f}' does not exist (yet). Is the path specified correctly?")
return
date = (datetime.now() - timedelta(minutes=args.backup_timeout)).strftime('%Y-%m-%d %H:%M:%S')
conn = sqlite3.connect(args.f)
num = {}
for table in TABLES:
num[table] = sqlite_num_transfer(conn, table, None)
num[table] += sqlite_num_transfer(conn, f'{table}_backup', date)
conn.close()
if sum(num.values()) == 0:
return
print(f'{num} new entries in backup database found ({num["record_race"]} ranks, {num["record_teamrace"]} teamranks, {num["record_saves"]} saves)')
print(f'Moving entries from {os.path.abspath(args.f)} to {os.path.abspath(args.to)}')
print("You can use the following commands to import the entries to MySQL (using https://github.com/techouse/sqlite3-to-mysql/):")
print()
print(f"sqlite3mysql --sqlite-file {os.path.abspath(args.to)} --ignore-duplicate-keys --mysql-insert-method IGNORE --sqlite-tables record_race record_teamrace record_saves --mysql-password 'PW2' --mysql-host 'host' --mysql-database teeworlds --mysql-user teeworlds")
print(f"When the ranks are transferred successfully to mysql, {os.path.abspath(args.to)} can be removed")
print()
print("Log of the transfer:")
print()
transfer(args.f, args.to, date, args.keep_timestamp_utc)
if __name__ == '__main__':
main()

50
scripts/parse_drmingw.sh Executable file
View File

@ -0,0 +1,50 @@
#!/bin/bash
if [ -z ${1+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass executable file (full path)"
printf "\e[31m%s\e[30m\n" "Usage: $0 <executable> <crash_log>"
echo -en "\e[0m"
exit 1
fi
if [ -z ${2+x} ]; then
printf "\e[31m%s\e[30m\n" "Did not pass crash log file (full path)"
printf "\e[31m%s\e[30m\n" "Usage: $0 <executable> <crash_log>"
echo -en "\e[0m"
exit 1
fi
TMP_OFFSET=$(grep -E -o "\(with offset [0-9A-Fa-f]+\)" "$2" | grep -E -o "[0-9A-Fa-f]*")
if [ -z "$TMP_OFFSET" ]; then
TMP_OFFSET=$(grep -E -o "^[0-9A-Fa-f]+-[0-9A-Fa-f]+ \S+\.exe" "$2" | grep -E -o "^[0-9A-Fa-f]+")
if [ -z "$TMP_OFFSET" ]; then
TMP_OFFSET="0"
if ! grep -q -E -o "\s\S+\.exe!" "$2"; then
printf "\e[31m%s\e[30m\n" "Module offset not found; addresses will be absolute"
echo -en "\e[0m"
fi
fi
fi
if [ "$TMP_OFFSET" != "0" ]; then
echo "Module offset: 0x$TMP_OFFSET"
fi
ADDR_BASE=$(winedump -f "$1" | grep -E -o "image base[ ]*0x[0-9A-Fa-f]*" | grep -E -o "0x[0-9A-Fa-f]+" | tail -1)
echo "Image base: $ADDR_BASE"
ADDR_PC_REGEX='[0-9A-Fa-f]+ [0-9A-Fa-f]+ [0-9A-Fa-f]+ [0-9A-Fa-f]+'
while read -r line
do
if [[ $line =~ $ADDR_PC_REGEX ]]
then
RELATIVE_ADDR=$(echo "$line" | grep -E -o -m 1 "\s\S+\.exe!.*0x([0-9A-Fa-f]+)" | grep -E -o "0x[0-9A-Fa-f]+" | head -1)
if [ -z "$RELATIVE_ADDR" ]; then
TMP_ADDR=$(echo "$line" | grep -E -o -m 1 "[0-9A-Fa-f]+ " | head -1)
REAL_ADDR=$(printf '0x%X\n' "$(((0x$TMP_ADDR-0x$TMP_OFFSET)+ADDR_BASE))")
else
REAL_ADDR=$(printf '0x%X\n' "$((RELATIVE_ADDR+ADDR_BASE))")
fi
addr2line -e "$1" -a -p -f -C -i "$REAL_ADDR" | sed 's/ [^ ]*\/src\// src\//g'
fi
done < "$2"

View File

@ -0,0 +1,33 @@
# This PowerShell script connects to a Named Pipe server,
# sends one message and then disconnects again.
# The first argument is the name of the pipe.
# The second argument is the message to send.
if ($args.length -lt 2) {
Write-Output "Usage: ./send_named_pipe.ps1 <pipename> <message> [message] ... [message]"
return
}
$Wrapper = [pscustomobject]@{
Pipe = new-object System.IO.Pipes.NamedPipeClientStream(
".",
$args[0],
[System.IO.Pipes.PipeDirection]::InOut,
[System.IO.Pipes.PipeOptions]::None,
[System.Security.Principal.TokenImpersonationLevel]::Impersonation
)
Reader = $null
Writer = $null
}
$Wrapper.Pipe.Connect(5000)
if (!$?) {
return
}
$Wrapper.Reader = New-Object System.IO.StreamReader($Wrapper.Pipe)
$Wrapper.Writer = New-Object System.IO.StreamWriter($Wrapper.Pipe)
$Wrapper.Writer.AutoFlush = $true
for ($i = 1; $i -lt $args.length; $i++) {
$Wrapper.Writer.WriteLine($args[$i])
}
# We need to wait because the lines will not be written if we close the pipe immediately
Start-Sleep -Seconds 1.5
$Wrapper.Pipe.Close()

270
scripts/tw_api.py Normal file
View File

@ -0,0 +1,270 @@
# coding: utf-8
# pylint: skip-file
from socket import socket, AF_INET, SOCK_DGRAM
import sys
import threading
import time
NUM_MASTERSERVERS = 4
MASTERSERVER_PORT = 8300
TIMEOUT = 2
SERVERTYPE_NORMAL = 0
SERVERTYPE_LEGACY = 1
PACKET_GETLIST = "\x20\x00\x00\x00\x00\x00\xff\xff\xff\xffreqt"
PACKET_GETLIST2 = "\x20\x00\x00\x00\x00\x00\xff\xff\xff\xffreq2"
PACKET_GETINFO = "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xffgief"
PACKET_GETINFO2 = "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xffgie2" + "\x00"
PACKET_GETINFO3 = "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xffgie3" + "\x00"
class Server_Info(threading.Thread):
def __init__(self, address, typ):
self.address = address
self.type = typ
self.finished = False
threading.Thread.__init__(self, target = self.run)
def run(self):
self.info = None
if self.type == SERVERTYPE_NORMAL:
self.info = get_server_info3(self.address)
elif self.type == SERVERTYPE_LEGACY:
self.info = get_server_info(self.address)
if self.info:
self.info = get_server_info2(self.address)
self.finished = True
def get_server_info(address):
try:
sock = socket(AF_INET, SOCK_DGRAM)
sock.settimeout(TIMEOUT)
sock.sendto(PACKET_GETINFO, address)
data, _addr = sock.recvfrom(1024)
sock.close()
data = data[14:] # skip header
slots = data.split("\x00")
server_info = {}
server_info["version"] = slots[0]
server_info["name"] = slots[1]
server_info["map"] = slots[2]
server_info["gametype"] = slots[3]
server_info["flags"] = int(slots[4])
server_info["progression"] = int(slots[5])
server_info["num_players"] = int(slots[6])
server_info["max_players"] = int(slots[7])
server_info["players"] = []
for i in range(0, server_info["num_players"]):
player = {}
player["name"] = slots[8+i*2]
player["score"] = int(slots[8+i*2+1])
server_info["players"].append(player)
return server_info
except:
sock.close()
return None
def get_server_info2(address):
try:
sock = socket(AF_INET, SOCK_DGRAM)
sock.settimeout(TIMEOUT)
sock.sendto(PACKET_GETINFO2, address)
data, _addr = sock.recvfrom(1024)
sock.close()
data = data[14:] # skip header
slots = data.split("\x00")
server_info = {}
server_info["token"] = slots[0]
server_info["version"] = slots[1]
server_info["name"] = slots[2]
server_info["map"] = slots[3]
server_info["gametype"] = slots[4]
server_info["flags"] = int(slots[5])
server_info["progression"] = int(slots[6])
server_info["num_players"] = int(slots[7])
server_info["max_players"] = int(slots[8])
server_info["players"] = []
for i in range(0, server_info["num_players"]):
player = {}
player["name"] = slots[9+i*2]
player["score"] = int(slots[9+i*2+1])
server_info["players"].append(player)
return server_info
except:
sock.close()
return None
def get_server_info3(address):
try:
sock = socket(AF_INET, SOCK_DGRAM)
sock.settimeout(TIMEOUT)
sock.sendto(PACKET_GETINFO3, address)
data, addr = sock.recvfrom(1400)
sock.close()
data = data[14:] # skip header
slots = data.split("\x00")
server_info = {}
server_info["token"] = slots[0]
server_info["version"] = slots[1]
server_info["name"] = slots[2]
server_info["map"] = slots[3]
server_info["gametype"] = slots[4]
server_info["flags"] = int(slots[5])
server_info["num_players"] = int(slots[6])
server_info["max_players"] = int(slots[7])
server_info["num_clients"] = int(slots[8])
server_info["max_clients"] = int(slots[9])
server_info["players"] = []
for i in range(0, server_info["num_clients"]):
player = {}
player["name"] = slots[10+i*5]
player["clan"] = slots[10+i*5+1]
player["country"] = int(slots[10+i*5+2])
player["score"] = int(slots[10+i*5+3])
if int(slots[10+i*5+4]):
player["player"] = True
else:
player["player"] = False
server_info["players"].append(player)
return server_info
except:
sock.close()
return None
class Master_Server_Info(threading.Thread):
def __init__(self, address):
self.address = address
self.finished = False
threading.Thread.__init__(self, target = self.run)
def run(self):
self.servers = get_list(self.address) + get_list2(self.address)
self.finished = True
def get_list(address):
servers = []
try:
sock = socket(AF_INET, SOCK_DGRAM)
sock.settimeout(TIMEOUT)
sock.sendto(PACKET_GETLIST, address)
while 1:
data, _addr = sock.recvfrom(1024)
data = data[14:]
num_servers = len(data) / 6
for n in range(0, num_servers):
ip = ".".join(map(str, map(ord, data[n*6:n*6+4])))
port = ord(data[n*6+5]) * 256 + ord(data[n*6+4])
servers += [[(ip, port), SERVERTYPE_LEGACY]]
except:
sock.close()
return servers
def get_list2(address):
servers = []
try:
sock = socket(AF_INET, SOCK_DGRAM)
sock.settimeout(TIMEOUT)
sock.sendto(PACKET_GETLIST2, address)
while 1:
data, _addr = sock.recvfrom(1400)
data = data[14:]
num_servers = len(data) / 18
for n in range(0, num_servers):
if data[n*18:n*18+12] == "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff":
ip = ".".join(map(str, map(ord, data[n*18+12:n*18+16])))
else:
ip = ":".join(map(str, map(ord, data[n*18:n*18+16])))
port = (ord(data[n*18+16])<<8) + ord(data[n*18+17])
servers += [[(ip, port), SERVERTYPE_NORMAL]]
except:
sock.close()
return servers
master_servers = []
for i in range(1, NUM_MASTERSERVERS+1):
m = Master_Server_Info((f"master{int(i)}.teeworlds.com", MASTERSERVER_PORT))
master_servers.append(m)
m.start()
time.sleep(0.001) # avoid issues
servers = []
while len(master_servers) != 0:
if master_servers[0].finished:
if master_servers[0].servers:
servers += master_servers[0].servers
del master_servers[0]
time.sleep(0.001) # be nice
servers_info = []
print(str(len(servers)) + " servers")
for server in servers:
s = Server_Info(server[0], server[1])
servers_info.append(s)
s.start()
time.sleep(0.001) # avoid issues
num_players = 0
num_clients = 0
while len(servers_info) != 0:
if servers_info[0].finished:
if servers_info[0].info:
num_players += servers_info[0].info["num_players"]
if servers_info[0].type == SERVERTYPE_NORMAL:
num_clients += servers_info[0].info["num_clients"]
else:
num_clients += servers_info[0].info["num_players"]
del servers_info[0]
time.sleep(0.001) # be nice
print(str(num_players) + " players and " + str(num_clients-num_players) + " spectators")

35
scripts/unicode.py Normal file
View File

@ -0,0 +1,35 @@
import csv
def confusables():
with open('confusables.txt', encoding='utf-8-sig') as f:
# Filter comments
f = map(lambda line: line.split('#')[0], f)
return list(csv.DictReader(f, fieldnames=['Value', 'Target', 'Category'], delimiter=';'))
UNICODEDATA_FIELDS = (
"Value",
"Name",
"General_Category",
"Canonical_Combining_Class",
"Bidi_Class",
"Decomposition_Type",
"Decomposition_Mapping",
"Numeric_Type",
"Numeric_Mapping",
"Bidi_Mirrored",
"Unicode_1_Name",
"ISO_Comment",
"Simple_Uppercase_Mapping",
"Simple_Lowercase_Mapping",
"Simple_Titlecase_Mapping",
)
def data():
with open('UnicodeData.txt', encoding='utf-8') as f:
return list(csv.DictReader(f, fieldnames=UNICODEDATA_FIELDS, delimiter=';'))
def unhex(s):
return int(s, 16)
def unhex_sequence(s):
return [unhex(x) for x in s.split()] if '<' not in s else None

9
scripts/wordlist.py Normal file
View File

@ -0,0 +1,9 @@
print("#ifndef GENERATED_WORDLIST_H")
print("#define GENERATED_WORDLIST_H")
print("const char g_aFallbackWordlist[][32] = {")
with open("data/wordlist.txt", encoding="utf-8") as f:
for line in f:
word = line.strip().split("\t")[1]
print(f"\t\"{word}\", ")
print("};")
print("#endif // GENERATED_WORDLIST_H")