summaryrefslogtreecommitdiff
path: root/archived/projt-launcher/scripts
diff options
context:
space:
mode:
authorMehmet Samet Duman <yongdohyun@projecttick.org>2026-04-02 18:51:45 +0300
committerMehmet Samet Duman <yongdohyun@projecttick.org>2026-04-02 18:51:45 +0300
commitd3261e64152397db2dca4d691a990c6bc2a6f4dd (patch)
treefac2f7be638651181a72453d714f0f96675c2b8b /archived/projt-launcher/scripts
parent31b9a8949ed0a288143e23bf739f2eb64fdc63be (diff)
downloadProject-Tick-d3261e64152397db2dca4d691a990c6bc2a6f4dd.tar.gz
Project-Tick-d3261e64152397db2dca4d691a990c6bc2a6f4dd.zip
NOISSUE add archived projects
Signed-off-by: Mehmet Samet Duman <yongdohyun@projecttick.org>
Diffstat (limited to 'archived/projt-launcher/scripts')
-rw-r--r--archived/projt-launcher/scripts/build-cef-from-source.sh139
-rwxr-xr-xarchived/projt-launcher/scripts/build-tomlplusplus.mjs110
-rwxr-xr-xarchived/projt-launcher/scripts/compress_images.sh9
-rwxr-xr-xarchived/projt-launcher/scripts/gen-cmark-config.sh21
-rwxr-xr-xarchived/projt-launcher/scripts/gen-cmark-export.sh30
-rwxr-xr-xarchived/projt-launcher/scripts/gen-cmark-version.sh12
-rwxr-xr-xarchived/projt-launcher/scripts/gen-nbt-export.sh46
-rwxr-xr-xarchived/projt-launcher/scripts/gen-qrencode-config.sh28
-rwxr-xr-xarchived/projt-launcher/scripts/patch_maintainer_emails.py486
-rwxr-xr-xarchived/projt-launcher/scripts/syncconfig.sh122
-rwxr-xr-xarchived/projt-launcher/scripts/update-qt-version.sh61
-rwxr-xr-xarchived/projt-launcher/scripts/update-subtrees.sh55
12 files changed, 1119 insertions, 0 deletions
diff --git a/archived/projt-launcher/scripts/build-cef-from-source.sh b/archived/projt-launcher/scripts/build-cef-from-source.sh
new file mode 100644
index 0000000000..e3c5b9da64
--- /dev/null
+++ b/archived/projt-launcher/scripts/build-cef-from-source.sh
@@ -0,0 +1,139 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+normalize_bool() {
+ case "${1:-}" in
+ 1|ON|on|TRUE|true|YES|yes) return 0 ;;
+ *) return 1 ;;
+ esac
+}
+
+CEF_VERSION="${LAUNCHER_CEF_VERSION:-${CEF_VERSION:-}}"
+CEF_DISTRIBUTION="${LAUNCHER_CEF_DISTRIBUTION:-${CEF_DISTRIBUTION:-minimal}}"
+CEF_BRANCH="${LAUNCHER_CEF_SOURCE_BRANCH:-${CEF_SOURCE_BRANCH:-}}"
+CEF_TARGET_ARCH="${LAUNCHER_CEF_TARGET_ARCH:-${CEF_TARGET_ARCH:-}}"
+CEF_BINARY_PLATFORM="${LAUNCHER_CEF_BINARY_PLATFORM:-${CEF_BINARY_PLATFORM:-}}"
+CEF_DOWNLOAD_ROOT="${LAUNCHER_CEF_SOURCE_DOWNLOAD_DIR:-${CEF_SOURCE_DOWNLOAD_DIR:-${PWD}/.cache/cef-source}}"
+CEF_DEPOT_TOOLS_DIR="${LAUNCHER_CEF_DEPOT_TOOLS_DIR:-${CEF_DEPOT_TOOLS_DIR:-${CEF_DOWNLOAD_ROOT}/depot_tools}}"
+CEF_OUT_FILE="${LAUNCHER_CEF_SOURCE_OUT_FILE:-}"
+CEF_INSTALL_BUILD_DEPS="${LAUNCHER_CEF_INSTALL_BUILD_DEPS:-0}"
+CEF_FORCE_CLEAN="${LAUNCHER_CEF_SOURCE_FORCE_CLEAN:-0}"
+CEF_GN_DEFINES="${LAUNCHER_CEF_GN_DEFINES:-${CEF_GN_DEFINES:-is_official_build=true use_sysroot=true symbol_level=0}}"
+
+if [[ -z "${CEF_TARGET_ARCH}" ]]; then
+ case "$(uname -m)" in
+ x86_64|amd64) CEF_TARGET_ARCH="x64" ;;
+ aarch64|arm64) CEF_TARGET_ARCH="arm64" ;;
+ esac
+fi
+
+if [[ -z "${CEF_BINARY_PLATFORM}" ]]; then
+ case "${CEF_TARGET_ARCH}" in
+ x64) CEF_BINARY_PLATFORM="linux64" ;;
+ arm64|aarch64) CEF_TARGET_ARCH="arm64"; CEF_BINARY_PLATFORM="linuxarm64" ;;
+ *)
+ echo "Unsupported Linux CEF target architecture: ${CEF_TARGET_ARCH:-unknown}" >&2
+ exit 1
+ ;;
+ esac
+fi
+
+case "${CEF_TARGET_ARCH}" in
+ x64) CEF_BUILD_ARG="--x64-build" ;;
+ arm64) CEF_BUILD_ARG="--arm64-build" ;;
+ *)
+ echo "Unsupported Linux CEF target architecture: ${CEF_TARGET_ARCH}" >&2
+ exit 1
+ ;;
+esac
+
+if [[ -z "${CEF_BRANCH}" && -n "${CEF_VERSION}" && "${CEF_VERSION}" =~ chromium-[0-9]+\.0\.([0-9]+)\.[0-9]+ ]]; then
+ CEF_BRANCH="${BASH_REMATCH[1]}"
+fi
+
+if [[ -z "${CEF_BRANCH}" ]]; then
+ echo "LAUNCHER_CEF_SOURCE_BRANCH or CEF_SOURCE_BRANCH must be set for CEF source builds." >&2
+ exit 1
+fi
+
+DOWNLOAD_DIR="${CEF_DOWNLOAD_ROOT}/chromium_git"
+TOOLS_DIR="${CEF_DOWNLOAD_ROOT}/cef-tools"
+AUTOMATE_SCRIPT="${TOOLS_DIR}/tools/automate/automate-git.py"
+DISTRIB_PARENT="${DOWNLOAD_DIR}/chromium/src/cef/binary_distrib"
+
+mkdir -p "${CEF_DOWNLOAD_ROOT}"
+
+if [[ ! -d "${TOOLS_DIR}/.git" ]]; then
+ git clone --filter=blob:none --depth 1 https://github.com/chromiumembedded/cef.git "${TOOLS_DIR}"
+else
+ git -C "${TOOLS_DIR}" fetch --depth 1 origin HEAD
+ git -C "${TOOLS_DIR}" reset --hard FETCH_HEAD
+fi
+
+if normalize_bool "${CEF_FORCE_CLEAN}"; then
+ rm -rf "${DISTRIB_PARENT}"
+fi
+
+find_existing_distrib() {
+ local pattern="cef_binary_*_${CEF_BINARY_PLATFORM}*"
+ if [[ -n "${CEF_VERSION}" ]]; then
+ pattern="cef_binary_${CEF_VERSION}_${CEF_BINARY_PLATFORM}*"
+ fi
+ find "${DISTRIB_PARENT}" -maxdepth 1 -mindepth 1 -type d -name "${pattern}" | sort | tail -n 1
+}
+
+CEF_DISTRIB_ROOT="$(find_existing_distrib || true)"
+if [[ -n "${CEF_DISTRIB_ROOT}" && -f "${CEF_DISTRIB_ROOT}/cmake/FindCEF.cmake" && -f "${CEF_DISTRIB_ROOT}/Release/libcef.so" ]]; then
+ echo "Reusing existing CEF binary distribution at ${CEF_DISTRIB_ROOT}"
+else
+ export GN_DEFINES="${CEF_GN_DEFINES}"
+ export CEF_USE_GN=1
+
+ base_args=(
+ --download-dir="${DOWNLOAD_DIR}"
+ --depot-tools-dir="${CEF_DEPOT_TOOLS_DIR}"
+ --branch="${CEF_BRANCH}"
+ "${CEF_BUILD_ARG}"
+ )
+
+ if normalize_bool "${CEF_INSTALL_BUILD_DEPS}"; then
+ install_build_deps_args=(
+ --no-prompt
+ --no-chromeos-fonts
+ )
+ if [[ "${CEF_TARGET_ARCH}" == "x64" ]]; then
+ install_build_deps_args+=(--no-arm)
+ fi
+ python3 "${AUTOMATE_SCRIPT}" "${base_args[@]}" --no-build
+ "${DOWNLOAD_DIR}/chromium/src/build/install-build-deps.sh" \
+ "${install_build_deps_args[@]}"
+ fi
+
+ build_args=(
+ --minimal-distrib
+ --minimal-distrib-only
+ --no-debug-build
+ --force-distrib
+ )
+ if normalize_bool "${CEF_FORCE_CLEAN}"; then
+ build_args+=(
+ --force-clean
+ --force-update
+ --force-build
+ )
+ fi
+
+ python3 "${AUTOMATE_SCRIPT}" "${base_args[@]}" "${build_args[@]}"
+ CEF_DISTRIB_ROOT="$(find_existing_distrib || true)"
+fi
+
+if [[ -z "${CEF_DISTRIB_ROOT}" || ! -f "${CEF_DISTRIB_ROOT}/cmake/FindCEF.cmake" || ! -f "${CEF_DISTRIB_ROOT}/Release/libcef.so" ]]; then
+ echo "CEF source build did not produce a usable Linux binary distribution." >&2
+ exit 1
+fi
+
+echo "Using CEF binary distribution from source build: ${CEF_DISTRIB_ROOT}"
+if [[ -n "${CEF_OUT_FILE}" ]]; then
+ printf '%s\n' "${CEF_DISTRIB_ROOT}" > "${CEF_OUT_FILE}"
+fi
diff --git a/archived/projt-launcher/scripts/build-tomlplusplus.mjs b/archived/projt-launcher/scripts/build-tomlplusplus.mjs
new file mode 100755
index 0000000000..e294fe4776
--- /dev/null
+++ b/archived/projt-launcher/scripts/build-tomlplusplus.mjs
@@ -0,0 +1,110 @@
+#!/usr/bin/env node
+/**
+ * Script to build toml++ documentation using Poxy
+ * This generates HTML documentation from the Doxygen-formatted markdown files
+ */
+
+import { execSync } from 'child_process';
+import { existsSync, mkdirSync } from 'fs';
+import { join, dirname } from 'path';
+import { fileURLToPath } from 'url';
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+const projectRoot = join(__dirname, '..');
+
+const colors = {
+ reset: '\x1b[0m',
+ green: '\x1b[32m',
+ yellow: '\x1b[33m',
+ red: '\x1b[31m',
+ blue: '\x1b[34m',
+};
+
+function log(message, color = colors.reset) {
+ console.log(`${color}${message}${colors.reset}`);
+}
+
+function commandExists(command) {
+ try {
+ execSync(`which ${command}`, { stdio: 'ignore' });
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+async function buildTomlPlusPlusDocs() {
+ log('\nšŸ”Ø Building toml++ documentation...', colors.blue);
+
+ // Check if dependencies exist
+ if (!commandExists('doxygen')) {
+ log('āŒ Doxygen is not installed!', colors.red);
+ log('Run: npm run setup:doxygen', colors.yellow);
+ process.exit(1);
+ }
+
+ // Check for poxy in common locations
+ const poxyPaths = [
+ 'poxy',
+ `${process.env.HOME}/.local/bin/poxy`,
+ ];
+
+ let poxyCommand = null;
+ for (const path of poxyPaths) {
+ if (commandExists(path)) {
+ poxyCommand = path;
+ break;
+ }
+ }
+
+ if (!poxyCommand) {
+ log('āŒ Poxy is not installed or not in PATH!', colors.red);
+ log('Run: npm run setup:doxygen', colors.yellow);
+ log('Or add to PATH: export PATH="$HOME/.local/bin:$PATH"', colors.yellow);
+ process.exit(1);
+ }
+
+ const tomlplusplusDir = join(projectRoot, 'website', 'tomlplusplus');
+
+ if (!existsSync(tomlplusplusDir)) {
+ log(`āŒ toml++ source directory not found: ${tomlplusplusDir}`, colors.red);
+ process.exit(1);
+ }
+
+ try {
+ log('šŸ“ Running Poxy to generate documentation...', colors.yellow);
+
+ // Change to tomlplusplus directory and run poxy
+ process.chdir(tomlplusplusDir);
+
+ // Run poxy - it will use the poxy.toml config file
+ // Fix PATH to use Homebrew Python instead of Nix Python (if present)
+ const fixedPath = `/opt/homebrew/bin:${process.env.HOME}/.local/bin:${process.env.PATH}`;
+ execSync(poxyCommand, {
+ stdio: 'inherit',
+ env: { ...process.env, PATH: fixedPath }
+ });
+
+ const outputDir = join(tomlplusplusDir, 'html');
+ log('āœ… toml++ documentation built successfully!', colors.green);
+ log(`šŸ“ Output: ${outputDir}`, colors.blue);
+ log('šŸ“ Will be copied to: _site/tomlplusplus/', colors.blue);
+
+ } catch (error) {
+ log(`āŒ Failed to build documentation: ${error.message}`, colors.red);
+ log('\nšŸ’” Tips:', colors.yellow);
+ log(' - Make sure poxy.toml exists in website/tomlplusplus/', colors.yellow);
+ log(' - Check that all source files are present', colors.yellow);
+ log(' - Try running manually: cd website/tomlplusplus && poxy', colors.yellow);
+ process.exit(1);
+ } finally {
+ // Return to project root
+ process.chdir(projectRoot);
+ }
+}
+
+buildTomlPlusPlusDocs().catch(error => {
+ log(`\nāŒ Unexpected error: ${error.message}`, colors.red);
+ process.exit(1);
+});
diff --git a/archived/projt-launcher/scripts/compress_images.sh b/archived/projt-launcher/scripts/compress_images.sh
new file mode 100755
index 0000000000..1eef9f1c46
--- /dev/null
+++ b/archived/projt-launcher/scripts/compress_images.sh
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+## If current working dirctory is ./scripts, ask to invoke from one directory up
+if [ ! -d "scripts" ]; then
+ echo "Please run this script from the root directory of the project"
+ exit 1
+fi
+
+find . -type f -name '*.png' -not -path '*/libraries/*' -exec oxipng --opt max --strip all --alpha --interlace 0 {} \;
diff --git a/archived/projt-launcher/scripts/gen-cmark-config.sh b/archived/projt-launcher/scripts/gen-cmark-config.sh
new file mode 100755
index 0000000000..21279c1032
--- /dev/null
+++ b/archived/projt-launcher/scripts/gen-cmark-config.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+# Generate cmark_config.h
+
+cat > "$1" << 'EOF'
+#ifndef CMARK_CONFIG_H
+#define CMARK_CONFIG_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define HAVE_STDBOOL_H 1
+#define HAVE___BUILTIN_EXPECT 1
+#define HAVE___ATTRIBUTE__ 1
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* CMARK_CONFIG_H */
+EOF
diff --git a/archived/projt-launcher/scripts/gen-cmark-export.sh b/archived/projt-launcher/scripts/gen-cmark-export.sh
new file mode 100755
index 0000000000..2aae7b607d
--- /dev/null
+++ b/archived/projt-launcher/scripts/gen-cmark-export.sh
@@ -0,0 +1,30 @@
+#!/bin/sh
+# Generate cmark_export.h
+
+cat > "$1" << 'EOF'
+#ifndef CMARK_EXPORT_H
+#define CMARK_EXPORT_H
+
+#ifdef CMARK_STATIC_DEFINE
+# define CMARK_EXPORT
+# define CMARK_NO_EXPORT
+#else
+# if defined(_WIN32) || defined(__CYGWIN__)
+# ifdef cmark_EXPORTS
+# define CMARK_EXPORT __declspec(dllexport)
+# else
+# define CMARK_EXPORT __declspec(dllimport)
+# endif
+# define CMARK_NO_EXPORT
+# else
+# ifndef CMARK_EXPORT
+# define CMARK_EXPORT __attribute__((visibility("default")))
+# endif
+# ifndef CMARK_NO_EXPORT
+# define CMARK_NO_EXPORT __attribute__((visibility("hidden")))
+# endif
+# endif
+#endif
+
+#endif /* CMARK_EXPORT_H */
+EOF
diff --git a/archived/projt-launcher/scripts/gen-cmark-version.sh b/archived/projt-launcher/scripts/gen-cmark-version.sh
new file mode 100755
index 0000000000..158c203267
--- /dev/null
+++ b/archived/projt-launcher/scripts/gen-cmark-version.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+# Generate cmark_version.h
+
+cat > "$1" << 'EOF'
+#ifndef CMARK_VERSION_H
+#define CMARK_VERSION_H
+
+#define CMARK_VERSION ((0 << 24) | (31 << 16) | (1 << 8) | 0)
+#define CMARK_VERSION_STRING "0.31.1"
+
+#endif /* CMARK_VERSION_H */
+EOF
diff --git a/archived/projt-launcher/scripts/gen-nbt-export.sh b/archived/projt-launcher/scripts/gen-nbt-export.sh
new file mode 100755
index 0000000000..4926290429
--- /dev/null
+++ b/archived/projt-launcher/scripts/gen-nbt-export.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+# Generate nbt_export.h for libnbtplusplus
+
+cat > "$1" << 'EOF'
+#ifndef NBT_EXPORT_H
+#define NBT_EXPORT_H
+
+#ifdef NBT_STATIC_DEFINE
+# define NBT_EXPORT
+# define NBT_NO_EXPORT
+#else
+# if defined(_WIN32) || defined(__CYGWIN__)
+# define NBT_EXPORT
+# define NBT_NO_EXPORT
+# else
+# ifndef NBT_EXPORT
+# ifdef nbt___EXPORTS
+# define NBT_EXPORT __attribute__((visibility("default")))
+# else
+# define NBT_EXPORT __attribute__((visibility("default")))
+# endif
+# endif
+# ifndef NBT_NO_EXPORT
+# define NBT_NO_EXPORT __attribute__((visibility("hidden")))
+# endif
+# endif
+#endif
+
+#ifndef NBT_DEPRECATED
+# if defined(_MSC_VER)
+# define NBT_DEPRECATED __declspec(deprecated)
+# else
+# define NBT_DEPRECATED __attribute__((__deprecated__))
+# endif
+#endif
+
+#ifndef NBT_DEPRECATED_EXPORT
+# define NBT_DEPRECATED_EXPORT NBT_EXPORT NBT_DEPRECATED
+#endif
+
+#ifndef NBT_DEPRECATED_NO_EXPORT
+# define NBT_DEPRECATED_NO_EXPORT NBT_NO_EXPORT NBT_DEPRECATED
+#endif
+
+#endif /* NBT_EXPORT_H */
+EOF
diff --git a/archived/projt-launcher/scripts/gen-qrencode-config.sh b/archived/projt-launcher/scripts/gen-qrencode-config.sh
new file mode 100755
index 0000000000..2168233acd
--- /dev/null
+++ b/archived/projt-launcher/scripts/gen-qrencode-config.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# Generate qrencode_config.h for libqrencode
+
+cat > "$1" << 'EOF'
+#ifndef QRENCODE_CONFIG_H
+#define QRENCODE_CONFIG_H
+
+#define HAVE_CONFIG_H 1
+#define HAVE_LIBPNG 1
+#define HAVE_PNG_H 1
+#define HAVE_STRDUP 1
+#define MAJOR_VERSION 4
+#define MICRO_VERSION 1
+#define MINOR_VERSION 1
+#define PACKAGE "qrencode"
+#define PACKAGE_BUGREPORT ""
+#define PACKAGE_NAME "qrencode"
+#define PACKAGE_STRING "qrencode 4.1.1"
+#define PACKAGE_TARNAME "qrencode"
+#define PACKAGE_URL ""
+#define PACKAGE_VERSION "4.1.1"
+#define STATIC_IN_RELEASE static
+#define STDC_HEADERS 1
+#define VERSION "4.1.1"
+#define HAVE_PTHREAD 1
+
+#endif /* QRENCODE_CONFIG_H */
+EOF
diff --git a/archived/projt-launcher/scripts/patch_maintainer_emails.py b/archived/projt-launcher/scripts/patch_maintainer_emails.py
new file mode 100755
index 0000000000..a86f2e9e7b
--- /dev/null
+++ b/archived/projt-launcher/scripts/patch_maintainer_emails.py
@@ -0,0 +1,486 @@
+#!/usr/bin/env python3
+import argparse
+import re
+import shlex
+import subprocess
+import sys
+from dataclasses import dataclass
+from pathlib import Path
+
+EMAIL_RE = re.compile(r"[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}")
+DIFF_GIT_RE = re.compile(r"^diff --git\s+(\S+)\s+(\S+)$")
+PLUS_RE = re.compile(r"^\+\+\+\s+b/(.+)$")
+
+
+@dataclass
+class CodeownersRule:
+ pattern: str
+ owners: list[str]
+ matcher_kind: str
+ matcher: re.Pattern[str]
+
+
+@dataclass
+class MaintainerRecord:
+ aliases: set[str]
+ emails: set[str]
+ paths: list[str]
+
+
+ALIAS_KEYS = {"github", "user", "username", "nick", "alias", "handle", "name"}
+
+
+def normalize_path(path: str) -> str:
+ p = path.strip()
+ if p.startswith("\"") and p.endswith("\"") and len(p) >= 2:
+ p = p[1:-1]
+ if p.startswith("./"):
+ p = p[2:]
+ return p
+
+
+def normalize_alias(token: str) -> str:
+ out = token.strip().lower()
+ if out.startswith("@"):
+ out = out[1:]
+ if "/" in out:
+ out = out.split("/", 1)[1]
+ out = re.sub(r"[^a-z0-9._-]+", "", out)
+ return out
+
+
+def glob_to_regex(glob: str) -> str:
+ parts: list[str] = []
+ i = 0
+ while i < len(glob):
+ ch = glob[i]
+ if ch == "*":
+ if i + 1 < len(glob) and glob[i + 1] == "*":
+ while i + 1 < len(glob) and glob[i + 1] == "*":
+ i += 1
+ parts.append(".*")
+ else:
+ parts.append("[^/]*")
+ elif ch == "?":
+ parts.append("[^/]")
+ else:
+ parts.append(re.escape(ch))
+ i += 1
+ return "".join(parts)
+
+
+def parse_codeowners_line(line: str) -> tuple[str, list[str]] | None:
+ payload = line.split("#", 1)[0].strip()
+ if not payload:
+ return None
+
+ try:
+ tokens = shlex.split(payload)
+ except ValueError:
+ tokens = payload.split()
+
+ if len(tokens) < 2:
+ return None
+
+ return tokens[0], tokens[1:]
+
+
+def compile_rule(pattern: str, owners: list[str]) -> CodeownersRule:
+ pat = pattern
+ if pat.endswith("/"):
+ pat += "**"
+
+ if pat.startswith("/"):
+ pat = pat[1:]
+
+ if "/" in pat:
+ regex = re.compile(r"^" + glob_to_regex(pat) + r"$")
+ return CodeownersRule(pattern=pattern, owners=owners, matcher_kind="path", matcher=regex)
+
+ regex = re.compile(r"^" + glob_to_regex(pat) + r"$")
+ return CodeownersRule(pattern=pattern, owners=owners, matcher_kind="component", matcher=regex)
+
+
+def parse_codeowners(path: Path) -> list[CodeownersRule]:
+ rules: list[CodeownersRule] = []
+ for raw_line in path.read_text(encoding="utf-8", errors="ignore").splitlines():
+ parsed = parse_codeowners_line(raw_line)
+ if not parsed:
+ continue
+ pattern, owners = parsed
+ rules.append(compile_rule(pattern, owners))
+ return rules
+
+
+def match_rule(path: str, rule: CodeownersRule) -> bool:
+ if rule.matcher_kind == "path":
+ return bool(rule.matcher.fullmatch(path))
+
+ parts = [p for p in path.split("/") if p]
+ return any(rule.matcher.fullmatch(part) for part in parts)
+
+
+def owners_for_file(path: str, rules: list[CodeownersRule]) -> list[str]:
+ matched: list[str] = []
+ for rule in rules:
+ if match_rule(path, rule):
+ matched = rule.owners
+ return matched
+
+
+def changed_files_from_patch_text(text: str) -> list[str]:
+ files: dict[str, None] = {}
+
+ for line in text.splitlines():
+ m = DIFF_GIT_RE.match(line)
+ if m:
+ left = m.group(1)
+ right = m.group(2)
+ if left.startswith("a/"):
+ left = left[2:]
+ if right.startswith("b/"):
+ right = right[2:]
+ picked = right if right != "/dev/null" else left
+ if picked and picked != "/dev/null":
+ files[normalize_path(picked)] = None
+ continue
+
+ m = PLUS_RE.match(line)
+ if m:
+ picked = normalize_path(m.group(1))
+ if picked and picked != "/dev/null":
+ files[picked] = None
+
+ return list(files.keys())
+
+
+def changed_files_from_patch_file(patch_path: str) -> list[str]:
+ if patch_path == "-":
+ return changed_files_from_patch_text(sys.stdin.read())
+
+ text = Path(patch_path).read_text(encoding="utf-8", errors="ignore")
+ return changed_files_from_patch_text(text)
+
+
+def changed_files_from_patch_files(patch_paths: list[str]) -> list[str]:
+ if not patch_paths:
+ return []
+
+ if len(patch_paths) > 1 and "-" in patch_paths:
+ raise RuntimeError("'-' stdin patch cannot be combined with other patch files")
+
+ files: dict[str, None] = {}
+ for patch_path in patch_paths:
+ for path in changed_files_from_patch_file(patch_path):
+ files[path] = None
+ return list(files.keys())
+
+
+def changed_files_from_git_range(git_range: str) -> list[str]:
+ result = subprocess.run(
+ ["git", "diff", "--name-only", git_range],
+ check=False,
+ capture_output=True,
+ text=True,
+ )
+ if result.returncode != 0:
+ raise RuntimeError(result.stderr.strip() or "git diff failed")
+
+ files = [normalize_path(line) for line in result.stdout.splitlines() if line.strip()]
+ return files
+
+
+def aliases_from_maintainer_file(content: str, stem: str) -> set[str]:
+ aliases: set[str] = set()
+ base = normalize_alias(stem)
+ if base:
+ aliases.add(base)
+
+ for line in content.splitlines():
+ if ":" not in line:
+ continue
+ key, value = line.split(":", 1)
+ key_norm = key.strip().lower()
+ if key_norm not in ALIAS_KEYS:
+ continue
+ for token in re.split(r"[\s,]+", value.strip()):
+ norm = normalize_alias(token)
+ if norm:
+ aliases.add(norm)
+ for mention in re.findall(r"@[A-Za-z0-9._-]+", value):
+ norm = normalize_alias(mention)
+ if norm:
+ aliases.add(norm)
+
+ return aliases
+
+
+def parse_maintainers_records(path: Path) -> list[MaintainerRecord]:
+ records: list[MaintainerRecord] = []
+ current_aliases: set[str] | None = None
+ current_emails: set[str] | None = None
+ current_paths: list[str] | None = None
+
+ def flush_current() -> None:
+ nonlocal current_aliases, current_emails, current_paths
+ if current_aliases is None or current_emails is None or current_paths is None:
+ return
+ if not current_emails:
+ current_aliases = None
+ current_emails = None
+ current_paths = None
+ return
+ if not current_paths:
+ current_paths = ["**"]
+ records.append(
+ MaintainerRecord(aliases=set(current_aliases), emails=set(current_emails), paths=list(current_paths))
+ )
+ current_aliases = None
+ current_emails = None
+ current_paths = None
+
+ for raw_line in path.read_text(encoding="utf-8", errors="ignore").splitlines():
+ line = raw_line.strip()
+ if not line:
+ continue
+
+ if line.startswith("[") and line.endswith("]"):
+ flush_current()
+ current_aliases = set()
+ current_emails = set()
+ current_paths = []
+ section_name = line[1:-1].strip()
+ section_alias = normalize_alias(section_name)
+ if section_alias:
+ current_aliases.add(section_alias)
+ continue
+
+ if line.startswith("#"):
+ continue
+
+ if current_aliases is None or current_emails is None or current_paths is None:
+ continue
+
+ payload = line.split("#", 1)[0].strip()
+ if ":" not in payload:
+ continue
+
+ key, value = payload.split(":", 1)
+ key_norm = key.strip().lower()
+ value = value.strip()
+ if not value:
+ continue
+
+ for email in EMAIL_RE.findall(value):
+ current_emails.add(email.lower())
+
+ if key_norm in {"path", "paths"}:
+ for token in re.split(r"[\s,]+", value):
+ candidate = token.strip()
+ if candidate:
+ current_paths.append(candidate)
+
+ if key_norm in ALIAS_KEYS:
+ for token in re.split(r"[\s,]+", value):
+ alias = normalize_alias(token)
+ if alias:
+ current_aliases.add(alias)
+ for mention in re.findall(r"@[A-Za-z0-9._-]+", value):
+ alias = normalize_alias(mention)
+ if alias:
+ current_aliases.add(alias)
+
+ flush_current()
+ return records
+
+
+def rules_from_maintainers_file(path: Path) -> list[CodeownersRule]:
+ if not path.exists() or not path.is_file():
+ return []
+
+ rules: list[CodeownersRule] = []
+ for record in parse_maintainers_records(path):
+ owners = [f"@{alias}" for alias in sorted(record.aliases)]
+ if not owners:
+ owners = sorted(record.emails)
+ for pattern in record.paths:
+ rules.append(compile_rule(pattern, owners))
+
+ return rules
+
+
+def build_alias_email_index(maintainers_source: Path) -> dict[str, set[str]]:
+ index: dict[str, set[str]] = {}
+
+ if not maintainers_source.exists():
+ return index
+
+ if maintainers_source.is_file():
+ for record in parse_maintainers_records(maintainers_source):
+ for alias in record.aliases:
+ index.setdefault(alias, set()).update(record.emails)
+ return index
+
+ if not maintainers_source.is_dir():
+ return index
+
+ for entry in sorted(maintainers_source.rglob("*.txt")):
+ text = entry.read_text(encoding="utf-8", errors="ignore")
+ emails = {email.lower() for email in EMAIL_RE.findall(text)}
+ if emails:
+ aliases = aliases_from_maintainer_file(text, entry.stem)
+ for alias in aliases:
+ index.setdefault(alias, set()).update(emails)
+
+ return index
+
+
+def resolve_owner_emails(owner: str, alias_index: dict[str, set[str]]) -> set[str]:
+ direct = {email.lower() for email in EMAIL_RE.findall(owner)}
+ if direct:
+ return direct
+
+ emails: set[str] = set()
+ norm = normalize_alias(owner)
+ if norm in alias_index:
+ emails.update(alias_index[norm])
+
+ if owner.startswith("@") and "/" in owner:
+ tail_norm = normalize_alias(owner.split("/", 1)[1])
+ if tail_norm in alias_index:
+ emails.update(alias_index[tail_norm])
+
+ return emails
+
+
+def choose_codeowners(explicit: str | None) -> Path | None:
+ if explicit:
+ return Path(explicit)
+
+ for candidate in (Path(".github/CODEOWNERS"), Path("CODEOWNERS")):
+ if candidate.exists():
+ return candidate
+
+ return None
+
+
+def main(argv: list[str]) -> int:
+ parser = argparse.ArgumentParser(
+ description="Find maintainer emails impacted by a patch using CODEOWNERS-like matching."
+ )
+ source_group = parser.add_mutually_exclusive_group(required=True)
+ source_group.add_argument(
+ "--patch",
+ nargs="+",
+ default=None,
+ help="One or more patch file paths. Use '-' to read patch from stdin.",
+ )
+ source_group.add_argument("--git-range", default=None, help="Git diff range, e.g. HEAD~1..HEAD")
+ parser.add_argument("--codeowners", default=None, help="Path to CODEOWNERS file")
+ parser.add_argument(
+ "--maintainers-dir",
+ default="MAINTAINERS",
+ help="Maintainer source path (directory with *.txt files or a MAINTAINERS file)",
+ )
+ parser.add_argument("--show-details", action="store_true", help="Print per-file owner and email mapping")
+ parser.add_argument(
+ "--strict-unresolved",
+ action="store_true",
+ help="Exit with code 2 if at least one owner cannot be resolved to an email",
+ )
+ args = parser.parse_args(argv)
+
+ maintainers_source = Path(args.maintainers_dir)
+ codeowners_path = choose_codeowners(args.codeowners)
+ if codeowners_path is not None:
+ if not codeowners_path.exists():
+ print(f"error: CODEOWNERS file not found: {codeowners_path}", file=sys.stderr)
+ return 1
+ rules = parse_codeowners(codeowners_path)
+ if not rules:
+ print(f"error: no usable rules found in {codeowners_path}", file=sys.stderr)
+ return 1
+ else:
+ rules = rules_from_maintainers_file(maintainers_source)
+ if not rules:
+ print(
+ f"error: no ownership rules found. Provide --codeowners or a valid MAINTAINERS file: {maintainers_source}",
+ file=sys.stderr,
+ )
+ return 1
+
+ try:
+ if args.git_range:
+ changed_files = changed_files_from_git_range(args.git_range)
+ else:
+ changed_files = changed_files_from_patch_files(args.patch)
+ except Exception as exc:
+ print(f"error: {exc}", file=sys.stderr)
+ return 1
+
+ if not changed_files:
+ return 0
+
+ alias_index = build_alias_email_index(maintainers_source)
+
+ all_emails: set[str] = set()
+ unresolved: set[str] = set()
+ unowned_files: list[str] = []
+ per_file: list[tuple[str, list[str], list[str]]] = []
+
+ for changed in changed_files:
+ owners = owners_for_file(changed, rules)
+ file_emails: set[str] = set()
+ if not owners:
+ unowned_files.append(changed)
+
+ for owner in owners:
+ resolved = resolve_owner_emails(owner, alias_index)
+ if resolved:
+ file_emails.update(resolved)
+ elif not EMAIL_RE.search(owner):
+ unresolved.add(owner)
+
+ sorted_file_emails = sorted(file_emails)
+ sorted_owners = sorted(set(owners))
+ per_file.append((changed, sorted_owners, sorted_file_emails))
+ all_emails.update(file_emails)
+
+ if args.show_details:
+ for changed, owners, emails in per_file:
+ owners_text = ", ".join(owners) if owners else "-"
+ emails_text = ", ".join(emails) if emails else "-"
+ print(f"{changed}\towners={owners_text}\temails={emails_text}")
+
+ for email in sorted(all_emails):
+ print(email)
+
+ if unowned_files:
+ preview = ", ".join(unowned_files[:5])
+ if len(unowned_files) > 5:
+ preview += ", ..."
+ print(
+ f"warning: no ownership rule match for {len(unowned_files)} changed file(s): {preview}",
+ file=sys.stderr,
+ )
+
+ if not all_emails:
+ print(
+ "warning: no maintainer emails resolved. Check ownership rules and MAINTAINERS aliases.",
+ file=sys.stderr,
+ )
+
+ if unresolved:
+ print(
+ "warning: unresolved owners: " + ", ".join(sorted(unresolved)),
+ file=sys.stderr,
+ )
+
+ if args.strict_unresolved and (unresolved or unowned_files or not all_emails):
+ return 2
+
+ return 0
+
+
+if __name__ == "__main__":
+ raise SystemExit(main(sys.argv[1:]))
diff --git a/archived/projt-launcher/scripts/syncconfig.sh b/archived/projt-launcher/scripts/syncconfig.sh
new file mode 100755
index 0000000000..9f070d6956
--- /dev/null
+++ b/archived/projt-launcher/scripts/syncconfig.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+# ============================================================================
+# syncconfig.sh - Generate auto.conf and autoconf.h from .config
+# ============================================================================
+#
+# This is a lightweight alternative to kconfig's syncconfig for platforms
+# where kconfig tools cannot be built (Windows, macOS CI, etc.).
+#
+# It reads a .config file and generates:
+# - include/config/auto.conf (Makefile-includable CONFIG_* variables)
+# - include/generated/autoconf.h (C header with #define CONFIG_*)
+#
+# Usage:
+# syncconfig.sh <build-dir>
+#
+# ============================================================================
+
+set -e
+
+# Force C locale for consistent regex matching across all locales.
+# Turkish locale (tr_TR) breaks [A-Za-z] character ranges in grep.
+export LC_ALL=C
+
+BUILD_DIR="${1:-.}"
+CONFIG_FILE="${BUILD_DIR}/.config"
+AUTO_CONF_DIR="${BUILD_DIR}/include/config"
+AUTO_HEADER_DIR="${BUILD_DIR}/include/generated"
+AUTO_CONF="${AUTO_CONF_DIR}/auto.conf"
+AUTO_HEADER="${AUTO_HEADER_DIR}/autoconf.h"
+
+if [ ! -f "$CONFIG_FILE" ]; then
+ echo "error: $CONFIG_FILE not found" >&2
+ exit 1
+fi
+
+# Create directories
+mkdir -p "$AUTO_CONF_DIR"
+mkdir -p "$AUTO_HEADER_DIR"
+
+# ============================================================================
+# Generate auto.conf
+# ============================================================================
+
+{
+ echo "# Automatically generated - do not edit"
+ echo "# $(date)"
+ echo ""
+ # Extract all CONFIG_*=value lines (skip comments and blank lines)
+ grep -E '^CONFIG_[A-Za-z0-9_]+=' "$CONFIG_FILE" || true
+ # Also extract non-CONFIG lines that are make variable assignments
+ grep -E '^[A-Z_]+[[:space:]]*:?=' "$CONFIG_FILE" | grep -v '^CONFIG_' || true
+} > "$AUTO_CONF"
+
+echo " GEN $AUTO_CONF"
+
+# ============================================================================
+# Generate autoconf.h
+# ============================================================================
+
+{
+ echo "/* Automatically generated - do not edit */"
+ echo "#ifndef __AUTOCONF_H__"
+ echo "#define __AUTOCONF_H__"
+ echo ""
+
+ while IFS= read -r line; do
+ # Skip comments and blank lines
+ case "$line" in
+ '#'*|'') continue ;;
+ esac
+
+ # Handle CONFIG_*=y -> #define CONFIG_* 1
+ if echo "$line" | grep -qE '^CONFIG_[A-Za-z0-9_]+=y$'; then
+ key=$(echo "$line" | cut -d= -f1)
+ echo "#define $key 1"
+ continue
+ fi
+
+ # Handle CONFIG_*=n -> /* CONFIG_* is not set */
+ if echo "$line" | grep -qE '^CONFIG_[A-Za-z0-9_]+=n$'; then
+ key=$(echo "$line" | cut -d= -f1)
+ echo "/* $key is not set */"
+ continue
+ fi
+
+ # Handle "# CONFIG_* is not set" -> /* CONFIG_* is not set */
+ if echo "$line" | grep -qE '^# CONFIG_[A-Za-z0-9_]+ is not set'; then
+ key=$(echo "$line" | sed 's/^# \(CONFIG_[A-Za-z0-9_]*\) is not set/\1/')
+ echo "/* $key is not set */"
+ continue
+ fi
+
+ # Handle CONFIG_*="string" -> #define CONFIG_* "string"
+ if echo "$line" | grep -qE '^CONFIG_[A-Za-z0-9_]+=".+"$'; then
+ key=$(echo "$line" | cut -d= -f1)
+ val=$(echo "$line" | cut -d= -f2-)
+ echo "#define $key $val"
+ continue
+ fi
+
+ # Handle CONFIG_*="" (empty string) -> #define CONFIG_* ""
+ if echo "$line" | grep -qE '^CONFIG_[A-Za-z0-9_]+=""$'; then
+ key=$(echo "$line" | cut -d= -f1)
+ echo "#define $key \"\""
+ continue
+ fi
+
+ # Handle CONFIG_*=number -> #define CONFIG_* number
+ if echo "$line" | grep -qE '^CONFIG_[A-Za-z0-9_]+=[0-9]+$'; then
+ key=$(echo "$line" | cut -d= -f1)
+ val=$(echo "$line" | cut -d= -f2-)
+ echo "#define $key $val"
+ continue
+ fi
+
+ done < "$CONFIG_FILE"
+
+ echo ""
+ echo "#endif /* __AUTOCONF_H__ */"
+} > "$AUTO_HEADER"
+
+echo " GEN $AUTO_HEADER"
diff --git a/archived/projt-launcher/scripts/update-qt-version.sh b/archived/projt-launcher/scripts/update-qt-version.sh
new file mode 100755
index 0000000000..10d7c1778d
--- /dev/null
+++ b/archived/projt-launcher/scripts/update-qt-version.sh
@@ -0,0 +1,61 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+root_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+cd "$root_dir"
+
+action_file=".github/actions/setup-dependencies/action.yml"
+workflow_files=(
+ ".gitlab-ci.yml"
+ ".gitlab/ci/github-workflow-parity.yml"
+ ".github/workflows/ci-launcher.yml"
+)
+
+doc_files=(
+ "CONTRIBUTING.md"
+ "docs/contributing/GETTING_STARTED.md"
+)
+
+current_version="$(awk '/qt-version:/{found=1} found && /default:/{print $2; exit}' "$action_file" | tr -d '\r')"
+if [[ -z "$current_version" ]]; then
+ echo "Failed to locate current Qt version in $action_file" >&2
+ exit 1
+fi
+
+track="${current_version%.*}"
+base_url="https://download.qt.io/official_releases/qt/${track}/"
+
+html="$(curl -fsSL "$base_url")"
+latest_version="$(echo "$html" | grep -oE "${track}\\.[0-9]+/" | tr -d '/' | sort -V | tail -1)"
+if [[ -z "$latest_version" ]]; then
+ echo "Failed to resolve latest Qt version from $base_url" >&2
+ exit 1
+fi
+
+changed=false
+if [[ "$latest_version" != "$current_version" ]]; then
+ perl -0777 -i -pe "s/(qt-version:[\s\S]*?default: )\Q$current_version\E/\1$latest_version/" "$action_file"
+
+ for file in "${workflow_files[@]}"; do
+ if [[ -f "$file" ]]; then
+ perl -i -pe "s/(qt-version:\\s*)\\Q$current_version\\E/\\1$latest_version/g; s/(QT_VERSION:\\s*\\\")\\Q$current_version\\E(\\\")/\\1$latest_version\\2/g; s/(qt_version:\\s*\\\")\\Q$current_version\\E(\\\")/\\1$latest_version\\2/g" "$file"
+ fi
+ done
+
+ for file in "${doc_files[@]}"; do
+ if [[ -f "$file" ]]; then
+ perl -i -pe "s/\\b\\Q$current_version\\E\\b/$latest_version/g" "$file"
+ fi
+ done
+ changed=true
+fi
+
+if [[ -n "${GITHUB_OUTPUT:-}" ]]; then
+ {
+ echo "qt_version_current=$current_version"
+ echo "qt_version_latest=$latest_version"
+ echo "qt_version_changed=$changed"
+ } >> "$GITHUB_OUTPUT"
+fi
+
+echo "Qt version: ${current_version} -> ${latest_version} (changed=${changed})"
diff --git a/archived/projt-launcher/scripts/update-subtrees.sh b/archived/projt-launcher/scripts/update-subtrees.sh
new file mode 100755
index 0000000000..c5601b29b7
--- /dev/null
+++ b/archived/projt-launcher/scripts/update-subtrees.sh
@@ -0,0 +1,55 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+ROOT="$(git rev-parse --show-toplevel)"
+cd "$ROOT"
+
+REMOTE="${REMOTE:-origin}"
+BASE_BRANCH="${BASE_BRANCH:-main}"
+UPDATE_BRANCH="${UPDATE_BRANCH:-chore/update-subtrees}"
+SUBTREES_FILE="${SUBTREES_FILE:-.github/subtrees.txt}"
+
+GIT_USER_NAME="${GIT_USER_NAME:-YongDo-Hyun}"
+GIT_USER_EMAIL="${GIT_USER_EMAIL:-froster12@naver.com}"
+
+git config user.name "$GIT_USER_NAME"
+git config user.email "$GIT_USER_EMAIL"
+
+if [ ! -f "$SUBTREES_FILE" ]; then
+ echo "Missing $SUBTREES_FILE"
+ exit 1
+fi
+
+git fetch "$REMOTE" "$BASE_BRANCH"
+git switch -C "$UPDATE_BRANCH" "$REMOTE/$BASE_BRANCH"
+
+while read -r name path repo branch; do
+ [ -z "${name:-}" ] && continue
+ case "$name" in
+ \#*) continue ;;
+ esac
+ if [ -z "${branch:-}" ]; then
+ branch="main"
+ fi
+
+ # LLVM needs --squash due to its massive unrelated history
+ squash_flag=""
+ if [ "$name" = "llvm" ]; then
+ squash_flag="--squash"
+ fi
+
+ echo "Updating $name ($path) from $repo ($branch)${squash_flag:+ [squash]}"
+ if [ -d "$path" ]; then
+ git subtree pull --prefix="$path" "$repo" "$branch" $squash_flag -m "chore(subtrees): update $name"
+ else
+ git subtree add --prefix="$path" "$repo" "$branch" $squash_flag -m "chore(subtrees): add $name"
+ fi
+done < "$SUBTREES_FILE"
+
+if git diff --quiet "$REMOTE/$BASE_BRANCH"..HEAD --; then
+ echo "No subtree changes detected; not pushing."
+ exit 0
+fi
+
+git push "$REMOTE" "$UPDATE_BRANCH"
+echo "Pushed updates to $REMOTE/$UPDATE_BRANCH"