summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--meta/COPYING.md55
-rw-r--r--meta/LICENSE52
-rw-r--r--meta/flake.nix2
-rw-r--r--meta/fuzz/meta/fuzz_meta_models.py45
-rw-r--r--meta/init.sh2
-rw-r--r--meta/meta/common/__init__.py7
-rw-r--r--meta/meta/common/modloadermp.py5
-rw-r--r--meta/meta/common/mojang-library-patches.json26
-rw-r--r--meta/meta/common/optifine.py10
-rw-r--r--meta/meta/common/risugami.py5
-rw-r--r--meta/meta/common/stationloader.py5
-rw-r--r--meta/meta/model/__init__.py1
-rw-r--r--meta/meta/model/java.py2
-rw-r--r--meta/meta/model/mojang.py12
-rwxr-xr-xmeta/meta/run/generate_forge.py40
-rw-r--r--meta/meta/run/generate_java.py5
-rw-r--r--meta/meta/run/generate_modloadermp.py24
-rwxr-xr-xmeta/meta/run/generate_mojang.py14
-rw-r--r--meta/meta/run/generate_neoforge.py40
-rw-r--r--meta/meta/run/generate_optifine.py110
-rw-r--r--meta/meta/run/generate_risugami.py26
-rw-r--r--meta/meta/run/generate_stationloader.py24
-rwxr-xr-xmeta/meta/run/index.py25
-rwxr-xr-xmeta/meta/run/update_forge.py28
-rw-r--r--meta/meta/run/update_java.py131
-rw-r--r--meta/meta/run/update_modloadermp.py24
-rw-r--r--meta/meta/run/update_neoforge.py54
-rw-r--r--meta/meta/run/update_optifine.py456
-rw-r--r--meta/meta/run/update_risugami.py25
-rw-r--r--meta/meta/run/update_stationloader.py24
-rw-r--r--meta/nix/pkgs/blockgame-meta.nix2
-rw-r--r--meta/poetry.lock124
-rw-r--r--meta/pyproject.toml18
-rw-r--r--meta/requirements.txt13
-rwxr-xr-xmeta/update.sh17
35 files changed, 1289 insertions, 164 deletions
diff --git a/meta/COPYING.md b/meta/COPYING.md
new file mode 100644
index 0000000000..b5d814c02c
--- /dev/null
+++ b/meta/COPYING.md
@@ -0,0 +1,55 @@
+meta
+====
+
+ Microsoft Public License (Ms-PL)
+
+ This license governs use of the accompanying software. If you use the
+ software, you accept this license. If you do not accept the license, do not
+ use the software.
+
+ 1. Definitions
+ The terms "reproduce," "reproduction," "derivative works," and "distribution"
+ have the same meaning here as under U.S. copyright law. A "contribution" is
+ the original software, or any additions or changes to the software. A
+ "contributor" is any person that distributes its contribution under this
+ license. "Licensed patents" are a contributor's patent claims that read
+ directly on its contribution.
+
+ 2. Grant of Rights
+ (A) Copyright Grant- Subject to the terms of this license, including the
+ license conditions and limitations in section 3, each contributor grants
+ you a non-exclusive, worldwide, royalty-free copyright license to
+ reproduce its contribution, prepare derivative works of its contribution,
+ and distribute its contribution or any derivative works that you create.
+
+ (B) Patent Grant- Subject to the terms of this license, including the
+ license conditions and limitations in section 3, each contributor grants
+ you a non-exclusive, worldwide, royalty-free license under its licensed
+ patents to make, have made, use, sell, offer for sale, import, and/or
+ otherwise dispose of its contribution in the software or derivative works
+ of the contribution in the software.
+
+ 3. Conditions and Limitations
+ (A) No Trademark License- This license does not grant you rights to use
+ any contributors' name, logo, or trademarks.
+
+ (B) If you bring a patent claim against any contributor over patents that
+ you claim are infringed by the software, your patent license from such
+ contributor to the software ends automatically.
+
+ (C) If you distribute any portion of the software, you must retain all
+ copyright, patent, trademark, and attribution notices that are present in
+ the software.
+
+ (D) If you distribute any portion of the software in source code form,
+ you may do so only under this license by including a complete copy of
+ this license with your distribution. If you distribute any portion of the
+ software in compiled or object code form, you may only do so under a
+ license that complies with this license.
+
+ (E) The software is licensed "as-is." You bear the risk of using it. The
+ contributors give no express warranties, guarantees, or conditions. You
+ may have additional consumer rights under your local laws which this
+ license cannot change. To the extent permitted under your local laws, the
+ contributors exclude the implied warranties of merchantability, fitness
+ for a particular purpose and non-infringement.
diff --git a/meta/LICENSE b/meta/LICENSE
new file mode 100644
index 0000000000..2caafaccf8
--- /dev/null
+++ b/meta/LICENSE
@@ -0,0 +1,52 @@
+Microsoft Public License (Ms-PL)
+
+This license governs use of the accompanying software. If you use the
+software, you accept this license. If you do not accept the license, do not
+use the software.
+
+1. Definitions
+The terms "reproduce," "reproduction," "derivative works," and "distribution"
+have the same meaning here as under U.S. copyright law. A "contribution" is
+the original software, or any additions or changes to the software. A
+"contributor" is any person that distributes its contribution under this
+license. "Licensed patents" are a contributor's patent claims that read
+directly on its contribution.
+
+2. Grant of Rights
+ (A) Copyright Grant- Subject to the terms of this license, including the
+ license conditions and limitations in section 3, each contributor grants
+ you a non-exclusive, worldwide, royalty-free copyright license to
+ reproduce its contribution, prepare derivative works of its contribution,
+ and distribute its contribution or any derivative works that you create.
+
+ (B) Patent Grant- Subject to the terms of this license, including the
+ license conditions and limitations in section 3, each contributor grants
+ you a non-exclusive, worldwide, royalty-free license under its licensed
+ patents to make, have made, use, sell, offer for sale, import, and/or
+ otherwise dispose of its contribution in the software or derivative works
+ of the contribution in the software.
+
+3. Conditions and Limitations
+ (A) No Trademark License- This license does not grant you rights to use
+ any contributors' name, logo, or trademarks.
+
+ (B) If you bring a patent claim against any contributor over patents that
+ you claim are infringed by the software, your patent license from such
+ contributor to the software ends automatically.
+
+ (C) If you distribute any portion of the software, you must retain all
+ copyright, patent, trademark, and attribution notices that are present in
+ the software.
+
+ (D) If you distribute any portion of the software in source code form,
+ you may do so only under this license by including a complete copy of
+ this license with your distribution. If you distribute any portion of the
+ software in compiled or object code form, you may only do so under a
+ license that complies with this license.
+
+ (E) The software is licensed "as-is." You bear the risk of using it. The
+ contributors give no express warranties, guarantees, or conditions. You
+ may have additional consumer rights under your local laws which this
+ license cannot change. To the extent permitted under your local laws, the
+ contributors exclude the implied warranties of merchantability, fitness
+ for a particular purpose and non-infringement.
diff --git a/meta/flake.nix b/meta/flake.nix
index aaacaa58e5..6fd0c6d82b 100644
--- a/meta/flake.nix
+++ b/meta/flake.nix
@@ -1,5 +1,5 @@
{
- description = "Prism Launcher Metadata generation scripts";
+ description = "ProjT Launcher Metadata generation scripts";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable";
diff --git a/meta/fuzz/meta/fuzz_meta_models.py b/meta/fuzz/meta/fuzz_meta_models.py
new file mode 100644
index 0000000000..c443a9a538
--- /dev/null
+++ b/meta/fuzz/meta/fuzz_meta_models.py
@@ -0,0 +1,45 @@
+import json
+import os
+import sys
+
+import atheris
+
+REPO_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
+if REPO_ROOT not in sys.path:
+ sys.path.insert(0, REPO_ROOT)
+
+from meta.model import MetaPackage, MetaVersion # noqa: E402
+
+
+def test_one_input(data: bytes) -> None:
+ try:
+ text = data.decode("utf-8", errors="ignore")
+ except Exception:
+ return
+
+ if not text:
+ return
+
+ try:
+ json.loads(text)
+ except Exception:
+ return
+
+ try:
+ MetaPackage.parse_raw(text)
+ except Exception:
+ pass
+
+ try:
+ MetaVersion.parse_raw(text)
+ except Exception:
+ pass
+
+
+def main() -> None:
+ atheris.Setup(sys.argv, test_one_input)
+ atheris.Fuzz()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/init.sh b/meta/init.sh
index c9bfa8133f..69a4704d7d 100644
--- a/meta/init.sh
+++ b/meta/init.sh
@@ -8,7 +8,7 @@ fi
export META_CACHE_DIR=${CACHE_DIRECTORY:-./caches}
export META_UPSTREAM_DIR=${META_UPSTREAM_DIR:-${STATE_DIRECTORY:-.}/upstream}
-export META_LAUNCHER_DIR=${META_LAUNCHER_DIR:-${STATE_DIRECTORY:-.}/launcher}
+export META_LAUNCHER_DIR=${META_LAUNCHER_DIR:-${STATE_DIRECTORY:-.}/metalauncher}
function init_repo {
# no op if target already exists
diff --git a/meta/meta/common/__init__.py b/meta/meta/common/__init__.py
index 04e3bdfbb0..a1df8c675f 100644
--- a/meta/meta/common/__init__.py
+++ b/meta/meta/common/__init__.py
@@ -10,7 +10,7 @@ import requests
from cachecontrol import CacheControl # type: ignore
from cachecontrol.caches import FileCache # type: ignore
-LAUNCHER_MAVEN = "https://files.prismlauncher.org/maven/%s"
+LAUNCHER_MAVEN = "https://files.projecttick.org/maven/%s"
def serialize_datetime(dt: datetime.datetime):
@@ -85,7 +85,7 @@ def default_session():
cache = FileCache(os.path.join(cache_path(), "http_cache"))
sess = CacheControl(requests.Session(), cache)
- sess.headers.update({"User-Agent": "PrismLauncherMeta/1.0"})
+ sess.headers.update({"User-Agent": "ProjectTickMeta/1.0"})
return sess
@@ -117,8 +117,7 @@ def get_file_sha1_from_file(file_name: str, sha1_file: str) -> Optional[str]:
if os.path.isfile(sha1_file):
with open(sha1_file, "r") as file:
return file.read()
- if not os.path.isfile(file_name):
- return None
+
new_sha1 = file_hash(file_name, hashlib.sha1)
with open(sha1_file, "w") as file:
file.write(new_sha1)
diff --git a/meta/meta/common/modloadermp.py b/meta/meta/common/modloadermp.py
new file mode 100644
index 0000000000..1c71b94614
--- /dev/null
+++ b/meta/meta/common/modloadermp.py
@@ -0,0 +1,5 @@
+BASE_DIR = "modloadermp"
+
+MODLOADERMP_COMPONENT = "modloadermp"
+
+VERSIONS_FILE = "modloadermp/versions.json"
diff --git a/meta/meta/common/mojang-library-patches.json b/meta/meta/common/mojang-library-patches.json
index d6386746b6..ae521eeb03 100644
--- a/meta/meta/common/mojang-library-patches.json
+++ b/meta/meta/common/mojang-library-patches.json
@@ -2921,6 +2921,32 @@
{
"_comment": "Add linux-arm64 support for LWJGL 3.3.6",
"match": [
+ "org.lwjgl:lwjgl-freetype:3.3.6"
+ ],
+ "additionalLibraries": [
+ {
+ "downloads": {
+ "artifact": {
+ "sha1": "b5492439c7c9a596655d0d0e06801f93ec491e53",
+ "size": 1093516,
+ "url": "https://build.lwjgl.org/release/3.3.6/bin/lwjgl-freetype/lwjgl-freetype-natives-linux-arm64.jar"
+ }
+ },
+ "name": "org.lwjgl:lwjgl-freetype-natives-linux-arm64:3.3.6-lwjgl.1",
+ "rules": [
+ {
+ "action": "allow",
+ "os": {
+ "name": "linux-arm64"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "_comment": "Add linux-arm64 support for LWJGL 3.3.6",
+ "match": [
"org.lwjgl:lwjgl-glfw:3.3.6"
],
"additionalLibraries": [
diff --git a/meta/meta/common/optifine.py b/meta/meta/common/optifine.py
new file mode 100644
index 0000000000..2fbb854c66
--- /dev/null
+++ b/meta/meta/common/optifine.py
@@ -0,0 +1,10 @@
+# upstream directory name where OptiFine pages/versions are stored
+BASE_DIR = "optifine"
+
+# launcher package uid (folder under `launcher/`) -- keep this as the canonical package id
+# Use 'net.optifine' so generated launcher metadata lives in `launcher/net.optifine/`
+OPTIFINE_COMPONENT = "net.optifine"
+
+# upstream component path (under `upstream/`) and combined versions file path
+OPTIFINE_UPSTREAM_DIR = "optifine"
+VERSIONS_FILE = "optifine/versions.json"
diff --git a/meta/meta/common/risugami.py b/meta/meta/common/risugami.py
new file mode 100644
index 0000000000..c04e8bc01b
--- /dev/null
+++ b/meta/meta/common/risugami.py
@@ -0,0 +1,5 @@
+BASE_DIR = "risugami"
+
+RISUGAMI_COMPONENT = "risugami"
+
+VERSIONS_FILE = "risugami/versions.json"
diff --git a/meta/meta/common/stationloader.py b/meta/meta/common/stationloader.py
new file mode 100644
index 0000000000..3ce88888c6
--- /dev/null
+++ b/meta/meta/common/stationloader.py
@@ -0,0 +1,5 @@
+BASE_DIR = "station-loader"
+
+STATIONLOADER_COMPONENT = "station-loader"
+
+VERSIONS_FILE = "station-loader/versions.json"
diff --git a/meta/meta/model/__init__.py b/meta/meta/model/__init__.py
index 203b367ed5..27dae8b181 100644
--- a/meta/meta/model/__init__.py
+++ b/meta/meta/model/__init__.py
@@ -206,6 +206,7 @@ class Versioned(MetaBase):
class MojangArtifactBase(MetaBase):
sha1: Optional[str]
+ sha256: Optional[str]
size: Optional[int]
url: str
diff --git a/meta/meta/model/java.py b/meta/meta/model/java.py
index c3efde9292..d8d5671474 100644
--- a/meta/meta/model/java.py
+++ b/meta/meta/model/java.py
@@ -200,7 +200,7 @@ class AdoptxOs(StrEnum):
ADOPTIUM_API_BASE = "https://api.adoptium.net"
-OPENJ9_API_BASE = " https://api.adoptopenjdk.net"
+OPENJ9_API_BASE = "https://api.adoptopenjdk.net"
ADOPTX_API_FEATURE_RELEASES = (
f"{{base_url}}/v3/assets/feature_releases/{{feature_version}}/{{release_type}}"
)
diff --git a/meta/meta/model/mojang.py b/meta/meta/model/mojang.py
index b3edfe21f9..72a5708017 100644
--- a/meta/meta/model/mojang.py
+++ b/meta/meta/model/mojang.py
@@ -191,9 +191,17 @@ class MojangJavaComponent(StrEnum):
GammaSnapshot = "java-runtime-gamma-snapshot"
Exe = "minecraft-java-exe"
Delta = "java-runtime-delta"
- Epsilon = "java-runtime-epsilon"
+ @classmethod # newmethod
+ def _missing_(cls, value: str):
+ member = str.__new__(cls, value)
+ member._name_ = value.upper().replace("-", "_")
+ member._value_ = value
+ cls._value2member_map_[value] = member
+ cls._member_map_[member._name_] = member
+ return member
+
class JavaVersion(MetaBase):
component: MojangJavaComponent = MojangJavaComponent.JreLegacy
major_version: int = Field(8, alias="majorVersion")
@@ -340,5 +348,5 @@ class MojangVersion(MetaBase):
compatible_java_name=javaName,
additional_traits=addn_traits,
main_jar=main_jar,
- logging=(self.logging or {}).get("client"),
+ logging=(self.logging or {}).get("client")
)
diff --git a/meta/meta/run/generate_forge.py b/meta/meta/run/generate_forge.py
index a01f503ebe..ce1dcdea48 100755
--- a/meta/meta/run/generate_forge.py
+++ b/meta/meta/run/generate_forge.py
@@ -2,9 +2,11 @@ import os
import re
from packaging import version as pversion
from operator import attrgetter
-from typing import Collection
+from typing import Collection, Optional
+import hashlib
-from meta.common import ensure_component_dir, launcher_path, upstream_path, eprint
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path, eprint, default_session
from meta.common.forge import (
FORGE_COMPONENT,
INSTALLER_MANIFEST_DIR,
@@ -42,6 +44,36 @@ UPSTREAM_DIR = upstream_path()
ensure_component_dir(FORGE_COMPONENT)
+sess = default_session()
+
+def update_library_info(lib: Library):
+ if not lib.downloads:
+ lib.downloads = MojangLibraryDownloads()
+ if not lib.downloads.artifact:
+ url = lib.url
+ if not url and lib.name:
+ url = f"https://maven.minecraftforge.net/{lib.name.path()}"
+ if url:
+ lib.downloads.artifact = MojangArtifact(url=url, sha1=None, size=None)
+
+ art = lib.downloads.artifact
+ if art and art.url:
+ try:
+ # Check/Fetch SHA1
+ if not art.sha1:
+ r = sess.get(art.url + ".sha1")
+ if r.status_code == 200:
+ art.sha1 = r.text.strip()
+
+ # Check/Fetch Size
+ if not art.size:
+ r = sess.head(art.url)
+ if r.status_code == 200 and 'Content-Length' in r.headers:
+ art.size = int(r.headers['Content-Length'])
+ except Exception as e:
+ eprint(f"Failed to update info for {lib.name}: {e}")
+
+
# Construct a set of libraries out of a Minecraft version file, for filtering.
mc_version_cache = {}
@@ -241,7 +273,8 @@ def version_from_build_system_installer(
v.requires = [Dependency(uid=MINECRAFT_COMPONENT, equals=version.mc_version_sane)]
v.main_class = "io.github.zekerzhayard.forgewrapper.installer.Main"
- # FIXME: Add the size and hash here
+ v.main_class = "io.github.zekerzhayard.forgewrapper.installer.Main"
+
v.maven_files = []
# load the locally cached installer file info and use it to add the installer entry in the json
@@ -273,6 +306,7 @@ def version_from_build_system_installer(
forge_lib.downloads.artifact.url = (
"https://maven.minecraftforge.net/%s" % forge_lib.name.path()
)
+ update_library_info(forge_lib)
v.maven_files.append(forge_lib)
v.libraries = []
diff --git a/meta/meta/run/generate_java.py b/meta/meta/run/generate_java.py
index 9f23e3a4d3..812569dd54 100644
--- a/meta/meta/run/generate_java.py
+++ b/meta/meta/run/generate_java.py
@@ -144,8 +144,6 @@ def mojang_component_to_major(mojang_component: MojangJavaComponent) -> int:
return 0
case MojangJavaComponent.Delta:
return 21
- case MojangJavaComponent.Epsilon:
- return 25
case _:
return 0
@@ -341,7 +339,7 @@ def main():
JavaRuntimeOS.LinuxArm32,
JavaRuntimeOS.LinuxArm64,
]
- and major in [8, 17, 21, 25]
+ and major in [8, 17, 21]
)
or (
runtime.runtime_os
@@ -523,7 +521,6 @@ def main():
MojangJavaComponent.Gamma,
MojangJavaComponent.GammaSnapshot,
MojangJavaComponent.Delta,
- MojangJavaComponent.Epsilon,
]:
runtime = get_mojang_extra_java(comp, java_os)
if runtime != None:
diff --git a/meta/meta/run/generate_modloadermp.py b/meta/meta/run/generate_modloadermp.py
new file mode 100644
index 0000000000..a1c6c20cbb
--- /dev/null
+++ b/meta/meta/run/generate_modloadermp.py
@@ -0,0 +1,24 @@
+import os
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.modloadermp import MODLOADERMP_COMPONENT, VERSIONS_FILE
+from meta.model import MetaPackage
+
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(MODLOADERMP_COMPONENT)
+
+
+def main():
+ package = MetaPackage(
+ uid=MODLOADERMP_COMPONENT,
+ name="ModLoaderMP",
+ description="ModLoaderMP metadata (auto-generated stub)",
+ )
+
+ package.write(os.path.join(LAUNCHER_DIR, MODLOADERMP_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/meta/run/generate_mojang.py b/meta/meta/run/generate_mojang.py
index 9d2063263a..bb6f4e60b6 100755
--- a/meta/meta/run/generate_mojang.py
+++ b/meta/meta/run/generate_mojang.py
@@ -1,6 +1,5 @@
import copy
import hashlib
-import re
import os
from collections import defaultdict, namedtuple
from operator import attrgetter
@@ -52,7 +51,7 @@ ensure_component_dir(LWJGL3_COMPONENT)
def map_log4j_artifact(version):
x = pversion.parse(version)
if x <= pversion.parse("2.0"):
- return "2.0-beta9-fixed", "https://files.prismlauncher.org/maven/%s"
+ return "2.0-beta9-fixed", "https://files.projecttick.org/maven/%s"
if x <= pversion.parse("2.17.1"):
return (
"2.17.1",
@@ -91,7 +90,7 @@ LOG4J_HASHES = {
# We want versions that contain natives for all platforms. If there are multiple, pick the latest one
# LWJGL versions we want
PASS_VARIANTS = [
- "1fd0e4d1f0f7c97e8765a69d38225e1f27ee14ef", # 3.4.1 (2026-02-17 12:42:24+00:00)
+ "1fd0e4d1f0f7c97e8765a69d38225e1f27ee14ef", # 3.4.1
"2b00f31688148fc95dbc8c8ef37308942cf0dce0", # 3.3.6 (2025-10-21 11:38:51+00:00)
"73974b3af2afeb5b272ffbadcd7963014387c84f", # 3.3.3 (2024-05-22 16:25:41+00:00)
"765b4ab443051d286bdbb1c19cd7dc86b0792dce", # 3.3.2 (2024-01-17 13:19:20+00:00)
@@ -514,15 +513,6 @@ def main():
if v.additional_traits == None:
v.additional_traits = []
v.additional_traits.append("legacyServices")
-
- # 13w16a-13w23a require legacyLaunch and those + 13w23b require texturepacks
- if re.match(r"13w[1,2]\d[a-c]", v.version) and 16 <= int(v.version[3:-1]) <= 23:
- if v.additional_traits == None:
- v.additional_traits = []
- if v.version != "13w23b":
- v.additional_traits.append("legacyLaunch")
- v.additional_traits.append("texturepacks")
-
v.write(out_filename)
for lwjglVersionVariant in lwjglVersionVariants:
diff --git a/meta/meta/run/generate_neoforge.py b/meta/meta/run/generate_neoforge.py
index de32256869..d3eac3f1de 100644
--- a/meta/meta/run/generate_neoforge.py
+++ b/meta/meta/run/generate_neoforge.py
@@ -2,9 +2,11 @@ from copy import deepcopy
import os
import re
from operator import attrgetter
-from typing import Collection
+from typing import Collection, Optional
+import hashlib
-from meta.common import ensure_component_dir, launcher_path, upstream_path, eprint
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path, eprint, default_session
from meta.common.neoforge import (
NEOFORGE_COMPONENT,
INSTALLER_MANIFEST_DIR,
@@ -36,6 +38,36 @@ UPSTREAM_DIR = upstream_path()
ensure_component_dir(NEOFORGE_COMPONENT)
+sess = default_session()
+
+def update_library_info(lib: Library):
+ if not lib.downloads:
+ lib.downloads = MojangLibraryDownloads()
+ if not lib.downloads.artifact:
+ url = lib.url
+ if not url and lib.name:
+ url = f"https://maven.neoforged.net/releases/{lib.name.path()}"
+ if url:
+ lib.downloads.artifact = MojangArtifact(url=url, sha1=None, size=None)
+
+ art = lib.downloads.artifact
+ if art and art.url:
+ try:
+ # Check/Fetch SHA1
+ if not art.sha1:
+ r = sess.get(art.url + ".sha1")
+ if r.status_code == 200:
+ art.sha1 = r.text.strip()
+
+ # Check/Fetch Size
+ if not art.size:
+ r = sess.head(art.url)
+ if r.status_code == 200 and 'Content-Length' in r.headers:
+ art.size = int(r.headers['Content-Length'])
+ except Exception as e:
+ eprint(f"Failed to update info for {lib.name}: {e}")
+
+
def version_from_build_system_installer(
installer: MojangVersion,
@@ -45,7 +77,8 @@ def version_from_build_system_installer(
v = MetaVersion(name="NeoForge", version=version.rawVersion, uid=NEOFORGE_COMPONENT)
v.main_class = "io.github.zekerzhayard.forgewrapper.installer.Main"
- # FIXME: Add the size and hash here
+ v.main_class = "io.github.zekerzhayard.forgewrapper.installer.Main"
+
v.maven_files = []
# load the locally cached installer file info and use it to add the installer entry in the json
@@ -69,6 +102,7 @@ def version_from_build_system_installer(
if forge_lib.name.is_log4j():
continue
+ update_library_info(forge_lib)
v.maven_files.append(forge_lib)
v.libraries = []
diff --git a/meta/meta/run/generate_optifine.py b/meta/meta/run/generate_optifine.py
new file mode 100644
index 0000000000..802d3af530
--- /dev/null
+++ b/meta/meta/run/generate_optifine.py
@@ -0,0 +1,110 @@
+import os
+import json
+from datetime import datetime
+from typing import List
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.optifine import OPTIFINE_COMPONENT, VERSIONS_FILE, OPTIFINE_UPSTREAM_DIR
+from meta.model import MetaPackage, MetaVersion, Library, MojangLibraryDownloads, MojangArtifact
+
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(OPTIFINE_COMPONENT)
+
+
+def _parse_date(d: str):
+ # dates on the site are like DD.MM.YYYY
+ try:
+ return datetime.strptime(d, "%d.%m.%Y")
+ except Exception:
+ return None
+
+
+def main():
+ # Prefer per-version files in the upstream component directory, fallback to combined versions.json
+ # upstream files live under `upstream/optifine`, launcher metadata should go under `launcher/net.optifine`
+ comp_dir = os.path.join(UPSTREAM_DIR, OPTIFINE_UPSTREAM_DIR)
+ entries = {}
+ if os.path.isdir(comp_dir):
+ files = [f for f in os.listdir(comp_dir) if f.endswith(".json")]
+ # If there are many per-version files (excluding the combined file), read them
+ per_files = [f for f in files if f != VERSIONS_FILE]
+ if per_files:
+ for fn in per_files:
+ path = os.path.join(comp_dir, fn)
+ try:
+ with open(path, "r", encoding="utf-8") as f:
+ data = json.load(f)
+ key = os.path.splitext(fn)[0]
+ entries[key] = data
+ except Exception:
+ print(f"Warning: failed to read upstream per-version file: {path}")
+ # fallback to combined index
+ if not entries:
+ src = os.path.join(UPSTREAM_DIR, VERSIONS_FILE)
+ if not os.path.exists(src):
+ print(f"Missing upstream file: {src}")
+ return
+ with open(src, "r", encoding="utf-8") as f:
+ entries = json.load(f)
+
+ versions: List[str] = []
+ parsed_versions = []
+
+ for key, data in entries.items():
+ # key already normalized by the updater
+ v = MetaVersion(name="OptiFine", uid=OPTIFINE_COMPONENT, version=key)
+ v.type = "release"
+ v.order = 10
+
+ filename = data.get("filename")
+ download_page = data.get("download_page")
+ resolved = data.get("resolved_url") or download_page
+ label = data.get("label")
+ changelog = data.get("changelog")
+ date = data.get("date")
+ size = data.get("size")
+ sha256 = data.get("sha256")
+
+ # attach jar mod as a simple artifact entry; prefer resolved_url and include sha256/size
+ lib = Library()
+ artifact_kwargs = {}
+ if resolved:
+ artifact_kwargs["url"] = resolved
+ else:
+ artifact_kwargs["url"] = download_page
+ if size is not None:
+ artifact_kwargs["size"] = size
+ if sha256 is not None:
+ artifact_kwargs["sha256"] = sha256
+
+ artifact = MojangArtifact(**artifact_kwargs)
+ lib.downloads = MojangLibraryDownloads(artifact=artifact)
+
+ v.jar_mods = [lib]
+ if label:
+ v.name = label
+
+ if date:
+ dt = _parse_date(date)
+ if dt:
+ v.release_time = dt
+
+ v.write(os.path.join(LAUNCHER_DIR, OPTIFINE_COMPONENT, f"{v.version}.json"))
+ parsed_versions.append((v.version, v.release_time))
+
+ # choose recommended: latest non-preview by release_time if available
+ parsed_versions.sort(key=lambda x: (x[1] or datetime.min), reverse=True)
+ recommended = [p[0] for p in parsed_versions[:3]]
+
+ package = MetaPackage(uid=OPTIFINE_COMPONENT, name="OptiFine")
+ package.recommended = recommended
+ package.description = "OptiFine installer and downloads"
+ package.project_url = "https://optifine.net"
+ package.write(os.path.join(LAUNCHER_DIR, OPTIFINE_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/meta/run/generate_risugami.py b/meta/meta/run/generate_risugami.py
new file mode 100644
index 0000000000..07962f25e4
--- /dev/null
+++ b/meta/meta/run/generate_risugami.py
@@ -0,0 +1,26 @@
+import os
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.risugami import RISUGAMI_COMPONENT, VERSIONS_FILE
+from meta.model import MetaPackage
+
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(RISUGAMI_COMPONENT)
+
+
+def main():
+ # If an upstream versions file exists, we could parse it later.
+ # For now create a minimal package.json so the meta tooling recognizes the component.
+ package = MetaPackage(
+ uid=RISUGAMI_COMPONENT,
+ name="Risugami ModLoader",
+ description="Risugami ModLoader metadata (auto-generated stub)",
+ )
+
+ package.write(os.path.join(LAUNCHER_DIR, RISUGAMI_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/meta/run/generate_stationloader.py b/meta/meta/run/generate_stationloader.py
new file mode 100644
index 0000000000..636772e600
--- /dev/null
+++ b/meta/meta/run/generate_stationloader.py
@@ -0,0 +1,24 @@
+import os
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.stationloader import STATIONLOADER_COMPONENT, VERSIONS_FILE
+from meta.model import MetaPackage
+
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(STATIONLOADER_COMPONENT)
+
+
+def main():
+ package = MetaPackage(
+ uid=STATIONLOADER_COMPONENT,
+ name="Station Loader",
+ description="Station Loader metadata (auto-generated stub)",
+ )
+
+ package.write(os.path.join(LAUNCHER_DIR, STATIONLOADER_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/meta/run/index.py b/meta/meta/run/index.py
index 22abea8457..3b617886c3 100755
--- a/meta/meta/run/index.py
+++ b/meta/meta/run/index.py
@@ -1,5 +1,6 @@
import hashlib
import os
+import sys
from operator import attrgetter
from meta.common import launcher_path, file_hash
@@ -27,9 +28,19 @@ for package in sorted(os.listdir(LAUNCHER_DIR)):
if package in ignore:
continue
- sharedData = MetaPackage.parse_file(
- os.path.join(LAUNCHER_DIR, package, "package.json")
- )
+ package_path = os.path.join(LAUNCHER_DIR, package)
+ if not os.path.isdir(package_path):
+ continue
+
+ package_json_path = os.path.join(package_path, "package.json")
+ if not os.path.isfile(package_json_path):
+ print(
+ f"Skipping '{package}': no package.json found in {package_path}",
+ file=sys.stderr,
+ )
+ continue
+
+ sharedData = MetaPackage.parse_file(package_json_path)
recommendedVersions = set()
if sharedData.recommended:
recommendedVersions = set(sharedData.recommended)
@@ -38,11 +49,13 @@ for package in sorted(os.listdir(LAUNCHER_DIR)):
versionList = MetaVersionIndex(uid=package, name=sharedData.name)
# walk through all the versions of the package
- for filename in os.listdir(LAUNCHER_DIR + "/%s" % package):
+ for filename in os.listdir(package_path):
if filename in ignore:
continue
# parse and hash the version file
- filepath = LAUNCHER_DIR + "/%s/%s" % (package, filename)
+ filepath = os.path.join(package_path, filename)
+ if not os.path.isfile(filepath):
+ continue
filehash = file_hash(filepath, hashlib.sha256)
versionFile = MetaVersion.parse_file(filepath)
is_recommended = versionFile.version in recommendedVersions
@@ -59,7 +72,7 @@ for package in sorted(os.listdir(LAUNCHER_DIR)):
)
# write the version index for the package
- outFilePath = LAUNCHER_DIR + "/%s/index.json" % package
+ outFilePath = os.path.join(package_path, "index.json")
versionList.write(outFilePath)
# insert entry into the package index
diff --git a/meta/meta/run/update_forge.py b/meta/meta/run/update_forge.py
index c74865b31f..42a294b67c 100755
--- a/meta/meta/run/update_forge.py
+++ b/meta/meta/run/update_forge.py
@@ -150,16 +150,19 @@ def process_forge_version(version, jar_path):
new_sha1 = None
sha1_file = jar_path + ".sha1"
- fileSha1 = get_file_sha1_from_file(jar_path, sha1_file)
- try:
- rfile = sess.get(version.url() + ".sha1")
- rfile.raise_for_status()
- new_sha1 = rfile.text.strip()
- if fileSha1 != new_sha1:
- remove_files([jar_path, profile_path, installer_info_path, sha1_file])
- except Exception as e:
- eprint("Failed to check sha1 %s" % version.url())
- eprint("Error is %s" % e)
+ if not os.path.isfile(jar_path):
+ remove_files([profile_path, installer_info_path])
+ else:
+ fileSha1 = get_file_sha1_from_file(jar_path, sha1_file)
+ try:
+ rfile = sess.get(version.url() + ".sha1")
+ rfile.raise_for_status()
+ new_sha1 = rfile.text.strip()
+ if fileSha1 != new_sha1:
+ remove_files([jar_path, profile_path, installer_info_path, sha1_file])
+ except Exception as e:
+ eprint("Failed to check sha1 %s" % version.url())
+ eprint("Error is %s" % e)
installer_refresh_required = not os.path.isfile(profile_path) or not os.path.isfile(
installer_info_path
@@ -251,7 +254,9 @@ def main():
)
r.raise_for_status()
promotions_json = r.json()
- assert type(promotions_json) == dict
+ promotions_json = r.json()
+ if not isinstance(promotions_json, dict) or "promos" not in promotions_json or not isinstance(promotions_json["promos"], dict):
+ raise ValueError("Invalid promotions_slim.json format: expected a dict with 'promos' object")
promoted_key_expression = re.compile(
"(?P<mc>[^-]+)-(?P<promotion>(latest)|(recommended))(-(?P<branch>[a-zA-Z0-9\\.]+))?"
@@ -261,7 +266,6 @@ def main():
new_index = DerivedForgeIndex()
- # FIXME: does not fully validate that the file has not changed format
# NOTE: For some insane reason, the format of the versions here is special. It having a branch at the end means it
# affects that particular branch.
# We don't care about Forge having branches.
diff --git a/meta/meta/run/update_java.py b/meta/meta/run/update_java.py
index 80ca7b4a70..0715133514 100644
--- a/meta/meta/run/update_java.py
+++ b/meta/meta/run/update_java.py
@@ -1,4 +1,5 @@
import os
+import time
from meta.common import upstream_path, ensure_upstream_dir, default_session
from meta.common.java import (
@@ -14,7 +15,6 @@ from meta.model.java import (
ADOPTIUM_API_BASE,
OPENJ9_API_BASE,
ADOPTX_API_AVAILABLE_RELEASES,
- adoptxAPIFeatureReleasesUrl,
adoptiumAPIFeatureReleasesUrl,
openj9APIFeatureReleasesUrl,
AdoptxJvmImpl,
@@ -51,17 +51,43 @@ ensure_upstream_dir(AZUL_VERSIONS_DIR)
sess = default_session()
+def filtered_available_releases(
+ available: AdoptxAvailableReleases, present_features: list[int]
+) -> AdoptxAvailableReleases:
+ filtered_features = sorted(set(present_features))
+ filtered_lts = [feature for feature in available.available_lts_releases if feature in filtered_features]
+ newest_feature = filtered_features[-1] if filtered_features else None
+ newest_lts = filtered_lts[-1] if filtered_lts else None
+
+ return AdoptxAvailableReleases(
+ available_releases=filtered_features,
+ available_lts_releases=filtered_lts,
+ most_recent_lts=newest_lts,
+ most_recent_feature_release=newest_feature,
+ most_recent_feature_version=newest_feature,
+ tip_version=newest_feature,
+ )
+
+
def main():
print("Getting Adoptium Release Manifests ")
- r = sess.get(ADOPTX_API_AVAILABLE_RELEASES.format(base_url=ADOPTIUM_API_BASE))
- r.raise_for_status()
+ for attempt in range(3):
+ r = sess.get(ADOPTX_API_AVAILABLE_RELEASES.format(base_url=ADOPTIUM_API_BASE))
+ if r.status_code >= 500:
+ if attempt < 2:
+ time.sleep(1 * (attempt + 1))
+ continue
+ else:
+ r.raise_for_status()
+ else:
+ r.raise_for_status()
+ break
+ else:
+ r.raise_for_status()
available = AdoptxAvailableReleases(**r.json())
- available_releases_file = os.path.join(
- UPSTREAM_DIR, ADOPTIUM_DIR, "available_releases.json"
- )
- available.write(available_releases_file)
+ present_adoptium_features: list[int] = []
for feature in available.available_releases:
print("Getting Manifests for Adoptium feature release:", feature)
@@ -80,12 +106,26 @@ def main():
)
api_call = adoptiumAPIFeatureReleasesUrl(feature, query=query)
print("Fetching JRE Page:", page, api_call)
- r_rls = sess.get(api_call)
- if r_rls.status_code == 404:
- break
+ for attempt in range(3):
+ r_rls = sess.get(api_call)
+ if r_rls.status_code == 404:
+ break
+ elif r_rls.status_code >= 500:
+ if attempt < 2:
+ time.sleep(1 * (attempt + 1))
+ continue
+ else:
+ r_rls.raise_for_status()
+ else:
+ r_rls.raise_for_status()
+ break
else:
+ # If all attempts failed
r_rls.raise_for_status()
+ if r_rls.status_code == 404:
+ break
+
releases = list(AdoptxRelease(**rls) for rls in r_rls.json())
releases_for_feature.extend(releases)
@@ -94,11 +134,22 @@ def main():
page += 1
print("Total Adoptium releases for feature:", len(releases_for_feature))
- releases = AdoptxReleases(__root__=releases_for_feature)
feature_file = os.path.join(
UPSTREAM_DIR, ADOPTIUM_VERSIONS_DIR, f"java{feature}.json"
)
- releases.write(feature_file)
+ if releases_for_feature:
+ releases = AdoptxReleases(__root__=releases_for_feature)
+ releases.write(feature_file)
+ present_adoptium_features.append(feature)
+ elif os.path.exists(feature_file):
+ os.remove(feature_file)
+
+ available_releases_file = os.path.join(
+ UPSTREAM_DIR, ADOPTIUM_DIR, "available_releases.json"
+ )
+ filtered_available_releases(
+ available, present_adoptium_features
+ ).write(available_releases_file)
print("Getting OpenJ9 Release Manifests ")
r = sess.get(ADOPTX_API_AVAILABLE_RELEASES.format(base_url=OPENJ9_API_BASE))
@@ -106,10 +157,7 @@ def main():
available = AdoptxAvailableReleases(**r.json())
- available_releases_file = os.path.join(
- UPSTREAM_DIR, OPENJ9_DIR, "available_releases.json"
- )
- available.write(available_releases_file)
+ present_openj9_features: list[int] = []
for feature in available.available_releases:
print("Getting Manifests for OpenJ9 feature release:", feature)
@@ -142,13 +190,22 @@ def main():
page += 1
print("Total OpenJ9 releases for feature:", len(releases_for_feature))
- releases = AdoptxReleases(__root__=releases_for_feature)
- if len(releases_for_feature) == 0:
- continue
feature_file = os.path.join(
UPSTREAM_DIR, OPENJ9_VERSIONS_DIR, f"java{feature}.json"
)
- releases.write(feature_file)
+ if releases_for_feature:
+ releases = AdoptxReleases(__root__=releases_for_feature)
+ releases.write(feature_file)
+ present_openj9_features.append(feature)
+ elif os.path.exists(feature_file):
+ os.remove(feature_file)
+
+ available_releases_file = os.path.join(
+ UPSTREAM_DIR, OPENJ9_DIR, "available_releases.json"
+ )
+ filtered_available_releases(
+ available, present_openj9_features
+ ).write(available_releases_file)
print("Getting Azul Release Manifests")
zulu_packages: list[ZuluPackage] = []
@@ -170,12 +227,25 @@ def main():
print("Processing Page:", page, api_call)
- r = sess.get(api_call)
- if r.status_code == 404:
- break
+ for attempt in range(3):
+ r = sess.get(api_call)
+ if r.status_code == 404:
+ break
+ elif r.status_code >= 500:
+ if attempt < 2:
+ time.sleep(1 * (attempt + 1))
+ continue
+ else:
+ r.raise_for_status()
+ else:
+ r.raise_for_status()
+ break
else:
r.raise_for_status()
+ if r.status_code == 404:
+ break
+
packages = list(ZuluPackage(**pkg) for pkg in r.json())
zulu_packages.extend(packages)
if len(packages) < page_size:
@@ -205,8 +275,19 @@ def main():
api_call = azulApiPackageDetailUrl(pkg.package_uuid)
print("Fetching Azul package manifest:", pkg.package_uuid)
- r_pkg = sess.get(api_call)
- r_pkg.raise_for_status()
+ for attempt in range(3):
+ r_pkg = sess.get(api_call)
+ if r_pkg.status_code >= 500:
+ if attempt < 2:
+ time.sleep(1 * (attempt + 1))
+ continue
+ else:
+ r_pkg.raise_for_status()
+ else:
+ r_pkg.raise_for_status()
+ break
+ else:
+ r_pkg.raise_for_status()
pkg_detail = ZuluPackageDetail(**r_pkg.json())
pkg_detail.write(pkg_file)
diff --git a/meta/meta/run/update_modloadermp.py b/meta/meta/run/update_modloadermp.py
new file mode 100644
index 0000000000..05d6bed6d8
--- /dev/null
+++ b/meta/meta/run/update_modloadermp.py
@@ -0,0 +1,24 @@
+import json
+import os
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.modloadermp import VERSIONS_FILE, BASE_DIR
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(BASE_DIR)
+
+sess = default_session()
+
+
+def main():
+ # Placeholder updater: upstream source not implemented yet.
+ out_path = os.path.join(UPSTREAM_DIR, VERSIONS_FILE)
+ with open(out_path, "w") as f:
+ json.dump({}, f, indent=4)
+
+ print(f"Wrote placeholder upstream file: {out_path}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/meta/run/update_neoforge.py b/meta/meta/run/update_neoforge.py
index 9957aad1bd..ae0bc49dc7 100644
--- a/meta/meta/run/update_neoforge.py
+++ b/meta/meta/run/update_neoforge.py
@@ -141,16 +141,19 @@ def process_neoforge_version(key, entry):
new_sha1 = None
sha1_file = jar_path + ".sha1"
- fileSha1 = get_file_sha1_from_file(jar_path, sha1_file)
- try:
- rfile = sess.get(version.url() + ".sha1")
- rfile.raise_for_status()
- new_sha1 = rfile.text.strip()
- if fileSha1 != new_sha1:
- remove_files([jar_path, profile_path, installer_info_path, sha1_file])
- except Exception as e:
- eprint("Failed to check sha1 %s" % version.url())
- eprint("Error is %s" % e)
+ if not os.path.isfile(jar_path):
+ remove_files([profile_path, installer_info_path])
+ else:
+ fileSha1 = get_file_sha1_from_file(jar_path, sha1_file)
+ try:
+ rfile = sess.get(version.url() + ".sha1")
+ rfile.raise_for_status()
+ new_sha1 = rfile.text.strip()
+ if fileSha1 != new_sha1:
+ remove_files([jar_path, profile_path, installer_info_path, sha1_file])
+ except Exception as e:
+ eprint("Failed to check sha1 %s" % version.url())
+ eprint("Error is %s" % e)
installer_refresh_required = not os.path.isfile(profile_path) or not os.path.isfile(
installer_info_path
@@ -256,20 +259,41 @@ def main():
version_expression = re.compile(
r"^(?P<mc>[0-9a-zA-Z_\.]+)-(?P<ver>[0-9\.]+\.(?P<build>[0-9]+))(-(?P<branch>[a-zA-Z0-9\.]+))?$"
)
+ neoforge_version_re = re.compile(
+ r"^(?P<mcminor>\d+)\.(?:(?P<mcpatch>\d+)|(?P<snapshot>[0-9a-z]+))\.(?P<number>\d+)(?:\.(?P<build>\d+))?(?:-(?P<tag>[0-9A-Za-z][0-9A-Za-z.+-]*))?$"
+ )
print("")
print("Processing versions:")
for long_version in main_json:
assert type(long_version) == str
- legacyMatch = version_expression.match(long_version)
- if legacyMatch:
- version = legacyMatch.group("ver")
+ match = version_expression.match(long_version)
+ if match:
+ mc_version = match.group("mc")
+ build = int(match.group("build"))
+ version = match.group("ver")
+ branch = match.group("branch")
artifact = "forge"
- else:
- version = long_version
+
+ match_nf = neoforge_version_re.match(long_version)
+ if match_nf:
+ mc_version = match_nf.group("snapshot")
+ if not mc_version:
+ mc_version = f"1.{match_nf.group('mcminor')}"
+ if match_nf.group("mcpatch") != "0":
+ mc_version += f".{match_nf.group('mcpatch')}"
+ build_str = match_nf.group("build") or match_nf.group("number")
+ build = int(build_str)
+ version = build_str
+ branch = match_nf.group("tag")
+ match = match_nf
artifact = "neoforge"
+ if not match and not match_nf:
+ print(f"Skipping {long_version} as it does not match regex")
+ continue
+
try:
files = get_single_forge_files_manifest(long_version, artifact)
except:
diff --git a/meta/meta/run/update_optifine.py b/meta/meta/run/update_optifine.py
new file mode 100644
index 0000000000..833e08e263
--- /dev/null
+++ b/meta/meta/run/update_optifine.py
@@ -0,0 +1,456 @@
+import json
+import os
+import re
+from urllib.parse import urljoin, urlparse, parse_qs
+import concurrent.futures
+import threading
+
+try:
+ from meta.common import upstream_path, ensure_upstream_dir, default_session
+ from meta.common.optifine import VERSIONS_FILE, BASE_DIR
+ HAVE_META = True
+except Exception:
+ # meta.common or its dependencies (requests) may not be available in this environment.
+ HAVE_META = False
+ def upstream_path():
+ return "upstream"
+
+ def ensure_upstream_dir(path):
+ path = os.path.join(upstream_path(), path)
+ if not os.path.exists(path):
+ os.makedirs(path, exist_ok=True)
+
+ def default_session():
+ raise RuntimeError("HTTP session unavailable: install 'requests' and 'cachecontrol'")
+
+ VERSIONS_FILE = "versions.json"
+ BASE_DIR = "optifine"
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(BASE_DIR)
+
+sess = None
+if HAVE_META:
+ sess = default_session()
+
+
+def _resolve_href(href: str):
+ """Return (filename, resolved_href).
+
+ Handles cases where href is a redirect wrapper (e.g., adfoc.us with an inner
+ 'url=' parameter) or where the 'f' query parameter is present.
+ """
+ parsed = urlparse(href)
+ q = parse_qs(parsed.query)
+
+ # Direct f parameter
+ f = q.get("f")
+ if f:
+ return f[0], href
+
+ # Some wrappers embed an inner url parameter that contains the real target
+ inner = q.get("url")
+ if inner:
+ # inner may be a list; pick first
+ inner_url = inner[0]
+ inner_parsed = urlparse(inner_url)
+ inner_q = parse_qs(inner_parsed.query)
+ inner_f = inner_q.get("f")
+ if inner_f:
+ return inner_f[0], inner_url
+
+ # fallback: last path component
+ return os.path.basename(parsed.path), href
+
+
+def _clean_key(filename: str) -> str:
+ # Remove OptiFine prefix, any trailing ad-wrapper segments, and the .jar suffix
+ key = re.sub(r"^OptiFine[_-]", "", filename, flags=re.IGNORECASE)
+ key = re.sub(r"\.jar$", "", key, flags=re.IGNORECASE)
+ # Strip trailing ad/adload/adloadx wrapper fragments that appear in some links
+ key = re.sub(r"[_-]ad[a-z0-9_-]*$", "", key, flags=re.IGNORECASE)
+ return key
+
+
+def _strip_ad_wrapper(filename: str) -> str:
+ """Remove trailing ad/adload/adloadx wrapper fragments from a filename.
+
+ Example: OptiFine_1.20.1_HD_U_H7_adloadx.jar -> OptiFine_1.20.1_HD_U_H7.jar
+ """
+ if not filename:
+ return filename
+ root, ext = os.path.splitext(filename)
+ # remove trailing segments that start with _ad or -ad
+ root = re.sub(r"[_-]ad[a-z0-9_-]*$", "", root, flags=re.IGNORECASE)
+ return root + ext
+
+
+def _guess_platforms(filename: str, label: str = None, changelog: str = None):
+ """Heuristically guess platform compatibility tags for an OptiFine build.
+
+ Returns a list like ['mojang', 'neoforge', 'fabric'] based on keywords.
+ """
+ text = " ".join(filter(None, [filename or "", label or "", changelog or ""]))
+ tl = text.lower()
+ platforms = []
+ # OptiFine always targets vanilla (Mojang) builds
+ platforms.append("mojang")
+ # Forge / NeoForge variants
+ if "neoforge" in tl or "neo-forge" in tl or "forge" in tl:
+ platforms.append("neoforge")
+ # Fabric
+ if "fabric" in tl:
+ platforms.append("fabric")
+ # Quilt
+ if "quilt" in tl:
+ platforms.append("quilt")
+ # LiteLoader / older loaders
+ if "liteloader" in tl:
+ platforms.append("liteloader")
+
+ # Deduplicate while preserving order
+ seen = set()
+ out = []
+ for p in platforms:
+ if p not in seen:
+ seen.add(p)
+ out.append(p)
+ return out
+
+
+def _score_entry(entry: dict) -> int:
+ url = (entry.get("download_page") or "").lower()
+ s = 0
+ if "optifine.net/adloadx" in url or "optifine.net/adload" in url or "optifine.net/download" in url:
+ s += 10
+ if url.endswith(".jar") or entry.get("filename", "").lower().endswith(".jar"):
+ s += 5
+ if "preview" in (entry.get("filename") or "").lower():
+ s -= 2
+ return s
+
+
+def main():
+ url = "https://optifine.net/downloads"
+ print(f"Fetching OptiFine downloads page: {url}")
+ # configurable timeouts (seconds)
+ default_timeout = float(os.environ.get("OPTIFINE_TIMEOUT", "10"))
+
+ try:
+ r = sess.get(url, timeout=default_timeout)
+ r.raise_for_status()
+ html = r.text
+ except Exception as e:
+ print(f"Error fetching downloads page: {e}")
+ html = ""
+
+ versions = {}
+
+ # Try parsing with BeautifulSoup if available; be permissive about href forms
+ try:
+ from bs4 import BeautifulSoup
+
+ soup = BeautifulSoup(html, "html.parser")
+ anchors = soup.find_all("a", href=True)
+ inspected = 0
+ matched = 0
+ for a in anchors:
+ inspected += 1
+ href = a["href"]
+ href_l = href.lower()
+
+ # Accept several formats: any URL containing '?f=' (adload/adloadx/download), or direct .jar links
+ if "?f=" not in href_l and not href_l.endswith(".jar"):
+ continue
+
+ matched += 1
+ filename, resolved = _resolve_href(href)
+ # strip ad/adload/adloadx wrapper parts from filename
+ filename = _strip_ad_wrapper(filename)
+
+ # Try to get version text from the same table row or nearby text
+ ver_text = None
+ changelog = None
+ date = None
+ tr = a.find_parent("tr")
+ if tr:
+ tds = tr.find_all("td")
+ if tds:
+ ver_text = tds[0].get_text(strip=True)
+ # find changelog link in the row
+ ch = tr.find("a", href=lambda h: h and "changelog" in h)
+ if ch:
+ changelog = ch.get("href")
+ # find date cell
+ date_td = tr.find("td", class_=lambda c: c and "colDate" in c)
+ if date_td:
+ date = date_td.get_text(strip=True)
+
+ if not ver_text:
+ # fallback: anchor text or nearby text nodes
+ if a.string and a.string.strip():
+ ver_text = a.string.strip()
+ else:
+ prev = a.find_previous(string=True)
+ if prev:
+ ver_text = prev.strip()
+
+ key = _clean_key(filename)
+ data = {
+ "filename": filename,
+ "download_page": urljoin(url, resolved),
+ "label": ver_text or filename,
+ "changelog": changelog,
+ "date": date,
+ }
+
+ existing = versions.get(key)
+ if existing is None or _score_entry(data) > _score_entry(existing):
+ versions[key] = data
+ platforms = _guess_platforms(data.get("filename"), data.get("label"), data.get("changelog"))
+ print(f"Added {key}: platforms: {', '.join(platforms)}")
+
+ print(f"Inspected {inspected} anchors, matched {matched} potential downloads")
+ except Exception:
+ # Fallback: regex parse (case-insensitive)
+ print("BeautifulSoup not available or parsing failed, falling back to regex parse")
+ for match in re.finditer(r'href="([^"]*\?f=[^"\s]+)"', html, flags=re.IGNORECASE):
+ href = match.group(1)
+ filename, resolved = _resolve_href(href)
+ filename = _strip_ad_wrapper(filename)
+ key = _clean_key(filename)
+ data = {
+ "filename": filename,
+ "download_page": urljoin(url, resolved),
+ "label": filename,
+ }
+ existing = versions.get(key)
+ if existing is None or _score_entry(data) > _score_entry(existing):
+ versions[key] = data
+ platforms = _guess_platforms(data.get("filename"), data.get("label"), data.get("changelog"))
+ print(f"Added {key}: platforms: {', '.join(platforms)}")
+
+ # Determine base output directory. Some upstream implementations return a
+ # path that already includes BASE_DIR, avoid duplicating it.
+ if UPSTREAM_DIR.endswith(BASE_DIR):
+ base_out_dir = UPSTREAM_DIR
+ else:
+ base_out_dir = os.path.join(UPSTREAM_DIR, BASE_DIR)
+
+ # Ensure output directory exists (defensive: collapse duplicate trailing BASE_DIR segments)
+ parts = base_out_dir.split(os.sep)
+ while len(parts) >= 2 and parts[-1] == BASE_DIR and parts[-2] == BASE_DIR:
+ parts.pop(-1)
+ base_out_dir = os.sep.join(parts)
+ os.makedirs(base_out_dir, exist_ok=True)
+
+ out_path = os.path.join(base_out_dir, VERSIONS_FILE)
+ # Attempt to resolve final download URLs and optionally compute hashes
+ # Default to computing SHA256 for each resolved file unless explicitly disabled
+ compute_hash = os.environ.get("OPTIFINE_COMPUTE_HASH", "1").lower() in ("1", "true", "yes")
+ resolved_count = 0
+ hashed_count = 0
+
+ if HAVE_META and sess is not None:
+ try:
+ # Use a ThreadPoolExecutor to parallelize network I/O for resolving URLs
+ concurrency = int(os.environ.get("OPTIFINE_CONCURRENCY", "8"))
+ if concurrency < 1:
+ concurrency = 1
+
+ total = len(versions)
+ counter = {"idx": 0}
+ counter_lock = threading.Lock()
+
+ def _process_item(item):
+ key, data = item
+ with counter_lock:
+ counter["idx"] += 1
+ idx = counter["idx"]
+
+ dp = data.get("download_page")
+ if not dp:
+ return key, data, False, False
+
+ print(f"[{idx}/{total}] Resolving {key} ({data.get('filename')}) -> {dp}")
+
+ # Each worker creates its own session to avoid any session thread-safety issues
+ sess_local = None
+ if HAVE_META:
+ try:
+ sess_local = default_session()
+ except Exception:
+ sess_local = None
+
+ # Fallback to global sess if default_session unavailable
+ if sess_local is None:
+ sess_local = sess
+
+ final_url = None
+ try:
+ # Try HEAD first
+ try:
+ resp = sess_local.head(dp, allow_redirects=True, timeout=default_timeout)
+ except Exception as e_head:
+ # Try GET as fallback for hosts that block HEAD
+ try:
+ resp = sess_local.get(dp, allow_redirects=True, timeout=default_timeout)
+ except Exception:
+ resp = None
+
+ if resp is not None:
+ final_url = getattr(resp, "url", None)
+
+ # Try to extract downloadx link from page HTML (short GET if needed)
+ page_text = None
+ if resp is not None and hasattr(resp, "text") and resp.text:
+ page_text = resp.text
+ else:
+ try:
+ rtmp = sess_local.get(dp, allow_redirects=True, timeout=5)
+ page_text = getattr(rtmp, "text", None)
+ final_url = getattr(rtmp, "url", final_url)
+ except Exception:
+ page_text = None
+
+ if page_text:
+ m = re.search(r"(downloadx\?f=[^\"'\s>]+)", page_text, flags=re.IGNORECASE)
+ if m:
+ candidate = m.group(1)
+ base_for_join = final_url or dp
+ final_url = urljoin(base_for_join, candidate)
+ print(f" Extracted downloadx link for {key}: {final_url}")
+
+ # If still not a .jar/f param, do a full GET and inspect final URL
+ if not final_url or (".jar" not in final_url and "?f=" not in final_url):
+ try:
+ resp2 = sess_local.get(dp, allow_redirects=True, timeout=30)
+ final_url = getattr(resp2, "url", final_url)
+ except Exception:
+ pass
+
+ hashed = False
+ if final_url:
+ data["resolved_url"] = final_url
+ print(f" Resolved {key} -> {final_url}")
+
+ if compute_hash:
+ try:
+ import hashlib
+
+ print(f" Hashing {key} from {final_url} ...")
+ h = hashlib.sha256()
+ size = 0
+ hash_timeout = float(os.environ.get("OPTIFINE_HASH_TIMEOUT", "120"))
+ r2 = sess_local.get(final_url, stream=True, timeout=hash_timeout)
+ r2.raise_for_status()
+ for chunk in r2.iter_content(8192):
+ if not chunk:
+ continue
+ h.update(chunk)
+ size += len(chunk)
+ data["sha256"] = h.hexdigest()
+ data["size"] = size
+ hashed = True
+ print(f" Hashed {key}: sha256={data['sha256']} size={data['size']}")
+ except Exception as e_hash:
+ print(f" Warning: failed to hash {final_url}: {e_hash}")
+
+ return key, data, bool(final_url), hashed
+ except Exception as e:
+ print(f" Error processing {key}: {e}")
+ return key, data, False, False
+
+ items = list(versions.items())
+ if concurrency == 1:
+ # run serially
+ results = map(_process_item, items)
+ else:
+ with concurrent.futures.ThreadPoolExecutor(max_workers=concurrency) as ex:
+ results = ex.map(_process_item, items)
+
+ # Collect results and write per-version files as each item completes
+ for key, data, resolved_flag, hashed_flag in results:
+ versions[key] = data
+ # Ensure per-version dir exists
+ try:
+ os.makedirs(base_out_dir, exist_ok=True)
+ per_path = os.path.join(base_out_dir, f"{key}.json")
+ with open(per_path, "w") as pf:
+ json.dump(data, pf, indent=4)
+ print(f"Wrote per-version file: {per_path}")
+ except Exception as e:
+ print(f"Warning: failed to write per-version file for {key}: {e}")
+
+ if resolved_flag:
+ resolved_count += 1
+ if hashed_flag:
+ hashed_count += 1
+ except KeyboardInterrupt:
+ print("Interrupted by user (KeyboardInterrupt). Writing partial results...")
+
+ # Write combined index (ensure parent exists)
+ os.makedirs(os.path.dirname(out_path) or ".", exist_ok=True)
+ with open(out_path, "w") as f:
+ json.dump(versions, f, indent=4)
+
+ # Also write per-version JSON files under the upstream component directory
+ try:
+ for key, data in versions.items():
+ per_path = os.path.join(base_out_dir, f"{key}.json")
+ with open(per_path, "w") as pf:
+ json.dump(data, pf, indent=4)
+ print(f"Wrote per-version file: {per_path}")
+ except Exception as e:
+ print(f"Warning: failed to write per-version files: {e}")
+
+ print(f"Wrote {len(versions)} OptiFine entries to {out_path}")
+ if HAVE_META and sess is not None:
+ print(f"Resolved {resolved_count} final URLs")
+ if compute_hash:
+ print(f"Computed {hashed_count} SHA256 hashes (OPTIFINE_COMPUTE_HASH=1)")
+ # If some entries are missing sha256 (e.g., were written before hashing completed),
+ # compute them now in parallel and update files.
+ missing = [ (k,v) for k,v in versions.items() if v.get("resolved_url") and not v.get("sha256") ]
+ if missing:
+ print(f"Computing missing SHA256 for {len(missing)} entries...")
+ def _compute_and_write(item):
+ k, v = item
+ url_final = v.get("resolved_url")
+ try:
+ import hashlib
+ hash_timeout = float(os.environ.get("OPTIFINE_HASH_TIMEOUT", "120"))
+ h = hashlib.sha256()
+ size = 0
+ r = sess.get(url_final, stream=True, timeout=hash_timeout)
+ r.raise_for_status()
+ for chunk in r.iter_content(8192):
+ if not chunk:
+ continue
+ h.update(chunk)
+ size += len(chunk)
+ v["sha256"] = h.hexdigest()
+ v["size"] = size
+ per_path = os.path.join(base_out_dir, f"{k}.json")
+ with open(per_path, "w") as pf:
+ json.dump(v, pf, indent=4)
+ print(f" Hashed {k}: {v['sha256']} size={v['size']}")
+ return True
+ except Exception as e:
+ print(f" Warning: failed to compute hash for {k}: {e}")
+ return False
+
+ concurrency = int(os.environ.get("OPTIFINE_CONCURRENCY", "8"))
+ if concurrency < 1:
+ concurrency = 1
+ completed = 0
+ with concurrent.futures.ThreadPoolExecutor(max_workers=concurrency) as ex:
+ for ok in ex.map(_compute_and_write, missing):
+ if ok:
+ completed += 1
+ print(f"Completed extra hashing: {completed}/{len(missing)}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/meta/run/update_risugami.py b/meta/meta/run/update_risugami.py
new file mode 100644
index 0000000000..66bd59eff8
--- /dev/null
+++ b/meta/meta/run/update_risugami.py
@@ -0,0 +1,25 @@
+import json
+import os
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.risugami import VERSIONS_FILE, BASE_DIR
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(BASE_DIR)
+
+sess = default_session()
+
+
+def main():
+ # Placeholder updater: upstream source not implemented yet.
+ # Create an empty versions file so the meta pipeline can proceed.
+ out_path = os.path.join(UPSTREAM_DIR, VERSIONS_FILE)
+ with open(out_path, "w") as f:
+ json.dump({}, f, indent=4)
+
+ print(f"Wrote placeholder upstream file: {out_path}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/meta/run/update_stationloader.py b/meta/meta/run/update_stationloader.py
new file mode 100644
index 0000000000..3e0a4b34bb
--- /dev/null
+++ b/meta/meta/run/update_stationloader.py
@@ -0,0 +1,24 @@
+import json
+import os
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.stationloader import VERSIONS_FILE, BASE_DIR
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(BASE_DIR)
+
+sess = default_session()
+
+
+def main():
+ # Placeholder updater: upstream source not implemented yet.
+ out_path = os.path.join(UPSTREAM_DIR, VERSIONS_FILE)
+ with open(out_path, "w") as f:
+ json.dump({}, f, indent=4)
+
+ print(f"Wrote placeholder upstream file: {out_path}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/nix/pkgs/blockgame-meta.nix b/meta/nix/pkgs/blockgame-meta.nix
index 8409c37cb4..fe95ffe67e 100644
--- a/meta/nix/pkgs/blockgame-meta.nix
+++ b/meta/nix/pkgs/blockgame-meta.nix
@@ -64,7 +64,7 @@ buildPythonApplication {
description = "Metadata generator for blockgame launcher.";
platforms = platforms.linux;
license = licenses.mspl;
- maintainers = with maintainers; [Scrumplex];
+ maintainers = with maintainers; [yongdohyun];
mainProgram = "update";
};
}
diff --git a/meta/poetry.lock b/meta/poetry.lock
index 8bfa4c06b8..77f97fe050 100644
--- a/meta/poetry.lock
+++ b/meta/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
[[package]]
name = "cachecontrol"
@@ -23,14 +23,14 @@ redis = ["redis (>=2.10.5)"]
[[package]]
name = "certifi"
-version = "2024.2.2"
+version = "2024.7.4"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
- {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
- {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
+ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
+ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
]
[[package]]
@@ -135,21 +135,16 @@ files = [
[[package]]
name = "filelock"
-version = "3.16.1"
+version = "3.20.3"
description = "A platform independent file lock."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.10"
groups = ["main"]
files = [
- {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
- {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
+ {file = "filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1"},
+ {file = "filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1"},
]
-[package.extras]
-docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
-testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
-typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
-
[[package]]
name = "idna"
version = "3.7"
@@ -242,49 +237,62 @@ files = [
[[package]]
name = "pydantic"
-version = "1.10.26"
+version = "1.10.24"
description = "Data validation and settings management using python type hints"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
- {file = "pydantic-1.10.26-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7ae36fa0ecef8d39884120f212e16c06bb096a38f523421278e2f39c1784546"},
- {file = "pydantic-1.10.26-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d95a76cf503f0f72ed7812a91de948440b2bf564269975738a4751e4fadeb572"},
- {file = "pydantic-1.10.26-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a943ce8e00ad708ed06a1d9df5b4fd28f5635a003b82a4908ece6f24c0b18464"},
- {file = "pydantic-1.10.26-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:465ad8edb29b15c10b779b16431fe8e77c380098badf6db367b7a1d3e572cf53"},
- {file = "pydantic-1.10.26-cp310-cp310-win_amd64.whl", hash = "sha256:80e6be6272839c8a7641d26ad569ab77772809dd78f91d0068dc0fc97f071945"},
- {file = "pydantic-1.10.26-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:116233e53889bcc536f617e38c1b8337d7fa9c280f0fd7a4045947515a785637"},
- {file = "pydantic-1.10.26-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c3cfdd361addb6eb64ccd26ac356ad6514cee06a61ab26b27e16b5ed53108f77"},
- {file = "pydantic-1.10.26-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0e4451951a9a93bf9a90576f3e25240b47ee49ab5236adccb8eff6ac943adf0f"},
- {file = "pydantic-1.10.26-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9858ed44c6bea5f29ffe95308db9e62060791c877766c67dd5f55d072c8612b5"},
- {file = "pydantic-1.10.26-cp311-cp311-win_amd64.whl", hash = "sha256:ac1089f723e2106ebde434377d31239e00870a7563245072968e5af5cc4d33df"},
- {file = "pydantic-1.10.26-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:468d5b9cacfcaadc76ed0a4645354ab6f263ec01a63fb6d05630ea1df6ae453f"},
- {file = "pydantic-1.10.26-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2c1b0b914be31671000ca25cf7ea17fcaaa68cfeadf6924529c5c5aa24b7ab1f"},
- {file = "pydantic-1.10.26-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15b13b9f8ba8867095769e1156e0d7fbafa1f65b898dd40fd1c02e34430973cb"},
- {file = "pydantic-1.10.26-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad7025ca324ae263d4313998e25078dcaec5f9ed0392c06dedb57e053cc8086b"},
- {file = "pydantic-1.10.26-cp312-cp312-win_amd64.whl", hash = "sha256:4482b299874dabb88a6c3759e3d85c6557c407c3b586891f7d808d8a38b66b9c"},
- {file = "pydantic-1.10.26-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1ae7913bb40a96c87e3d3f6fe4e918ef53bf181583de4e71824360a9b11aef1c"},
- {file = "pydantic-1.10.26-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8154c13f58d4de5d3a856bb6c909c7370f41fb876a5952a503af6b975265f4ba"},
- {file = "pydantic-1.10.26-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f8af0507bf6118b054a9765fb2e402f18a8b70c964f420d95b525eb711122d62"},
- {file = "pydantic-1.10.26-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dcb5a7318fb43189fde6af6f21ac7149c4bcbcfffc54bc87b5becddc46084847"},
- {file = "pydantic-1.10.26-cp313-cp313-win_amd64.whl", hash = "sha256:71cde228bc0600cf8619f0ee62db050d1880dcc477eba0e90b23011b4ee0f314"},
- {file = "pydantic-1.10.26-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6b40730cc81d53d515dc0b8bb5c9b43fadb9bed46de4a3c03bd95e8571616dba"},
- {file = "pydantic-1.10.26-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c3bbb9c0eecdf599e4db9b372fa9cc55be12e80a0d9c6d307950a39050cb0e37"},
- {file = "pydantic-1.10.26-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc2e3fe7bc4993626ef6b6fa855defafa1d6f8996aa1caef2deb83c5ac4d043a"},
- {file = "pydantic-1.10.26-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:36d9e46b588aaeb1dcd2409fa4c467fe0b331f3cc9f227b03a7a00643704e962"},
- {file = "pydantic-1.10.26-cp314-cp314-win_amd64.whl", hash = "sha256:81ce3c8616d12a7be31b4aadfd3434f78f6b44b75adbfaec2fe1ad4f7f999b8c"},
- {file = "pydantic-1.10.26-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc5c91a3b3106caf07ac6735ec6efad8ba37b860b9eb569923386debe65039ad"},
- {file = "pydantic-1.10.26-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dde599e0388e04778480d57f49355c9cc7916de818bf674de5d5429f2feebfb6"},
- {file = "pydantic-1.10.26-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8be08b5cfe88e58198722861c7aab737c978423c3a27300911767931e5311d0d"},
- {file = "pydantic-1.10.26-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0141f4bafe5eda539d98c9755128a9ea933654c6ca4306b5059fc87a01a38573"},
- {file = "pydantic-1.10.26-cp38-cp38-win_amd64.whl", hash = "sha256:eb664305ffca8a9766a8629303bb596607d77eae35bb5f32ff9245984881b638"},
- {file = "pydantic-1.10.26-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:502b9d30d18a2dfaf81b7302f6ba0e5853474b1c96212449eb4db912cb604b7d"},
- {file = "pydantic-1.10.26-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0d8f6087bf697dec3bf7ffcd7fe8362674f16519f3151789f33cbe8f1d19fc15"},
- {file = "pydantic-1.10.26-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dd40a99c358419910c85e6f5d22f9c56684c25b5e7abc40879b3b4a52f34ae90"},
- {file = "pydantic-1.10.26-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ce3293b86ca9f4125df02ff0a70be91bc7946522467cbd98e7f1493f340616ba"},
- {file = "pydantic-1.10.26-cp39-cp39-win_amd64.whl", hash = "sha256:1a4e3062b71ab1d5df339ba12c48f9ed5817c5de6cb92a961dd5c64bb32e7b96"},
- {file = "pydantic-1.10.26-py3-none-any.whl", hash = "sha256:c43ad70dc3ce7787543d563792426a16fd7895e14be4b194b5665e36459dd917"},
- {file = "pydantic-1.10.26.tar.gz", hash = "sha256:8c6aa39b494c5af092e690127c283d84f363ac36017106a9e66cb33a22ac412e"},
+ {file = "pydantic-1.10.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef07ea2fba12f9188cfa2c50cb3eaa6516b56c33e2a8cc3cd288b4190ee6c0c"},
+ {file = "pydantic-1.10.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a42033fac69b9f1f867ecc3a2159f0e94dceb1abfc509ad57e9e88d49774683"},
+ {file = "pydantic-1.10.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c626596c1b95dc6d45f7129f10b6743fbb50f29d942d25a22b2ceead670c067d"},
+ {file = "pydantic-1.10.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8057172868b0d98f95e6fcddcc5f75d01570e85c6308702dd2c50ea673bc197b"},
+ {file = "pydantic-1.10.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:82f951210ebcdb778b1d93075af43adcd04e9ebfd4f44b1baa8eeb21fbd71e36"},
+ {file = "pydantic-1.10.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b66e4892d8ae005f436a5c5f1519ecf837574d8414b1c93860fb3c13943d9b37"},
+ {file = "pydantic-1.10.24-cp310-cp310-win_amd64.whl", hash = "sha256:50d9f8a207c07f347d4b34806dc576872000d9a60fd481ed9eb78ea8512e0666"},
+ {file = "pydantic-1.10.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:70152291488f8d2bbcf2027b5c28c27724c78a7949c91b466d28ad75d6d12702"},
+ {file = "pydantic-1.10.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:956b30638272c51c85caaff76851b60db4b339022c0ee6eca677c41e3646255b"},
+ {file = "pydantic-1.10.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bed9d6eea5fabbc6978c42e947190c7bd628ddaff3b56fc963fe696c3710ccd6"},
+ {file = "pydantic-1.10.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af8e2b3648128b8cadb1a71e2f8092a6f42d4ca123fad7a8d7ce6db8938b1db3"},
+ {file = "pydantic-1.10.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:076fff9da02ca716e4c8299c68512fdfbeac32fdefc9c160e6f80bdadca0993d"},
+ {file = "pydantic-1.10.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8f2447ca88a7e14fd4d268857521fb37535c53a367b594fa2d7c2551af905993"},
+ {file = "pydantic-1.10.24-cp311-cp311-win_amd64.whl", hash = "sha256:58d42a7c344882c00e3bb7c6c8c6f62db2e3aafa671f307271c45ad96e8ccf7a"},
+ {file = "pydantic-1.10.24-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:17e7610119483f03954569c18d4de16f4e92f1585f20975414033ac2d4a96624"},
+ {file = "pydantic-1.10.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e24435a9970dcb2b35648f2cf57505d4bd414fcca1a404c82e28d948183fe0a6"},
+ {file = "pydantic-1.10.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a9e92b9c78d7f3cfa085c21c110e7000894446e24a836d006aabfc6ae3f1813"},
+ {file = "pydantic-1.10.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef14dfa7c98b314a3e449e92df6f1479cafe74c626952f353ff0176b075070de"},
+ {file = "pydantic-1.10.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52219b4e70c1db185cfd103a804e416384e1c8950168a2d4f385664c7c35d21a"},
+ {file = "pydantic-1.10.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ce0986799248082e9a5a026c9b5d2f9fa2e24d2afb9b0eace9104334a58fdc1"},
+ {file = "pydantic-1.10.24-cp312-cp312-win_amd64.whl", hash = "sha256:874a78e4ed821258295a472e325eee7de3d91ba7a61d0639ce1b0367a3c63d4c"},
+ {file = "pydantic-1.10.24-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:265788a1120285c4955f8b3d52b3ea6a52c7a74db097c4c13a4d3567f0c6df3c"},
+ {file = "pydantic-1.10.24-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d255bebd927e5f1e026b32605684f7b6fc36a13e62b07cb97b29027b91657def"},
+ {file = "pydantic-1.10.24-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e45dbc79a44e34c2c83ef1fcb56ff663040474dcf4dfc452db24a1de0f7574"},
+ {file = "pydantic-1.10.24-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af31565b12a7db5bfa5fe8c3a4f8fda4d32f5c2929998b1b241f1c22e9ab6e69"},
+ {file = "pydantic-1.10.24-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9c377fc30d9ca40dbff5fd79c5a5e1f0d6fff040fa47a18851bb6b0bd040a5d8"},
+ {file = "pydantic-1.10.24-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b644d6f14b2ce617d6def21622f9ba73961a16b7dffdba7f6692e2f66fa05d00"},
+ {file = "pydantic-1.10.24-cp313-cp313-win_amd64.whl", hash = "sha256:0cbbf306124ae41cc153fdc2559b37faa1bec9a23ef7b082c1756d1315ceffe6"},
+ {file = "pydantic-1.10.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7c8bbad6037a87effe9f3739bdf39851add6e0f7e101d103a601c504892ffa70"},
+ {file = "pydantic-1.10.24-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f154a8a46a0d950c055254f8f010ba07e742ac4404a3b6e281a31913ac45ccd0"},
+ {file = "pydantic-1.10.24-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f25d2f792afcd874cc8339c1da1cc52739f4f3d52993ed1f6c263ef2afadc47"},
+ {file = "pydantic-1.10.24-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:49a6f0178063f15eaea6cbcb2dba04db0b73db9834bc7b1e1c4dbea28c7cd22f"},
+ {file = "pydantic-1.10.24-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:bb3df10be3c7d264947180615819aeec0916f19650f2ba7309ed1fe546ead0d2"},
+ {file = "pydantic-1.10.24-cp37-cp37m-win_amd64.whl", hash = "sha256:fa0ebefc169439267e4b4147c7d458908788367640509ed32c90a91a63ebb579"},
+ {file = "pydantic-1.10.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d1a5ef77efeb54def2695f2b8f4301aae8c7aa2b334bd15f61c18ef54317621"},
+ {file = "pydantic-1.10.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02f7a25e8949d8ca568e4bcef2ffed7881d7843286e7c3488bdd3b67f092059c"},
+ {file = "pydantic-1.10.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5da2775712dda8b89e701ed2a72d5d81d23dbc6af84089da8a0f61a0be439c8c"},
+ {file = "pydantic-1.10.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75259be0558ca3af09192ad7b18557f2e9033ad4cbd48c252131f5292f6374fd"},
+ {file = "pydantic-1.10.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1a1ae996daa3d43c530b8d0bacc7e2d9cb55e3991f0e6b7cc2cb61a0fb9f6667"},
+ {file = "pydantic-1.10.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:34109b0afa63b36eec2f2b115694e48ae5ee52f7d3c1baa0be36f80e586bda52"},
+ {file = "pydantic-1.10.24-cp38-cp38-win_amd64.whl", hash = "sha256:4d7336bfcdb8cb58411e6b498772ba2cff84a2ce92f389bae3a8f1bb2c840c49"},
+ {file = "pydantic-1.10.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25fb9a69a21d711deb5acefdab9ff8fb49e6cc77fdd46d38217d433bff2e3de2"},
+ {file = "pydantic-1.10.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6af36a8fb3072526b5b38d3f341b12d8f423188e7d185f130c0079fe02cdec7f"},
+ {file = "pydantic-1.10.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fc35569dfd15d3b3fc06a22abee0a45fdde0784be644e650a8769cd0b2abd94"},
+ {file = "pydantic-1.10.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fac7fbcb65171959973f3136d0792c3d1668bc01fd414738f0898b01f692f1b4"},
+ {file = "pydantic-1.10.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc3f4a6544517380658b63b144c7d43d5276a343012913b7e5d18d9fba2f12bb"},
+ {file = "pydantic-1.10.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:415c638ca5fd57b915a62dd38c18c8e0afe5adf5527be6f8ce16b4636b616816"},
+ {file = "pydantic-1.10.24-cp39-cp39-win_amd64.whl", hash = "sha256:a5bf94042efbc6ab56b18a5921f426ebbeefc04f554a911d76029e7be9057d01"},
+ {file = "pydantic-1.10.24-py3-none-any.whl", hash = "sha256:093768eba26db55a88b12f3073017e3fdee319ef60d3aef5c6c04a4e484db193"},
+ {file = "pydantic-1.10.24.tar.gz", hash = "sha256:7e6d1af1bd3d2312079f28c9baf2aafb4a452a06b50717526e5ac562e37baa53"},
]
[package.dependencies]
@@ -330,23 +338,23 @@ files = [
[[package]]
name = "urllib3"
-version = "2.2.1"
+version = "2.6.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
- {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
+ {file = "urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f"},
+ {file = "urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
+brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
-zstd = ["zstandard (>=0.18.0)"]
+zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""]
[metadata]
lock-version = "2.1"
python-versions = "^3.8"
-content-hash = "2ed3cbaf03c118578cad2e4ea78c7baefeb741fae05c96472df8a4a59d97a9ae"
+content-hash = "03993066d611a6cc272392e18020bc1ecb50242aaf20ee3ec824e7661b6fd34d"
diff --git a/meta/pyproject.toml b/meta/pyproject.toml
index 295d05a859..57d82fb0f3 100644
--- a/meta/pyproject.toml
+++ b/meta/pyproject.toml
@@ -1,13 +1,13 @@
[tool.poetry]
name = "meta"
-version = "0.0.1"
-description = "Prism Launcher meta generator"
-authors = ["Sefa Eyeoglu <contact@scrumplex.net>"]
+version = "0.0.5-1"
+description = "ProjT Launcher meta generator"
+authors = ["Yong Do Hyun <froster12@naver.com>"]
license = "MS-PL"
-readme = "README.md"
-repository = "https://github.com/PrismLauncher/meta"
-documentation = "https://github.com/PrismLauncher/meta"
-keywords = ["metadata", "prism", "launcher"]
+readme = "README"
+repository = "https://github.com/Project-Tick/meta"
+documentation = "https://github.com/Project-Tick/meta"
+keywords = ["metadata", "projt", "launcher"]
[tool.poetry.scripts]
generateFabric = "meta.run.generate_fabric:main"
@@ -27,10 +27,10 @@ updateJava = "meta.run.update_java:main"
index = "meta.run.index:main"
[tool.poetry.dependencies]
-python = "^3.8"
+python = ">=3.10,<4.0"
cachecontrol = "^0.14.0"
requests = "^2.31.0"
-filelock = "^3.13.1"
+filelock = "^3.20.3"
packaging = "^25.0"
pydantic = "^1.10.13"
diff --git a/meta/requirements.txt b/meta/requirements.txt
new file mode 100644
index 0000000000..e47926b82b
--- /dev/null
+++ b/meta/requirements.txt
@@ -0,0 +1,13 @@
+beautifulsoup4==4.14.3
+CacheControl==0.14.2
+certifi==2025.11.12
+charset-normalizer==3.4.4
+filelock==3.20.1
+idna==3.11
+msgpack==1.1.2
+packaging==25.0
+pydantic==1.10.24
+requests==2.32.4
+soupsieve==2.8.1
+typing_extensions==4.15.0
+urllib3==2.6.3
diff --git a/meta/update.sh b/meta/update.sh
index 15df54e2b0..28ac1095c4 100755
--- a/meta/update.sh
+++ b/meta/update.sh
@@ -8,7 +8,7 @@ fi
export META_CACHE_DIR=${CACHE_DIRECTORY:-./caches}
export META_UPSTREAM_DIR=${META_UPSTREAM_DIR:-${STATE_DIRECTORY:-.}/upstream}
-export META_LAUNCHER_DIR=${META_LAUNCHER_DIR:-${STATE_DIRECTORY:-.}/launcher}
+export META_LAUNCHER_DIR=${META_LAUNCHER_DIR:-${STATE_DIRECTORY:-.}/metalauncher}
function fail_in() {
upstream_git reset --hard HEAD
@@ -31,9 +31,11 @@ function launcher_git() {
# make sure we *could* push to our repo
currentDate=$(date -I)
+currentHour=$(date +"%H")
+currentMinute=$(date +"%M")
+currentSecond=$(date +"%S")
upstream_git reset --hard HEAD || exit 1
-upstream_git pull
python -m meta.run.update_mojang || fail_in
python -m meta.run.update_forge || fail_in
@@ -52,13 +54,12 @@ if [ "${DEPLOY_TO_GIT}" = true ]; then
upstream_git add liteloader/*.json || fail_in
upstream_git add java_runtime/adoptium/available_releases.json java_runtime/adoptium/versions/*.json java_runtime/azul/packages.json java_runtime/azul/versions/*.json java_runtime/ibm/available_releases.json java_runtime/ibm/versions/*.json || fail_in
if ! upstream_git diff --cached --exit-code; then
- upstream_git commit -a -m "Update ${currentDate}" || fail_in
+ upstream_git commit -a -m "Update Date ${currentDate} Time ${currentHour}:${currentMinute}:${currentSecond}" || fail_in
upstream_git push || exit 1
fi
fi
launcher_git reset --hard HEAD || exit 1
-launcher_git pull
python -m meta.run.generate_mojang || fail_out
python -m meta.run.generate_forge || fail_out
@@ -74,19 +75,21 @@ if [ "${DEPLOY_TO_GIT}" = true ]; then
launcher_git add net.minecraftforge/* || fail_out
launcher_git add net.neoforged/* || fail_out
launcher_git add net.fabricmc.fabric-loader/* net.fabricmc.intermediary/* || fail_out
- launcher_git add org.quiltmc.quilt-loader/* || fail_out # TODO: add Quilt hashed, once it is actually used
+ launcher_git add org.quiltmc.quilt-loader/* || fail_out
+ launcher_git add org.quiltmc.hashed/* 2>/dev/null || true
launcher_git add com.mumfrey.liteloader/* || fail_out
launcher_git add net.minecraft.java/* net.adoptium.java/* com.azul.java/* com.ibm.java/* || fail_out
if ! launcher_git diff --cached --exit-code; then
- launcher_git commit -a -m "Update ${currentDate}" || fail_out
+ launcher_git commit -a -m "Update Date ${currentDate} Time ${currentHour}:${currentMinute}:${currentSecond}" || fail_out
launcher_git push || exit 1
fi
fi
if [ "${DEPLOY_TO_FOLDER}" = true ]; then
echo "Deploying to ${DEPLOY_FOLDER}"
- rsync -rvog --chown="${DEPLOY_FOLDER_USER}:${DEPLOY_FOLDER_GROUP}" --exclude=.git "${META_LAUNCHER_DIR}/" "${DEPLOY_FOLDER}"
+ mkdir -p "${DEPLOY_FOLDER}"
+ rsync -av --exclude=.git "${META_LAUNCHER_DIR}/" "${DEPLOY_FOLDER}"
fi
exit 0