summaryrefslogtreecommitdiff
path: root/meta/run
diff options
context:
space:
mode:
authorTrial97 <alexandru.tripon97@gmail.com>2024-04-27 23:19:54 +0300
committerTrial97 <alexandru.tripon97@gmail.com>2024-04-27 23:19:54 +0300
commit2eecce37d3e8f16eeae1fbe51eb39f577e30b17b (patch)
treef48e24203446ada81c736d3eb7ebc6a54c2fb320 /meta/run
parentd996dc2a20c8664b03259cead7c888fd85d9848c (diff)
parentd215ec0383ce130d2244a3bfe4567d56f3982701 (diff)
downloadProject-Tick-2eecce37d3e8f16eeae1fbe51eb39f577e30b17b.tar.gz
Project-Tick-2eecce37d3e8f16eeae1fbe51eb39f577e30b17b.zip
Merge branch 'main' of https://github.com/PrismLauncher/meta into javas
Signed-off-by: Trial97 <alexandru.tripon97@gmail.com>
Diffstat (limited to 'meta/run')
-rw-r--r--meta/run/__init__.py1
-rwxr-xr-xmeta/run/generate_fabric.py143
-rwxr-xr-xmeta/run/generate_forge.py472
-rw-r--r--meta/run/generate_java.py407
-rwxr-xr-xmeta/run/generate_liteloader.py118
-rwxr-xr-xmeta/run/generate_mojang.py576
-rw-r--r--meta/run/generate_neoforge.py181
-rwxr-xr-xmeta/run/generate_quilt.py161
-rwxr-xr-xmeta/run/index.py78
-rwxr-xr-xmeta/run/update_fabric.py130
-rwxr-xr-xmeta/run/update_forge.py397
-rw-r--r--meta/run/update_java.py175
-rwxr-xr-xmeta/run/update_liteloader.py37
-rwxr-xr-xmeta/run/update_mojang.py180
-rw-r--r--meta/run/update_neoforge.py319
-rwxr-xr-xmeta/run/update_quilt.py122
16 files changed, 3497 insertions, 0 deletions
diff --git a/meta/run/__init__.py b/meta/run/__init__.py
new file mode 100644
index 0000000000..1bfe4f9ac8
--- /dev/null
+++ b/meta/run/__init__.py
@@ -0,0 +1 @@
+"""Main scripts"""
diff --git a/meta/run/generate_fabric.py b/meta/run/generate_fabric.py
new file mode 100755
index 0000000000..7ca50dd6aa
--- /dev/null
+++ b/meta/run/generate_fabric.py
@@ -0,0 +1,143 @@
+import json
+import os
+
+from meta.common import (
+ ensure_component_dir,
+ launcher_path,
+ upstream_path,
+ transform_maven_key,
+)
+from meta.common.fabric import (
+ JARS_DIR,
+ INSTALLER_INFO_DIR,
+ META_DIR,
+ INTERMEDIARY_COMPONENT,
+ LOADER_COMPONENT,
+)
+from meta.model import MetaVersion, Dependency, Library, MetaPackage, GradleSpecifier
+from meta.model.fabric import FabricJarInfo, FabricInstallerDataV1, FabricMainClasses
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(LOADER_COMPONENT)
+ensure_component_dir(INTERMEDIARY_COMPONENT)
+
+
+def load_jar_info(artifact_key) -> FabricJarInfo:
+ return FabricJarInfo.parse_file(
+ os.path.join(UPSTREAM_DIR, JARS_DIR, f"{artifact_key}.json")
+ )
+
+
+def load_installer_info(version) -> FabricInstallerDataV1:
+ return FabricInstallerDataV1.parse_file(
+ os.path.join(UPSTREAM_DIR, INSTALLER_INFO_DIR, f"{version}.json")
+ )
+
+
+def process_loader_version(entry) -> MetaVersion:
+ jar_info = load_jar_info(transform_maven_key(entry["maven"]))
+ installer_info = load_installer_info(entry["version"])
+
+ v = MetaVersion(
+ name="Fabric Loader", uid="net.fabricmc.fabric-loader", version=entry["version"]
+ )
+ v.release_time = jar_info.release_time
+ v.requires = [Dependency(uid="net.fabricmc.intermediary")]
+ v.order = 10
+ v.type = "release"
+ if isinstance(installer_info.main_class, FabricMainClasses):
+ v.main_class = installer_info.main_class.client
+ else:
+ v.main_class = installer_info.main_class
+ v.libraries = []
+ v.libraries.extend(installer_info.libraries.common)
+ v.libraries.extend(installer_info.libraries.client)
+ loader_lib = Library(
+ name=GradleSpecifier.from_string(entry["maven"]),
+ url="https://maven.fabricmc.net",
+ )
+ v.libraries.append(loader_lib)
+ return v
+
+
+def process_intermediary_version(entry) -> MetaVersion:
+ jar_info = load_jar_info(transform_maven_key(entry["maven"]))
+
+ v = MetaVersion(
+ name="Intermediary Mappings",
+ uid="net.fabricmc.intermediary",
+ version=entry["version"],
+ )
+ v.release_time = jar_info.release_time
+ v.requires = [Dependency(uid="net.minecraft", equals=entry["version"])]
+ v.order = 11
+ v.type = "release"
+ v.libraries = []
+ v.volatile = True
+ intermediary_lib = Library(
+ name=GradleSpecifier.from_string(entry["maven"]),
+ url="https://maven.fabricmc.net",
+ )
+ v.libraries.append(intermediary_lib)
+ return v
+
+
+def main():
+ recommended_loader_versions = []
+ recommended_intermediary_versions = []
+
+ with open(
+ os.path.join(UPSTREAM_DIR, META_DIR, "loader.json"), "r", encoding="utf-8"
+ ) as f:
+ loader_version_index = json.load(f)
+ for entry in loader_version_index:
+ version = entry["version"]
+ print(f"Processing loader {version}")
+
+ v = process_loader_version(entry)
+
+ # Fabric Meta has a separate "stable" field, let's use that
+ if not recommended_loader_versions and entry["stable"]:
+ recommended_loader_versions.append(version)
+
+ v.write(os.path.join(LAUNCHER_DIR, LOADER_COMPONENT, f"{v.version}.json"))
+
+ with open(
+ os.path.join(UPSTREAM_DIR, META_DIR, "intermediary.json"), "r", encoding="utf-8"
+ ) as f:
+ intermediary_version_index = json.load(f)
+ for entry in intermediary_version_index:
+ version = entry["version"]
+ print(f"Processing intermediary {version}")
+
+ v = process_intermediary_version(entry)
+
+ recommended_intermediary_versions.append(
+ version
+ ) # all intermediaries are recommended
+
+ v.write(
+ os.path.join(LAUNCHER_DIR, INTERMEDIARY_COMPONENT, f"{v.version}.json")
+ )
+
+ package = MetaPackage(uid=LOADER_COMPONENT, name="Fabric Loader")
+ package.recommended = recommended_loader_versions
+ package.description = (
+ "Fabric Loader is a tool to load Fabric-compatible mods in game environments."
+ )
+ package.project_url = "https://fabricmc.net"
+ package.authors = ["Fabric Developers"]
+ package.write(os.path.join(LAUNCHER_DIR, LOADER_COMPONENT, "package.json"))
+
+ package = MetaPackage(uid=INTERMEDIARY_COMPONENT, name="Intermediary Mappings")
+ package.recommended = recommended_intermediary_versions
+ package.description = "Intermediary mappings allow using Fabric Loader with mods for Minecraft in a more compatible manner."
+ package.project_url = "https://fabricmc.net"
+ package.authors = ["Fabric Developers"]
+ package.write(os.path.join(LAUNCHER_DIR, INTERMEDIARY_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/generate_forge.py b/meta/run/generate_forge.py
new file mode 100755
index 0000000000..95b5ac39cb
--- /dev/null
+++ b/meta/run/generate_forge.py
@@ -0,0 +1,472 @@
+import os
+import re
+import sys
+from packaging import version as pversion
+from operator import attrgetter
+from typing import Collection
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.forge import (
+ FORGE_COMPONENT,
+ INSTALLER_MANIFEST_DIR,
+ VERSION_MANIFEST_DIR,
+ DERIVED_INDEX_FILE,
+ LEGACYINFO_FILE,
+ INSTALLER_INFO_DIR,
+ BAD_VERSIONS,
+ FORGEWRAPPER_LIBRARY,
+)
+from meta.common.mojang import MINECRAFT_COMPONENT
+from meta.model import (
+ MetaVersion,
+ Dependency,
+ Library,
+ GradleSpecifier,
+ MojangLibraryDownloads,
+ MojangArtifact,
+ MetaPackage,
+)
+from meta.model.forge import (
+ ForgeVersion,
+ ForgeInstallerProfile,
+ ForgeLegacyInfo,
+ fml_libs_for_version,
+ ForgeInstallerProfileV2,
+ InstallerInfo,
+ DerivedForgeIndex,
+ ForgeLegacyInfoList,
+)
+from meta.model.mojang import MojangVersion
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(FORGE_COMPONENT)
+
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+
+# Construct a set of libraries out of a Minecraft version file, for filtering.
+mc_version_cache = {}
+
+
+def load_mc_version_filter(version: str):
+ if version in mc_version_cache:
+ return mc_version_cache[version]
+ v = MetaVersion.parse_file(
+ os.path.join(LAUNCHER_DIR, MINECRAFT_COMPONENT, f"{version}.json")
+ )
+ libs = set(map(attrgetter("name"), v.libraries))
+ mc_version_cache[version] = libs
+ return libs
+
+
+"""
+Match a library coordinate to a set of library coordinates.
+ * Block those that pass completely.
+ * For others, block those with lower versions than in the set.
+"""
+
+
+def should_ignore_artifact(libs: Collection[GradleSpecifier], match: GradleSpecifier):
+ for ver in libs:
+ if (
+ ver.group == match.group
+ and ver.artifact == match.artifact
+ and ver.classifier == match.classifier
+ ):
+ if ver.version == match.version:
+ # Everything is matched perfectly - this one will be ignored
+ return True
+ elif pversion.parse(ver.version) > pversion.parse(match.version):
+ return True
+ else:
+ # Otherwise it did not match - new version is higher and this is an upgrade
+ return False
+ # No match found in the set - we need to keep this
+ return False
+
+
+def version_from_profile(
+ profile: ForgeInstallerProfile, version: ForgeVersion
+) -> MetaVersion:
+ v = MetaVersion(name="Forge", version=version.rawVersion, uid=FORGE_COMPONENT)
+ mc_version = profile.install.minecraft
+ v.requires = [Dependency(uid=MINECRAFT_COMPONENT, equals=mc_version)]
+ v.main_class = profile.version_info.main_class
+ v.release_time = profile.version_info.time
+
+ args = profile.version_info.minecraft_arguments
+ tweakers = []
+ expression = re.compile(r"--tweakClass ([a-zA-Z0-9.]+)")
+ match = expression.search(args)
+ while match is not None:
+ tweakers.append(match.group(1))
+ args = args[: match.start()] + args[match.end() :]
+ match = expression.search(args)
+ if len(tweakers) > 0:
+ args = args.strip()
+ v.additional_tweakers = tweakers
+ # v.minecraftArguments = args
+
+ v.libraries = []
+ mc_filter = load_mc_version_filter(mc_version)
+ for forge_lib in profile.version_info.libraries:
+ if (
+ forge_lib.name.is_lwjgl()
+ or forge_lib.name.is_log4j()
+ or should_ignore_artifact(mc_filter, forge_lib.name)
+ ):
+ continue
+
+ overridden_name = forge_lib.name
+ if overridden_name.group == "net.minecraftforge":
+ if overridden_name.artifact == "minecraftforge":
+ overridden_name.artifact = "forge"
+ overridden_name.version = "%s-%s" % (
+ mc_version,
+ overridden_name.version,
+ )
+
+ overridden_name.classifier = "universal"
+ elif overridden_name.artifact == "forge":
+ overridden_name.classifier = "universal"
+
+ overridden_lib = Library(name=overridden_name)
+ if forge_lib.url == "http://maven.minecraftforge.net/":
+ overridden_lib.url = "https://maven.minecraftforge.net/"
+ else:
+ overridden_lib.url = forge_lib.url
+ # if forge_lib.checksums and len(forge_lib.checksums) == 2:
+ # overridden_lib.mmcHint = "forge-pack-xz"
+ v.libraries.append(overridden_lib)
+
+ v.order = 5
+ return v
+
+
+def version_from_modernized_installer(
+ installer: MojangVersion, version: ForgeVersion
+) -> MetaVersion:
+ v = MetaVersion(name="Forge", version=version.rawVersion, uid=FORGE_COMPONENT)
+ mc_version = version.mc_version
+ v.requires = [Dependency(uid=MINECRAFT_COMPONENT, equals=mc_version)]
+ v.main_class = installer.main_class
+ v.release_time = installer.release_time
+
+ args = installer.minecraft_arguments
+ tweakers = []
+ expression = re.compile("--tweakClass ([a-zA-Z0-9.]+)")
+ match = expression.search(args)
+ while match is not None:
+ tweakers.append(match.group(1))
+ args = args[: match.start()] + args[match.end() :]
+ match = expression.search(args)
+ if len(tweakers) > 0:
+ args = args.strip()
+ v.additional_tweakers = tweakers
+ # v.minecraftArguments = args
+
+ v.libraries = []
+
+ mc_filter = load_mc_version_filter(mc_version)
+ for forge_lib in installer.libraries:
+ if (
+ forge_lib.name.is_lwjgl()
+ or forge_lib.name.is_log4j()
+ or should_ignore_artifact(mc_filter, forge_lib.name)
+ ):
+ continue
+
+ if forge_lib.name.group == "net.minecraftforge":
+ if forge_lib.name.artifact == "forge":
+ overridden_name = forge_lib.name
+ overridden_name.classifier = "universal"
+ forge_lib.downloads.artifact.path = overridden_name.path()
+ forge_lib.downloads.artifact.url = (
+ "https://maven.minecraftforge.net/%s" % overridden_name.path()
+ )
+ forge_lib.name = overridden_name
+
+ elif forge_lib.name.artifact == "minecraftforge":
+ overridden_name = forge_lib.name
+ overridden_name.artifact = "forge"
+ overridden_name.classifier = "universal"
+ overridden_name.version = "%s-%s" % (
+ mc_version,
+ overridden_name.version,
+ )
+ forge_lib.downloads.artifact.path = overridden_name.path()
+ forge_lib.downloads.artifact.url = (
+ "https://maven.minecraftforge.net/%s" % overridden_name.path()
+ )
+ forge_lib.name = overridden_name
+
+ v.libraries.append(forge_lib)
+
+ v.order = 5
+ return v
+
+
+def version_from_legacy(info: ForgeLegacyInfo, version: ForgeVersion) -> MetaVersion:
+ v = MetaVersion(name="Forge", version=version.rawVersion, uid=FORGE_COMPONENT)
+ mc_version = version.mc_version_sane
+ v.requires = [Dependency(uid=MINECRAFT_COMPONENT, equals=mc_version)]
+ v.release_time = info.release_time
+ v.order = 5
+ if fml_libs_for_version(
+ mc_version
+ ): # WHY, WHY DID I WASTE MY TIME REWRITING FMLLIBSMAPPING
+ v.additional_traits = ["legacyFML"]
+
+ classifier = "client"
+ if "universal" in version.url():
+ classifier = "universal"
+
+ main_mod = Library(
+ name=GradleSpecifier(
+ "net.minecraftforge", "forge", version.long_version, classifier
+ )
+ )
+ main_mod.downloads = MojangLibraryDownloads()
+ main_mod.downloads.artifact = MojangArtifact(
+ url=version.url(), sha1=info.sha1, size=info.size
+ )
+ main_mod.downloads.artifact.path = None
+ v.jar_mods = [main_mod]
+ return v
+
+
+def version_from_build_system_installer(
+ installer: MojangVersion, profile: ForgeInstallerProfileV2, version: ForgeVersion
+) -> MetaVersion:
+ v = MetaVersion(name="Forge", version=version.rawVersion, uid=FORGE_COMPONENT)
+ v.requires = [Dependency(uid=MINECRAFT_COMPONENT, equals=version.mc_version_sane)]
+ v.main_class = "io.github.zekerzhayard.forgewrapper.installer.Main"
+
+ # FIXME: Add the size and hash here
+ v.maven_files = []
+
+ # load the locally cached installer file info and use it to add the installer entry in the json
+ info = InstallerInfo.parse_file(
+ os.path.join(UPSTREAM_DIR, INSTALLER_INFO_DIR, f"{version.long_version}.json")
+ )
+ installer_lib = Library(
+ name=GradleSpecifier(
+ "net.minecraftforge", "forge", version.long_version, "installer"
+ )
+ )
+ installer_lib.downloads = MojangLibraryDownloads()
+ installer_lib.downloads.artifact = MojangArtifact(
+ url="https://maven.minecraftforge.net/%s" % (installer_lib.name.path()),
+ sha1=info.sha1hash,
+ size=info.size,
+ )
+ v.maven_files.append(installer_lib)
+
+ for forge_lib in profile.libraries:
+ if forge_lib.name.is_log4j():
+ continue
+
+ if (
+ forge_lib.name.group == "net.minecraftforge"
+ and forge_lib.name.artifact == "forge"
+ and forge_lib.name.classifier == "universal"
+ ):
+ forge_lib.downloads.artifact.url = (
+ "https://maven.minecraftforge.net/%s" % forge_lib.name.path()
+ )
+ v.maven_files.append(forge_lib)
+
+ v.libraries = []
+
+ v.libraries.append(FORGEWRAPPER_LIBRARY)
+
+ for forge_lib in installer.libraries:
+ if forge_lib.name.is_log4j():
+ continue
+
+ if forge_lib.name.group == "net.minecraftforge":
+ if forge_lib.name.artifact == "forge" and not forge_lib.name.classifier:
+ forge_lib.name.classifier = "launcher"
+ forge_lib.downloads.artifact.path = forge_lib.name.path()
+ forge_lib.downloads.artifact.url = (
+ "https://maven.minecraftforge.net/%s" % forge_lib.name.path()
+ )
+ forge_lib.name = forge_lib.name
+ # net.minecraftforge.forge:client doesn't exist??? (49.0.x)
+ if not len(forge_lib.downloads.artifact.url):
+ continue
+ v.libraries.append(forge_lib)
+
+ v.release_time = installer.release_time
+ v.order = 5
+ mc_args = (
+ "--username ${auth_player_name} --version ${version_name} --gameDir ${game_directory} "
+ "--assetsDir ${assets_root} --assetIndex ${assets_index_name} --uuid ${auth_uuid} "
+ "--accessToken ${auth_access_token} --userType ${user_type} --versionType ${version_type}"
+ )
+ for arg in installer.arguments.game:
+ mc_args += f" {arg}"
+ if "--fml.forgeGroup" not in installer.arguments.game:
+ mc_args += f" --fml.forgeGroup net.minecraftforge"
+ if "--fml.forgeVersion" not in installer.arguments.game:
+ mc_args += f" --fml.forgeVersion {version.rawVersion}"
+ if "--fml.mcVersion" not in installer.arguments.game:
+ mc_args += f" --fml.mcVersion {version.mc_version}"
+ v.minecraft_arguments = mc_args
+ return v
+
+
+def main():
+ # load the locally cached version list
+ remote_versions = DerivedForgeIndex.parse_file(
+ os.path.join(UPSTREAM_DIR, DERIVED_INDEX_FILE)
+ )
+ recommended_versions = []
+
+ legacy_info_list = ForgeLegacyInfoList.parse_file(
+ os.path.join(UPSTREAM_DIR, LEGACYINFO_FILE)
+ )
+ legacy_versions = [
+ "1.1",
+ "1.2.3",
+ "1.2.4",
+ "1.2.5",
+ "1.3.2",
+ "1.4.1",
+ "1.4.2",
+ "1.4.3",
+ "1.4.4",
+ "1.4.5",
+ "1.4.6",
+ "1.4.7",
+ "1.5",
+ "1.5.1",
+ "1.5.2",
+ "1.6.1",
+ "1.6.2",
+ "1.6.3",
+ "1.6.4",
+ "1.7.10",
+ "1.7.10-pre4",
+ "1.7.2",
+ "1.8",
+ "1.8.8",
+ "1.8.9",
+ "1.9",
+ "1.9.4",
+ "1.10",
+ "1.10.2",
+ "1.11",
+ "1.11.2",
+ "1.12",
+ "1.12.1",
+ "1.12.2",
+ ]
+
+ for key, entry in remote_versions.versions.items():
+ if entry.mc_version is None:
+ eprint("Skipping %s with invalid MC version" % key)
+ continue
+
+ version = ForgeVersion(entry)
+
+ if version.long_version in BAD_VERSIONS:
+ # Version 1.12.2-14.23.5.2851 is ultra cringe, I can't imagine why you would even spend one second on
+ # actually adding support for this version.
+ # It is cringe, because it's installer info is broken af
+ eprint(f"Skipping bad version {version.long_version}")
+ continue
+
+ if version.url() is None:
+ eprint("Skipping %s with no valid files" % key)
+ continue
+ eprint("Processing Forge %s" % version.rawVersion)
+ version_elements = version.rawVersion.split(".")
+ if len(version_elements) < 1:
+ eprint("Skipping version %s with not enough version elements" % key)
+ continue
+
+ major_version_str = version_elements[0]
+ if not major_version_str.isnumeric():
+ eprint(
+ "Skipping version %s with non-numeric major version %s"
+ % (key, major_version_str)
+ )
+ continue
+
+ if entry.recommended:
+ recommended_versions.append(version.rawVersion)
+
+ # If we do not have the corresponding Minecraft version, we ignore it
+ if not os.path.isfile(
+ os.path.join(
+ LAUNCHER_DIR, MINECRAFT_COMPONENT, f"{version.mc_version_sane}.json"
+ )
+ ):
+ eprint(
+ "Skipping %s with no corresponding Minecraft version %s"
+ % (key, version.mc_version_sane)
+ )
+ continue
+
+ # Path for new-style build system based installers
+ installer_version_filepath = os.path.join(
+ UPSTREAM_DIR, VERSION_MANIFEST_DIR, f"{version.long_version}.json"
+ )
+ profile_filepath = os.path.join(
+ UPSTREAM_DIR, INSTALLER_MANIFEST_DIR, f"{version.long_version}.json"
+ )
+
+ eprint(installer_version_filepath)
+ if os.path.isfile(installer_version_filepath):
+ installer = MojangVersion.parse_file(installer_version_filepath)
+ if entry.mc_version in legacy_versions:
+ v = version_from_modernized_installer(installer, version)
+ else:
+ profile = ForgeInstallerProfileV2.parse_file(profile_filepath)
+ v = version_from_build_system_installer(installer, profile, version)
+ else:
+ if version.uses_installer():
+ # If we do not have the Forge json, we ignore this version
+ if not os.path.isfile(profile_filepath):
+ eprint("Skipping %s with missing profile json" % key)
+ continue
+ profile = ForgeInstallerProfile.parse_file(profile_filepath)
+ v = version_from_profile(profile, version)
+ else:
+ # Generate json for legacy here
+ if version.mc_version_sane == "1.6.1":
+ continue
+ build = version.build
+ if (
+ str(build).encode("utf-8").decode("utf8")
+ not in legacy_info_list.number
+ ):
+ eprint(
+ "Legacy build %d is missing in legacy info. Ignoring." % build
+ )
+ continue
+
+ v = version_from_legacy(legacy_info_list.number[str(build)], version)
+
+ v.write(os.path.join(LAUNCHER_DIR, FORGE_COMPONENT, f"{v.version}.json"))
+
+ recommended_versions.sort()
+
+ print("Recommended versions:", recommended_versions)
+
+ package = MetaPackage(
+ uid=FORGE_COMPONENT,
+ name="Forge",
+ project_url="https://www.minecraftforge.net/forum/",
+ )
+ package.recommended = recommended_versions
+ package.write(os.path.join(LAUNCHER_DIR, FORGE_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/generate_java.py b/meta/run/generate_java.py
new file mode 100644
index 0000000000..95803f21f1
--- /dev/null
+++ b/meta/run/generate_java.py
@@ -0,0 +1,407 @@
+import copy
+import datetime
+import os
+from typing import Optional
+from functools import reduce
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+
+from meta.common.java import (
+ JAVA_MINECRAFT_COMPONENT,
+ JAVA_ADOPTIUM_COMPONENT,
+ JAVA_AZUL_COMPONENT,
+ ADOPTIUM_DIR,
+ ADOPTIUM_VERSIONS_DIR,
+ AZUL_DIR,
+ AZUL_VERSIONS_DIR,
+)
+from meta.model import MetaPackage
+from meta.model.java import (
+ JavaRuntimeOS,
+ JavaRuntimeVersion,
+ JavaRuntimeMeta,
+ JavaVersionMeta,
+ JavaPackageType,
+ JavaChecksumMeta,
+ JavaChecksumType,
+ JavaRuntimeDownloadType,
+ AdoptiumAvailableReleases,
+ AdoptiumReleases,
+ AdoptiumRelease,
+ AdoptiumImageType,
+ AdoptiumBinary,
+ ZuluPackageList,
+ ZuluPackageDetail,
+ AzulJavaPackageType,
+ AzulArch,
+)
+
+from meta.common.mojang import (
+ JAVA_MANIFEST_FILE,
+)
+
+from meta.model.mojang import (
+ JavaIndex,
+ MojangJavaComponent,
+ MojangJavaOsName,
+ MojangJavaRuntime,
+)
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+
+MOJANG_OS_ARCHITECTURES = [
+ "x64",
+ "x86",
+ "arm64",
+ "arm32",
+]
+
+MOJANG_OS_ARCHITECTURE_TRANSLATIONS = {
+ 64: "x64",
+ 32: "x86",
+ "x32": "x86",
+ "i386": "x86",
+ "aarch64": "arm64",
+ "x86_64": "x64",
+ "arm": "arm32",
+}
+
+
+def translate_arch(arch: str | int):
+ if isinstance(arch, str):
+ arch = arch.lower()
+ if arch in MOJANG_OS_ARCHITECTURES:
+ return arch
+ elif arch in MOJANG_OS_ARCHITECTURE_TRANSLATIONS:
+ return MOJANG_OS_ARCHITECTURE_TRANSLATIONS[arch]
+ else:
+ return None
+
+
+MOJANG_OS_NAMES = [
+ "mac-os",
+ "linux",
+ "windows",
+]
+
+MOJANG_OS_TRANSLATIONS = {
+ "osx": "mac-os",
+ "mac": "mac-os",
+ "macos": "mac-os",
+}
+
+
+def translate_os(os: str):
+ os = os.lower()
+ if os in MOJANG_OS_NAMES:
+ return os
+ elif os in MOJANG_OS_TRANSLATIONS:
+ return MOJANG_OS_TRANSLATIONS[os]
+ else:
+ return None
+
+
+def mojang_os_to_java_os(mojang_os: MojangJavaOsName) -> JavaRuntimeOS:
+ match mojang_os:
+ case MojangJavaOsName.Linux:
+ return JavaRuntimeOS.LinuxX64
+ case MojangJavaOsName.Linuxi386:
+ return JavaRuntimeOS.LinuxX86
+ case MojangJavaOsName.MacOs:
+ return JavaRuntimeOS.MacOsX64
+ case MojangJavaOsName.MacOSArm64:
+ return JavaRuntimeOS.MacOsArm64
+ case MojangJavaOsName.WindowsArm64:
+ return JavaRuntimeOS.WindowsArm64
+ case MojangJavaOsName.WindowsX64:
+ return JavaRuntimeOS.WindowsX64
+ case MojangJavaOsName.WindowsX86:
+ return JavaRuntimeOS.WindowsX86
+ case _:
+ return JavaRuntimeOS.Unknown
+
+
+def mojang_runtime_to_java_runtime(
+ mojang_runtime: MojangJavaRuntime,
+ mojang_component: MojangJavaComponent,
+ runtime_os: JavaRuntimeOS,
+) -> JavaRuntimeMeta:
+ major, _, security = mojang_runtime.version.name.partition("u")
+ if major and security:
+ version_parts = [int(major), 0, int(security)]
+ else:
+ version_parts = [int(part) for part in mojang_runtime.version.name.split(".")]
+
+ while len(version_parts) < 3:
+ version_parts.append(0)
+
+ build = None
+ if len(version_parts) >= 4:
+ build = version_parts[3]
+
+ version = JavaVersionMeta(
+ major=version_parts[0],
+ minor=version_parts[1],
+ security=version_parts[2],
+ build=build,
+ name=mojang_runtime.version.name,
+ )
+ return JavaRuntimeMeta(
+ name=mojang_component,
+ vendor="mojang",
+ url=mojang_runtime.manifest.url,
+ releaseTime=mojang_runtime.version.released,
+ checksum=JavaChecksumMeta(
+ type=JavaChecksumType.Sha1, hash=mojang_runtime.manifest.sha1
+ ),
+ downloadType=JavaRuntimeDownloadType.Manifest,
+ packageType=JavaPackageType.Jre,
+ version=version,
+ runtime_os=runtime_os,
+ )
+
+
+def adoptium_release_binary_to_java_runtime(
+ rls: AdoptiumRelease,
+ binary: AdoptiumBinary,
+ runtime_os: JavaRuntimeOS,
+) -> JavaRuntimeMeta:
+ assert binary.package is not None
+
+ checksum = None
+ if binary.package.checksum is not None:
+ checksum = JavaChecksumMeta(
+ type=JavaChecksumType.Sha256, hash=binary.package.checksum
+ )
+
+ pkg_type = JavaPackageType(str(binary.image_type))
+
+ version = JavaVersionMeta(
+ major=rls.version_data.major if rls.version_data.major is not None else 0,
+ minor=rls.version_data.minor if rls.version_data.minor is not None else 0,
+ security=(
+ rls.version_data.security if rls.version_data.security is not None else 0
+ ),
+ build=rls.version_data.build,
+ )
+ rls_name = f"{rls.vendor}_temurin_{binary.image_type}{version}"
+ return JavaRuntimeMeta(
+ name=rls_name,
+ vendor=rls.vendor,
+ url=binary.package.link,
+ releaseTime=rls.timestamp,
+ checksum=checksum,
+ downloadType=JavaRuntimeDownloadType.Archive,
+ packageType=pkg_type,
+ version=version,
+ runtime_os=runtime_os,
+ )
+
+
+def azul_package_to_java_runtime(
+ pkg: ZuluPackageDetail, runtime_os: JavaRuntimeOS
+) -> JavaRuntimeMeta:
+ version_parts = copy.copy(pkg.java_version)
+
+ build = None
+ while len(version_parts) < 3:
+ version_parts.append(0)
+
+ if len(version_parts) >= 4:
+ build = version_parts[3]
+
+ version = JavaVersionMeta(
+ major=version_parts[0],
+ minor=version_parts[1],
+ security=version_parts[2],
+ build=build,
+ )
+
+ pkg_type = JavaPackageType(str(pkg.java_package_type))
+
+ rls_name = f"azul_{pkg.product}_{pkg.java_package_type}{version}"
+
+ checksum = None
+ if pkg.sha256_hash is not None:
+ checksum = JavaChecksumMeta(type=JavaChecksumType.Sha256, hash=pkg.sha256_hash)
+
+ return JavaRuntimeMeta(
+ name=rls_name,
+ vendor="azul",
+ url=pkg.download_url,
+ releaseTime=pkg.build_date,
+ checksum=checksum,
+ downloadType=JavaRuntimeDownloadType.Archive,
+ packageType=pkg_type,
+ version=version,
+ runtime_os=runtime_os,
+ )
+
+
+PREFERED_VENDOR_ORDER = ["mojang", "eclipse", "azul"]
+
+__PREFERED_VENDOR_ORDER = list(reversed(PREFERED_VENDOR_ORDER))
+
+
+def vendor_priority(vendor: str) -> int:
+ """Get a numeric priority for a given vendor
+
+ Args:
+ vendor (str): the vendor to check
+
+ Returns:
+ int: how preferable the vendor is, the higher the better
+ """
+ if vendor not in PREFERED_VENDOR_ORDER:
+ return -1
+ return __PREFERED_VENDOR_ORDER.index(vendor)
+
+
+def pkg_type_priority(pkg_type: JavaPackageType) -> int:
+ match pkg_type:
+ case JavaPackageType.Jre:
+ return 2
+ case JavaPackageType.Jdk:
+ return 1
+ return -1
+
+
+def writeJavas(javas: dict[int, list[JavaRuntimeMeta]], uid: str):
+ def oldest_timestamp(a: datetime.datetime | None, b: datetime.datetime):
+ if a is None or a > b:
+ return b
+ return a
+
+ ensure_component_dir(uid)
+
+ # small hack to sort the versions after major
+ javas = dict(sorted(javas.items(), key=lambda item: item[0]))
+ timestamps: dict[int, datetime.datetime | None] = {}
+ prevDate: datetime.datetime | None = None
+ for major, runtimes in javas.items():
+ releaseTime = reduce(
+ oldest_timestamp,
+ (runtime.release_time for runtime in runtimes),
+ None,
+ )
+ if prevDate is not None and releaseTime < prevDate:
+ releaseTime = prevDate + datetime.timedelta(seconds=1)
+ prevDate = releaseTime
+ timestamps[major] = releaseTime
+
+ for major, runtimes in javas.items():
+ version_file = os.path.join(LAUNCHER_DIR, uid, f"java{major}.json")
+ java_version = JavaRuntimeVersion(
+ name=f"Java {major}",
+ uid=uid,
+ version=f"java{major}",
+ releaseTime=timestamps.get(major),
+ runtimes=runtimes,
+ )
+ java_version.write(version_file)
+
+ package = MetaPackage(uid=uid, name="Java Runtimes", recommended=[])
+ package.write(os.path.join(LAUNCHER_DIR, uid, "package.json"))
+
+
+def main():
+ javas: dict[int, list[JavaRuntimeMeta]] = {}
+
+ def add_java_runtime(runtime: JavaRuntimeMeta, major: int):
+ if major not in javas:
+ javas[major] = list[JavaRuntimeMeta]()
+ print(f"Regestering runtime: {runtime.name} for Java {major}")
+ javas[major].append(runtime)
+
+ print("Processing Mojang Javas")
+ mojang_java_manifest = JavaIndex.parse_file(
+ os.path.join(UPSTREAM_DIR, JAVA_MANIFEST_FILE)
+ )
+ for mojang_os_name in mojang_java_manifest:
+ if mojang_os_name == MojangJavaOsName.Gamecore:
+ continue # empty
+ java_os = mojang_os_to_java_os(mojang_os_name)
+ for comp in mojang_java_manifest[mojang_os_name]:
+ if comp == MojangJavaComponent.Exe:
+ continue # doesn't appear to be used and not marked with a full verison so I don't trust it
+ mojang_runtimes = mojang_java_manifest[mojang_os_name][comp]
+ for mojang_runtime in mojang_runtimes:
+ if comp == MojangJavaComponent.JreLegacy:
+ major = 8
+ else:
+ major = int(mojang_runtime.version.name.partition(".")[0])
+ runtime = mojang_runtime_to_java_runtime(mojang_runtime, comp, java_os)
+ add_java_runtime(runtime, major)
+
+ writeJavas(javas=javas, uid=JAVA_MINECRAFT_COMPONENT)
+ javas = {}
+
+ print("Processing Adoptium Releases")
+ adoptium_path = os.path.join(UPSTREAM_DIR, ADOPTIUM_DIR, "available_releases.json")
+ if os.path.exists(adoptium_path):
+ adoptium_available_releases = AdoptiumAvailableReleases.parse_file(
+ adoptium_path
+ )
+ for major in adoptium_available_releases.available_releases:
+ adoptium_releases = AdoptiumReleases.parse_file(
+ os.path.join(UPSTREAM_DIR, ADOPTIUM_VERSIONS_DIR, f"java{major}.json")
+ )
+ for _, rls in adoptium_releases:
+ for binary in rls.binaries:
+ if (
+ binary.package is None
+ or binary.image_type is not AdoptiumImageType.Jre
+ ):
+ continue
+ binary_arch = translate_arch(str(binary.architecture))
+ binary_os = translate_os(str(binary.os))
+ if binary_arch is None or binary_os is None:
+ print(f"Ignoring release for {binary.os} {binary.architecture}")
+ continue
+
+ java_os = JavaRuntimeOS(f"{binary_os}-{binary_arch}")
+ runtime = adoptium_release_binary_to_java_runtime(
+ rls, binary, java_os
+ )
+ add_java_runtime(runtime, major)
+
+ writeJavas(javas=javas, uid=JAVA_ADOPTIUM_COMPONENT)
+ javas = {}
+ print("Processing Azul Packages")
+ azul_path = os.path.join(UPSTREAM_DIR, AZUL_DIR, "packages.json")
+ if os.path.exists(azul_path):
+ azul_packages = ZuluPackageList.parse_file(azul_path)
+ for _, pkg in azul_packages:
+ pkg_detail = ZuluPackageDetail.parse_file(
+ os.path.join(
+ UPSTREAM_DIR, AZUL_VERSIONS_DIR, f"{pkg.package_uuid}.json"
+ )
+ )
+ major = pkg_detail.java_version[0]
+ if major < 8 or pkg_detail.java_package_type is not AzulJavaPackageType.Jre:
+ continue # we will never need java versions less than 8
+
+ pkg_os = translate_os(str(pkg_detail.os))
+ if pkg_detail.arch == AzulArch.Arm:
+ pkg_arch = translate_arch(f"{pkg_detail.arch}{pkg_detail.hw_bitness}")
+ elif pkg_detail.arch == AzulArch.X86:
+ pkg_arch = translate_arch(int(pkg_detail.hw_bitness))
+ else:
+ pkg_arch = None
+ if pkg_arch is None or pkg_os is None:
+ print(
+ f"Ignoring release for {pkg_detail.os} {pkg_detail.arch}_{pkg_detail.hw_bitness}"
+ )
+ continue
+
+ java_os = JavaRuntimeOS(f"{pkg_os}-{pkg_arch}")
+ runtime = azul_package_to_java_runtime(pkg_detail, java_os)
+ add_java_runtime(runtime, major)
+ writeJavas(javas=javas, uid=JAVA_AZUL_COMPONENT)
+ javas = {}
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/generate_liteloader.py b/meta/run/generate_liteloader.py
new file mode 100755
index 0000000000..2fe95fcc42
--- /dev/null
+++ b/meta/run/generate_liteloader.py
@@ -0,0 +1,118 @@
+import os
+from datetime import datetime
+from typing import List, Tuple, Dict, Optional
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.liteloader import LITELOADER_COMPONENT, VERSIONS_FILE
+from meta.common.mojang import MINECRAFT_COMPONENT
+from meta.model import MetaVersion, GradleSpecifier, Library, MetaPackage, Dependency
+from meta.model.liteloader import LiteloaderIndex, LiteloaderArtefact
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(LITELOADER_COMPONENT)
+
+
+def process_artefacts(
+ mc_version: str, artefacts: Dict[str, LiteloaderArtefact], is_snapshot: bool
+) -> Tuple[List[MetaVersion], Optional[MetaVersion]]:
+ versions: List[MetaVersion] = []
+ lookup: Dict[str, MetaVersion] = {}
+ latest_version = None
+ latest = None
+ for x, artefact in artefacts.items():
+ if x == "latest":
+ latest_version = artefact.version
+ continue
+ v = MetaVersion(
+ name="LiteLoader",
+ uid=LITELOADER_COMPONENT,
+ version=artefact.version,
+ requires=[Dependency(uid=MINECRAFT_COMPONENT, equals=mc_version)],
+ release_time=datetime.utcfromtimestamp(int(artefact.timestamp)),
+ additional_tweakers=[artefact.tweakClass],
+ main_class="net.minecraft.launchwrapper.Launch",
+ order=10,
+ libraries=artefact.libraries,
+ type="release",
+ )
+
+ if is_snapshot:
+ v.type = "snapshot"
+
+ # hack to make broken liteloader versions work
+ for lib in v.libraries:
+ if lib.name == GradleSpecifier("org.ow2.asm", "asm-all", "5.0.3"):
+ lib.url = "https://repo.maven.apache.org/maven2/"
+ if lib.name == GradleSpecifier("org.ow2.asm", "asm-all", "5.2"):
+ lib.url = "http://repo.liteloader.com/"
+
+ liteloader_lib = Library(
+ name=GradleSpecifier("com.mumfrey", "liteloader", v.version),
+ url="http://dl.liteloader.com/versions/",
+ )
+ if is_snapshot:
+ liteloader_lib.mmcHint = "always-stale"
+ v.libraries.append(liteloader_lib)
+
+ versions.append(v)
+ lookup[v.version] = v
+
+ if latest_version:
+ latest = lookup[latest_version]
+ return versions, latest
+
+
+def process_versions(index: LiteloaderIndex) -> Tuple[List[MetaVersion], List[str]]:
+ all_versions: List[MetaVersion] = []
+ recommended: List[str] = []
+ for mcVersion, versionObject in index.versions.items():
+ # ignore this for now. It should be a jar mod or something.
+ if mcVersion == "1.5.2":
+ continue
+
+ latest_release = None
+ if versionObject.artefacts:
+ versions, latest_release = process_artefacts(
+ mcVersion, versionObject.artefacts.liteloader, False
+ )
+ all_versions.extend(versions)
+ if versionObject.snapshots:
+ versions, latest_snapshot = process_artefacts(
+ mcVersion, versionObject.snapshots.liteloader, True
+ )
+ all_versions.extend(versions)
+
+ if latest_release:
+ recommended.append(latest_release.version)
+
+ recommended.sort()
+
+ all_versions.sort(key=lambda x: x.release_time, reverse=True)
+ return all_versions, recommended
+
+
+def main():
+ index = LiteloaderIndex.parse_file(os.path.join(UPSTREAM_DIR, VERSIONS_FILE))
+
+ all_versions, recommended = process_versions(index)
+
+ for version in all_versions:
+ version.write(
+ os.path.join(LAUNCHER_DIR, LITELOADER_COMPONENT, f"{version.version}.json")
+ )
+
+ package = MetaPackage(
+ uid=LITELOADER_COMPONENT,
+ name="LiteLoader",
+ description=index.meta.description,
+ project_url=index.meta.url,
+ authors=[index.meta.authors],
+ recommended=recommended,
+ )
+ package.write(os.path.join(LAUNCHER_DIR, LITELOADER_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/generate_mojang.py b/meta/run/generate_mojang.py
new file mode 100755
index 0000000000..e0068c24ec
--- /dev/null
+++ b/meta/run/generate_mojang.py
@@ -0,0 +1,576 @@
+import copy
+import hashlib
+import os
+from collections import defaultdict, namedtuple
+from operator import attrgetter
+from pprint import pprint
+from packaging import version as pversion
+from typing import Optional, List
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.mojang import (
+ STATIC_LEGACY_SERVICES_FILE,
+ VERSION_MANIFEST_FILE,
+ MINECRAFT_COMPONENT,
+ LWJGL3_COMPONENT,
+ LWJGL_COMPONENT,
+ STATIC_OVERRIDES_FILE,
+ VERSIONS_DIR,
+ LIBRARY_PATCHES_FILE,
+)
+from meta.model import (
+ MetaVersion,
+ Library,
+ GradleSpecifier,
+ MojangLibraryDownloads,
+ MojangArtifact,
+ Dependency,
+ MetaPackage,
+ MojangRules,
+)
+from meta.model.mojang import (
+ LegacyServices,
+ MojangIndexWrap,
+ MojangIndex,
+ MojangVersion,
+ LegacyOverrideIndex,
+ LibraryPatches,
+ SUPPORTED_FEATURES,
+)
+
+APPLY_SPLIT_NATIVES_WORKAROUND = True
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(MINECRAFT_COMPONENT)
+ensure_component_dir(LWJGL_COMPONENT)
+ensure_component_dir(LWJGL3_COMPONENT)
+
+
+def map_log4j_artifact(version):
+ x = pversion.parse(version)
+ if x <= pversion.parse("2.0"):
+ return "2.0-beta9-fixed", "https://files.prismlauncher.org/maven/%s"
+ if x <= pversion.parse("2.17.1"):
+ return (
+ "2.17.1",
+ "https://repo1.maven.org/maven2/%s",
+ ) # This is the only version that's patched (as of 2022/02/19)
+ return None, None
+
+
+LOG4J_HASHES = {
+ "2.0-beta9-fixed": {
+ "log4j-api": {
+ "sha1": "b61eaf2e64d8b0277e188262a8b771bbfa1502b3",
+ "size": 107347,
+ },
+ "log4j-core": {
+ "sha1": "677991ea2d7426f76309a73739cecf609679492c",
+ "size": 677588,
+ },
+ },
+ "2.17.1": {
+ "log4j-api": {
+ "sha1": "d771af8e336e372fb5399c99edabe0919aeaf5b2",
+ "size": 301872,
+ },
+ "log4j-core": {
+ "sha1": "779f60f3844dadc3ef597976fcb1e5127b1f343d",
+ "size": 1790452,
+ },
+ "log4j-slf4j18-impl": {
+ "sha1": "ca499d751f4ddd8afb016ef698c30be0da1d09f7",
+ "size": 21268,
+ },
+ },
+}
+
+# We want versions that contain natives for all platforms. If there are multiple, pick the latest one
+# LWJGL versions we want
+PASS_VARIANTS = [
+ # TODO: needs arm64 for Linux?
+ "8a9b08f11271eb4de3b50e5d069949500b2c7bc1", # 3.3.3 (2024-04-03 11:49:39+00:00)
+ "765b4ab443051d286bdbb1c19cd7dc86b0792dce", # 3.3.2 (2024-01-17 13:19:20+00:00)
+ "54c4fb1d6a96ac3007c947bf310c8bcf94a862be", # 3.3.1 (2023-04-20 11:55:19+00:00) split natives, with WoA natives
+ "ea4973ebc9eadf059f30f0958c89f330898bff51", # 3.2.2 (2019-07-04 14:41:05+00:00) will be patched, missing tinyfd
+ "235fc413bc4c76b269c207f7bca6464f1e1f1d80", # 3.2.1 (2019-02-13 16:12:08+00:00)
+ "deb1a436d806413207350735a00e04b54d113916", # 3.1.6 (2018-10-18 14:46:12+00:00)
+ "3e47f0f742fb759401754769fa59c508fd8fda75", # 3.1.2 (2018-06-21 12:57:11+00:00)
+ "a3f254df5a63a0a1635755733022029e8cfae1b3", # 2.9.4-nightly-20150209 (2016-12-20 14:05:34+00:00)
+ "879be09c0bd0d4bafc2ea4ea3d2ab8607a0d976c", # 2.9.3 (2015-01-30 11:58:24+00:00)
+ "8d4951d00253dfaa36a0faf1c8be541431861c30", # 2.9.1 (2014-05-22 14:44:33+00:00)
+ "cf58c9f92fed06cb041a7244c6b4b667e6d544cc", # 2.9.1-nightly-20131120 (2013-12-06 13:55:34+00:00)
+ "27dcadcba29a1a7127880ca1a77efa9ece866f24", # 2.9.0 (2013-09-06 12:31:58+00:00)
+]
+
+# LWJGL versions we def. don't want!
+BAD_VARIANTS = [
+ "79bde9e46e9ad9accebda11e8293ed08d80dbdc3", # 3.3.2 (2023-08-30 11:24:35+00:00) does not have lwjgl-freetype
+ "8836c419f90f69a278b97d945a34af165c24ff60", # 3.3.1 (2022-05-18 13:51:54+00:00) split natives, with workaround, replaced by 23w26a
+ "3c624b94c06dbc4abae08fe6156d74abe4a2cca5", # 3.3.1 (2022-05-04 14:41:35+00:00) we already have a nice 3.3.1
+ "e1106ca765798218323b7a6d7528050260ea9d88", # 3.3.1 (2022-05-04 14:41:35+00:00) doesn't use split natives
+ "90b3d9ca01058286c033b6b7ae7f6dc370a04015", # 3.2.2 (2022-03-31 14:53:25+00:00) only linux, windows
+ "d986df9598fa2bcf4a5baab5edf044548e66d011", # 3.2.2 (2021-12-10 03:36:38+00:00) only linux, windows
+ "4b73fccb9e5264c2068bdbc26f9651429abbf21a", # 3.2.2 (2021-08-25 14:41:57+00:00) only linux, windows
+ "090cec3577ecfe438b890b2a9410ea07aa725e16", # 3.2.2 (2021-04-07 14:04:09+00:00) only linux, windows
+ "ab463e9ebc6a36abf22f2aa27b219dd372ff5069", # 3.2.2 (2019-08-13 07:33:42+00:00) only linux, windows
+ "51d8ff5a7efc949b4ad2088930e151d6b88ba616", # 3.2.2 (2019-07-19 09:25:47+00:00) only linux, windows
+ "854649a5bd1455b89117593ae82ff90c8132cacf", # 3.2.1 (2019-04-18 11:05:19+00:00) only osx, windows
+ "89fcb489261b05f622e8052fe0b588b0cfe49c24", # 3.1.6 (2019-04-18 11:05:19+00:00) only linux
+ "f04052162b50fa1433f67e1a90bc79466c4ab776", # 2.9.0 (2013-10-21 16:34:47+00:00) only linux, windows
+ "6442fc475f501fbd0fc4244fd1c38c02d9ebaf7e", # 2.9.0 (2011-03-30 22:00:00+00:00) fine but newer variant available
+]
+
+
+def add_or_get_bucket(buckets, rules: Optional[MojangRules]) -> MetaVersion:
+ rule_hash = None
+ if rules:
+ rule_hash = hash(rules.json())
+
+ if rule_hash in buckets:
+ bucket = buckets[rule_hash]
+ else:
+ bucket = MetaVersion(name="LWJGL", version="undetermined", uid=LWJGL_COMPONENT)
+ bucket.type = "release"
+ buckets[rule_hash] = bucket
+ return bucket
+
+
+def hash_lwjgl_version(lwjgl: MetaVersion):
+ lwjgl_copy = copy.deepcopy(lwjgl)
+ lwjgl_copy.release_time = None
+ return hashlib.sha1(lwjgl_copy.json().encode("utf-8", "strict")).hexdigest()
+
+
+def sort_libs_by_name(library):
+ return library.name
+
+
+LWJGLEntry = namedtuple("LWJGLEntry", ("version", "sha1"))
+
+lwjglVersionVariants = defaultdict(list)
+
+
+def add_lwjgl_version(variants, lwjgl):
+ lwjgl_copy = copy.deepcopy(lwjgl)
+ libraries = list(lwjgl_copy.libraries)
+ libraries.sort(key=sort_libs_by_name)
+ lwjgl_copy.libraries = libraries
+
+ version = lwjgl_copy.version
+ current_hash = hash_lwjgl_version(lwjgl_copy)
+ found = False
+ for variant in variants[version]:
+ existing_hash = variant.sha1
+ if current_hash == existing_hash:
+ found = True
+ break
+ if not found:
+ print("!!! New variant for LWJGL version %s" % version)
+ variants[version].append(LWJGLEntry(version=lwjgl_copy, sha1=current_hash))
+
+
+def remove_paths_from_lib(lib):
+ if lib.downloads.artifact:
+ lib.downloads.artifact.path = None
+ if lib.downloads.classifiers:
+ for key, value in lib.downloads.classifiers.items():
+ value.path = None
+
+
+def adapt_new_style_arguments(arguments):
+ foo = []
+ # we ignore the jvm arguments entirely.
+ # grab the strings, log the complex stuff
+ for arg in arguments.game:
+ if isinstance(arg, str):
+ if arg == "--clientId":
+ continue
+ if arg == "${clientid}":
+ continue
+ if arg == "--xuid":
+ continue
+ if arg == "${auth_xuid}":
+ continue
+ foo.append(arg)
+ else:
+ print("!!! Unrecognized structure in Minecraft game arguments:")
+ pprint(arg)
+ return " ".join(foo)
+
+
+def adapt_new_style_arguments_to_traits(arguments):
+ foo = []
+ # we ignore the jvm arguments entirely.
+ # grab the object, log the errors
+ for arg in arguments.game:
+ if isinstance(arg, dict):
+ for rule in arg["rules"]:
+ for k, v in rule["features"].items():
+ if rule["action"] == "allow" and v and k in SUPPORTED_FEATURES:
+ foo.append(f"feature:{k}")
+ return foo
+
+
+def is_macos_only(rules: Optional[MojangRules]):
+ allows_osx = False
+ allows_all = False
+ # print("Considering", specifier, "rules", rules)
+ if rules:
+ for rule in rules:
+ if rule.action == "allow" and rule.os and rule.os.name == "osx":
+ allows_osx = True
+ if rule.action == "allow" and not rule.os:
+ allows_all = True
+ if allows_osx and not allows_all:
+ return True
+ return False
+
+
+def patch_library(lib: Library, patches: LibraryPatches) -> List[Library]:
+ to_patch = [lib]
+
+ new_libraries = []
+ while to_patch:
+ target = to_patch.pop(0)
+
+ for patch in patches:
+ if patch.applies(target):
+ if patch.override:
+ target.merge(patch.override)
+
+ if patch.additionalLibraries:
+ additional_copy = copy.deepcopy(patch.additionalLibraries)
+ new_libraries += list(dict.fromkeys(additional_copy))
+ if patch.patchAdditionalLibraries:
+ to_patch += additional_copy
+
+ return new_libraries
+
+
+def process_single_variant(lwjgl_variant: MetaVersion, patches: LibraryPatches):
+ lwjgl_version = lwjgl_variant.version
+ v = copy.deepcopy(lwjgl_variant)
+
+ new_libraries = []
+ for lib in v.libraries:
+ new_libraries += patch_library(lib, patches)
+ v.libraries += list(dict.fromkeys(new_libraries))
+
+ if lwjgl_version[0] == "2":
+ filename = os.path.join(LAUNCHER_DIR, LWJGL_COMPONENT, f"{lwjgl_version}.json")
+
+ v.name = "LWJGL 2"
+ v.uid = LWJGL_COMPONENT
+ v.conflicts = [Dependency(uid=LWJGL3_COMPONENT)]
+ elif lwjgl_version[0] == "3":
+ filename = os.path.join(LAUNCHER_DIR, LWJGL3_COMPONENT, f"{lwjgl_version}.json")
+
+ v.name = "LWJGL 3"
+ v.uid = LWJGL3_COMPONENT
+ v.conflicts = [Dependency(uid=LWJGL_COMPONENT)]
+ # remove jutils and jinput from LWJGL 3
+ # this is a dependency that Mojang kept in, but doesn't belong there anymore
+ filtered_libraries = list(
+ filter(lambda l: l.name.artifact not in ["jutils", "jinput"], v.libraries)
+ )
+ v.libraries = filtered_libraries
+ else:
+ raise Exception("LWJGL version not recognized: %s" % v.version)
+
+ v.volatile = True
+ v.order = -1
+ good = True
+ for lib in v.libraries:
+ # skip libraries without natives or that we patched
+ if not lib.natives or lib in new_libraries:
+ continue
+ checked_dict = {"linux", "windows", "osx"}
+ if not checked_dict.issubset(lib.natives.keys()):
+ print("Missing system classifier!", v.version, lib.name, lib.natives.keys())
+ good = False
+ break
+ if lib.downloads:
+ for entry in checked_dict:
+ baked_entry = lib.natives[entry]
+ if baked_entry not in lib.downloads.classifiers:
+ print(
+ "Missing download for classifier!",
+ v.version,
+ lib.name,
+ baked_entry,
+ lib.downloads.classifiers.keys(),
+ )
+ good = False
+ break
+ if good:
+ v.write(filename)
+ else:
+ print("Skipped LWJGL", v.version)
+
+
+def lib_is_split_native(lib: Library) -> bool:
+ if lib.name.classifier and lib.name.classifier.startswith("natives-"):
+ return True
+ return False
+
+
+def version_has_split_natives(v: MojangVersion) -> bool:
+ for lib in v.libraries:
+ if lib_is_split_native(lib):
+ return True
+ return False
+
+
+def main():
+ # get the local version list
+ override_index = LegacyOverrideIndex.parse_file(STATIC_OVERRIDES_FILE)
+ legacy_services = LegacyServices.parse_file(STATIC_LEGACY_SERVICES_FILE)
+ library_patches = LibraryPatches.parse_file(LIBRARY_PATCHES_FILE)
+
+ found_any_lwjgl3 = False
+
+ for filename in os.listdir(os.path.join(UPSTREAM_DIR, VERSIONS_DIR)):
+ input_file = os.path.join(UPSTREAM_DIR, VERSIONS_DIR, filename)
+ if not input_file.endswith(".json"):
+ # skip non JSON files
+ continue
+ print("Processing", filename)
+ mojang_version = MojangVersion.parse_file(input_file)
+ v = mojang_version.to_meta_version(
+ "Minecraft", MINECRAFT_COMPONENT, mojang_version.id
+ )
+
+ libs_minecraft = []
+ new_libs_minecraft = []
+ is_lwjgl_3 = False
+ has_split_natives = version_has_split_natives(v)
+ buckets = {}
+
+ for lib in v.libraries:
+ specifier = lib.name
+
+ # generic fixes
+ remove_paths_from_lib(lib)
+
+ if APPLY_SPLIT_NATIVES_WORKAROUND and lib_is_split_native(lib):
+ # merge classifier into artifact name to workaround bug in launcher
+ specifier.artifact += f"-{specifier.classifier}"
+ specifier.classifier = None
+
+ if specifier.is_lwjgl():
+ if has_split_natives: # implies lwjgl3
+ bucket = add_or_get_bucket(buckets, None)
+ is_lwjgl_3 = True
+ found_any_lwjgl3 = True
+ bucket.version = specifier.version
+ if not bucket.libraries:
+ bucket.libraries = []
+ bucket.libraries.append(lib)
+ bucket.release_time = v.release_time
+ else:
+ rules = None
+ if lib.rules:
+ rules = lib.rules
+ lib.rules = None
+ if is_macos_only(rules):
+ print(
+ "Candidate library ",
+ specifier,
+ " is only for macOS and is therefore ignored.",
+ )
+ continue
+ bucket = add_or_get_bucket(buckets, rules)
+ if (
+ specifier.group == "org.lwjgl.lwjgl"
+ and specifier.artifact == "lwjgl"
+ ):
+ bucket.version = specifier.version
+ if specifier.group == "org.lwjgl" and specifier.artifact == "lwjgl":
+ is_lwjgl_3 = True
+ found_any_lwjgl3 = True
+ bucket.version = specifier.version
+ if not bucket.libraries:
+ bucket.libraries = []
+ bucket.libraries.append(lib)
+ bucket.release_time = v.release_time
+ # FIXME: workaround for insane log4j nonsense from December 2021. Probably needs adjustment.
+ elif lib.name.is_log4j():
+ version_override, maven_override = map_log4j_artifact(lib.name.version)
+
+ if version_override and maven_override:
+ if version_override not in LOG4J_HASHES:
+ raise Exception(
+ "ERROR: unhandled log4j version (overriden) %s!"
+ % version_override
+ )
+
+ if lib.name.artifact not in LOG4J_HASHES[version_override]:
+ raise Exception(
+ "ERROR: unhandled log4j artifact %s!" % lib.name.artifact
+ )
+
+ replacement_name = GradleSpecifier(
+ "org.apache.logging.log4j", lib.name.artifact, version_override
+ )
+ artifact = MojangArtifact(
+ url=maven_override % (replacement_name.path()),
+ sha1=LOG4J_HASHES[version_override][lib.name.artifact]["sha1"],
+ size=LOG4J_HASHES[version_override][lib.name.artifact]["size"],
+ )
+
+ libs_minecraft.append(
+ Library(
+ name=replacement_name,
+ downloads=MojangLibraryDownloads(artifact=artifact),
+ )
+ )
+ else:
+ libs_minecraft.append(lib)
+ else:
+ new_libs_minecraft += patch_library(lib, library_patches)
+ libs_minecraft.append(lib)
+ if len(buckets) == 1:
+ for key in buckets:
+ lwjgl = buckets[key]
+ lwjgl.libraries = sorted(lwjgl.libraries, key=attrgetter("name"))
+ add_lwjgl_version(lwjglVersionVariants, lwjgl)
+ print("Found only candidate LWJGL", lwjgl.version, key)
+ else:
+ # multiple buckets for LWJGL. [None] is common to all, other keys are for different sets of rules
+ for key in buckets:
+ if key is None:
+ continue
+ lwjgl = buckets[key]
+ if None in buckets:
+ lwjgl.libraries = sorted(
+ lwjgl.libraries + buckets[None].libraries,
+ key=attrgetter("name"),
+ )
+ else:
+ lwjgl.libraries = sorted(lwjgl.libraries, key=attrgetter("name"))
+ add_lwjgl_version(lwjglVersionVariants, lwjgl)
+ print("Found candidate LWJGL", lwjgl.version, key)
+ # remove the common bucket...
+ if None in buckets:
+ del buckets[None]
+ v.libraries = libs_minecraft + list(dict.fromkeys(new_libs_minecraft))
+
+ if is_lwjgl_3:
+ lwjgl_dependency = Dependency(uid=LWJGL3_COMPONENT)
+ else:
+ lwjgl_dependency = Dependency(uid=LWJGL_COMPONENT)
+ if len(buckets) == 1:
+ suggested_version = next(iter(buckets.values())).version
+ if is_lwjgl_3:
+ lwjgl_dependency.suggests = suggested_version
+ else:
+ lwjgl_dependency.suggests = "2.9.4-nightly-20150209"
+ else:
+ bad_versions = {"3.1.6", "3.2.1"}
+ our_versions = set()
+
+ for lwjgl in iter(buckets.values()):
+ our_versions = our_versions.union({lwjgl.version})
+
+ if our_versions == bad_versions:
+ print("Found broken 3.1.6/3.2.1 combo, forcing LWJGL to 3.2.1")
+ suggested_version = "3.2.1"
+ lwjgl_dependency.suggests = suggested_version
+ else:
+ raise Exception(
+ "ERROR: cannot determine single suggested LWJGL version in %s"
+ % mojang_version.id
+ )
+
+ # if it uses LWJGL 3, add the trait that enables starting on first thread on macOS
+ if is_lwjgl_3:
+ if not v.additional_traits:
+ v.additional_traits = []
+ v.additional_traits.append("FirstThreadOnMacOS")
+ v.requires = [lwjgl_dependency]
+ v.order = -2
+ # process 1.13 arguments into previous version
+ if not mojang_version.minecraft_arguments and mojang_version.arguments:
+ v.minecraft_arguments = adapt_new_style_arguments(mojang_version.arguments)
+ if not v.additional_traits:
+ v.additional_traits = []
+ v.additional_traits.extend(
+ adapt_new_style_arguments_to_traits(mojang_version.arguments)
+ )
+ out_filename = os.path.join(
+ LAUNCHER_DIR, MINECRAFT_COMPONENT, f"{v.version}.json"
+ )
+ if v.version in override_index.versions:
+ override = override_index.versions[v.version]
+ override.apply_onto_meta_version(v)
+ if v.version in legacy_services:
+ if v.additional_traits == None:
+ v.additional_traits = []
+ v.additional_traits.append("legacyServices")
+ v.write(out_filename)
+
+ for lwjglVersionVariant in lwjglVersionVariants:
+ decided_variant = None
+ passed_variants = 0
+ unknown_variants = 0
+ print(
+ "%d variant(s) for LWJGL %s:"
+ % (len(lwjglVersionVariants[lwjglVersionVariant]), lwjglVersionVariant)
+ )
+
+ for variant in lwjglVersionVariants[lwjglVersionVariant]:
+ if variant.sha1 in BAD_VARIANTS:
+ print("Variant %s ignored because it's marked as bad." % variant.sha1)
+ continue
+ if variant.sha1 in PASS_VARIANTS:
+ print("Variant %s accepted." % variant.sha1)
+ decided_variant = variant
+ passed_variants += 1
+ continue
+ # print natives classifiers to decide which variant to use
+ n = [
+ x.natives.keys()
+ for x in variant.version.libraries
+ if x.natives is not None
+ ]
+ print(n)
+
+ print(
+ f' "{variant.sha1}", # {lwjglVersionVariant} ({variant.version.release_time})'
+ )
+ unknown_variants += 1
+ print("")
+
+ if decided_variant and passed_variants == 1 and unknown_variants == 0:
+ process_single_variant(decided_variant.version, library_patches)
+ else:
+ raise Exception(
+ "No variant decided for version %s out of %d possible ones and %d unknown ones."
+ % (lwjglVersionVariant, passed_variants, unknown_variants)
+ )
+
+ lwjgl_package = MetaPackage(uid=LWJGL_COMPONENT, name="LWJGL 2")
+ lwjgl_package.write(os.path.join(LAUNCHER_DIR, LWJGL_COMPONENT, "package.json"))
+
+ if found_any_lwjgl3:
+ lwjgl_package = MetaPackage(uid=LWJGL3_COMPONENT, name="LWJGL 3")
+ lwjgl_package.write(
+ os.path.join(LAUNCHER_DIR, LWJGL3_COMPONENT, "package.json")
+ )
+
+ mojang_index = MojangIndexWrap(
+ MojangIndex.parse_file(os.path.join(UPSTREAM_DIR, VERSION_MANIFEST_FILE))
+ )
+
+ minecraft_package = MetaPackage(uid=MINECRAFT_COMPONENT, name="Minecraft")
+ minecraft_package.recommended = [mojang_index.latest.release]
+ minecraft_package.write(
+ os.path.join(LAUNCHER_DIR, MINECRAFT_COMPONENT, "package.json")
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/generate_neoforge.py b/meta/run/generate_neoforge.py
new file mode 100644
index 0000000000..ee1e26908d
--- /dev/null
+++ b/meta/run/generate_neoforge.py
@@ -0,0 +1,181 @@
+from copy import deepcopy
+import os
+import re
+import sys
+from operator import attrgetter
+from typing import Collection
+
+from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common.neoforge import (
+ NEOFORGE_COMPONENT,
+ INSTALLER_MANIFEST_DIR,
+ VERSION_MANIFEST_DIR,
+ DERIVED_INDEX_FILE,
+ INSTALLER_INFO_DIR,
+)
+from meta.common.forge import FORGEWRAPPER_LIBRARY
+from meta.common.mojang import MINECRAFT_COMPONENT
+from meta.model import (
+ MetaVersion,
+ Dependency,
+ Library,
+ GradleSpecifier,
+ MojangLibraryDownloads,
+ MojangArtifact,
+ MetaPackage,
+)
+from meta.model.neoforge import (
+ NeoForgeVersion,
+ NeoForgeInstallerProfileV2,
+ InstallerInfo,
+ DerivedNeoForgeIndex,
+)
+from meta.model.mojang import MojangVersion
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(NEOFORGE_COMPONENT)
+
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+
+def version_from_build_system_installer(
+ installer: MojangVersion,
+ profile: NeoForgeInstallerProfileV2,
+ version: NeoForgeVersion,
+) -> MetaVersion:
+ v = MetaVersion(name="NeoForge", version=version.rawVersion, uid=NEOFORGE_COMPONENT)
+ v.requires = [Dependency(uid=MINECRAFT_COMPONENT, equals=version.mc_version_sane)]
+ v.main_class = "io.github.zekerzhayard.forgewrapper.installer.Main"
+
+ # FIXME: Add the size and hash here
+ v.maven_files = []
+
+ # load the locally cached installer file info and use it to add the installer entry in the json
+ info = InstallerInfo.parse_file(
+ os.path.join(UPSTREAM_DIR, INSTALLER_INFO_DIR, f"{version.long_version}.json")
+ )
+ installer_lib = Library(
+ name=GradleSpecifier(
+ "net.neoforged", version.artifact, version.long_version, "installer"
+ )
+ )
+ installer_lib.downloads = MojangLibraryDownloads()
+ installer_lib.downloads.artifact = MojangArtifact(
+ url="https://maven.neoforged.net/%s" % (installer_lib.name.path()),
+ sha1=info.sha1hash,
+ size=info.size,
+ )
+ v.maven_files.append(installer_lib)
+
+ for forge_lib in profile.libraries:
+ if forge_lib.name.is_log4j():
+ continue
+
+ v.maven_files.append(forge_lib)
+
+ v.libraries = []
+
+ v.libraries.append(FORGEWRAPPER_LIBRARY)
+
+ for forge_lib in installer.libraries:
+ if forge_lib.name.is_log4j():
+ continue
+
+ v.libraries.append(forge_lib)
+
+ v.release_time = installer.release_time
+ v.order = 5
+ mc_args = (
+ "--username ${auth_player_name} --version ${version_name} --gameDir ${game_directory} "
+ "--assetsDir ${assets_root} --assetIndex ${assets_index_name} --uuid ${auth_uuid} "
+ "--accessToken ${auth_access_token} --userType ${user_type} --versionType ${version_type}"
+ )
+ for arg in installer.arguments.game:
+ mc_args += f" {arg}"
+ v.minecraft_arguments = mc_args
+ return v
+
+
+def main():
+ # load the locally cached version list
+ remote_versions = DerivedNeoForgeIndex.parse_file(
+ os.path.join(UPSTREAM_DIR, DERIVED_INDEX_FILE)
+ )
+ recommended_versions = []
+
+ for key, entry in remote_versions.versions.items():
+ if entry.mc_version is None:
+ eprint("Skipping %s with invalid MC version" % key)
+ continue
+
+ version = NeoForgeVersion(entry)
+
+ if version.url() is None:
+ eprint("Skipping %s with no valid files" % key)
+ continue
+ eprint("Processing Forge %s" % version.rawVersion)
+ version_elements = version.rawVersion.split(".")
+ if len(version_elements) < 1:
+ eprint("Skipping version %s with not enough version elements" % key)
+ continue
+
+ major_version_str = version_elements[0]
+ if not major_version_str.isnumeric():
+ eprint(
+ "Skipping version %s with non-numeric major version %s"
+ % (key, major_version_str)
+ )
+ continue
+
+ if entry.recommended:
+ recommended_versions.append(version.rawVersion)
+
+ # If we do not have the corresponding Minecraft version, we ignore it
+ if not os.path.isfile(
+ os.path.join(
+ LAUNCHER_DIR, MINECRAFT_COMPONENT, f"{version.mc_version_sane}.json"
+ )
+ ):
+ eprint(
+ "Skipping %s with no corresponding Minecraft version %s"
+ % (key, version.mc_version_sane)
+ )
+ continue
+
+ # Path for new-style build system based installers
+ installer_version_filepath = os.path.join(
+ UPSTREAM_DIR, VERSION_MANIFEST_DIR, f"{version.long_version}.json"
+ )
+ profile_filepath = os.path.join(
+ UPSTREAM_DIR, INSTALLER_MANIFEST_DIR, f"{version.long_version}.json"
+ )
+
+ eprint(installer_version_filepath)
+ assert os.path.isfile(
+ installer_version_filepath
+ ), f"version {installer_version_filepath} does not have installer version manifest"
+ installer = MojangVersion.parse_file(installer_version_filepath)
+ profile = NeoForgeInstallerProfileV2.parse_file(profile_filepath)
+ v = version_from_build_system_installer(installer, profile, version)
+
+ v.write(os.path.join(LAUNCHER_DIR, NEOFORGE_COMPONENT, f"{v.version}.json"))
+
+ recommended_versions.sort()
+
+ print("Recommended versions:", recommended_versions)
+
+ package = MetaPackage(
+ uid=NEOFORGE_COMPONENT,
+ name="NeoForge",
+ project_url="https://neoforged.net",
+ )
+ package.recommended = recommended_versions
+ package.write(os.path.join(LAUNCHER_DIR, NEOFORGE_COMPONENT, "package.json"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/generate_quilt.py b/meta/run/generate_quilt.py
new file mode 100755
index 0000000000..ccf2797380
--- /dev/null
+++ b/meta/run/generate_quilt.py
@@ -0,0 +1,161 @@
+import json
+import os
+
+from meta.common import (
+ ensure_component_dir,
+ launcher_path,
+ upstream_path,
+ transform_maven_key,
+)
+from meta.common.quilt import (
+ JARS_DIR,
+ INSTALLER_INFO_DIR,
+ META_DIR,
+ INTERMEDIARY_COMPONENT,
+ LOADER_COMPONENT,
+ USE_QUILT_MAPPINGS,
+ DISABLE_BEACON_ARG,
+ DISABLE_BEACON_VERSIONS,
+)
+from meta.model import MetaVersion, Dependency, Library, MetaPackage, GradleSpecifier
+from meta.model.fabric import FabricJarInfo, FabricInstallerDataV1, FabricMainClasses
+
+LAUNCHER_DIR = launcher_path()
+UPSTREAM_DIR = upstream_path()
+
+ensure_component_dir(LOADER_COMPONENT)
+ensure_component_dir(INTERMEDIARY_COMPONENT)
+
+
+def load_jar_info(artifact_key) -> FabricJarInfo:
+ return FabricJarInfo.parse_file(
+ os.path.join(UPSTREAM_DIR, JARS_DIR, f"{artifact_key}.json")
+ )
+
+
+def load_installer_info(version) -> FabricInstallerDataV1:
+ return FabricInstallerDataV1.parse_file(
+ os.path.join(UPSTREAM_DIR, INSTALLER_INFO_DIR, f"{version}.json")
+ )
+
+
+def process_loader_version(entry) -> (MetaVersion, bool):
+ should_recommend = (
+ "-" not in entry["version"]
+ ) # Don't recommend pre releases as per SemVer
+
+ jar_info = load_jar_info(transform_maven_key(entry["maven"]))
+ installer_info = load_installer_info(entry["version"])
+
+ v = MetaVersion(name="Quilt Loader", uid=LOADER_COMPONENT, version=entry["version"])
+ v.release_time = jar_info.release_time
+ v.requires = [Dependency(uid=INTERMEDIARY_COMPONENT)]
+ v.order = 10
+ v.type = "release"
+ if isinstance(installer_info.main_class, FabricMainClasses):
+ v.main_class = installer_info.main_class.client
+ else:
+ v.main_class = installer_info.main_class
+ v.libraries = []
+ v.libraries.extend(installer_info.libraries.common)
+ v.libraries.extend(installer_info.libraries.client)
+ loader_lib = Library(
+ name=GradleSpecifier.from_string(entry["maven"]),
+ url="https://maven.quiltmc.org/repository/release",
+ )
+ v.libraries.append(loader_lib)
+
+ if entry["version"] in DISABLE_BEACON_VERSIONS:
+ if not v.additional_jvm_args:
+ v.additional_jvm_args = []
+ v.additional_jvm_args.append(DISABLE_BEACON_ARG)
+
+ return v, should_recommend
+
+
+def process_intermediary_version(entry) -> MetaVersion:
+ jar_info = load_jar_info(transform_maven_key(entry["maven"]))
+
+ v = MetaVersion(
+ name="Quilt Intermediary Mappings",
+ uid=INTERMEDIARY_COMPONENT,
+ version=entry["version"],
+ )
+ v.release_time = jar_info.release_time
+ v.requires = [Dependency(uid="net.minecraft", equals=entry["version"])]
+ v.order = 11
+ v.type = "release"
+ v.libraries = []
+ v.volatile = True
+ intermediary_lib = Library(
+ name=GradleSpecifier.from_string(entry["maven"]),
+ url="https://maven.quiltmc.org/repository/release",
+ )
+ v.libraries.append(intermediary_lib)
+ return v
+
+
+def main():
+ recommended_loader_versions = []
+ recommended_intermediary_versions = []
+
+ with open(
+ os.path.join(UPSTREAM_DIR, META_DIR, "loader.json"), "r", encoding="utf-8"
+ ) as f:
+ loader_version_index = json.load(f)
+ for entry in loader_version_index:
+ version = entry["version"]
+ print(f"Processing loader {version}")
+
+ v, should_recommend = process_loader_version(entry)
+
+ if (
+ not recommended_loader_versions and should_recommend
+ ): # newest stable loader is recommended
+ recommended_loader_versions.append(version)
+
+ v.write(os.path.join(LAUNCHER_DIR, LOADER_COMPONENT, f"{v.version}.json"))
+
+ if USE_QUILT_MAPPINGS:
+ with open(
+ os.path.join(UPSTREAM_DIR, META_DIR, "hashed.json"), "r", encoding="utf-8"
+ ) as f:
+ intermediary_version_index = json.load(f)
+ for entry in intermediary_version_index:
+ version = entry["version"]
+ print(f"Processing intermediary {version}")
+
+ v = process_intermediary_version(entry)
+
+ recommended_intermediary_versions.append(
+ version
+ ) # all intermediaries are recommended
+
+ v.write(
+ os.path.join(
+ LAUNCHER_DIR, INTERMEDIARY_COMPONENT, f"{v.version}.json"
+ )
+ )
+
+ package = MetaPackage(uid=LOADER_COMPONENT, name="Quilt Loader")
+ package.recommended = recommended_loader_versions
+ package.description = "The Quilt project is an open, community-driven modding toolchain designed primarily for Minecraft."
+ package.project_url = "https://quiltmc.org/"
+ package.authors = ["Quilt Project"]
+ package.write(os.path.join(LAUNCHER_DIR, LOADER_COMPONENT, "package.json"))
+
+ if USE_QUILT_MAPPINGS:
+ package = MetaPackage(
+ uid=INTERMEDIARY_COMPONENT, name="Quilt Intermediary Mappings"
+ )
+ package.recommended = recommended_intermediary_versions
+ package.description = "Intermediary mappings allow using Quilt Loader with mods for Minecraft in a more compatible manner."
+ package.project_url = "https://quiltmc.org/"
+ package.authors = ["Quilt Project"]
+ package.write(
+ os.path.join(LAUNCHER_DIR, INTERMEDIARY_COMPONENT, "package.json")
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/index.py b/meta/run/index.py
new file mode 100755
index 0000000000..23dc2336ea
--- /dev/null
+++ b/meta/run/index.py
@@ -0,0 +1,78 @@
+import hashlib
+import os
+from operator import attrgetter
+
+from meta.common import launcher_path
+from meta.model import MetaVersion, MetaPackage
+from meta.model.index import (
+ MetaPackageIndex,
+ MetaVersionIndex,
+ MetaVersionIndexEntry,
+ MetaPackageIndexEntry,
+)
+
+LAUNCHER_DIR = launcher_path()
+
+
+# take the hash type (like hashlib.md5) and filename, return hex string of hash
+def hash_file(hash_fn, file_name):
+ hash_instance = hash_fn()
+ with open(file_name, "rb") as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ hash_instance.update(chunk)
+ return hash_instance.hexdigest()
+
+
+# ignore these files when indexing versions
+ignore = {"index.json", "package.json", ".git", ".github"}
+
+# initialize output structures - package list level
+packages = MetaPackageIndex()
+
+# walk through all the package folders
+for package in sorted(os.listdir(LAUNCHER_DIR)):
+ if package in ignore:
+ continue
+
+ sharedData = MetaPackage.parse_file(
+ os.path.join(LAUNCHER_DIR, package, "package.json")
+ )
+ recommendedVersions = set()
+ if sharedData.recommended:
+ recommendedVersions = set(sharedData.recommended)
+
+ # initialize output structures - version list level
+ versionList = MetaVersionIndex(uid=package, name=sharedData.name)
+
+ # walk through all the versions of the package
+ for filename in os.listdir(LAUNCHER_DIR + "/%s" % package):
+ if filename in ignore:
+ continue
+ # parse and hash the version file
+ filepath = LAUNCHER_DIR + "/%s/%s" % (package, filename)
+ filehash = hash_file(hashlib.sha256, filepath)
+ versionFile = MetaVersion.parse_file(filepath)
+ is_recommended = versionFile.version in recommendedVersions
+
+ versionEntry = MetaVersionIndexEntry.from_meta_version(
+ versionFile, is_recommended, filehash
+ )
+
+ versionList.versions.append(versionEntry)
+
+ # sort the versions in descending order by time of release
+ versionList.versions = sorted(
+ versionList.versions, key=attrgetter("release_time"), reverse=True
+ )
+
+ # write the version index for the package
+ outFilePath = LAUNCHER_DIR + "/%s/index.json" % package
+ versionList.write(outFilePath)
+
+ # insert entry into the package index
+ packageEntry = MetaPackageIndexEntry(
+ uid=package, name=sharedData.name, sha256=hash_file(hashlib.sha256, outFilePath)
+ )
+ packages.packages.append(packageEntry)
+
+packages.write(os.path.join(LAUNCHER_DIR, "index.json"))
diff --git a/meta/run/update_fabric.py b/meta/run/update_fabric.py
new file mode 100755
index 0000000000..132495b92c
--- /dev/null
+++ b/meta/run/update_fabric.py
@@ -0,0 +1,130 @@
+import json
+import os
+import zipfile
+from datetime import datetime
+
+import requests
+
+from meta.common import (
+ upstream_path,
+ ensure_upstream_dir,
+ transform_maven_key,
+ default_session,
+)
+from meta.common.fabric import (
+ JARS_DIR,
+ INSTALLER_INFO_DIR,
+ META_DIR,
+ DATETIME_FORMAT_HTTP,
+)
+from meta.model.fabric import FabricJarInfo
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(JARS_DIR)
+ensure_upstream_dir(INSTALLER_INFO_DIR)
+ensure_upstream_dir(META_DIR)
+
+sess = default_session()
+
+
+def filehash(filename, hashtype, blocksize=65536):
+ h = hashtype()
+ with open(filename, "rb") as f:
+ for block in iter(lambda: f.read(blocksize), b""):
+ h.update(block)
+ return h.hexdigest()
+
+
+def get_maven_url(maven_key, server, ext):
+ parts = maven_key.split(":", 3)
+ maven_ver_url = (
+ server + parts[0].replace(".", "/") + "/" + parts[1] + "/" + parts[2] + "/"
+ )
+ maven_url = maven_ver_url + parts[1] + "-" + parts[2] + ext
+ return maven_url
+
+
+def get_json_file(path, url):
+ with open(path, "w", encoding="utf-8") as f:
+ r = sess.get(url)
+ r.raise_for_status()
+ version_json = r.json()
+ json.dump(version_json, f, sort_keys=True, indent=4)
+ return version_json
+
+
+def head_file(url):
+ r = sess.head(url)
+ r.raise_for_status()
+ return r.headers
+
+
+def get_binary_file(path, url):
+ with open(path, "wb") as f:
+ r = sess.get(url)
+ r.raise_for_status()
+ for chunk in r.iter_content(chunk_size=128):
+ f.write(chunk)
+
+
+def compute_jar_file(path, url):
+ # These two approaches should result in the same metadata, except for the timestamp which might be a few minutes
+ # off for the fallback method
+ try:
+ # Let's not download a Jar file if we don't need to.
+ headers = head_file(url)
+ tstamp = datetime.strptime(headers["Last-Modified"], DATETIME_FORMAT_HTTP)
+ except requests.HTTPError:
+ # Just in case something changes in the future
+ print(f"Falling back to downloading jar for {url}")
+
+ jar_path = path + ".jar"
+ get_binary_file(jar_path, url)
+ tstamp = datetime.fromtimestamp(0)
+ with zipfile.ZipFile(jar_path) as jar:
+ allinfo = jar.infolist()
+ for info in allinfo:
+ tstamp_new = datetime(*info.date_time)
+ if tstamp_new > tstamp:
+ tstamp = tstamp_new
+
+ data = FabricJarInfo(release_time=tstamp)
+ data.write(path + ".json")
+
+
+def main():
+ # get the version list for each component we are interested in
+ for component in ["intermediary", "loader"]:
+ index = get_json_file(
+ os.path.join(UPSTREAM_DIR, META_DIR, f"{component}.json"),
+ "https://meta.fabricmc.net/v2/versions/" + component,
+ )
+ for it in index:
+ print(f"Processing {component} {it['version']} ")
+ jar_maven_url = get_maven_url(
+ it["maven"], "https://maven.fabricmc.net/", ".jar"
+ )
+ compute_jar_file(
+ os.path.join(UPSTREAM_DIR, JARS_DIR, transform_maven_key(it["maven"])),
+ jar_maven_url,
+ )
+
+ # for each loader, download installer JSON file from maven
+ with open(
+ os.path.join(UPSTREAM_DIR, META_DIR, "loader.json"), "r", encoding="utf-8"
+ ) as loaderVersionIndexFile:
+ loader_version_index = json.load(loaderVersionIndexFile)
+ for it in loader_version_index:
+ print(f"Downloading JAR info for loader {it['version']} ")
+ maven_url = get_maven_url(
+ it["maven"], "https://maven.fabricmc.net/", ".json"
+ )
+ get_json_file(
+ os.path.join(UPSTREAM_DIR, INSTALLER_INFO_DIR, f"{it['version']}.json"),
+ maven_url,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/update_forge.py b/meta/run/update_forge.py
new file mode 100755
index 0000000000..8fc8920bad
--- /dev/null
+++ b/meta/run/update_forge.py
@@ -0,0 +1,397 @@
+"""
+ Get the source files necessary for generating Forge versions
+"""
+
+import copy
+import hashlib
+import json
+import os
+import re
+import sys
+import zipfile
+from contextlib import suppress
+from datetime import datetime
+from pathlib import Path
+from pprint import pprint
+
+from pydantic import ValidationError
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.forge import (
+ JARS_DIR,
+ INSTALLER_INFO_DIR,
+ INSTALLER_MANIFEST_DIR,
+ VERSION_MANIFEST_DIR,
+ FILE_MANIFEST_DIR,
+ BAD_VERSIONS,
+ LEGACYINFO_FILE,
+)
+from meta.model.forge import (
+ ForgeFile,
+ ForgeEntry,
+ ForgeMCVersionInfo,
+ ForgeLegacyInfoList,
+ DerivedForgeIndex,
+ ForgeVersion,
+ ForgeInstallerProfile,
+ ForgeInstallerProfileV2,
+ InstallerInfo,
+ ForgeLegacyInfo,
+)
+from meta.model.mojang import MojangVersion
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(JARS_DIR)
+ensure_upstream_dir(INSTALLER_INFO_DIR)
+ensure_upstream_dir(INSTALLER_MANIFEST_DIR)
+ensure_upstream_dir(VERSION_MANIFEST_DIR)
+ensure_upstream_dir(FILE_MANIFEST_DIR)
+
+LEGACYINFO_PATH = os.path.join(UPSTREAM_DIR, LEGACYINFO_FILE)
+
+sess = default_session()
+
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+
+def filehash(filename, hashtype, blocksize=65536):
+ hashtype = hashtype()
+ with open(filename, "rb") as f:
+ for block in iter(lambda: f.read(blocksize), b""):
+ hashtype.update(block)
+ return hashtype.hexdigest()
+
+
+def get_single_forge_files_manifest(longversion):
+ print(f"Getting Forge manifest for {longversion}")
+ path_thing = UPSTREAM_DIR + "/forge/files_manifests/%s.json" % longversion
+ files_manifest_file = Path(path_thing)
+ from_file = False
+ if files_manifest_file.is_file():
+ with open(path_thing, "r") as f:
+ files_json = json.load(f)
+ from_file = True
+ else:
+ file_url = (
+ "https://files.minecraftforge.net/net/minecraftforge/forge/%s/meta.json"
+ % longversion
+ )
+ r = sess.get(file_url)
+ r.raise_for_status()
+ files_json = r.json()
+
+ ret_dict = dict()
+
+ for classifier, extensionObj in files_json.get("classifiers").items():
+ assert type(classifier) == str
+ assert type(extensionObj) == dict
+
+ # assert len(extensionObj.items()) == 1
+ index = 0
+ count = 0
+ while index < len(extensionObj.items()):
+ mutable_copy = copy.deepcopy(extensionObj)
+ extension, hashtype = mutable_copy.popitem()
+ if not type(classifier) == str:
+ pprint(classifier)
+ pprint(extensionObj)
+ if not type(hashtype) == str:
+ pprint(classifier)
+ pprint(extensionObj)
+ print(
+ "%s: Skipping missing hash for extension %s:"
+ % (longversion, extension)
+ )
+ index += 1
+ continue
+ assert type(classifier) == str
+ processed_hash = re.sub(r"\W", "", hashtype)
+ if not len(processed_hash) == 32:
+ print(
+ "%s: Skipping invalid hash for extension %s:"
+ % (longversion, extension)
+ )
+ pprint(extensionObj)
+ index += 1
+ continue
+
+ file_obj = ForgeFile(
+ classifier=classifier, hash=processed_hash, extension=extension
+ )
+ if count == 0:
+ ret_dict[classifier] = file_obj
+ index += 1
+ count += 1
+ else:
+ print(
+ "%s: Multiple objects detected for classifier %s:"
+ % (longversion, classifier)
+ )
+ pprint(extensionObj)
+ assert False
+
+ if not from_file:
+ with open(path_thing, "w", encoding="utf-8") as f:
+ json.dump(files_json, f, sort_keys=True, indent=4)
+
+ return ret_dict
+
+
+def main():
+ # get the remote version list fragments
+ r = sess.get(
+ "https://files.minecraftforge.net/net/minecraftforge/forge/maven-metadata.json"
+ )
+ r.raise_for_status()
+ main_json = r.json()
+ assert type(main_json) == dict
+
+ r = sess.get(
+ "https://files.minecraftforge.net/net/minecraftforge/forge/promotions_slim.json"
+ )
+ r.raise_for_status()
+ promotions_json = r.json()
+ assert type(promotions_json) == dict
+
+ promoted_key_expression = re.compile(
+ "(?P<mc>[^-]+)-(?P<promotion>(latest)|(recommended))(-(?P<branch>[a-zA-Z0-9\\.]+))?"
+ )
+
+ recommended_set = set()
+
+ new_index = DerivedForgeIndex()
+
+ # FIXME: does not fully validate that the file has not changed format
+ # NOTE: For some insane reason, the format of the versions here is special. It having a branch at the end means it
+ # affects that particular branch.
+ # We don't care about Forge having branches.
+ # Therefore we only use the short version part for later identification and filter out the branch-specific
+ # promotions (among other errors).
+ print("Processing promotions:")
+ for promoKey, shortversion in promotions_json.get("promos").items():
+ match = promoted_key_expression.match(promoKey)
+ if not match:
+ print("Skipping promotion %s, the key did not parse:" % promoKey)
+ pprint(promoKey)
+ assert match
+ if not match.group("mc"):
+ print(
+ "Skipping promotion %s, because it has no Minecraft version." % promoKey
+ )
+ continue
+ if match.group("branch"):
+ print("Skipping promotion %s, because it on a branch only." % promoKey)
+ continue
+ elif match.group("promotion") == "recommended":
+ recommended_set.add(shortversion)
+ print("%s added to recommended set" % shortversion)
+ elif match.group("promotion") == "latest":
+ pass
+ else:
+ assert False
+
+ version_expression = re.compile(
+ "^(?P<mc>[0-9a-zA-Z_\\.]+)-(?P<ver>[0-9\\.]+\\.(?P<build>[0-9]+))(-(?P<branch>[a-zA-Z0-9\\.]+))?$"
+ )
+
+ print("")
+ print("Processing versions:")
+ for mc_version, value in main_json.items():
+ assert type(mc_version) == str
+ assert type(value) == list
+ for long_version in value:
+ assert type(long_version) == str
+ match = version_expression.match(long_version)
+ if not match:
+ pprint(long_version)
+ assert match
+ assert match.group("mc") == mc_version
+
+ files = get_single_forge_files_manifest(long_version)
+
+ build = int(match.group("build"))
+ version = match.group("ver")
+ branch = match.group("branch")
+
+ is_recommended = version in recommended_set
+
+ entry = ForgeEntry(
+ long_version=long_version,
+ mc_version=mc_version,
+ version=version,
+ build=build,
+ branch=branch,
+ # NOTE: we add this later after the fact. The forge promotions file lies about these.
+ latest=False,
+ recommended=is_recommended,
+ files=files,
+ )
+ new_index.versions[long_version] = entry
+ if not new_index.by_mc_version:
+ new_index.by_mc_version = dict()
+ if mc_version not in new_index.by_mc_version:
+ new_index.by_mc_version.setdefault(mc_version, ForgeMCVersionInfo())
+ new_index.by_mc_version[mc_version].versions.append(long_version)
+ # NOTE: we add this later after the fact. The forge promotions file lies about these.
+ # if entry.latest:
+ # new_index.by_mc_version[mc_version].latest = long_version
+ if entry.recommended:
+ new_index.by_mc_version[mc_version].recommended = long_version
+
+ print("")
+ print("Post processing promotions and adding missing 'latest':")
+ for mc_version, info in new_index.by_mc_version.items():
+ latest_version = info.versions[-1]
+ info.latest = latest_version
+ new_index.versions[latest_version].latest = True
+ print("Added %s as latest for %s" % (latest_version, mc_version))
+
+ print("")
+ print("Dumping index files...")
+
+ with open(UPSTREAM_DIR + "/forge/maven-metadata.json", "w", encoding="utf-8") as f:
+ json.dump(main_json, f, sort_keys=True, indent=4)
+
+ with open(UPSTREAM_DIR + "/forge/promotions_slim.json", "w", encoding="utf-8") as f:
+ json.dump(promotions_json, f, sort_keys=True, indent=4)
+
+ new_index.write(UPSTREAM_DIR + "/forge/derived_index.json")
+
+ legacy_info_list = ForgeLegacyInfoList()
+
+ print("Grabbing installers and dumping installer profiles...")
+ # get the installer jars - if needed - and get the installer profiles out of them
+ for key, entry in new_index.versions.items():
+ eprint("Updating Forge %s" % key)
+ if entry.mc_version is None:
+ eprint("Skipping %d with invalid MC version" % entry.build)
+ continue
+
+ version = ForgeVersion(entry)
+ if version.url() is None:
+ eprint("Skipping %d with no valid files" % version.build)
+ continue
+ if version.long_version in BAD_VERSIONS:
+ eprint(f"Skipping bad version {version.long_version}")
+ continue
+
+ jar_path = os.path.join(UPSTREAM_DIR, JARS_DIR, version.filename())
+
+ if version.uses_installer():
+ installer_info_path = (
+ UPSTREAM_DIR + "/forge/installer_info/%s.json" % version.long_version
+ )
+ profile_path = (
+ UPSTREAM_DIR
+ + "/forge/installer_manifests/%s.json" % version.long_version
+ )
+ version_file_path = (
+ UPSTREAM_DIR + "/forge/version_manifests/%s.json" % version.long_version
+ )
+
+ installer_refresh_required = not os.path.isfile(
+ profile_path
+ ) or not os.path.isfile(installer_info_path)
+
+ if installer_refresh_required:
+ # grab the installer if it's not there
+ if not os.path.isfile(jar_path):
+ eprint("Downloading %s" % version.url())
+ rfile = sess.get(version.url(), stream=True)
+ rfile.raise_for_status()
+ with open(jar_path, "wb") as f:
+ for chunk in rfile.iter_content(chunk_size=128):
+ f.write(chunk)
+
+ eprint("Processing %s" % version.url())
+ # harvestables from the installer
+ if not os.path.isfile(profile_path):
+ print(jar_path)
+ with zipfile.ZipFile(jar_path) as jar:
+ with suppress(KeyError):
+ with jar.open("version.json") as profile_zip_entry:
+ version_data = profile_zip_entry.read()
+
+ # Process: does it parse?
+ MojangVersion.parse_raw(version_data)
+
+ with open(version_file_path, "wb") as versionJsonFile:
+ versionJsonFile.write(version_data)
+ versionJsonFile.close()
+
+ with jar.open("install_profile.json") as profile_zip_entry:
+ install_profile_data = profile_zip_entry.read()
+
+ # Process: does it parse?
+ is_parsable = False
+ exception = None
+ try:
+ ForgeInstallerProfile.parse_raw(install_profile_data)
+ is_parsable = True
+ except ValidationError as err:
+ exception = err
+ try:
+ ForgeInstallerProfileV2.parse_raw(install_profile_data)
+ is_parsable = True
+ except ValidationError as err:
+ exception = err
+
+ if not is_parsable:
+ if version.is_supported():
+ raise exception
+ else:
+ eprint(
+ "Version %s is not supported and won't be generated later."
+ % version.long_version
+ )
+
+ with open(profile_path, "wb") as profileFile:
+ profileFile.write(install_profile_data)
+ profileFile.close()
+
+ # installer info v1
+ if not os.path.isfile(installer_info_path):
+ installer_info = InstallerInfo()
+ installer_info.sha1hash = filehash(jar_path, hashlib.sha1)
+ installer_info.sha256hash = filehash(jar_path, hashlib.sha256)
+ installer_info.size = os.path.getsize(jar_path)
+ installer_info.write(installer_info_path)
+ else:
+ # ignore the two versions without install manifests and jar mod class files
+ # TODO: fix those versions?
+ if version.mc_version_sane == "1.6.1":
+ continue
+
+ # only gather legacy info if it's missing
+ if not os.path.isfile(LEGACYINFO_PATH):
+ # grab the jar/zip if it's not there
+ if not os.path.isfile(jar_path):
+ rfile = sess.get(version.url(), stream=True)
+ rfile.raise_for_status()
+ with open(jar_path, "wb") as f:
+ for chunk in rfile.iter_content(chunk_size=128):
+ f.write(chunk)
+ # find the latest timestamp in the zip file
+ tstamp = datetime.fromtimestamp(0)
+ with zipfile.ZipFile(jar_path) as jar:
+ for info in jar.infolist():
+ tstamp_new = datetime(*info.date_time)
+ if tstamp_new > tstamp:
+ tstamp = tstamp_new
+ legacy_info = ForgeLegacyInfo()
+ legacy_info.release_time = tstamp
+ legacy_info.sha1 = filehash(jar_path, hashlib.sha1)
+ legacy_info.sha256 = filehash(jar_path, hashlib.sha256)
+ legacy_info.size = os.path.getsize(jar_path)
+ legacy_info_list.number[key] = legacy_info
+
+ # only write legacy info if it's missing
+ if not os.path.isfile(LEGACYINFO_PATH):
+ legacy_info_list.write(LEGACYINFO_PATH)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/update_java.py b/meta/run/update_java.py
new file mode 100644
index 0000000000..dc9e159be3
--- /dev/null
+++ b/meta/run/update_java.py
@@ -0,0 +1,175 @@
+import os
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.java import (
+ BASE_DIR,
+ ADOPTIUM_DIR,
+ AZUL_DIR,
+ ADOPTIUM_VERSIONS_DIR,
+ AZUL_VERSIONS_DIR,
+)
+from meta.model.java import (
+ ADOPTIUM_API_AVAILABLE_RELEASES,
+ adoptiumAPIFeatureReleasesUrl,
+ AdoptiumImageType,
+ AdoptiumAPIFeatureReleasesQuery,
+ AdoptiumAvailableReleases,
+ AdoptiumRelease,
+ AdoptiumReleases,
+ azulApiPackagesUrl,
+ AzulApiPackagesQuery,
+ ZuluPackage,
+ ZuluPackageList,
+ AzulArchiveType,
+ AzulReleaseStatus,
+ AzulAvailabilityType,
+ azulApiPackageDetailUrl,
+ ZuluPackageDetail,
+ ZuluPackagesDetail,
+)
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(BASE_DIR)
+ensure_upstream_dir(ADOPTIUM_DIR)
+ensure_upstream_dir(AZUL_DIR)
+ensure_upstream_dir(ADOPTIUM_VERSIONS_DIR)
+ensure_upstream_dir(AZUL_VERSIONS_DIR)
+
+
+sess = default_session()
+
+
+def main():
+ print("Getting Adoptium Release Manifests ")
+ r = sess.get(ADOPTIUM_API_AVAILABLE_RELEASES)
+ r.raise_for_status()
+
+ available = AdoptiumAvailableReleases(**r.json())
+
+ available_releases_file = os.path.join(
+ UPSTREAM_DIR, ADOPTIUM_DIR, "available_releases.json"
+ )
+ available.write(available_releases_file)
+
+ for feature in available.available_releases:
+ print("Getting Manifests for Adoptium feature release:", feature)
+
+ page_size = 10
+
+ releases_for_feature: list[AdoptiumRelease] = []
+ page = 0
+ while True:
+ query = AdoptiumAPIFeatureReleasesQuery(
+ image_type=AdoptiumImageType.Jre, page_size=page_size, page=page
+ )
+ api_call = adoptiumAPIFeatureReleasesUrl(feature, query=query)
+ print("Fetching JRE Page:", page, api_call)
+ r_rls = sess.get(api_call)
+ if r_rls.status_code == 404:
+ break
+ else:
+ r_rls.raise_for_status()
+
+ releases = list(AdoptiumRelease(**rls) for rls in r_rls.json())
+ releases_for_feature.extend(releases)
+
+ if len(r_rls.json()) < page_size:
+ break
+ page += 1
+
+ page = 0
+ while True:
+ query = AdoptiumAPIFeatureReleasesQuery(
+ image_type=AdoptiumImageType.Jdk, page_size=page_size, page=page
+ )
+ api_call = adoptiumAPIFeatureReleasesUrl(feature, query=query)
+ print("Fetching JDK Page:", page, api_call)
+ r_rls = sess.get(api_call)
+ if r_rls.status_code == 404:
+ break
+ else:
+ r_rls.raise_for_status()
+
+ releases = list(AdoptiumRelease(**rls) for rls in r_rls.json())
+ releases_for_feature.extend(releases)
+
+ if len(r_rls.json()) < page_size:
+ break
+ page += 1
+
+ print("Total Adoptium releases for feature:", len(releases_for_feature))
+ releases = AdoptiumReleases(__root__=releases_for_feature)
+ feature_file = os.path.join(
+ UPSTREAM_DIR, ADOPTIUM_VERSIONS_DIR, f"java{feature}.json"
+ )
+ releases.write(feature_file)
+
+ print("Getting Azul Release Manifests")
+ zulu_packages: list[ZuluPackage] = []
+ page = 1
+ page_size = 100
+ while True:
+
+ query = AzulApiPackagesQuery(
+ archive_type=AzulArchiveType.Zip,
+ release_status=AzulReleaseStatus.Ga,
+ availability_types=[AzulAvailabilityType.CA],
+ javafx_bundled=False,
+ page=page,
+ page_size=page_size,
+ )
+ api_call = azulApiPackagesUrl(query=query)
+
+ print("Processing Page:", page, api_call)
+
+ r = sess.get(api_call)
+ if r.status_code == 404:
+ break
+ else:
+ r.raise_for_status()
+
+ packages = list(ZuluPackage(**pkg) for pkg in r.json())
+ zulu_packages.extend(packages)
+ if len(packages) < page_size:
+ break
+ page += 1
+
+ print("Total Azul Packages:", len(zulu_packages))
+ packages = ZuluPackageList(__root__=zulu_packages)
+ azul_manifest_file = os.path.join(UPSTREAM_DIR, AZUL_DIR, "packages.json")
+ packages.write(azul_manifest_file)
+
+ azul_major_versions: dict[int, ZuluPackagesDetail] = {}
+
+ for _, pkg in packages:
+
+ major_version = pkg.java_version[0]
+ if major_version not in azul_major_versions:
+ azul_major_versions[major_version] = ZuluPackagesDetail(__root__=[])
+
+ pkg_file = os.path.join(
+ UPSTREAM_DIR, AZUL_VERSIONS_DIR, f"{pkg.package_uuid}.json"
+ )
+ if os.path.exists(pkg_file) and os.path.isfile(pkg_file):
+ pkg_detail = ZuluPackageDetail.parse_file(pkg_file)
+ azul_major_versions[major_version].append(pkg_detail)
+ else:
+
+ api_call = azulApiPackageDetailUrl(pkg.package_uuid)
+ print("Fetching Azul package manifest:", pkg.package_uuid)
+ r_pkg = sess.get(api_call)
+ r_pkg.raise_for_status()
+
+ pkg_detail = ZuluPackageDetail(**r_pkg.json())
+ pkg_detail.write(pkg_file)
+ azul_major_versions[major_version].append(pkg_detail)
+
+ for major in azul_major_versions:
+ major_file = os.path.join(UPSTREAM_DIR, AZUL_VERSIONS_DIR, f"java{major}.json")
+ azul_major_versions[major].write(major_file)
+
+
+if __name__ == "__main__":
+ main()
+#
diff --git a/meta/run/update_liteloader.py b/meta/run/update_liteloader.py
new file mode 100755
index 0000000000..383644754d
--- /dev/null
+++ b/meta/run/update_liteloader.py
@@ -0,0 +1,37 @@
+import json
+import os
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.liteloader import VERSIONS_FILE, BASE_DIR
+from meta.model.liteloader import LiteloaderIndex
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(BASE_DIR)
+
+sess = default_session()
+
+
+def main():
+ # get the remote version list
+ r = sess.get("http://dl.liteloader.com/versions/versions.json")
+ r.raise_for_status()
+
+ # make sure it's JSON
+ main_json = r.json()
+
+ # make sure we understand the schema
+ remote_versions = LiteloaderIndex.parse_obj(main_json)
+ parsed = remote_versions.json()
+ original = json.dumps(main_json, sort_keys=True, indent=4)
+ assert parsed == original
+
+ print("Successfully parsed index")
+ print(f"Last updated {remote_versions.meta.updated}")
+
+ # save the json it to file
+ remote_versions.write(os.path.join(UPSTREAM_DIR, VERSIONS_FILE))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/update_mojang.py b/meta/run/update_mojang.py
new file mode 100755
index 0000000000..33b4e061ad
--- /dev/null
+++ b/meta/run/update_mojang.py
@@ -0,0 +1,180 @@
+import json
+import os
+import zipfile
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.http import download_binary_file
+from meta.common.mojang import (
+ BASE_DIR,
+ VERSION_MANIFEST_FILE,
+ VERSIONS_DIR,
+ ASSETS_DIR,
+ STATIC_EXPERIMENTS_FILE,
+ STATIC_OLD_SNAPSHOTS_FILE,
+ JAVA_MANIFEST_FILE,
+)
+from meta.model.mojang import (
+ MojangIndexWrap,
+ MojangIndex,
+ ExperimentIndex,
+ ExperimentIndexWrap,
+ OldSnapshotIndexWrap,
+ OldSnapshotIndex,
+ JavaIndex,
+)
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(BASE_DIR)
+ensure_upstream_dir(VERSIONS_DIR)
+ensure_upstream_dir(ASSETS_DIR)
+
+sess = default_session()
+
+
+def fetch_zipped_version(path, url):
+ zip_path = f"{path}.zip"
+ download_binary_file(sess, zip_path, url)
+ with zipfile.ZipFile(zip_path) as z:
+ for info in z.infolist():
+ if info.filename.endswith(".json"):
+ print(f"Found {info.filename} as version json")
+ version_json = json.load(z.open(info))
+ break
+
+ assert version_json
+
+ with open(path, "w", encoding="utf-8") as f:
+ json.dump(version_json, f, sort_keys=True, indent=4)
+
+ return version_json
+
+
+def fetch_modified_version(path, version):
+ r = sess.get(version.url)
+ r.raise_for_status()
+ version_json = r.json()
+
+ version_json["releaseTime"] = version_json["releaseTime"] + "T00:00:00+02:00"
+ version_json["time"] = version_json["releaseTime"]
+
+ downloads = {
+ "client": {"url": version.jar, "sha1": version.sha1, "size": version.size}
+ }
+
+ version_json["downloads"] = downloads
+ version_json["type"] = "old_snapshot"
+
+ with open(path, "w", encoding="utf-8") as f:
+ json.dump(version_json, f, sort_keys=True, indent=4)
+
+ return version_json
+
+
+def fetch_version(path, url):
+ r = sess.get(url)
+ r.raise_for_status()
+ version_json = r.json()
+
+ with open(path, "w", encoding="utf-8") as f:
+ json.dump(version_json, f, sort_keys=True, indent=4)
+
+ return version_json
+
+
+MOJANG_JAVA_URL = "https://piston-meta.mojang.com/v1/products/java-runtime/2ec0cc96c44e5a76b9c8b7c39df7210883d12871/all.json"
+
+
+def update_javas():
+ r = sess.get(MOJANG_JAVA_URL)
+ r.raise_for_status()
+
+ remote_javas = JavaIndex(__root__=r.json())
+
+ java_manifest_path = os.path.join(UPSTREAM_DIR, JAVA_MANIFEST_FILE)
+
+ remote_javas.write(java_manifest_path)
+
+
+def main():
+ # get the remote version list
+ r = sess.get("https://piston-meta.mojang.com/mc/game/version_manifest_v2.json")
+ r.raise_for_status()
+
+ remote_versions = MojangIndexWrap(MojangIndex(**r.json()))
+ remote_ids = set(remote_versions.versions.keys())
+
+ version_manifest_path = os.path.join(UPSTREAM_DIR, VERSION_MANIFEST_FILE)
+
+ if os.path.exists(version_manifest_path):
+ # get the local version list
+ current_versions = MojangIndexWrap(
+ MojangIndex.parse_file(version_manifest_path)
+ )
+ local_ids = set(current_versions.versions.keys())
+
+ # versions not present locally but present remotely are new
+ pending_ids = remote_ids.difference(local_ids)
+
+ for x in local_ids:
+ remote_version = remote_versions.versions[x]
+ local_version = current_versions.versions[x]
+ if remote_version.time > local_version.time:
+ pending_ids.add(x)
+ else:
+ pending_ids = remote_ids
+
+ for x in pending_ids:
+ version = remote_versions.versions[x]
+ print(
+ "Updating "
+ + version.id
+ + " to timestamp "
+ + version.release_time.isoformat()
+ )
+ fetch_version(
+ os.path.join(UPSTREAM_DIR, VERSIONS_DIR, f"{x}.json"), version.url
+ )
+
+ # deal with experimental snapshots separately
+ if os.path.exists(STATIC_EXPERIMENTS_FILE):
+ experiments = ExperimentIndexWrap(
+ ExperimentIndex.parse_file(STATIC_EXPERIMENTS_FILE)
+ )
+ experiment_ids = set(experiments.versions.keys())
+
+ for x in experiment_ids:
+ version = experiments.versions[x]
+ experiment_path = os.path.join(UPSTREAM_DIR, VERSIONS_DIR, f"{x}.json")
+
+ print("Updating experiment " + version.id)
+ if not os.path.isfile(experiment_path):
+ fetch_zipped_version(experiment_path, version.url)
+ else:
+ print("Already have experiment " + version.id)
+
+ # deal with old snapshots
+ if os.path.exists(STATIC_OLD_SNAPSHOTS_FILE):
+ old_snapshots = OldSnapshotIndexWrap(
+ OldSnapshotIndex.parse_file(STATIC_OLD_SNAPSHOTS_FILE)
+ )
+ old_snapshots_ids = set(old_snapshots.versions.keys())
+
+ for x in old_snapshots_ids:
+ version = old_snapshots.versions[x]
+ old_snapshots_path = os.path.join(UPSTREAM_DIR, VERSIONS_DIR, f"{x}.json")
+
+ print("Updating old snapshot " + version.id)
+ if not os.path.isfile(old_snapshots_path):
+ fetch_modified_version(old_snapshots_path, version)
+ else:
+ print("Already have old snapshot " + version.id)
+
+ remote_versions.index.write(version_manifest_path)
+
+ print("Getting Mojang Java runtime manfest")
+ update_javas()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/update_neoforge.py b/meta/run/update_neoforge.py
new file mode 100644
index 0000000000..826981aed1
--- /dev/null
+++ b/meta/run/update_neoforge.py
@@ -0,0 +1,319 @@
+"""
+ Get the source files necessary for generating Forge versions
+"""
+
+import copy
+import hashlib
+import json
+import os
+import re
+import sys
+import zipfile
+from contextlib import suppress
+from datetime import datetime
+from pathlib import Path
+from pprint import pprint
+import urllib.parse
+
+from pydantic import ValidationError
+
+from meta.common import upstream_path, ensure_upstream_dir, default_session
+from meta.common.neoforge import (
+ JARS_DIR,
+ INSTALLER_INFO_DIR,
+ INSTALLER_MANIFEST_DIR,
+ VERSION_MANIFEST_DIR,
+ FILE_MANIFEST_DIR,
+)
+from meta.model.neoforge import (
+ NeoForgeFile,
+ NeoForgeEntry,
+ NeoForgeMCVersionInfo,
+ DerivedNeoForgeIndex,
+ NeoForgeVersion,
+ NeoForgeInstallerProfileV2,
+ InstallerInfo,
+)
+from meta.model.mojang import MojangVersion
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(JARS_DIR)
+ensure_upstream_dir(INSTALLER_INFO_DIR)
+ensure_upstream_dir(INSTALLER_MANIFEST_DIR)
+ensure_upstream_dir(VERSION_MANIFEST_DIR)
+ensure_upstream_dir(FILE_MANIFEST_DIR)
+
+sess = default_session()
+
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+
+def filehash(filename, hashtype, blocksize=65536):
+ hashtype = hashtype()
+ with open(filename, "rb") as f:
+ for block in iter(lambda: f.read(blocksize), b""):
+ hashtype.update(block)
+ return hashtype.hexdigest()
+
+
+def find_nth(haystack, needle, n):
+ start = haystack.find(needle)
+ while start >= 0 and n > 1:
+ start = haystack.find(needle, start + len(needle))
+ n -= 1
+ return start
+
+
+def get_single_forge_files_manifest(longversion, artifact: str):
+ print(f"Getting NeoForge manifest for {longversion}")
+ path_thing = UPSTREAM_DIR + "/neoforge/files_manifests/%s.json" % longversion
+ files_manifest_file = Path(path_thing)
+ from_file = False
+ if files_manifest_file.is_file():
+ with open(path_thing, "r") as f:
+ files_json = json.load(f)
+ from_file = True
+ else:
+ file_url = (
+ f"https://maven.neoforged.net/api/maven/details/releases/net%2Fneoforged%2F{artifact}%2F"
+ + urllib.parse.quote(longversion)
+ )
+ r = sess.get(file_url)
+ r.raise_for_status()
+ files_json = r.json()
+
+ ret_dict = dict()
+
+ for file in files_json.get("files"):
+ assert type(file) == dict
+ name = file["name"]
+ prefix = f"{artifact}-{longversion}"
+ assert name.startswith(
+ prefix
+ ), f"{longversion} classifier {name} doesn't start with {prefix}"
+ file_name = name[len(prefix) :]
+ if file_name.startswith("-"):
+ file_name = file_name[1:]
+ if file_name.startswith("."):
+ continue
+
+ classifier, ext = os.path.splitext(file_name)
+
+ if ext in [".md5", ".sha1", ".sha256", ".sha512"]:
+ continue
+
+ # assert len(extensionObj.items()) == 1
+ file_obj = NeoForgeFile(
+ artifact=artifact, classifier=classifier, extension=ext[1:]
+ )
+ ret_dict[classifier] = file_obj
+
+ if not from_file:
+ Path(path_thing).parent.mkdir(parents=True, exist_ok=True)
+ with open(path_thing, "w", encoding="utf-8") as f:
+ json.dump(files_json, f, sort_keys=True, indent=4)
+
+ return ret_dict
+
+
+def main():
+ # get the remote version list fragments
+ r = sess.get(
+ "https://maven.neoforged.net/api/maven/versions/releases/net%2Fneoforged%2Fforge"
+ )
+ r.raise_for_status()
+ main_json = r.json()["versions"]
+ assert type(main_json) == list
+
+ # get the new remote version list fragments
+ r = sess.get(
+ "https://maven.neoforged.net/api/maven/versions/releases/net%2Fneoforged%2Fneoforge"
+ )
+ r.raise_for_status()
+ new_main_json = r.json()["versions"]
+ assert type(new_main_json) == list
+
+ main_json += new_main_json
+
+ new_index = DerivedNeoForgeIndex()
+
+ version_expression = re.compile(
+ r"^(?P<mc>[0-9a-zA-Z_\.]+)-(?P<ver>[0-9\.]+\.(?P<build>[0-9]+))(-(?P<branch>[a-zA-Z0-9\.]+))?$"
+ )
+ neoforge_version_re = re.compile(
+ r"^(?P<mcminor>\d+).(?P<mcpatch>\d+).(?P<number>\d+)(?:-(?P<tag>\w+))?$"
+ )
+
+ print("")
+ print("Processing versions:")
+ for long_version in main_json:
+ assert type(long_version) == str
+
+ match = version_expression.match(long_version)
+ if match:
+ mc_version = match.group("mc")
+ build = int(match.group("build"))
+ version = match.group("ver")
+ branch = match.group("branch")
+ artifact = "forge"
+
+ match_nf = neoforge_version_re.match(long_version)
+ if match_nf:
+ mc_version = f"1.{match_nf.group('mcminor')}.{match_nf.group('mcpatch')}"
+ build = int(match_nf.group("number"))
+ version = match_nf.group("number")
+ branch = match_nf.group("tag")
+ match = match_nf
+ artifact = "neoforge"
+
+ assert match, f"{long_version} doesn't match version regex"
+ try:
+ files = get_single_forge_files_manifest(long_version, artifact)
+ except:
+ continue
+
+ # TODO: what *is* recommended?
+ is_recommended = False
+
+ entry = NeoForgeEntry(
+ artifact=artifact,
+ long_version=long_version,
+ mc_version=mc_version,
+ version=version,
+ build=build,
+ branch=branch,
+ # NOTE: we add this later after the fact. The forge promotions file lies about these.
+ latest=False,
+ recommended=is_recommended,
+ files=files,
+ )
+ new_index.versions[long_version] = entry
+ if not new_index.by_mc_version:
+ new_index.by_mc_version = dict()
+ if mc_version not in new_index.by_mc_version:
+ new_index.by_mc_version.setdefault(mc_version, NeoForgeMCVersionInfo())
+ new_index.by_mc_version[mc_version].versions.append(long_version)
+ # NOTE: we add this later after the fact. The forge promotions file lies about these.
+ # if entry.latest:
+ # new_index.by_mc_version[mc_version].latest = long_version
+ if entry.recommended:
+ new_index.by_mc_version[mc_version].recommended = long_version
+
+ print("")
+ print("Dumping index files...")
+
+ with open(
+ UPSTREAM_DIR + "/neoforge/maven-metadata.json", "w", encoding="utf-8"
+ ) as f:
+ json.dump(main_json, f, sort_keys=True, indent=4)
+
+ new_index.write(UPSTREAM_DIR + "/neoforge/derived_index.json")
+
+ print("Grabbing installers and dumping installer profiles...")
+ # get the installer jars - if needed - and get the installer profiles out of them
+ for key, entry in new_index.versions.items():
+ eprint("Updating NeoForge %s" % key)
+ if entry.mc_version is None:
+ eprint("Skipping %d with invalid MC version" % entry.build)
+ continue
+
+ version = NeoForgeVersion(entry)
+ if version.url() is None:
+ eprint("Skipping %d with no valid files" % version.build)
+ continue
+ if not version.uses_installer():
+ eprint(f"version {version.long_version} does not use installer")
+ continue
+
+ jar_path = os.path.join(UPSTREAM_DIR, JARS_DIR, version.filename())
+
+ installer_info_path = (
+ UPSTREAM_DIR + "/neoforge/installer_info/%s.json" % version.long_version
+ )
+ profile_path = (
+ UPSTREAM_DIR
+ + "/neoforge/installer_manifests/%s.json" % version.long_version
+ )
+ version_file_path = (
+ UPSTREAM_DIR + "/neoforge/version_manifests/%s.json" % version.long_version
+ )
+
+ installer_refresh_required = not os.path.isfile(
+ profile_path
+ ) or not os.path.isfile(installer_info_path)
+
+ if installer_refresh_required:
+ # grab the installer if it's not there
+ if not os.path.isfile(jar_path):
+ eprint("Downloading %s" % version.url())
+ try:
+ rfile = sess.get(version.url(), stream=True)
+ rfile.raise_for_status()
+ Path(jar_path).parent.mkdir(parents=True, exist_ok=True)
+ with open(jar_path, "wb") as f:
+ for chunk in rfile.iter_content(chunk_size=128):
+ f.write(chunk)
+ except Exception as e:
+ eprint("Failed to download %s" % version.url())
+ eprint("Error is %s" % e)
+ continue
+
+ eprint("Processing %s" % version.url())
+ # harvestables from the installer
+ if not os.path.isfile(profile_path):
+ print(jar_path)
+ with zipfile.ZipFile(jar_path) as jar:
+ with suppress(KeyError):
+ with jar.open("version.json") as profile_zip_entry:
+ version_data = profile_zip_entry.read()
+
+ # Process: does it parse?
+ MojangVersion.parse_raw(version_data)
+
+ Path(version_file_path).parent.mkdir(
+ parents=True, exist_ok=True
+ )
+ with open(version_file_path, "wb") as versionJsonFile:
+ versionJsonFile.write(version_data)
+ versionJsonFile.close()
+
+ with jar.open("install_profile.json") as profile_zip_entry:
+ install_profile_data = profile_zip_entry.read()
+
+ # Process: does it parse?
+ is_parsable = False
+ exception = None
+ try:
+ NeoForgeInstallerProfileV2.parse_raw(install_profile_data)
+ is_parsable = True
+ except ValidationError as err:
+ exception = err
+
+ if not is_parsable:
+ if version.is_supported():
+ raise exception
+ else:
+ eprint(
+ "Version %s is not supported and won't be generated later."
+ % version.long_version
+ )
+
+ Path(profile_path).parent.mkdir(parents=True, exist_ok=True)
+ with open(profile_path, "wb") as profileFile:
+ profileFile.write(install_profile_data)
+ profileFile.close()
+
+ # installer info v1
+ if not os.path.isfile(installer_info_path):
+ installer_info = InstallerInfo()
+ installer_info.sha1hash = filehash(jar_path, hashlib.sha1)
+ installer_info.sha256hash = filehash(jar_path, hashlib.sha256)
+ installer_info.size = os.path.getsize(jar_path)
+ installer_info.write(installer_info_path)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/meta/run/update_quilt.py b/meta/run/update_quilt.py
new file mode 100755
index 0000000000..5eaa8ed0d0
--- /dev/null
+++ b/meta/run/update_quilt.py
@@ -0,0 +1,122 @@
+import json
+import os
+import zipfile
+from datetime import datetime
+
+import requests
+
+from meta.common import (
+ upstream_path,
+ ensure_upstream_dir,
+ transform_maven_key,
+ default_session,
+)
+from meta.common.quilt import JARS_DIR, INSTALLER_INFO_DIR, META_DIR, USE_QUILT_MAPPINGS
+from meta.common.fabric import DATETIME_FORMAT_HTTP
+from meta.model.fabric import FabricJarInfo
+
+UPSTREAM_DIR = upstream_path()
+
+ensure_upstream_dir(JARS_DIR)
+ensure_upstream_dir(INSTALLER_INFO_DIR)
+ensure_upstream_dir(META_DIR)
+
+sess = default_session()
+
+
+def filehash(filename, hashtype, blocksize=65536):
+ h = hashtype()
+ with open(filename, "rb") as f:
+ for block in iter(lambda: f.read(blocksize), b""):
+ h.update(block)
+ return h.hexdigest()
+
+
+def get_maven_url(maven_key, server, ext):
+ parts = maven_key.split(":", 3)
+ maven_ver_url = (
+ server + parts[0].replace(".", "/") + "/" + parts[1] + "/" + parts[2] + "/"
+ )
+ maven_url = maven_ver_url + parts[1] + "-" + parts[2] + ext
+ return maven_url
+
+
+def get_json_file(path, url):
+ with open(path, "w", encoding="utf-8") as f:
+ r = sess.get(url)
+ r.raise_for_status()
+ print(f"QUILT DEBUG {r.headers}")
+ version_json = r.json()
+ json.dump(version_json, f, sort_keys=True, indent=4)
+ return version_json
+
+
+def head_file(url):
+ r = sess.head(url)
+ r.raise_for_status()
+ return r.headers
+
+
+def get_binary_file(path, url):
+ with open(path, "wb") as f:
+ r = sess.get(url)
+ r.raise_for_status()
+ for chunk in r.iter_content(chunk_size=128):
+ f.write(chunk)
+
+
+def compute_jar_file(path, url):
+ # NOTE: Quilt Meta does not make any guarantees about Last-Modified.
+ # Always download the JAR file instead
+ jar_path = path + ".jar"
+ get_binary_file(jar_path, url)
+ tstamp = datetime.fromtimestamp(0)
+ with zipfile.ZipFile(jar_path) as jar:
+ allinfo = jar.infolist()
+ for info in allinfo:
+ tstamp_new = datetime(*info.date_time)
+ if tstamp_new > tstamp:
+ tstamp = tstamp_new
+
+ data = FabricJarInfo(release_time=tstamp)
+ data.write(path + ".json")
+
+
+def main():
+ # get the version list for each component we are interested in
+ components = ["loader"]
+ if USE_QUILT_MAPPINGS:
+ components.append("hashed")
+ for component in components:
+ index = get_json_file(
+ os.path.join(UPSTREAM_DIR, META_DIR, f"{component}.json"),
+ "https://meta.quiltmc.org/v3/versions/" + component,
+ )
+ for it in index:
+ print(f"Processing {component} {it['version']} ")
+ jar_maven_url = get_maven_url(
+ it["maven"], "https://maven.quiltmc.org/repository/release/", ".jar"
+ )
+ compute_jar_file(
+ os.path.join(UPSTREAM_DIR, JARS_DIR, transform_maven_key(it["maven"])),
+ jar_maven_url,
+ )
+
+ # for each loader, download installer JSON file from maven
+ with open(
+ os.path.join(UPSTREAM_DIR, META_DIR, "loader.json"), "r", encoding="utf-8"
+ ) as loaderVersionIndexFile:
+ loader_version_index = json.load(loaderVersionIndexFile)
+ for it in loader_version_index:
+ print(f"Downloading JAR info for loader {it['version']} ")
+ maven_url = get_maven_url(
+ it["maven"], "https://maven.quiltmc.org/repository/release/", ".json"
+ )
+ get_json_file(
+ os.path.join(UPSTREAM_DIR, INSTALLER_INFO_DIR, f"{it['version']}.json"),
+ maven_url,
+ )
+
+
+if __name__ == "__main__":
+ main()