summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--meta/common/__init__.py31
-rwxr-xr-xmeta/run/generate_forge.py7
-rw-r--r--meta/run/generate_neoforge.py7
-rwxr-xr-xmeta/run/index.py17
-rwxr-xr-xmeta/run/update_fabric.py8
-rwxr-xr-xmeta/run/update_forge.py59
-rw-r--r--meta/run/update_neoforge.py49
-rwxr-xr-xmeta/run/update_quilt.py8
8 files changed, 91 insertions, 95 deletions
diff --git a/meta/common/__init__.py b/meta/common/__init__.py
index 16a4f507bf..1f358cdbd7 100644
--- a/meta/common/__init__.py
+++ b/meta/common/__init__.py
@@ -1,8 +1,10 @@
import os
import os.path
import datetime
+import hashlib
+import sys
from urllib.parse import urlparse
-from typing import Any, Optional
+from typing import Any, Optional, Callable
import requests
from cachecontrol import CacheControl # type: ignore
@@ -88,10 +90,35 @@ def default_session():
return sess
-def remove_files(file_paths):
+def remove_files(file_paths: list[str]) -> None:
for file_path in file_paths:
try:
if os.path.isfile(file_path):
os.remove(file_path)
except Exception as e:
print(e)
+
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+
+def file_hash(
+ filename: str, hashtype: Callable[[], "hashlib._Hash"], blocksize: int = 65536
+) -> str:
+ hashtype = hashtype()
+ with open(filename, "rb") as f:
+ for block in iter(lambda: f.read(blocksize), b""):
+ hashtype.update(block)
+ return hashtype.hexdigest()
+
+
+def get_file_sha1_from_file(file_name: str, sha1_file: str) -> Optional[str]:
+ if os.path.isfile(sha1_file):
+ with open(sha1_file, "r") as file:
+ return file.read()
+
+ new_sha1 = file_hash(file_name, hashlib.sha1)
+ with open(sha1_file, "w") as file:
+ file.write(new_sha1)
+ return None
diff --git a/meta/run/generate_forge.py b/meta/run/generate_forge.py
index 95b5ac39cb..a01f503ebe 100755
--- a/meta/run/generate_forge.py
+++ b/meta/run/generate_forge.py
@@ -1,11 +1,10 @@
import os
import re
-import sys
from packaging import version as pversion
from operator import attrgetter
from typing import Collection
-from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common import ensure_component_dir, launcher_path, upstream_path, eprint
from meta.common.forge import (
FORGE_COMPONENT,
INSTALLER_MANIFEST_DIR,
@@ -44,10 +43,6 @@ UPSTREAM_DIR = upstream_path()
ensure_component_dir(FORGE_COMPONENT)
-def eprint(*args, **kwargs):
- print(*args, file=sys.stderr, **kwargs)
-
-
# Construct a set of libraries out of a Minecraft version file, for filtering.
mc_version_cache = {}
diff --git a/meta/run/generate_neoforge.py b/meta/run/generate_neoforge.py
index eab7fb33e3..5c6f965435 100644
--- a/meta/run/generate_neoforge.py
+++ b/meta/run/generate_neoforge.py
@@ -1,11 +1,10 @@
from copy import deepcopy
import os
import re
-import sys
from operator import attrgetter
from typing import Collection
-from meta.common import ensure_component_dir, launcher_path, upstream_path
+from meta.common import ensure_component_dir, launcher_path, upstream_path, eprint
from meta.common.neoforge import (
NEOFORGE_COMPONENT,
INSTALLER_MANIFEST_DIR,
@@ -38,10 +37,6 @@ UPSTREAM_DIR = upstream_path()
ensure_component_dir(NEOFORGE_COMPONENT)
-def eprint(*args, **kwargs):
- print(*args, file=sys.stderr, **kwargs)
-
-
def version_from_build_system_installer(
installer: MojangVersion,
profile: NeoForgeInstallerProfileV2,
diff --git a/meta/run/index.py b/meta/run/index.py
index 23dc2336ea..22abea8457 100755
--- a/meta/run/index.py
+++ b/meta/run/index.py
@@ -2,7 +2,9 @@ import hashlib
import os
from operator import attrgetter
-from meta.common import launcher_path
+from meta.common import launcher_path, file_hash
+
+
from meta.model import MetaVersion, MetaPackage
from meta.model.index import (
MetaPackageIndex,
@@ -14,15 +16,6 @@ from meta.model.index import (
LAUNCHER_DIR = launcher_path()
-# take the hash type (like hashlib.md5) and filename, return hex string of hash
-def hash_file(hash_fn, file_name):
- hash_instance = hash_fn()
- with open(file_name, "rb") as f:
- for chunk in iter(lambda: f.read(4096), b""):
- hash_instance.update(chunk)
- return hash_instance.hexdigest()
-
-
# ignore these files when indexing versions
ignore = {"index.json", "package.json", ".git", ".github"}
@@ -50,7 +43,7 @@ for package in sorted(os.listdir(LAUNCHER_DIR)):
continue
# parse and hash the version file
filepath = LAUNCHER_DIR + "/%s/%s" % (package, filename)
- filehash = hash_file(hashlib.sha256, filepath)
+ filehash = file_hash(filepath, hashlib.sha256)
versionFile = MetaVersion.parse_file(filepath)
is_recommended = versionFile.version in recommendedVersions
@@ -71,7 +64,7 @@ for package in sorted(os.listdir(LAUNCHER_DIR)):
# insert entry into the package index
packageEntry = MetaPackageIndexEntry(
- uid=package, name=sharedData.name, sha256=hash_file(hashlib.sha256, outFilePath)
+ uid=package, name=sharedData.name, sha256=file_hash(outFilePath, hashlib.sha256)
)
packages.packages.append(packageEntry)
diff --git a/meta/run/update_fabric.py b/meta/run/update_fabric.py
index e0c1d819c9..d19540b5e4 100755
--- a/meta/run/update_fabric.py
+++ b/meta/run/update_fabric.py
@@ -30,14 +30,6 @@ ensure_upstream_dir(META_DIR)
sess = default_session()
-def filehash(filename, hashtype, blocksize=65536):
- h = hashtype()
- with open(filename, "rb") as f:
- for block in iter(lambda: f.read(blocksize), b""):
- h.update(block)
- return h.hexdigest()
-
-
def get_maven_url(maven_key, server, ext):
parts = maven_key.split(":", 3)
maven_ver_url = (
diff --git a/meta/run/update_forge.py b/meta/run/update_forge.py
index 5a86727199..7ec9e45023 100755
--- a/meta/run/update_forge.py
+++ b/meta/run/update_forge.py
@@ -7,7 +7,6 @@ import hashlib
import json
import os
import re
-import sys
import zipfile
from contextlib import suppress
from datetime import datetime
@@ -21,6 +20,9 @@ from meta.common import (
ensure_upstream_dir,
default_session,
remove_files,
+ eprint,
+ file_hash,
+ get_file_sha1_from_file,
)
from meta.common.forge import (
JARS_DIR,
@@ -43,6 +45,7 @@ from meta.model.forge import (
InstallerInfo,
ForgeLegacyInfo,
)
+from meta.common.http import download_binary_file
from meta.model.mojang import MojangVersion
UPSTREAM_DIR = upstream_path()
@@ -58,18 +61,6 @@ LEGACYINFO_PATH = os.path.join(UPSTREAM_DIR, LEGACYINFO_FILE)
sess = default_session()
-def eprint(*args, **kwargs):
- print(*args, file=sys.stderr, **kwargs)
-
-
-def filehash(filename, hashtype, blocksize=65536):
- hashtype = hashtype()
- with open(filename, "rb") as f:
- for block in iter(lambda: f.read(blocksize), b""):
- hashtype.update(block)
- return hashtype.hexdigest()
-
-
def get_single_forge_files_manifest(longversion):
print(f"Getting Forge manifest for {longversion}")
path_thing = UPSTREAM_DIR + "/forge/files_manifests/%s.json" % longversion
@@ -297,15 +288,20 @@ def main():
UPSTREAM_DIR + "/forge/version_manifests/%s.json" % version.long_version
)
+ new_sha1 = None
+ sha1_file = jar_path + ".sha1"
if not os.path.isfile(jar_path):
remove_files([profile_path, installer_info_path])
else:
- fileSha1 = filehash(jar_path, hashlib.sha1)
+ fileSha1 = get_file_sha1_from_file(jar_path, sha1_file)
try:
rfile = sess.get(version.url() + ".sha1")
rfile.raise_for_status()
- if fileSha1 != rfile.text.strip():
- remove_files([jar_path, profile_path, installer_info_path])
+ new_sha1 = rfile.text.strip()
+ if fileSha1 != new_sha1:
+ remove_files(
+ [jar_path, profile_path, installer_info_path, sha1_file]
+ )
except Exception as e:
eprint("Failed to check sha1 %s" % version.url())
eprint("Error is %s" % e)
@@ -318,11 +314,18 @@ def main():
# grab the installer if it's not there
if not os.path.isfile(jar_path):
eprint("Downloading %s" % version.url())
- rfile = sess.get(version.url(), stream=True)
- rfile.raise_for_status()
- with open(jar_path, "wb") as f:
- for chunk in rfile.iter_content(chunk_size=128):
- f.write(chunk)
+ download_binary_file(sess, jar_path, version.url())
+ if new_sha1 is None:
+ try:
+ rfile = sess.get(version.url() + ".sha1")
+ rfile.raise_for_status()
+ new_sha1 = rfile.text.strip()
+ except Exception as e:
+ eprint("Failed to download new sha1 %s" % version.url())
+ eprint("Error is %s" % e)
+ if new_sha1 is not None: # this is in case the fetch failed
+ with open(sha1_file, "w") as file:
+ file.write(new_sha1)
eprint("Processing %s" % version.url())
# harvestables from the installer
@@ -373,8 +376,8 @@ def main():
# installer info v1
if not os.path.isfile(installer_info_path):
installer_info = InstallerInfo()
- installer_info.sha1hash = filehash(jar_path, hashlib.sha1)
- installer_info.sha256hash = filehash(jar_path, hashlib.sha256)
+ installer_info.sha1hash = file_hash(jar_path, hashlib.sha1)
+ installer_info.sha256hash = file_hash(jar_path, hashlib.sha256)
installer_info.size = os.path.getsize(jar_path)
installer_info.write(installer_info_path)
else:
@@ -387,11 +390,7 @@ def main():
if not os.path.isfile(LEGACYINFO_PATH):
# grab the jar/zip if it's not there
if not os.path.isfile(jar_path):
- rfile = sess.get(version.url(), stream=True)
- rfile.raise_for_status()
- with open(jar_path, "wb") as f:
- for chunk in rfile.iter_content(chunk_size=128):
- f.write(chunk)
+ download_binary_file(sess, jar_path, version.url())
# find the latest timestamp in the zip file
tstamp = datetime.fromtimestamp(0)
with zipfile.ZipFile(jar_path) as jar:
@@ -401,8 +400,8 @@ def main():
tstamp = tstamp_new
legacy_info = ForgeLegacyInfo()
legacy_info.release_time = tstamp
- legacy_info.sha1 = filehash(jar_path, hashlib.sha1)
- legacy_info.sha256 = filehash(jar_path, hashlib.sha256)
+ legacy_info.sha1 = file_hash(jar_path, hashlib.sha1)
+ legacy_info.sha256 = file_hash(jar_path, hashlib.sha256)
legacy_info.size = os.path.getsize(jar_path)
legacy_info_list.number[key] = legacy_info
diff --git a/meta/run/update_neoforge.py b/meta/run/update_neoforge.py
index ee8d6ff950..186edb253c 100644
--- a/meta/run/update_neoforge.py
+++ b/meta/run/update_neoforge.py
@@ -7,7 +7,6 @@ import hashlib
import json
import os
import re
-import sys
import zipfile
from contextlib import suppress
from datetime import datetime
@@ -22,7 +21,11 @@ from meta.common import (
ensure_upstream_dir,
default_session,
remove_files,
+ eprint,
+ file_hash,
+ get_file_sha1_from_file,
)
+from meta.common.http import download_binary_file
from meta.common.neoforge import (
JARS_DIR,
INSTALLER_INFO_DIR,
@@ -52,18 +55,6 @@ ensure_upstream_dir(FILE_MANIFEST_DIR)
sess = default_session()
-def eprint(*args, **kwargs):
- print(*args, file=sys.stderr, **kwargs)
-
-
-def filehash(filename, hashtype, blocksize=65536):
- hashtype = hashtype()
- with open(filename, "rb") as f:
- for block in iter(lambda: f.read(blocksize), b""):
- hashtype.update(block)
- return hashtype.hexdigest()
-
-
def find_nth(haystack, needle, n):
start = haystack.find(needle)
while start >= 0 and n > 1:
@@ -221,15 +212,20 @@ def main():
UPSTREAM_DIR + "/neoforge/version_manifests/%s.json" % version.long_version
)
+ new_sha1 = None
+ sha1_file = jar_path + ".sha1"
if not os.path.isfile(jar_path):
remove_files([profile_path, installer_info_path])
else:
- fileSha1 = filehash(jar_path, hashlib.sha1)
+ fileSha1 = get_file_sha1_from_file(jar_path, sha1_file)
try:
rfile = sess.get(version.url() + ".sha1")
rfile.raise_for_status()
- if fileSha1 != rfile.text.strip():
- remove_files([jar_path, profile_path, installer_info_path])
+ new_sha1 = rfile.text.strip()
+ if fileSha1 != new_sha1:
+ remove_files(
+ [jar_path, profile_path, installer_info_path, sha1_file]
+ )
except Exception as e:
eprint("Failed to check sha1 %s" % version.url())
eprint("Error is %s" % e)
@@ -243,16 +239,23 @@ def main():
if not os.path.isfile(jar_path):
eprint("Downloading %s" % version.url())
try:
- rfile = sess.get(version.url(), stream=True)
- rfile.raise_for_status()
Path(jar_path).parent.mkdir(parents=True, exist_ok=True)
- with open(jar_path, "wb") as f:
- for chunk in rfile.iter_content(chunk_size=128):
- f.write(chunk)
+ download_binary_file(sess, jar_path, version.url())
except Exception as e:
eprint("Failed to download %s" % version.url())
eprint("Error is %s" % e)
continue
+ if new_sha1 is None:
+ try:
+ rfile = sess.get(version.url() + ".sha1")
+ rfile.raise_for_status()
+ new_sha1 = rfile.text.strip()
+ except Exception as e:
+ eprint("Failed to download new sha1 %s" % version.url())
+ eprint("Error is %s" % e)
+ if new_sha1 is not None: # this is in case the fetch failed
+ with open(sha1_file, "w") as file:
+ file.write(new_sha1)
eprint("Processing %s" % version.url())
# harvestables from the installer
@@ -302,8 +305,8 @@ def main():
# installer info v1
if not os.path.isfile(installer_info_path):
installer_info = InstallerInfo()
- installer_info.sha1hash = filehash(jar_path, hashlib.sha1)
- installer_info.sha256hash = filehash(jar_path, hashlib.sha256)
+ installer_info.sha1hash = file_hash(jar_path, hashlib.sha1)
+ installer_info.sha256hash = file_hash(jar_path, hashlib.sha256)
installer_info.size = os.path.getsize(jar_path)
installer_info.write(installer_info_path)
diff --git a/meta/run/update_quilt.py b/meta/run/update_quilt.py
index 5f3c40caa8..2f3160265d 100755
--- a/meta/run/update_quilt.py
+++ b/meta/run/update_quilt.py
@@ -22,14 +22,6 @@ ensure_upstream_dir(META_DIR)
sess = default_session()
-def filehash(filename, hashtype, blocksize=65536):
- h = hashtype()
- with open(filename, "rb") as f:
- for block in iter(lambda: f.read(blocksize), b""):
- h.update(block)
- return h.hexdigest()
-
-
def get_maven_url(maven_key, server, ext):
parts = maven_key.split(":", 3)
maven_ver_url = (