1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
|
import concurrent.futures
import json
import os
import zipfile
from datetime import datetime
from meta.common import (
upstream_path,
ensure_upstream_dir,
transform_maven_key,
default_session,
)
from meta.common.quilt import JARS_DIR, INSTALLER_INFO_DIR, META_DIR, USE_QUILT_MAPPINGS
from meta.model.fabric import FabricJarInfo
UPSTREAM_DIR = upstream_path()
ensure_upstream_dir(JARS_DIR)
ensure_upstream_dir(INSTALLER_INFO_DIR)
ensure_upstream_dir(META_DIR)
sess = default_session()
def get_maven_url(maven_key, server, ext):
parts = maven_key.split(":", 3)
maven_ver_url = (
server + parts[0].replace(".", "/") + "/" + parts[1] + "/" + parts[2] + "/"
)
maven_url = maven_ver_url + parts[1] + "-" + parts[2] + ext
return maven_url
def get_json_file(path, url):
with open(path, "w", encoding="utf-8") as f:
r = sess.get(url)
r.raise_for_status()
print(f"QUILT DEBUG {r.headers}")
version_json = r.json()
json.dump(version_json, f, sort_keys=True, indent=4)
return version_json
def head_file(url):
r = sess.head(url)
r.raise_for_status()
return r.headers
def get_binary_file(path, url):
with open(path, "wb") as f:
r = sess.get(url)
r.raise_for_status()
for chunk in r.iter_content(chunk_size=128):
f.write(chunk)
def compute_jar_file(path, url):
# NOTE: Quilt Meta does not make any guarantees about Last-Modified.
# Always download the JAR file instead
jar_path = path + ".jar"
get_binary_file(jar_path, url)
tstamp = datetime.fromtimestamp(0)
with zipfile.ZipFile(jar_path) as jar:
allinfo = jar.infolist()
for info in allinfo:
tstamp_new = datetime(*info.date_time)
if tstamp_new > tstamp:
tstamp = tstamp_new
data = FabricJarInfo(release_time=tstamp)
data.write(path + ".json")
def compute_jar_file_concurrent(component, it):
print(f"Processing {component} {it['version']} ")
jar_maven_url = get_maven_url(
it["maven"], "https://maven.quiltmc.org/repository/release/", ".jar"
)
compute_jar_file(
os.path.join(UPSTREAM_DIR, JARS_DIR, transform_maven_key(it["maven"])),
jar_maven_url,
)
print(f"Processing {component} {it['version']} Done")
def get_json_file_concurrent(it):
print(f"Downloading JAR info for loader {it['version']} ")
maven_url = get_maven_url(
it["maven"], "https://maven.quiltmc.org/repository/release/", ".json"
)
get_json_file(
os.path.join(UPSTREAM_DIR, INSTALLER_INFO_DIR, f"{it['version']}.json"),
maven_url,
)
def main():
# get the version list for each component we are interested in
components = ["loader"]
if USE_QUILT_MAPPINGS:
components.append("hashed")
for component in components:
index = get_json_file(
os.path.join(UPSTREAM_DIR, META_DIR, f"{component}.json"),
"https://meta.quiltmc.org/v3/versions/" + component,
)
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = [
executor.submit(compute_jar_file_concurrent, component, it)
for it in index
]
for f in futures:
f.result()
# for each loader, download installer JSON file from maven
with open(
os.path.join(UPSTREAM_DIR, META_DIR, "loader.json"), "r", encoding="utf-8"
) as loaderVersionIndexFile:
loader_version_index = json.load(loaderVersionIndexFile)
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = [
executor.submit(get_json_file_concurrent, it)
for it in loader_version_index
]
for f in futures:
f.result()
if __name__ == "__main__":
main()
|