-
Notifications
You must be signed in to change notification settings - Fork 28
/
Copy pathupdate_json.py
337 lines (261 loc) · 10.5 KB
/
update_json.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
import json
import re
import requests
from datetime import datetime
from typing import Dict, List, Tuple, Any, Optional
GITHUB_REPO = "Balackburn/Apollo"
JSON_FILE = "apps.json"
JSON_NOEXT = "apps_noext.json"
def fetch_all_releases() -> List[Dict[str, Any]]:
"""
Fetch all release information from GitHub.
Returns:
List of release objects sorted by publication date (oldest first)
Raises:
requests.RequestException: If the API request fails
"""
api_url = f"https://api.github.com/repos/{GITHUB_REPO}/releases"
headers = {"Accept": "application/vnd.github+json"}
response = requests.get(api_url, headers=headers)
response.raise_for_status() # Raise exception for non 200 OK responses
releases = response.json()
sorted_releases = sorted(releases, key=lambda x: x["published_at"], reverse=False)
return sorted_releases
def fetch_latest_release() -> Dict[str, Any]:
"""
Fetch the latest release information from GitHub.
Returns:
Latest release object
Raises:
requests.RequestException: If the API request fails
ValueError: If no releases are found
"""
api_url = f"https://api.github.com/repos/{GITHUB_REPO}/releases"
headers = {"Accept": "application/vnd.github+json"}
response = requests.get(api_url, headers=headers)
response.raise_for_status()
releases = response.json()
sorted_releases = sorted(releases, key=lambda x: x["published_at"], reverse=True)
if not sorted_releases:
raise ValueError("No release found.")
return sorted_releases[0]
def format_description(input_text: str) -> str:
"""
Format release description by removing HTML tags and markdown formatting.
Args:
input_text: Raw release description
Returns:
Formatted description text
"""
description = input_text
description = re.sub(r"<[^<]+?>", "", description) # HTML tags
description = re.sub(r"#{1,6}\s?", "", description) # Markdown header tags
description = description.replace(r"\*{2}", "").replace("-", "•").replace("`", '"')
return description
def get_download_info(
release: Dict[str, Any], prefix: Optional[str]
) -> Tuple[Optional[str], Optional[int]]:
"""
Get download URL and size for a specific release asset.
Args:
release: GitHub release object
prefix: Asset name prefix to search for (None or "NO-EXTENSIONS")
Returns:
Tuple of (download_url, size) - both can be None if asset not found
"""
target_prefix = "NO-EXTENSIONS" if prefix == "NO-EXTENSIONS" else "Apollo"
download_url = None
size = None
for asset in release.get("assets", []):
if asset.get("name", "").startswith(target_prefix) and asset.get(
"browser_download_url"
):
download_url = asset["browser_download_url"]
size = asset.get("size")
break
return download_url, size
def parse_version(version_string: str) -> Tuple[str, str]:
"""
Parse version string to extract main version and secondary version if present.
Support for both underscore and hyphen separators.
Args:
version_string: Version string with optional underscore or hyphen component
Returns:
Tuple of (app_version, tweak_version)
"""
# Support both underscore and hyphen as separators
# This captures prior releases which used a different formatting approach
version_match = re.search(
r"(\d+\.\d+(?:\.\d+)?)(?:[_-](\d+\.\d+\.\d+[a-z]?))?", version_string
)
if not version_match:
raise ValueError(f"Invalid version format: {version_string}")
app_version = version_match.group(1)
tweak_version = version_match.group(2)
# Catches edge cases where only major and minor version are present in the tag
if tweak_version and tweak_version.count(".") == 1: # If only major and minor exist
tweak_version += ".0"
return app_version, tweak_version
def get_patch_number(version_string: str) -> int:
"""
Extract patch number from version string.
Args:
version_string: Version string (i.e. "1.2.3_1.2.3")
Returns:
Patch number (third component of version)
"""
_, tweak_version = parse_version(version_string)
# Extract patch number (third component)
_, _, patch = map(int, tweak_version.split("."))
return patch
def update_json_file(
json_file: str,
fetched_data_all: List[Dict[str, Any]],
fetched_data_latest: Dict[str, Any],
prefix: Optional[str],
) -> None:
"""
Update app source JSON file with fetched release information.
Args:
json_file: Path to the app source JSON file to update
fetched_data_all: List of all releases
fetched_data_latest: Latest release data
prefix: Asset name prefix to search for (None or "NO-EXTENSIONS")
"""
try:
with open(json_file, "r") as file:
data = json.load(file)
except (json.JSONDecodeError, FileNotFoundError) as e:
raise ValueError(f"Error reading JSON file {json_file}: {str(e)}")
if (
not data.get("apps")
or not isinstance(data["apps"], list)
or len(data["apps"]) == 0
):
raise ValueError("Invalid JSON structure: missing or empty 'apps' array")
app = data["apps"][0]
if "versions" not in app:
app["versions"] = []
# Dictionary to track newest release for each version
version_dates = {} # version -> (published_at, entry_data)
# Process all releases
for release in fetched_data_all:
full_version = release["tag_name"].lstrip("v")
# Parse the version for comparison
app_version, tweak_version = parse_version(full_version)
version = app_version
version_date = release["published_at"]
# Extract and format description
description = release.get("body", "")
keyword = "Apollo for Reddit (with ImprovedCustomApi) Release Information"
if keyword in description:
description = description.split(keyword, 1)[1].strip()
description = format_description(description)
# Get download information
download_url, size = get_download_info(release, prefix)
# Create version entry
version_entry = {
"version": version,
"date": version_date,
"localizedDescription": description,
"downloadURL": download_url,
"size": size,
}
# Only process entries that have a download URL
if download_url:
# Check if we already have this version in our tracking dictionary
if version not in version_dates or version_date > version_dates[version][0]:
# This is either a new version or a newer release of the same version
version_dates[version] = (version_date, version_entry)
# Remove all existing entries for versions that we're updating
app["versions"] = [
v for v in app["versions"] if v.get("version") not in version_dates
]
# Add all the newest version entries
for version_date, entry in version_dates.values():
app["versions"].insert(0, entry)
# Sort versions by date (newest first)
app["versions"] = sorted(
app["versions"], key=lambda x: x.get("date", ""), reverse=True
)
# Process latest release
latest_version = fetched_data_latest["tag_name"].lstrip("v")
tag = fetched_data_latest["tag_name"]
try:
app_version, tweak_version = parse_version(latest_version)
version = tweak_version
patch_number = get_patch_number(latest_version)
except ValueError as e:
raise ValueError(f"Error parsing latest version: {str(e)}")
# Update app metadata
app["version"] = app_version
app["versionDate"] = fetched_data_latest["published_at"]
app["versionDescription"] = format_description(fetched_data_latest.get("body", ""))
# Find IPA download URL and size
app["downloadURL"] = next(
(
asset["browser_download_url"]
for asset in fetched_data_latest.get("assets", [])
if asset.get("name", "").endswith(".ipa")
and asset.get("browser_download_url")
),
None,
)
app["size"] = next(
(
asset["size"]
for asset in fetched_data_latest.get("assets", [])
if asset.get("browser_download_url") == app["downloadURL"]
),
None,
)
# Add news entry if not already present
if "news" not in data:
data["news"] = []
news_identifier = f"release-{latest_version}"
if not any(item.get("identifier") == news_identifier for item in data["news"]):
formatted_date = datetime.strptime(
fetched_data_latest["published_at"], "%Y-%m-%dT%H:%M:%SZ"
).strftime("%d %b")
# Determine caption and image_url based on patch number
if patch_number == 0:
caption = "Major update of Apollo (with ImprovedCustomApi) is here!"
image_url = "https://raw.githubusercontent.com/Balackburn/Apollo/main/images/news/news_1.webp"
else:
caption = "Update of Apollo (with ImprovedCustomApi) now available!"
image_url = "https://raw.githubusercontent.com/Balackburn/Apollo/main/images/news/news_2.webp"
news_entry = {
"appID": "com.christianselig.Apollo",
"title": f"{latest_version} - {formatted_date}",
"identifier": news_identifier,
"caption": caption,
"date": fetched_data_latest["published_at"],
"tintColor": "3F91FE",
"imageURL": image_url,
"notify": True,
"url": f"https://github.com/Balackburn/Apollo/releases/tag/{tag}",
}
data["news"].append(news_entry)
try:
with open(json_file, "w") as file:
json.dump(data, file, indent=2)
except IOError as e:
raise ValueError(f"Error writing to JSON file {json_file}: {str(e)}")
def main() -> None:
"""
Entrypoint for the GitHub workflow action.
The script runs two passes to populate both the sources (standard and no-extensions).
"""
try:
fetched_data_all = fetch_all_releases()
fetched_data_latest = fetch_latest_release()
update_json_file(JSON_FILE, fetched_data_all, fetched_data_latest, None)
update_json_file(
JSON_NOEXT, fetched_data_all, fetched_data_latest, "NO-EXTENSIONS"
)
print(f"Successfully updated {JSON_FILE} and {JSON_NOEXT}")
except Exception as e:
print(f"Error: {str(e)}")
raise
if __name__ == "__main__":
main()