|
| 1 | +#!/usr/bin/env python3 |
| 2 | +import os |
| 3 | +import json |
| 4 | +import urllib.request |
| 5 | +import re |
| 6 | +import xml.etree.ElementTree as ET |
| 7 | +from datetime import datetime |
| 8 | +from email.utils import parsedate_to_datetime |
| 9 | + |
| 10 | +# Configuration |
| 11 | +DEVICES_JSON_URL = "https://raw.githubusercontent.com/PitchBlackRecoveryProject/vendor_utils/pb/pb_devices.json" |
| 12 | +REPO_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) |
| 13 | +OEM_DIR = os.path.join(REPO_ROOT, "_oem") |
| 14 | +JSON_DIR = os.path.join(REPO_ROOT, "assets", "json") |
| 15 | + |
| 16 | +# Regex to match PBRP filenames |
| 17 | +ZIP_META_PATTERN = re.compile( |
| 18 | + r"(?:PBRP|PitchBlack)-.*?-(\d+(?:\.\d+)+)-(\d{8}).*?-(OFFICIAL|BETA|UNOFFICIAL).*?\.zip", |
| 19 | + re.IGNORECASE |
| 20 | +) |
| 21 | + |
| 22 | +def fetch_json(url): |
| 23 | + try: |
| 24 | + with urllib.request.urlopen(url) as response: |
| 25 | + return json.loads(response.read().decode()) |
| 26 | + except Exception as e: |
| 27 | + print(f"Error fetching JSON {url}: {e}") |
| 28 | + return None |
| 29 | + |
| 30 | +def fetch_sourceforge_builds(target_codename): |
| 31 | + """ |
| 32 | + Fetches builds from SourceForge using the target_codename. |
| 33 | + For unified devices, this target_codename is the unified name (e.g., 'miatoll'). |
| 34 | + """ |
| 35 | + rss_url = f"https://sourceforge.net/projects/pbrp/rss?path=/{target_codename}" |
| 36 | + |
| 37 | + builds = [] |
| 38 | + |
| 39 | + try: |
| 40 | + req = urllib.request.Request( |
| 41 | + rss_url, |
| 42 | + data=None, |
| 43 | + headers={'User-Agent': 'PBRP-Manager/1.0'} |
| 44 | + ) |
| 45 | + |
| 46 | + with urllib.request.urlopen(req) as response: |
| 47 | + xml_data = response.read().decode() |
| 48 | + |
| 49 | + root = ET.fromstring(xml_data) |
| 50 | + |
| 51 | + for item in root.findall('./channel/item'): |
| 52 | + title = item.find('title').text |
| 53 | + link = item.find('link').text |
| 54 | + pub_date_str = item.find('pubDate').text |
| 55 | + |
| 56 | + # Strict path check |
| 57 | + if not title.startswith(f"/{target_codename}/"): |
| 58 | + continue |
| 59 | + |
| 60 | + filename = title.split('/')[-1] |
| 61 | + |
| 62 | + if not (filename.endswith('.zip') or filename.endswith('.img')): |
| 63 | + continue |
| 64 | + |
| 65 | + match = ZIP_META_PATTERN.search(filename) |
| 66 | + |
| 67 | + version = "Unknown" |
| 68 | + build_type = "UNOFFICIAL" |
| 69 | + date_str = "Unknown" |
| 70 | + timestamp = 0 |
| 71 | + |
| 72 | + try: |
| 73 | + dt = parsedate_to_datetime(pub_date_str) |
| 74 | + date_str = dt.strftime("%Y-%m-%d") |
| 75 | + timestamp = dt.timestamp() |
| 76 | + except: |
| 77 | + pass |
| 78 | + |
| 79 | + if match: |
| 80 | + version = match.group(1) |
| 81 | + raw_date = match.group(2) |
| 82 | + build_type = match.group(3).upper() |
| 83 | + |
| 84 | + if len(raw_date) == 8: |
| 85 | + date_str = f"{raw_date[0:4]}-{raw_date[4:6]}-{raw_date[6:8]}" |
| 86 | + elif filename.endswith('.img'): |
| 87 | + version = "Image" |
| 88 | + build_type = "IMG" |
| 89 | + |
| 90 | + download_link = link if link.endswith('/download') else f"{link}/download" |
| 91 | + |
| 92 | + build_info = { |
| 93 | + "version": version, |
| 94 | + "build_type": build_type, |
| 95 | + "date": date_str, |
| 96 | + "download_link": download_link, |
| 97 | + "github_release": None, |
| 98 | + "changelog": "- Auto-fetched from SourceForge", |
| 99 | + "_timestamp": timestamp |
| 100 | + } |
| 101 | + builds.append(build_info) |
| 102 | + |
| 103 | + except urllib.error.HTTPError as e: |
| 104 | + if e.code == 404: |
| 105 | + return [] # Folder doesn't exist |
| 106 | + print(f" -> Warning: SF HTTP Error ({e.code})") |
| 107 | + return [] |
| 108 | + except Exception as e: |
| 109 | + print(f" -> Warning: Could not fetch SF data ({e})") |
| 110 | + return [] |
| 111 | + |
| 112 | + # Sort by timestamp descending |
| 113 | + builds.sort(key=lambda x: x['_timestamp'], reverse=True) |
| 114 | + |
| 115 | + for b in builds: |
| 116 | + del b['_timestamp'] |
| 117 | + |
| 118 | + return builds |
| 119 | + |
| 120 | +def process_device(vendor, device_codename, build_source_codename, specific_data): |
| 121 | + """ |
| 122 | + vendor: Manufacturer |
| 123 | + device_codename: The actual device (e.g. curtana) |
| 124 | + build_source_codename: Where files are located (e.g. miatoll) |
| 125 | + specific_data: JSON object containing 'name', 'maintainer' etc. |
| 126 | + """ |
| 127 | + vendor_lower = vendor.lower() |
| 128 | + vendor_dir = os.path.join(OEM_DIR, vendor_lower) |
| 129 | + md_path = os.path.join(vendor_dir, f"{device_codename}.md") |
| 130 | + |
| 131 | + # 1. Skip if MD exists |
| 132 | + if os.path.exists(md_path): |
| 133 | + return False |
| 134 | + |
| 135 | + print(f"Checking: {vendor}/{device_codename} (Source: {build_source_codename})...") |
| 136 | + |
| 137 | + # 2. Check SourceForge using the BUILD SOURCE codename |
| 138 | + found_builds = fetch_sourceforge_builds(build_source_codename) |
| 139 | + |
| 140 | + if not found_builds: |
| 141 | + print(f" -> [SKIP] No builds found on SourceForge for '{build_source_codename}'") |
| 142 | + return False |
| 143 | + |
| 144 | + # If we are here, files exist on SF. Proceed to create pages. |
| 145 | + if not os.path.exists(vendor_dir): |
| 146 | + os.makedirs(vendor_dir) |
| 147 | + |
| 148 | + print(f" -> [NEW] Found {len(found_builds)} builds! Creating pages...") |
| 149 | + |
| 150 | + # 3. Create Markdown File |
| 151 | + # Use specific name if available, otherwise capitalize codename |
| 152 | + device_name = specific_data.get('name', device_codename.capitalize()) |
| 153 | + |
| 154 | + # Clean maintainer name |
| 155 | + raw_maintainer = specific_data.get('maintainer', 'Unknown') |
| 156 | + maintainer = raw_maintainer.replace('@', '').strip() |
| 157 | + |
| 158 | + tree_url = f"https://github.com/PitchBlackRecoveryProject/android_device_{vendor_lower}_{device_codename}-pbrp" |
| 159 | + |
| 160 | + md_content = f"""--- |
| 161 | +layout: device |
| 162 | +title: "{device_name}" |
| 163 | +codename: {device_codename} |
| 164 | +permalink: /{device_codename}/ |
| 165 | +oem: {vendor} |
| 166 | +supportstatus: Current |
| 167 | +maintainer: {maintainer} |
| 168 | +devicetree: "{tree_url}" |
| 169 | +--- |
| 170 | +
|
| 171 | +{{% include disclaimer.html %}} |
| 172 | +
|
| 173 | +{{% include download.html %}} |
| 174 | +
|
| 175 | +{{% include pbrpinstall.html %}} |
| 176 | +""" |
| 177 | + try: |
| 178 | + with open(md_path, 'w') as f: |
| 179 | + f.write(md_content) |
| 180 | + print(f" -> Created MD: {md_path}") |
| 181 | + except Exception as e: |
| 182 | + print(f" -> Error creating MD: {e}") |
| 183 | + return False |
| 184 | + |
| 185 | + # 4. Create JSON Build File |
| 186 | + if not os.path.exists(JSON_DIR): |
| 187 | + os.makedirs(JSON_DIR) |
| 188 | + |
| 189 | + json_path = os.path.join(JSON_DIR, f"builds-{device_codename}.json") |
| 190 | + |
| 191 | + json_data = { |
| 192 | + "latest": found_builds[0], |
| 193 | + "older_builds": found_builds[1:] if len(found_builds) > 1 else [] |
| 194 | + } |
| 195 | + |
| 196 | + try: |
| 197 | + with open(json_path, 'w') as f: |
| 198 | + json.dump(json_data, f, indent=2) |
| 199 | + print(f" -> Created JSON: {json_path}") |
| 200 | + except Exception as e: |
| 201 | + print(f" -> Error creating JSON: {e}") |
| 202 | + |
| 203 | + return True |
| 204 | + |
| 205 | +def main(): |
| 206 | + print(f"Connecting to master device list...") |
| 207 | + data = fetch_json(DEVICES_JSON_URL) |
| 208 | + if not data: |
| 209 | + return |
| 210 | + |
| 211 | + # 1. Build a map of Child -> Parent for unified devices |
| 212 | + # e.g. {'curtana': 'miatoll', 'joyeuse': 'miatoll'} |
| 213 | + child_to_parent_map = {} |
| 214 | + |
| 215 | + for vendor, devices in data.items(): |
| 216 | + for key, device_data in devices.items(): |
| 217 | + if "unified" in device_data and isinstance(device_data["unified"], list): |
| 218 | + for child in device_data["unified"]: |
| 219 | + # We map child to the parent key (which matches SF folder) |
| 220 | + child_to_parent_map[child] = key |
| 221 | + |
| 222 | + processed_codenames = set() |
| 223 | + new_devices_count = 0 |
| 224 | + |
| 225 | + # 2. First Pass: Process every device EXPLICITLY listed in the JSON |
| 226 | + # This ensures 'curtana' uses its own "name" field, even if it's unified. |
| 227 | + for vendor, devices in data.items(): |
| 228 | + for key, device_data in devices.items(): |
| 229 | + |
| 230 | + # Determine Build Source |
| 231 | + # If this key is a child in the map, use parent. Otherwise use self. |
| 232 | + build_source = child_to_parent_map.get(key, key) |
| 233 | + |
| 234 | + if process_device(vendor, key, build_source, device_data): |
| 235 | + new_devices_count += 1 |
| 236 | + |
| 237 | + processed_codenames.add(key) |
| 238 | + |
| 239 | + # 3. Second Pass: Process "Implicit" children |
| 240 | + # Devices listed in a "unified" array but NOT having their own key in the JSON |
| 241 | + for vendor, devices in data.items(): |
| 242 | + for key, device_data in devices.items(): |
| 243 | + if "unified" in device_data and isinstance(device_data["unified"], list): |
| 244 | + parent_source = key |
| 245 | + parent_maintainer = device_data.get('maintainer', 'Unknown') |
| 246 | + |
| 247 | + for child in device_data["unified"]: |
| 248 | + if child not in processed_codenames: |
| 249 | + # Fallback data for implicit child |
| 250 | + fallback_data = { |
| 251 | + "name": child.capitalize(), |
| 252 | + "maintainer": parent_maintainer |
| 253 | + } |
| 254 | + |
| 255 | + if process_device(vendor, child, parent_source, fallback_data): |
| 256 | + new_devices_count += 1 |
| 257 | + |
| 258 | + processed_codenames.add(child) |
| 259 | + |
| 260 | + print("-" * 30) |
| 261 | + print(f"Sync Complete.") |
| 262 | + print(f"New Devices Added: {new_devices_count}") |
| 263 | + |
| 264 | +if __name__ == "__main__": |
| 265 | + main() |
0 commit comments