@@ -28,12 +28,7 @@ def fetch_json(url):
2828 return None
2929
3030def fetch_sourceforge_builds (target_codename ):
31- """
32- Fetches builds from SourceForge using the target_codename.
33- For unified devices, this target_codename is the unified name (e.g., 'miatoll').
34- """
3531 rss_url = f"https://sourceforge.net/projects/pbrp/rss?path=/{ target_codename } "
36-
3732 builds = []
3833
3934 try :
@@ -42,28 +37,20 @@ def fetch_sourceforge_builds(target_codename):
4237 data = None ,
4338 headers = {'User-Agent' : 'PBRP-Manager/1.0' }
4439 )
45-
4640 with urllib .request .urlopen (req ) as response :
4741 xml_data = response .read ().decode ()
4842
4943 root = ET .fromstring (xml_data )
50-
5144 for item in root .findall ('./channel/item' ):
5245 title = item .find ('title' ).text
5346 link = item .find ('link' ).text
5447 pub_date_str = item .find ('pubDate' ).text
5548
56- # Strict path check
57- if not title .startswith (f"/{ target_codename } /" ):
58- continue
59-
49+ if not title .startswith (f"/{ target_codename } /" ): continue
6050 filename = title .split ('/' )[- 1 ]
61-
62- if not (filename .endswith ('.zip' ) or filename .endswith ('.img' )):
63- continue
51+ if not (filename .endswith ('.zip' ) or filename .endswith ('.img' )): continue
6452
6553 match = ZIP_META_PATTERN .search (filename )
66-
6754 version = "Unknown"
6855 build_type = "UNOFFICIAL"
6956 date_str = "Unknown"
@@ -73,14 +60,12 @@ def fetch_sourceforge_builds(target_codename):
7360 dt = parsedate_to_datetime (pub_date_str )
7461 date_str = dt .strftime ("%Y-%m-%d" )
7562 timestamp = dt .timestamp ()
76- except :
77- pass
63+ except : pass
7864
7965 if match :
8066 version = match .group (1 )
8167 raw_date = match .group (2 )
8268 build_type = match .group (3 ).upper ()
83-
8469 if len (raw_date ) == 8 :
8570 date_str = f"{ raw_date [0 :4 ]} -{ raw_date [4 :6 ]} -{ raw_date [6 :8 ]} "
8671 elif filename .endswith ('.img' ):
@@ -89,73 +74,51 @@ def fetch_sourceforge_builds(target_codename):
8974
9075 download_link = link if link .endswith ('/download' ) else f"{ link } /download"
9176
92- build_info = {
77+ builds . append ( {
9378 "version" : version ,
9479 "build_type" : build_type ,
9580 "date" : date_str ,
9681 "download_link" : download_link ,
9782 "github_release" : None ,
9883 "changelog" : "- Auto-fetched from SourceForge" ,
9984 "_timestamp" : timestamp
100- }
101- builds .append (build_info )
85+ })
10286
10387 except urllib .error .HTTPError as e :
104- if e .code == 404 :
105- return [] # Folder doesn't exist
88+ if e .code == 404 : return []
10689 print (f" -> Warning: SF HTTP Error ({ e .code } )" )
10790 return []
10891 except Exception as e :
10992 print (f" -> Warning: Could not fetch SF data ({ e } )" )
11093 return []
11194
112- # Sort by timestamp descending
11395 builds .sort (key = lambda x : x ['_timestamp' ], reverse = True )
114-
115- for b in builds :
116- del b ['_timestamp' ]
117-
96+ for b in builds : del b ['_timestamp' ]
11897 return builds
11998
12099def process_device (vendor , device_codename , build_source_codename , specific_data ):
121- """
122- vendor: Manufacturer
123- device_codename: The actual device (e.g. curtana)
124- build_source_codename: Where files are located (e.g. miatoll)
125- specific_data: JSON object containing 'name', 'maintainer' etc.
126- """
127100 vendor_lower = vendor .lower ()
128101 vendor_dir = os .path .join (OEM_DIR , vendor_lower )
129102 md_path = os .path .join (vendor_dir , f"{ device_codename } .md" )
130103
131- # 1. Skip if MD exists
132- if os .path .exists (md_path ):
133- return False
104+ if os .path .exists (md_path ): return False
134105
135106 print (f"Checking: { vendor } /{ device_codename } (Source: { build_source_codename } )..." )
136107
137- # 2. Check SourceForge using the BUILD SOURCE codename
138108 found_builds = fetch_sourceforge_builds (build_source_codename )
139-
140109 if not found_builds :
141110 print (f" -> [SKIP] No builds found on SourceForge for '{ build_source_codename } '" )
142111 return False
143112
144- # If we are here, files exist on SF. Proceed to create pages.
145- if not os .path .exists (vendor_dir ):
146- os .makedirs (vendor_dir )
147-
113+ if not os .path .exists (vendor_dir ): os .makedirs (vendor_dir )
148114 print (f" -> [NEW] Found { len (found_builds )} builds! Creating pages..." )
149115
150- # 3. Create Markdown File
151- # Use specific name if available, otherwise capitalize codename
152116 device_name = specific_data .get ('name' , device_codename .capitalize ())
153-
154- # Clean maintainer name
155117 raw_maintainer = specific_data .get ('maintainer' , 'Unknown' )
156118 maintainer = raw_maintainer .replace ('@' , '' ).strip ()
157119
158- tree_url = f"https://github.com/PitchBlackRecoveryProject/android_device_{ vendor_lower } _{ device_codename } -pbrp"
120+ # Use build_source_codename (Parent) for the repo tree URL
121+ tree_url = f"https://github.com/PitchBlackRecoveryProject/android_device_{ vendor_lower } _{ build_source_codename } -pbrp"
159122
160123 md_content = f"""---
161124layout: device
@@ -175,17 +138,13 @@ def process_device(vendor, device_codename, build_source_codename, specific_data
175138{{% include pbrpinstall.html %}}
176139"""
177140 try :
178- with open (md_path , 'w' ) as f :
179- f .write (md_content )
141+ with open (md_path , 'w' ) as f : f .write (md_content )
180142 print (f" -> Created MD: { md_path } " )
181143 except Exception as e :
182144 print (f" -> Error creating MD: { e } " )
183145 return False
184146
185- # 4. Create JSON Build File
186- if not os .path .exists (JSON_DIR ):
187- os .makedirs (JSON_DIR )
188-
147+ if not os .path .exists (JSON_DIR ): os .makedirs (JSON_DIR )
189148 json_path = os .path .join (JSON_DIR , f"builds-{ device_codename } .json" )
190149
191150 json_data = {
@@ -194,8 +153,7 @@ def process_device(vendor, device_codename, build_source_codename, specific_data
194153 }
195154
196155 try :
197- with open (json_path , 'w' ) as f :
198- json .dump (json_data , f , indent = 2 )
156+ with open (json_path , 'w' ) as f : json .dump (json_data , f , indent = 2 )
199157 print (f" -> Created JSON: { json_path } " )
200158 except Exception as e :
201159 print (f" -> Error creating JSON: { e } " )
@@ -205,56 +163,37 @@ def process_device(vendor, device_codename, build_source_codename, specific_data
205163def main ():
206164 print (f"Connecting to master device list..." )
207165 data = fetch_json (DEVICES_JSON_URL )
208- if not data :
209- return
166+ if not data : return
210167
211- # 1. Build a map of Child -> Parent for unified devices
212- # e.g. {'curtana': 'miatoll', 'joyeuse': 'miatoll'}
213168 child_to_parent_map = {}
214-
215169 for vendor , devices in data .items ():
216170 for key , device_data in devices .items ():
217171 if "unified" in device_data and isinstance (device_data ["unified" ], list ):
218172 for child in device_data ["unified" ]:
219- # We map child to the parent key (which matches SF folder)
220173 child_to_parent_map [child ] = key
221174
222175 processed_codenames = set ()
223176 new_devices_count = 0
224177
225- # 2. First Pass: Process every device EXPLICITLY listed in the JSON
226- # This ensures 'curtana' uses its own "name" field, even if it's unified.
178+ # 1. Process Explicit Entries
227179 for vendor , devices in data .items ():
228180 for key , device_data in devices .items ():
229-
230- # Determine Build Source
231- # If this key is a child in the map, use parent. Otherwise use self.
232181 build_source = child_to_parent_map .get (key , key )
233-
234182 if process_device (vendor , key , build_source , device_data ):
235183 new_devices_count += 1
236-
237184 processed_codenames .add (key )
238185
239- # 3. Second Pass: Process "Implicit" children
240- # Devices listed in a "unified" array but NOT having their own key in the JSON
186+ # 2. Process Implicit Children
241187 for vendor , devices in data .items ():
242188 for key , device_data in devices .items ():
243189 if "unified" in device_data and isinstance (device_data ["unified" ], list ):
244190 parent_source = key
245191 parent_maintainer = device_data .get ('maintainer' , 'Unknown' )
246-
247192 for child in device_data ["unified" ]:
248193 if child not in processed_codenames :
249- # Fallback data for implicit child
250- fallback_data = {
251- "name" : child .capitalize (),
252- "maintainer" : parent_maintainer
253- }
254-
194+ fallback_data = {"name" : child .capitalize (), "maintainer" : parent_maintainer }
255195 if process_device (vendor , child , parent_source , fallback_data ):
256196 new_devices_count += 1
257-
258197 processed_codenames .add (child )
259198
260199 print ("-" * 30 )
0 commit comments