Skip to content

Commit 3345b1f

Browse files
committed
fix: final gemini cleanup (consistent returns + type hints + utcnow)
1 parent 9332c77 commit 3345b1f

2 files changed

Lines changed: 23 additions & 55 deletions

File tree

modules/github_fetcher.py

Lines changed: 19 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ async def validate_github_username(username: str) -> bool:
5252
data = response.json()
5353
return data.get('type') == 'User'
5454
except httpx.HTTPError:
55-
return True
55+
return False # API failed → treat as invalid (safer)
5656

5757
@staticmethod
5858
async def fetch_user_profile(username: str) -> dict:
@@ -68,11 +68,7 @@ async def fetch_user_profile(username: str) -> dict:
6868
"query": f"""
6969
query {{
7070
user(login: "{username}") {{
71-
name
72-
bio
73-
location
74-
avatarUrl
75-
url
71+
name bio location avatarUrl url
7672
followers {{ totalCount }}
7773
following {{ totalCount }}
7874
repository(name: "{username}") {{
@@ -97,28 +93,19 @@ async def fetch_user_profile(username: str) -> dict:
9793
}
9894

9995
async with httpx.AsyncClient() as client:
100-
graphql_response = await client.post(
96+
resp = await client.post(
10197
"https://api.github.com/graphql",
102-
headers={
103-
"Authorization": f"Bearer {Settings.get_github_token()}",
104-
"Content-Type": "application/json"
105-
},
98+
headers={"Authorization": f"Bearer {Settings.get_github_token()}", "Content-Type": "application/json"},
10699
json=graphql_query
107100
)
108-
graphql_response.raise_for_status()
101+
resp.raise_for_status()
109102

110-
graphql_data = graphql_response.json().get('data', {}).get('user', {})
103+
graphql_data = resp.json().get('data', {}).get('user', {})
111104
if not graphql_data:
112105
raise ValueError(f"User '{username}' not found")
113106

114-
pr_merged_last_year = sum(
115-
1 for pr in graphql_data.get('pullRequests', {}).get('nodes', [])
116-
if pr and datetime.strptime(pr['createdAt'], '%Y-%m-%dT%H:%M:%SZ') > one_year_ago_dt
117-
)
118-
issues_closed_last_year = sum(
119-
1 for issue in graphql_data.get('issues', {}).get('nodes', [])
120-
if issue and datetime.strptime(issue['createdAt'], '%Y-%m-%dT%H:%M:%SZ') > one_year_ago_dt
121-
)
107+
pr_merged = sum(1 for pr in graphql_data.get('pullRequests', {}).get('nodes', []) if pr and datetime.strptime(pr['createdAt'], '%Y-%m-%dT%H:%M:%SZ') > one_year_ago_dt)
108+
issues_closed = sum(1 for issue in graphql_data.get('issues', {}).get('nodes', []) if issue and datetime.strptime(issue['createdAt'], '%Y-%m-%dT%H:%M:%SZ') > one_year_ago_dt)
122109

123110
return {
124111
'username': username,
@@ -130,18 +117,14 @@ async def fetch_user_profile(username: str) -> dict:
130117
'followers': graphql_data.get('followers', {}).get('totalCount', 0),
131118
'following': graphql_data.get('following', {}).get('totalCount', 0),
132119
'public_repos': graphql_data.get('repositories', {}).get('totalCount', 0),
133-
'pull_requests_merged': pr_merged_last_year if pr_merged_last_year < 100 else f"{100}+",
134-
'issues_closed': issues_closed_last_year if issues_closed_last_year < 100 else f"{100}+",
120+
'pull_requests_merged': pr_merged if pr_merged < 100 else f"{100}+",
121+
'issues_closed': issues_closed if issues_closed < 100 else f"{100}+",
135122
'achievements': {
136-
'total_contributions': graphql_data.get('contributionsCollection', {})
137-
.get('contributionCalendar', {})
138-
.get('totalContributions', 0),
139-
'repositories_contributed_to': graphql_data.get('repositoriesContributedTo', {})
140-
.get('totalCount', 0),
123+
'total_contributions': graphql_data.get('contributionsCollection', {}).get('contributionCalendar', {}).get('totalContributions', 0),
124+
'repositories_contributed_to': graphql_data.get('repositoriesContributedTo', {}).get('totalCount', 0),
141125
},
142126
'social_accounts': await GitHubProfileFetcher.social_accounts(username),
143-
'readme_content': (graphql_data.get('repository', {}).get('object', {}).get('text', '')
144-
if graphql_data.get('repository') and graphql_data.get('repository', {}).get('object') else '')
127+
'readme_content': graphql_data.get('repository', {}).get('object', {}).get('text', '')
145128
}
146129

147130
except httpx.HTTPStatusError as e:
@@ -154,8 +137,8 @@ async def fetch_user_profile(username: str) -> dict:
154137
return {"error": "An unexpected error occurred"}
155138

156139
@staticmethod
157-
async def social_accounts(username):
158-
"""Fetch social accounts. Returns list or {"error": "..."} for consistency."""
140+
async def social_accounts(username) -> list:
141+
"""Fetch social accounts. Always returns list (empty on error)."""
159142
try:
160143
base_url = f"https://api.github.com/users/{username}/social_accounts"
161144
async with httpx.AsyncClient() as client:
@@ -171,13 +154,14 @@ async def social_accounts(username):
171154
except httpx.HTTPStatusError as e:
172155
if e.response.status_code == 404:
173156
return await GitHubProfileFetcher.get_social_from_readme(username)
174-
return {"error": f"HTTP Error: {e.response.status_code}"}
157+
logger.warning("HTTP error fetching social accounts for %s: %s", username, e)
158+
return []
175159
except Exception as e:
176160
logger.exception("Unexpected error in social_accounts for user %s", username)
177-
return {"error": "Failed to fetch social accounts"}
161+
return []
178162

179163
@staticmethod
180-
async def get_social_from_readme(username):
164+
async def get_social_from_readme(username) -> list:
181165
"""Extract LinkedIn link from README (simplified for reliability)"""
182166
try:
183167
url = f"https://api.github.com/repos/{username}/{username}/readme"

utils/user.py

Lines changed: 4 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -26,28 +26,19 @@ async def verify_username(
2626
) -> str:
2727
"""Validate GitHub username format and existence"""
2828
if not await GitHubProfileFetcher.validate_github_username(username):
29-
raise HTTPException(
30-
status_code=400,
31-
detail="Invalid GitHub username. Usernames must be 1-39 characters long and can only contain alphanumeric characters and single hyphens."
32-
)
29+
raise HTTPException(status_code=400, detail="Invalid GitHub username.")
3330
return username
3431

3532

3633
async def verify_linkedin_username(
3734
username: Annotated[
3835
str,
39-
Path(
40-
min_length=1,
41-
pattern=r'^[\w\-]+$'
42-
)
36+
Path(min_length=1, pattern=r'^[\w\-]+$')
4337
]
4438
) -> str:
4539
"""Validate LinkedIn username format"""
4640
if not LinkedInProfileFetcher._validate_linkedin_username(username):
47-
raise HTTPException(
48-
status_code=400,
49-
detail="Invalid LinkedIn username. Username can only contain letters, numbers, and hyphens."
50-
)
41+
raise HTTPException(status_code=400, detail="Invalid LinkedIn username.")
5142
return username
5243

5344

@@ -60,22 +51,15 @@ async def get_user_data(username: str, force: bool = True) -> dict:
6051
if res.status_code == 200:
6152
return res.json()
6253

63-
# Parallel fetch
6454
profile_data, contributions_data = await asyncio.gather(
6555
GitHubProfileFetcher.fetch_user_profile(username),
66-
asyncio.to_thread(
67-
GitHubContributionsFetcher.fetch_recent_contributions,
68-
username,
69-
Settings.CONTRIBUTION_DAYS
70-
)
56+
asyncio.to_thread(GitHubContributionsFetcher.fetch_recent_contributions, username, Settings.CONTRIBUTION_DAYS)
7157
)
7258

7359
if "error" in profile_data:
7460
return profile_data
7561

7662
ai_generator = AIDescriptionGenerator()
77-
78-
# Robust parallel AI summaries
7963
ai_tasks = [asyncio.to_thread(ai_generator.generate_profile_summary, profile_data)]
8064
if contributions_data:
8165
ai_tasks.append(asyncio.to_thread(ai_generator.generate_activity_summary, contributions_data))

0 commit comments

Comments
 (0)