Skip to content

Commit ed37398

Browse files
Merge pull request #15
v3.11.8
2 parents 5a11eef + 1457f06 commit ed37398

8 files changed

Lines changed: 110 additions & 69 deletions

File tree

CHANGELOG.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# Changelog
22

3+
## v3.11.8
4+
5+
### Changes
6+
7+
* Update parser to support FurAffinity's new display names feature
8+
* Added `UserPartial.display_name` and `User.display_name`
9+
* Full support coming in next minor update
10+
311
## v3.11.7
412

513
### Changes

faapi/comment.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ def parse(self, comment_tag: Optional[Tag] = None):
131131
self.date = datetime.fromtimestamp(parsed["timestamp"])
132132
self.author = faapi.user.UserPartial()
133133
self.author.name = parsed["user_name"]
134+
self.author.display_name = parsed["user_display_name"]
134135
self.author.title = parsed["user_title"]
135136
self.author.avatar_url = parsed["avatar_url"]
136137
self.text = parsed["text"]

faapi/journal.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,7 @@ def parse(self, journal_tag: Optional[Union[Tag, BeautifulSoup]] = None):
141141
self.id = parsed["id"]
142142
self.title = parsed["title"]
143143
self.author.name = parsed.get("user_name", "")
144+
self.author.display_name = parsed.get("user_display_name", "")
144145
self.author.status = parsed.get("user_status", "")
145146
self.author.title = parsed.get("user_title", "")
146147
self.author.join_date = parsed.get("user_join_date", "")
@@ -220,6 +221,7 @@ def parse(self, journal_page: Optional[Union[Tag, BeautifulSoup]] = None):
220221
self.id = parsed["id"]
221222
self.title = parsed["title"]
222223
self.author.name = parsed["user_info"]["name"]
224+
self.author.display_name = parsed["user_info"]["display_name"]
223225
self.author.status = parsed["user_info"]["status"]
224226
self.author.title = parsed["user_info"]["title"]
225227
self.author.join_date = parsed["user_info"]["join_date"]

faapi/parse.py

Lines changed: 88 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
from datetime import datetime
2-
from re import MULTILINE
3-
from re import Match
4-
from re import Pattern
52
from re import compile as re_compile
3+
from re import Match
64
from re import match
5+
from re import MULTILINE
6+
from re import Pattern
77
from re import search
88
from re import sub
99
from typing import Any
@@ -19,14 +19,14 @@
1919
from urllib3.util import parse_url
2020

2121
from .connection import root
22+
from .exceptions import _raise_exception
2223
from .exceptions import DisabledAccount
23-
from .exceptions import NoTitle
2424
from .exceptions import NonePage
2525
from .exceptions import NotFound
2626
from .exceptions import NoticeMessage
27+
from .exceptions import NoTitle
2728
from .exceptions import ParsingError
2829
from .exceptions import ServerError
29-
from .exceptions import _raise_exception
3030

3131
relative_url: Pattern = re_compile(r"^(?:https?://(?:www\.)?furaffinity\.net)?(.*)")
3232
mentions_regexp: Pattern = re_compile(r"^(?:(?:https?://)?(?:www\.)?furaffinity\.net)?/user/([^/#]+).*$")
@@ -130,9 +130,11 @@ def html_to_bbcode(html: str) -> str:
130130

131131
for a in body.select("a"):
132132
href_match: Optional[Match] = relative_url.match(a.attrs.get('href', ''))
133-
a.replaceWith(f"[url={href_match[1] if href_match else a.attrs.get('href', '')}]",
134-
*a.children,
135-
"[/url]")
133+
a.replaceWith(
134+
f"[url={href_match[1] if href_match else a.attrs.get('href', '')}]",
135+
*a.children,
136+
"[/url]"
137+
)
136138

137139
for yt in body.select("iframe[src*='youtube.com/embed']"):
138140
yt.replaceWith(f"[yt]https://youtube.com/embed/{yt.attrs.get('src', '').strip('/').split('/')}[/yt]")
@@ -144,31 +146,33 @@ def html_to_bbcode(html: str) -> str:
144146
quote_name_tag.replaceWith(quote_author)
145147
continue
146148
quote_name_tag.decompose()
147-
quote_tag.replaceWith(f"[quote{('=' + quote_author) if quote_author else ''}]",
148-
*quote_tag.children,
149-
"[/quote]")
149+
quote_tag.replaceWith(
150+
f"[quote{('=' + quote_author) if quote_author else ''}]",
151+
*quote_tag.children,
152+
"[/quote]"
153+
)
150154

151155
for quote_tag in body.select("span.bbcode.bbcode_quote"):
152156
quote_tag.replaceWith("[quote]", *quote_tag.children, "[/quote]")
153157

154158
for [selector, bbcode_tag] in (
155-
("i", "i"),
156-
("b", "b"),
157-
("strong", "b"),
158-
("u", "u"),
159-
("s", "s"),
160-
("code.bbcode_left", "left"),
161-
("code.bbcode_center", "center"),
162-
("code.bbcode_right", "right"),
163-
("span.bbcode_spoiler", "spoiler"),
164-
("sub", "sub"),
165-
("sup", "sup"),
166-
("h1", "h1"),
167-
("h2", "h2"),
168-
("h3", "h3"),
169-
("h4", "h4"),
170-
("h5", "h5"),
171-
("h6", "h6"),
159+
("i", "i"),
160+
("b", "b"),
161+
("strong", "b"),
162+
("u", "u"),
163+
("s", "s"),
164+
("code.bbcode_left", "left"),
165+
("code.bbcode_center", "center"),
166+
("code.bbcode_right", "right"),
167+
("span.bbcode_spoiler", "spoiler"),
168+
("sub", "sub"),
169+
("sup", "sup"),
170+
("h1", "h1"),
171+
("h2", "h2"),
172+
("h3", "h3"),
173+
("h4", "h4"),
174+
("h5", "h5"),
175+
("h6", "h6"),
172176
):
173177
for tag in body.select(selector):
174178
tag.replaceWith(f"[{bbcode_tag}]", *tag.children, f"[/{bbcode_tag}]")
@@ -183,25 +187,27 @@ def html_to_bbcode(html: str) -> str:
183187
if not (div_class := tag.attrs.get("class", None)):
184188
tag.replaceWith(f"[tag={tag.name}]", *tag.children, "[/tag.{tag.name}]")
185189
else:
186-
tag.replaceWith(f"[tag={tag.name}.{' '.join(div_class) if isinstance(div_class, list) else div_class}]",
187-
*tag.children,
188-
"[/tag]")
190+
tag.replaceWith(
191+
f"[tag={tag.name}.{' '.join(div_class) if isinstance(div_class, list) else div_class}]",
192+
*tag.children,
193+
"[/tag]"
194+
)
189195

190196
bbcode: str = body.decode_contents()
191197

192198
bbcode = sub(" *$", "", bbcode, flags=MULTILINE)
193199
bbcode = sub("^ *", "", bbcode, flags=MULTILINE)
194200

195201
for char, substitution in (
196-
("©", "(c)"),
197-
("™", "(tm)"),
198-
("®", "(r)"),
199-
("©", "(c)"),
200-
("®", "(tm)"),
201-
("™", "(r)"),
202-
("&lt;", "<"),
203-
("&gt;", ">"),
204-
("&amp;", "&"),
202+
("©", "(c)"),
203+
("™", "(tm)"),
204+
("®", "(r)"),
205+
("&copy;", "(c)"),
206+
("&reg;", "(tm)"),
207+
("&trade;", "(r)"),
208+
("&lt;", "<"),
209+
("&gt;", ">"),
210+
("&amp;", "&"),
205211
):
206212
bbcode = bbcode.replace(char, substitution)
207213

@@ -251,8 +257,11 @@ def parse_extra(page: BeautifulSoup) -> BeautifulSoup:
251257
child_new = Tag(name="a", attrs={"class": "iconusername", "href": f"/user/{user}"})
252258
child_new_img: Tag = Tag(
253259
name="img",
254-
attrs={"alt": user, "title": user,
255-
"src": f"//a.furaffinity.net/{datetime.now():%Y%m%d}/{username_url(user)}.gif"})
260+
attrs={
261+
"alt": user, "title": user,
262+
"src": f"//a.furaffinity.net/{datetime.now():%Y%m%d}/{username_url(user)}.gif"
263+
}
264+
)
256265
child_new.insert(0, child_new_img)
257266
if m_[2]:
258267
child_new.insert(1, f"\xA0{m_[2]}")
@@ -455,22 +464,26 @@ def parse_submission_author(author_tag: Tag) -> dict[str, Any]:
455464

456465
assert tag_author is not None, _raise_exception(ParsingError("Missing author tag"))
457466

458-
tag_author_name: Optional[Tag] = tag_author.select_one("span.c-usernameBlockSimple > a")
467+
tag_author_name: Optional[Tag] = tag_author.select_one("span.c-usernameBlockSimple__displayName")
459468
tag_author_icon: Optional[Tag] = author_tag.select_one("img.submission-user-icon")
460469

461470
assert tag_author_name is not None, _raise_exception(ParsingError("Missing author name tag"))
462471
assert tag_author_icon is not None, _raise_exception(ParsingError("Missing author icon tag"))
463472

464-
author_name: str = get_attr(tag_author_name, "href").strip().split('/')[-2]
465-
author_title: str = ([*filter(bool, [child.strip()
466-
for child in tag_author.children
467-
if isinstance(child, NavigableString)][3:])] or [""])[-1]
473+
author_name: str = tag_author_name.attrs["title"].strip()
474+
author_display_name: str = tag_author_name.text.strip()
475+
author_title: str = ([*filter(
476+
bool, [child.strip()
477+
for child in tag_author.children
478+
if isinstance(child, NavigableString)][3:]
479+
)] or [""])[-1]
468480
author_title = author_title if tag_author.select_one('a[href$="/#tip"]') is None else sub(r"\|$", "", author_title)
469481
author_title = author_title.strip("\xA0 ") # NBSP
470482
author_icon_url: str = "https:" + get_attr(tag_author_icon, "src")
471483

472484
return {
473485
"author": author_name,
486+
"author_display_name": author_display_name,
474487
"author_title": author_title,
475488
"author_icon_url": author_icon_url,
476489
}
@@ -564,21 +577,25 @@ def parse_submission_page(sub_page: BeautifulSoup) -> dict[str, Any]:
564577
thumbnail_url = f"{thumbnail_url.rsplit('/', 1)[0]}/{quote(thumbnail_url.rsplit('/', 1)[1])}" \
565578
if thumbnail_url else ""
566579
prev_sub: Optional[int] = int(
567-
get_attr(tag_prev, "href").split("/")[-2]) if tag_prev and tag_prev.text.lower() == "prev" else None
580+
get_attr(tag_prev, "href").split("/")[-2]
581+
) if tag_prev and tag_prev.text.lower() == "prev" else None
568582
next_sub: Optional[int] = int(
569-
get_attr(tag_next, "href").split("/")[-2]) if tag_next and tag_next.text.lower() == "next" else None
583+
get_attr(tag_next, "href").split("/")[-2]
584+
) if tag_next and tag_next.text.lower() == "next" else None
570585
fav_link: Optional[str] = f"{root}{href}" if (href := get_attr(tag_fav, "href")).startswith("/fav/") else None
571586
unfav_link: Optional[str] = f"{root}{href}" if (href := get_attr(tag_fav, "href")).startswith("/unfav/") else None
572587
user_folders: list[tuple[str, str, str]] = []
573588
for a in tag_user_folders:
574589
tag_folder_name: Optional[Tag] = a.select_one("span")
575590
tag_folder_group: Optional[Tag] = a.select_one("strong")
576591
assert tag_folder_name is not None, _raise_exception(ParsingError("Missing folder name tag"))
577-
user_folders.append((
578-
tag_folder_name.text.strip(),
579-
(root + href) if (href := a.attrs.get("href", "")) else "",
580-
tag_folder_group.text.strip() if tag_folder_group else ""
581-
))
592+
user_folders.append(
593+
(
594+
tag_folder_name.text.strip(),
595+
(root + href) if (href := a.attrs.get("href", "")) else "",
596+
tag_folder_group.text.strip() if tag_folder_group else ""
597+
)
598+
)
582599

583600
return {
584601
"id": id_,
@@ -609,19 +626,21 @@ def parse_submission_page(sub_page: BeautifulSoup) -> dict[str, Any]:
609626

610627

611628
def parse_user_header(user_header: Tag) -> dict[str, Any]:
612-
tag_status: Optional[Tag] = user_header.select_one("a.c-usernameBlock__userName")
629+
tag_user_name: Optional[Tag] = user_header.select_one("a.c-usernameBlock__userName")
630+
tag_user_display_name: Optional[Tag] = user_header.select_one("a.c-usernameBlock__displayName")
613631
tag_title_join_date: Optional[Tag] = user_header.select_one("userpage-nav-user-details span.user-title")
614632
tag_avatar: Optional[Tag] = user_header.select_one("userpage-nav-avatar img")
615633

616-
assert tag_status is not None, _raise_exception(ParsingError("Missing name tag"))
634+
assert tag_user_name is not None, _raise_exception(ParsingError("Missing user name tag"))
635+
assert tag_user_display_name is not None, _raise_exception(ParsingError("Missing user display name tag"))
617636
assert tag_title_join_date is not None, _raise_exception(ParsingError("Missing join date tag"))
618637
assert tag_avatar is not None, _raise_exception(ParsingError("Missing user icon tag"))
619638

620-
status: str = ""
621-
name: str = tag_status.text.strip()
639+
tag_user_symbol: Optional[Tag] = tag_user_name.select_one("span.c-usernameBlock__symbol")
622640

623-
if not user_header.select_one("img.type-admin"):
624-
status, name = name[0], name[1:]
641+
status: str = tag_user_symbol.text.strip() if tag_user_symbol else ""
642+
name: str = tag_user_name.text.strip().removeprefix(status).strip()
643+
display_name: str = tag_user_display_name.text.strip()
625644

626645
title: str = ttd[0].strip() if len(ttd := tag_title_join_date.text.rsplit("|", 1)) > 1 else ""
627646
join_date: datetime = parse_date(ttd[-1].strip().split(":", 1)[1])
@@ -631,6 +650,7 @@ def parse_user_header(user_header: Tag) -> dict[str, Any]:
631650
return {
632651
"status": status,
633652
"name": name,
653+
"display_name": display_name,
634654
"title": title,
635655
"join_date": join_date,
636656
"avatar_url": avatar_url,
@@ -712,7 +732,9 @@ def parse_user_page(user_page: BeautifulSoup) -> dict[str, Any]:
712732

713733
def parse_comment_tag(tag: Tag) -> dict:
714734
tag_id: Optional[Tag] = tag.select_one("a.comment_anchor")
715-
tag_username: Optional[Tag] = tag.select_one("comment-username .comment_username")
735+
tag_user_name: Optional[Tag] = tag.select_one("comment-username a.c-usernameBlock__userName")
736+
tag_user_symbol: Optional[Tag] = tag_user_name.select_one(".c-usernameBlock__symbol") if tag_user_name else None
737+
tag_user_display_name: Optional[Tag] = tag.select_one("comment-username a.c-usernameBlock__displayName")
716738
tag_avatar: Optional[Tag] = tag.select_one("div.avatar img.comment_useravatar")
717739
tag_user_title: Optional[Tag] = tag.select_one("comment-title")
718740
tag_body: Optional[Tag] = tag.select_one("comment-user-text")
@@ -730,7 +752,7 @@ def parse_comment_tag(tag: Tag) -> dict:
730752
comment_id: int = int(attr_id.removeprefix("cid:"))
731753
comment_text: str = clean_html(inner_html(tag_body))
732754

733-
if tag_username is None:
755+
if tag_user_name is None or tag_user_display_name is None:
734756
return {
735757
"id": comment_id,
736758
"user_name": "",
@@ -764,7 +786,10 @@ def parse_comment_tag(tag: Tag) -> dict:
764786

765787
return {
766788
"id": comment_id,
767-
"user_name": tag_username.text.strip(),
789+
"user_name": tag_user_name.text.strip().removeprefix(
790+
tag_user_symbol.text.strip() if tag_user_symbol else ""
791+
).strip(),
792+
"user_display_name": tag_user_display_name.text.strip(),
768793
"user_title": tag_user_title.text.strip(),
769794
"avatar_url": avatar_url,
770795
"timestamp": int(attr_timestamp),

faapi/submission.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,7 @@ def parse(self, submission_page: Optional[BeautifulSoup] = None):
257257
self.id = parsed["id"]
258258
self.title = parsed["title"]
259259
self.author.name = parsed["author"]
260+
self.author.display_name = parsed["author_display_name"]
260261
self.author.title = parsed["author_title"]
261262
self.author.avatar_url = parsed["author_icon_url"]
262263
self.date = parsed["date"]

faapi/user.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ class UserBase:
3737

3838
def __init__(self):
3939
self.name: str = ""
40+
self.display_name: str = ""
4041
self.status: str = ""
4142

4243
def __hash__(self) -> int:
@@ -79,6 +80,7 @@ def __le__(self, other) -> bool:
7980

8081
def __iter__(self):
8182
yield "name", self.name
83+
yield "display_name", self.display_name
8284
yield "status", self.status
8385

8486
def __repr__(self):
@@ -195,6 +197,7 @@ def __init__(self, user_page: Optional[BeautifulSoup] = None):
195197

196198
def __iter__(self):
197199
yield "name", self.name
200+
yield "display_name", self.display_name
198201
yield "status", self.status
199202
yield "title", self.title
200203
yield "join_date", self.join_date
@@ -236,6 +239,7 @@ def parse(self, user_page: Optional[BeautifulSoup] = None):
236239
parsed: dict = parse_user_page(self.user_page)
237240

238241
self.name = parsed["name"]
242+
self.display_name = parsed["display_name"]
239243
self.status = parsed["status"]
240244
self.profile = parsed["profile"]
241245
self.title = parsed["title"]

tests/test_faapi.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ def test_user(cookies: RequestsCookieJar, user_test_data: dict):
120120
user = api.user(user_test_data["name"])
121121
user_dict = dict(user)
122122

123-
assert user.name == user_dict["name"] == user_test_data["name"]
123+
assert user.name.lower() == user_dict["name"].lower() == user_test_data["name"].lower()
124124
assert user.status == user_dict["status"] == user_test_data["status"]
125125
assert user.title == user_dict["title"] == user_test_data["title"]
126126
assert user.join_date == user_dict["join_date"] == datetime.fromisoformat(user_test_data["join_date"]) + dst_us()
@@ -155,7 +155,7 @@ def test_submission(cookies: RequestsCookieJar, submission_test_data: dict):
155155

156156
assert submission.id == submission_dict["id"] == submission_test_data["id"]
157157
assert submission.title == submission_dict["title"] == submission_test_data["title"]
158-
assert submission.author.name == submission_dict["author"]["name"] == submission_test_data["author"]["name"]
158+
assert submission.author.name.lower() == submission_dict["author"]["name"].lower() == submission_test_data["author"]["name"].lower()
159159
assert submission.author.avatar_url == submission_dict["author"]["avatar_url"] != ""
160160
assert submission.date == submission_dict["date"] == datetime.fromisoformat(submission_test_data["date"]) + dst_us()
161161
assert submission.tags == submission_dict["tags"] == submission_test_data["tags"]
@@ -215,7 +215,7 @@ def test_journal(cookies: RequestsCookieJar, journal_test_data: dict):
215215

216216
assert journal.id == journal_dict["id"] == journal_test_data["id"]
217217
assert journal.title == journal_dict["title"] == journal_test_data["title"]
218-
assert journal.author.name == journal_dict["author"]["name"] == journal_test_data["author"]["name"]
218+
assert journal.author.name.lower() == journal_dict["author"]["name"].lower() == journal_test_data["author"]["name"].lower()
219219
assert journal.author.join_date == journal_dict["author"]["join_date"] == \
220220
datetime.fromisoformat(journal_test_data["author"]["join_date"]) + dst_us()
221221
assert journal.author.avatar_url == journal_dict["author"]["avatar_url"] != ""

0 commit comments

Comments
 (0)