1+ from datetime import datetime , timezone
2+
13import pytest
24
35from video_rss_aggregator .application .ports import FetchedFeed , FetchedFeedEntry
46from video_rss_aggregator .application .use_cases .ingest_feed import IngestFeed
7+ from video_rss_aggregator .domain .outcomes import Failure
58
69
710class FakeFeedSource :
811 async def fetch (self , feed_url : str , max_items : int | None = None ):
9- assert max_items == 1
1012 entries = (
1113 FetchedFeedEntry (source_url = "https://example.com/1" , title = "One" , guid = "1" ),
1214 FetchedFeedEntry (source_url = "https://example.com/2" , title = "Two" , guid = "2" ),
1315 )
1416 return FetchedFeed (
1517 title = "Example Feed" ,
1618 site_url = "https://example.com" ,
17- entries = entries [:max_items ],
19+ entries = entries [:max_items ] if max_items is not None else entries ,
1820 )
1921
2022
@@ -37,9 +39,11 @@ async def save_feed_item(self, feed_url: str, entry: FetchedFeedEntry) -> None:
3739class FakeProcessSource :
3840 def __init__ (self ) -> None :
3941 self .calls : list [tuple [str , str | None ]] = []
42+ self .results : dict [str , object ] = {}
4043
4144 async def execute (self , source_url : str , title : str | None ):
4245 self .calls .append ((source_url , title ))
46+ return self .results .get (source_url )
4347
4448
4549@pytest .fixture
@@ -144,3 +148,67 @@ async def test_ingest_feed_skips_entries_without_source_url() -> None:
144148 )
145149 ]
146150 assert process_source .calls == [("https://example.com/valid" , None )]
151+
152+
153+ @pytest .mark .anyio
154+ async def test_ingest_feed_counts_only_non_failure_results_as_processed () -> None :
155+ feeds = FakeFeedRepository ()
156+ videos = FakeVideoRepository ()
157+ process_source = FakeProcessSource ()
158+ process_source .results = {
159+ "https://example.com/2" : Failure (
160+ source_url = "https://example.com/2" , reason = "download failed"
161+ )
162+ }
163+ use_case = IngestFeed (
164+ feed_source = FakeFeedSource (),
165+ feeds = feeds ,
166+ videos = videos ,
167+ process_source = process_source ,
168+ )
169+
170+ report = await use_case .execute (
171+ "https://example.com/feed.xml" , process = True , max_items = 2
172+ )
173+
174+ assert report .item_count == 2
175+ assert report .processed_count == 1
176+
177+
178+ class FakeFeedSourceWithPublishedEntries :
179+ async def fetch (self , feed_url : str , max_items : int | None = None ):
180+ return FetchedFeed (
181+ title = "Published Feed" ,
182+ site_url = "https://example.com" ,
183+ entries = (
184+ FetchedFeedEntry (
185+ source_url = "https://example.com/published" ,
186+ title = "Published item" ,
187+ guid = "published-guid" ,
188+ published_at = datetime (2024 , 1 , 2 , 3 , 4 , tzinfo = timezone .utc ),
189+ ),
190+ ),
191+ )
192+
193+
194+ @pytest .mark .anyio
195+ async def test_ingest_feed_preserves_publication_timestamps () -> None :
196+ feeds = FakeFeedRepository ()
197+ videos = FakeVideoRepository ()
198+ process_source = FakeProcessSource ()
199+ use_case = IngestFeed (
200+ feed_source = FakeFeedSourceWithPublishedEntries (),
201+ feeds = feeds ,
202+ videos = videos ,
203+ process_source = process_source ,
204+ )
205+
206+ await use_case .execute ("https://example.com/feed.xml" , process = False )
207+
208+ saved_feed = feeds .saved [0 ][1 ]
209+ saved_entry = videos .saved [0 ][1 ]
210+
211+ assert saved_feed .entries [0 ].published_at == datetime (
212+ 2024 , 1 , 2 , 3 , 4 , tzinfo = timezone .utc
213+ )
214+ assert saved_entry .published_at == datetime (2024 , 1 , 2 , 3 , 4 , tzinfo = timezone .utc )
0 commit comments