73 lines
1.9 KiB
Python
73 lines
1.9 KiB
Python
import pytest
|
|
from unittest.mock import AsyncMock
|
|
from datetime import datetime
|
|
from src.crawlers.dto import NewsItemDTO
|
|
from src.processor.dto import EnrichedNewsItemDTO
|
|
from src.orchestrator.service import TrendScoutService
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_run_iteration():
|
|
# Arrange
|
|
crawler_mock = AsyncMock()
|
|
processor_mock = AsyncMock()
|
|
storage_mock = AsyncMock()
|
|
notifier_mock = AsyncMock()
|
|
|
|
timestamp = datetime.now()
|
|
|
|
news_item = NewsItemDTO(
|
|
title="Test Title",
|
|
url="http://example.com",
|
|
content_text="Sample text",
|
|
source="Source",
|
|
timestamp=timestamp
|
|
)
|
|
|
|
high_relevance_item = EnrichedNewsItemDTO(
|
|
**news_item.model_dump(),
|
|
relevance_score=8,
|
|
summary_ru="Summary",
|
|
anomalies_detected=[]
|
|
)
|
|
|
|
anomaly_item = EnrichedNewsItemDTO(
|
|
**news_item.model_dump(),
|
|
relevance_score=5,
|
|
summary_ru="Summary",
|
|
anomalies_detected=["Anomaly"]
|
|
)
|
|
|
|
low_relevance_item = EnrichedNewsItemDTO(
|
|
**news_item.model_dump(),
|
|
relevance_score=5,
|
|
summary_ru="Summary",
|
|
anomalies_detected=[]
|
|
)
|
|
|
|
crawler_mock.fetch_latest.return_value = [news_item, news_item, news_item]
|
|
|
|
# Return different items for each call to simulate different results
|
|
processor_mock.analyze.side_effect = [
|
|
high_relevance_item,
|
|
anomaly_item,
|
|
low_relevance_item
|
|
]
|
|
|
|
service = TrendScoutService(
|
|
crawlers=[crawler_mock],
|
|
processor=processor_mock,
|
|
storage=storage_mock,
|
|
notifier=notifier_mock
|
|
)
|
|
|
|
# Act
|
|
await service.run_iteration()
|
|
|
|
# Assert
|
|
crawler_mock.fetch_latest.assert_called_once()
|
|
assert processor_mock.analyze.call_count == 3
|
|
assert storage_mock.store.call_count == 3
|
|
|
|
# Should only alert on high relevance (1) or anomalies (1), total 2 times
|
|
assert notifier_mock.send_alert.call_count == 2
|