Skip to content

Commit 4f65cde

Browse files
committed
Cleans up tests
1 parent 9e4a251 commit 4f65cde

File tree

3 files changed

+24
-14
lines changed

3 files changed

+24
-14
lines changed

tests/conftest.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import asyncio
2+
import pytest
3+
4+
@pytest.fixture(scope="session")
5+
def event_loop():
6+
"""Create a single event loop for the entire test session to avoid it being closed between tests."""
7+
loop = asyncio.new_event_loop()
8+
yield loop
9+
loop.close()

tests/integration_tests/test_scraper.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ def test_scraper_tool_run_text(scraper_tool):
4545
assert "this domain is for use in illustrative examples" in result.lower()
4646
assert "<!doctype html>" not in result.lower() # no html tags
4747

48+
4849
def test_scraper_tool_run_markdown(scraper_tool):
4950
"""Test scraping with markdown output format."""
5051
result = scraper_tool.invoke({"url": "http://example.com", "output_format": "markdown"})
@@ -53,6 +54,7 @@ def test_scraper_tool_run_markdown(scraper_tool):
5354
assert "illustrative examples" in result
5455
assert "](https://www.iana.org/domains/example)" in result # check for link markdown
5556

57+
5658
@pytest.mark.asyncio
5759
async def test_scraper_tool_arun_html(scraper_tool):
5860
"""Test async scraping for basic HTML."""
@@ -76,6 +78,7 @@ def test_google_search_tool_run_json(google_search_tool):
7678
except json.JSONDecodeError:
7779
pytest.fail("Google search result is not valid JSON")
7880

81+
7982
def test_google_search_tool_run_csv(google_search_tool):
8083
"""Test Google search with CSV output format."""
8184
result = google_search_tool.invoke({"query": "python programming language", "output_format": "csv"})

tests/unit_tests/test_scraper.py

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@
2020
ScraperAPIAmazonSearchToolInput,
2121
)
2222

23-
# --- Fixtures ---
23+
24+
pytestmark = pytest.mark.allow_hosts("127.0.0.1")
2425

2526
@pytest.fixture
2627
def mock_env_api_key(monkeypatch):
@@ -34,7 +35,6 @@ def scraper_api_wrapper(mock_env_api_key):
3435
def scraper_api_structured_wrapper(mock_env_api_key):
3536
return ScraperAPIStructuredWrapper()
3637

37-
3838
# --- Test ScraperAPIWrapper ---
3939

4040
def test_scraper_api_wrapper_init_from_env(mock_env_api_key):
@@ -126,8 +126,6 @@ def test_scraper_api_wrapper_scrape_connection_error(mock_get, scraper_api_wrapp
126126
)
127127

128128
@pytest.mark.asyncio
129-
@pytest.mark.allow_hosts(['127.0.0.1', '::1']) # due to incompatibility between pytest-socket and asyncio
130-
# https://github.com/pytest-dev/pytest-asyncio/issues/160
131129
async def test_scraper_api_wrapper_scrape_async_success(scraper_api_wrapper):
132130

133131
mock_response = AsyncMock()
@@ -168,7 +166,7 @@ async def test_scraper_api_wrapper_scrape_async_success(scraper_api_wrapper):
168166
assert result == "<html>Async Success</html>"
169167

170168
@pytest.mark.asyncio
171-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
169+
172170
async def test_scraper_api_wrapper_scrape_async_with_params(scraper_api_wrapper):
173171

174172
mock_response = AsyncMock()
@@ -223,7 +221,7 @@ async def test_scraper_api_wrapper_scrape_async_with_params(scraper_api_wrapper)
223221
assert result == "Async Success Text"
224222

225223
@pytest.mark.asyncio
226-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
224+
227225
async def test_scraper_api_wrapper_scrape_async_http_error(scraper_api_wrapper):
228226

229227
mock_response = AsyncMock(spec=aiohttp.ClientResponse)
@@ -328,7 +326,7 @@ def test_structured_wrapper_amazon_search(mock_get, scraper_api_structured_wrapp
328326

329327

330328
@pytest.mark.asyncio
331-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
329+
332330
async def test_structured_wrapper_google_search_async(scraper_api_structured_wrapper):
333331

334332
mock_response = AsyncMock(spec=aiohttp.ClientResponse)
@@ -377,7 +375,7 @@ async def test_structured_wrapper_google_search_async(scraper_api_structured_wra
377375

378376

379377
@pytest.mark.asyncio
380-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
378+
381379
async def test_structured_wrapper_amazon_search_async(scraper_api_structured_wrapper):
382380

383381
mock_response = AsyncMock(spec=aiohttp.ClientResponse)
@@ -461,7 +459,7 @@ def test_scraper_tool_run_error(mock_scrape, scraper_tool):
461459

462460

463461
@pytest.mark.asyncio
464-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
462+
465463
@patch.object(ScraperAPIWrapper, 'scrape_async', new_callable=AsyncMock)
466464
async def test_scraper_tool_arun(mock_scrape_async, scraper_tool):
467465
mock_scrape_async.return_value = "Async scraped content"
@@ -482,7 +480,7 @@ async def test_scraper_tool_arun(mock_scrape_async, scraper_tool):
482480
assert result == "Async scraped content"
483481

484482
@pytest.mark.asyncio
485-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
483+
486484
@patch.object(ScraperAPIWrapper, 'scrape_async', new_callable=AsyncMock)
487485
async def test_scraper_tool_arun_error(mock_scrape_async, scraper_tool):
488486
mock_scrape_async.side_effect = ConnectionError("Async connection failed")
@@ -541,7 +539,7 @@ def test_google_search_tool_run_error(mock_search, google_search_tool):
541539
mock_search.assert_called_once()
542540

543541
@pytest.mark.asyncio
544-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
542+
545543
@patch.object(ScraperAPIStructuredWrapper, 'google_search_async', new_callable=AsyncMock)
546544
async def test_google_search_tool_arun(mock_search_async, google_search_tool):
547545
mock_search_async.return_value = '{"async_google_data": "found"}'
@@ -566,7 +564,7 @@ async def test_google_search_tool_arun(mock_search_async, google_search_tool):
566564
assert result == '{"async_google_data": "found"}'
567565

568566
@pytest.mark.asyncio
569-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
567+
570568
@patch.object(ScraperAPIStructuredWrapper, 'google_search_async', new_callable=AsyncMock)
571569
async def test_google_search_tool_arun_error(mock_search_async, google_search_tool):
572570
mock_search_async.side_effect = Exception("Async Search failed")
@@ -623,7 +621,7 @@ def test_amazon_search_tool_run_error(mock_search, amazon_search_tool):
623621
mock_search.assert_called_once()
624622

625623
@pytest.mark.asyncio
626-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
624+
627625
@patch.object(ScraperAPIStructuredWrapper, 'amazon_search_async', new_callable=AsyncMock)
628626
async def test_amazon_search_tool_arun(mock_search_async, amazon_search_tool):
629627
mock_search_async.return_value = '{"async_amazon_data": "found"}'
@@ -642,7 +640,7 @@ async def test_amazon_search_tool_arun(mock_search_async, amazon_search_tool):
642640
assert result == '{"async_amazon_data": "found"}'
643641

644642
@pytest.mark.asyncio
645-
@pytest.mark.allow_hosts(['127.0.0.1', '::1'])
643+
646644
@patch.object(ScraperAPIStructuredWrapper, 'amazon_search_async', new_callable=AsyncMock)
647645
async def test_amazon_search_tool_arun_error(mock_search_async, amazon_search_tool):
648646
mock_search_async.side_effect = TimeoutError("Amazon Timeout")

0 commit comments

Comments
 (0)