Skip to content

Commit 534bbf2

Browse files
authored
Bring unit test coverage to 100% (scrapd#115)
In an attempt to reach the last lines of code that were not covered by the unit tests, this patch does the following: * adds the missing tests. * updates the Makefile to re-run failed integration tests only. * updates the CircleCI workflow to remove the integration tests from the coverage. * adjusts the Mergify configuration. Fixes scrapd#100
1 parent ed13860 commit 534bbf2

File tree

7 files changed

+97
-31
lines changed

7 files changed

+97
-31
lines changed

.circleci/config.yml

+21-4
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ jobs:
5858
name: run doc tests
5959
command: make docs
6060

61-
test:
61+
test-units:
6262
<<: *defaults
6363
steps:
6464
- checkout
@@ -67,11 +67,21 @@ jobs:
6767
- run:
6868
name: run unit tests
6969
command: |
70-
make test
70+
make test-units
7171
venv/bin/pip install coveralls
7272
venv/bin/coveralls
7373
- store_test_results:
7474
path: test-results
75+
test-integrations:
76+
<<: *defaults
77+
steps:
78+
- checkout
79+
- attach_workspace:
80+
at: *working_directory
81+
- run:
82+
name: run unit tests
83+
command: |
84+
make test-integrations
7585
7686
publish:
7787
<<: *defaults
@@ -139,7 +149,13 @@ workflows:
139149
filters:
140150
tags:
141151
only: /.*/
142-
- test:
152+
- test-units:
153+
requires:
154+
- prepare
155+
filters:
156+
tags:
157+
only: /.*/
158+
- test-integrations:
143159
requires:
144160
- prepare
145161
filters:
@@ -158,7 +174,8 @@ workflows:
158174
requires:
159175
- format
160176
- lint
161-
- test
177+
- test-units
178+
- test-integrations
162179
filters:
163180
branches:
164181
ignore: /.*/

.mergify.yml

+2-1
Original file line numberDiff line numberDiff line change
@@ -10,5 +10,6 @@ pull_request_rules:
1010
- 'status-success=ci/circleci: format'
1111
- 'status-success=ci/circleci: lint'
1212
- 'status-success=ci/circleci: prepare'
13-
- 'status-success=ci/circleci: test'
13+
- 'status-success=ci/circleci: test-units'
14+
- 'status-success=ci/circleci: test-integrations'
1415
name: master

Makefile

+1-1
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ test-units: venv ## Run the unit tests
9595

9696
.PHONY: test-integrations
9797
test-integrations: venv ## Run the unit tests
98-
$(RUN_CMD) tox -- -m "integrations"
98+
$(RUN_CMD) tox -- -m "integrations" --reruns 3 --reruns-delay 5
9999

100100
setup: venv ## Setup the full environment (default)
101101

requirements-dev.txt

+2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
Faker==1.0.5
22
Sphinx==2.0.1
33
aiohttp-theme==0.1.5
4+
aioresponses==0.6.0
45
asynctest==0.12.4
56
bpython
67
flake8==3.7.7
@@ -10,6 +11,7 @@ pytest-asyncio==0.10.0
1011
pytest-bdd==3.1.0
1112
pytest-cov==2.6.1
1213
pytest-mock==1.10.4
14+
pytest-rerunfailures==7.0
1315
pytest-socket==0.3.3
1416
pytest-xdist==1.28.0
1517
pytest==4.4.1

scrapd/core/apd.py

-1
Original file line numberDiff line numberDiff line change
@@ -592,7 +592,6 @@ async def fetch_and_parse(session, url):
592592
:rtype: dict
593593
"""
594594
# Retrieve the page.
595-
# page = await fetch_text(session, url)
596595
page = await fetch_detail_page(session, url)
597596
if not page:
598597
raise ValueError(f'The URL {url} returned a 0-length content.')

tests/core/test_apd.py

+70-20
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22
from unittest import mock
33

44
import aiohttp
5+
from aioresponses import aioresponses
56
import asynctest
7+
from faker import Faker
68
from loguru import logger
79
import pytest
810
from tenacity import RetryError
@@ -18,6 +20,9 @@
1820
# Disable logging for the tests.
1921
logger.remove()
2022

23+
# Set faker object.
24+
fake = Faker()
25+
2126

2227
def load_test_page(page):
2328
"""Load a test page."""
@@ -394,10 +399,17 @@ def test_parse_page_content_01(mocker):
394399
page_fd = TEST_DATA_DIR / 'traffic-fatality-2-3'
395400
page = page_fd.read_text()
396401
mocker.patch('scrapd.core.apd.parse_deceased_field', side_effect=ValueError)
397-
apd.parse_page_content(page)
402+
result = apd.parse_page_content(page)
403+
assert len(result) == 6
404+
398405

406+
def test_parse_page_content_02(mocker):
407+
"""Ensure a log entry is created if there is no deceased field."""
408+
result = apd.parse_page_content('Case: 01-2345678')
409+
assert result
399410

400-
def test_parse_page_content_02():
411+
412+
def test_parse_page_content_03():
401413
"""Ensure a missing case number raises an exception."""
402414
with pytest.raises(ValueError):
403415
apd.parse_page_content('The is no case number here.')
@@ -425,15 +437,8 @@ def test_parse_page_00(filename, expected):
425437

426438

427439
@asynctest.patch("scrapd.core.apd.fetch_news_page",
428-
side_effect=[load_test_page(page) for page in [
429-
'296',
430-
'296?page=1',
431-
'296?page=27',
432-
]])
433-
@asynctest.patch(
434-
"scrapd.core.apd.fetch_detail_page",
435-
return_value=load_test_page('traffic-fatality-2-3'),
436-
)
440+
side_effect=[load_test_page(page) for page in ['296', '296?page=1', '296?page=27']])
441+
@asynctest.patch("scrapd.core.apd.fetch_detail_page", return_value=load_test_page('traffic-fatality-2-3'))
437442
@pytest.mark.asyncio
438443
async def test_date_filtering_00(fake_details, fake_news):
439444
"""Ensure the date filtering do not fetch unnecessary data."""
@@ -444,22 +449,29 @@ async def test_date_filtering_00(fake_details, fake_news):
444449

445450

446451
@asynctest.patch("scrapd.core.apd.fetch_news_page",
447-
side_effect=[load_test_page(page) for page in [
448-
'296',
449-
'296?page=1',
450-
'296?page=27',
451-
]])
452-
@asynctest.patch(
453-
"scrapd.core.apd.fetch_detail_page",
454-
return_value=load_test_page('traffic-fatality-2-3'),
455-
)
452+
side_effect=[load_test_page(page) for page in ['296', '296?page=1', '296?page=27']])
453+
@asynctest.patch("scrapd.core.apd.fetch_detail_page", return_value=load_test_page('traffic-fatality-2-3'))
456454
@pytest.mark.asyncio
457455
async def test_date_filtering_01(fake_details, fake_news):
458456
"""Ensure the date filtering do not fetch unnecessary data."""
459457
data, _ = await apd.async_retrieve(pages=-5, from_="2019-01-02", to="2019-01-03")
460458
assert isinstance(data, list)
461459

462460

461+
@asynctest.patch("scrapd.core.apd.fetch_news_page",
462+
side_effect=[load_test_page(page) for page in ['296', '296?page=1', '296?page=27']])
463+
@asynctest.patch(
464+
"scrapd.core.apd.fetch_detail_page",
465+
side_effect=[load_test_page(page) for page in ['traffic-fatality-2-3'] + ['traffic-fatality-71-2'] * 14])
466+
@pytest.mark.asyncio
467+
async def test_date_filtering_02(fake_details, fake_news):
468+
"""Ensure the date filtering do not fetch unnecessary data."""
469+
data, page_count = await apd.async_retrieve(from_="2019-01-16", to="2019-01-16")
470+
assert isinstance(data, list)
471+
assert len(data) == 1
472+
assert page_count == 2
473+
474+
463475
@pytest.mark.asyncio
464476
async def test_fetch_text_00():
465477
"""Ensure `fetch_text` retries several times."""
@@ -474,6 +486,17 @@ async def test_fetch_text_00():
474486
assert apd.fetch_text.retry.statistics['attempt_number'] > 1
475487

476488

489+
@pytest.mark.asyncio
490+
async def test_fetch_text_01():
491+
"""Ensure fetch_text retrieves some text."""
492+
url = fake.uri()
493+
with aioresponses() as m:
494+
m.get(url, payload=dict(foo='bar'))
495+
async with aiohttp.ClientSession() as session:
496+
text = await apd.fetch_text(session, url)
497+
assert '{"foo": "bar"}' == text
498+
499+
477500
@asynctest.patch("scrapd.core.apd.fetch_news_page", side_effect=ValueError)
478501
@pytest.mark.asyncio
479502
async def test_async_retrieve_00(fake_news):
@@ -524,3 +547,30 @@ async def test_fetch_and_parse_01(page, mocker):
524547
with pytest.raises(RetryError):
525548
apd.fetch_and_parse.retry.stop = stop_after_attempt(1)
526549
await apd.fetch_and_parse(None, 'url')
550+
551+
552+
@asynctest.patch("scrapd.core.apd.fetch_text", return_value='')
553+
@pytest.mark.asyncio
554+
async def test_fetch_news_page_00(fetch_text):
555+
"""Ensure the fetch function is called with the right parameters."""
556+
page = 2
557+
params = {'page': page - 1}
558+
async with aiohttp.ClientSession() as session:
559+
try:
560+
await apd.fetch_news_page(session, page)
561+
except Exception:
562+
pass
563+
fetch_text.assert_called_once_with(session, apd.APD_URL, params)
564+
565+
566+
@asynctest.patch("scrapd.core.apd.fetch_text", return_value='')
567+
@pytest.mark.asyncio
568+
async def test_fetch_detail_page_00(fetch_text):
569+
"""Ensure the fetch function is called with the right parameters."""
570+
url = fake.uri()
571+
async with aiohttp.ClientSession() as session:
572+
try:
573+
await apd.fetch_detail_page(session, url)
574+
except Exception:
575+
pass
576+
fetch_text.assert_called_once_with(session, url)

tests/step_defs/test_retrieve.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,11 @@
11
"""Define the scenarios for the feature tests."""
2-
import asyncio
3-
42
import pytest
53
from pytest_bdd import given
64
from pytest_bdd import scenario
75
from pytest_bdd import then
86

97
from scrapd.core import apd
108
from tests.test_common import TEST_ROOT_DIR
11-
from tests.test_common import TEST_DATA_DIR
129

1310

1411
@scenario(
@@ -45,5 +42,5 @@ def ensure_results(mocker, event_loop, output_format, time_range, entry_count):
4542
assert result is not None
4643
assert len(result) == entry_count
4744
assert isinstance(result, list)
48-
if len(result) > 0:
45+
if result:
4946
assert isinstance(result[0], dict)

0 commit comments

Comments
 (0)