From d9344b1b4b271edfc0d5f75db0187146538d6872 Mon Sep 17 00:00:00 2001 From: Daniil Fajnberg Date: Sat, 13 Nov 2021 21:56:42 +0100 Subject: [PATCH] more tests; coverage script and settings --- .coveragerc | 10 ++++++++++ .gitignore | 4 +++- coverage.sh | 3 +++ setup.cfg | 2 +- tests/test_scrape.py | 43 ++++++++++++++++++++++++++++++++++++------- 5 files changed, 53 insertions(+), 9 deletions(-) create mode 100644 .coveragerc create mode 100755 coverage.sh diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..8b86dbe --- /dev/null +++ b/.coveragerc @@ -0,0 +1,10 @@ +[run] +source = . +branch = true +omit = + .venv/* + +[report] +fail_under = 100 +show_missing = True +skip_covered = True diff --git a/.gitignore b/.gitignore index 50cdbf2..b853735 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,6 @@ # Distribution / packaging: *.egg-info/ # Python cache: -__pycache__/ \ No newline at end of file +__pycache__/ +# Tests: +.coverage \ No newline at end of file diff --git a/coverage.sh b/coverage.sh new file mode 100755 index 0000000..786075e --- /dev/null +++ b/coverage.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh + +coverage erase && coverage run -m unittest discover && coverage report diff --git a/setup.cfg b/setup.cfg index d3e8f25..baa248b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,7 +19,7 @@ keywords = webscraping, html, markup, dom, scraper, attributes, tags, stocks, fi package_dir = = src packages = find: -python_requires = >=3.7 +python_requires = >=3.8 install_requires = beautifulsoup4 aiohttp diff --git a/tests/test_scrape.py b/tests/test_scrape.py index 9ad1e38..77aa58d 100644 --- a/tests/test_scrape.py +++ b/tests/test_scrape.py @@ -1,12 +1,13 @@ -from unittest import TestCase -from unittest.mock import patch, MagicMock, call +import logging +from unittest import IsolatedAsyncioTestCase +from unittest.mock import patch, MagicMock, AsyncMock, call from bs4 import BeautifulSoup from stocksymbolscraper import scrape -class ScrapeTestCase(TestCase): +class ScrapeTestCase(IsolatedAsyncioTestCase): @patch.object(scrape, 'get_single_tr_data') def test_extract_row_data(self, mock_get_single_tr_data: MagicMock): @@ -53,8 +54,36 @@ class ScrapeTestCase(TestCase): output = scrape.get_str_from_td(test_td) self.assertEqual(expected_output, output) + @patch.object(scrape, 'ClientSession') + async def test_soup_from_url(self, mock_session_cls): + test_html = 'foo' + mock_response = MagicMock() + mock_response.text = AsyncMock(return_value=test_html) + mock_get_return = MagicMock() + mock_get_return.__aenter__ = AsyncMock(return_value=mock_response) + mock_session_obj = MagicMock() + mock_session_obj.get = MagicMock(return_value=mock_get_return) + mock_session_cls.return_value = mock_session_obj + output = await scrape.soup_from_url('foo') + expected_output = BeautifulSoup(test_html, scrape.HTML_PARSER) + self.assertEqual(expected_output, output) + + output = await scrape.soup_from_url('foo', mock_session_obj) + self.assertEqual(expected_output, output) + def test_trs_from_page(self): - # Tested function takes URL as argument (GET request is issued) - # HTML to be parsed could be substituted - # - pass + tr1_text, tr2_text = 'foo', 'bar' + test_html = f'
{tr1_text}{tr2_text}
' + test_soup = BeautifulSoup(test_html, scrape.HTML_PARSER) + output = scrape.trs_from_page(test_soup) + expected_output = test_soup.find_all('tr') + self.assertSequenceEqual(expected_output, output) + + logging.disable(logging.CRITICAL) + test_html = f'
garbage
' + test_soup = BeautifulSoup(test_html, scrape.HTML_PARSER) + with patch.object(scrape, 'open') as mock_open: + self.assertRaises(scrape.UnexpectedMarkupError, scrape.trs_from_page, test_soup) + mock_open.assert_called_once() + mock_open.return_value.__enter__.return_value.write.assert_called_once_with(test_soup.prettify()) + logging.disable(logging.NOTSET)