added one docstring
This commit is contained in:
parent
2715f78e55
commit
973f3524f4
@ -80,6 +80,23 @@ async def trs_from_page(url: str, session: ClientSession = None, limit: int = No
|
|||||||
|
|
||||||
async def get_data_from_category(category: str, session: ClientSession = None,
|
async def get_data_from_category(category: str, session: ClientSession = None,
|
||||||
first_page: int = 1, last_page: int = inf) -> list[row_type]:
|
first_page: int = 1, last_page: int = inf) -> list[row_type]:
|
||||||
|
"""
|
||||||
|
Returns data rows from a category (i.e. companies starting with that specific letter).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
category:
|
||||||
|
Must be a valid component of the URL path indicating the first character-category (e.g. 'A' or '0-9').
|
||||||
|
session (optional):
|
||||||
|
If passed a ClientSession instance, all HTTP requests will be made using that session;
|
||||||
|
otherwise a new one is created.
|
||||||
|
first_page (optional):
|
||||||
|
The number of the page to begin with when scraping the results; defaults to 1.
|
||||||
|
last_page (optional):
|
||||||
|
The number of the last page to scrape; by default all pages starting with `first_page` are scraped.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of 5-tuples (of string elements) extracted from the specified pages
|
||||||
|
"""
|
||||||
log.info(f"Getting companies starting with '{category}'")
|
log.info(f"Getting companies starting with '{category}'")
|
||||||
if session is None:
|
if session is None:
|
||||||
session = ClientSession()
|
session = ClientSession()
|
||||||
|
Loading…
Reference in New Issue
Block a user