cli now supports multiple ticker symbols and concurrency
This commit is contained in:
parent
55a56c2f9c
commit
c18ab65a7d
@ -7,7 +7,7 @@ from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
from .functions import get_all_financials, ResultDict
|
||||
from .constants import END_DATE, MAIN_LOGGER_NAME
|
||||
from .constants import END_DATE, MAIN_LOGGER_NAME, DEFAULT_CONCURRENT_BATCH_SIZE
|
||||
|
||||
|
||||
log = logging.getLogger(MAIN_LOGGER_NAME)
|
||||
@ -16,6 +16,7 @@ JSON_EXT, CSV_EXT = '.json', '.csv'
|
||||
|
||||
TICKER_SYMBOL = 'ticker_symbol'
|
||||
QUARTERLY = 'quarterly'
|
||||
BATCH_SIZE = 'concurrent_batch_size'
|
||||
TO_FILE = 'to_file'
|
||||
JSON_INDENT = 'json_indent'
|
||||
VERBOSE = 'verbose'
|
||||
@ -26,6 +27,7 @@ def parse_cli() -> dict:
|
||||
parser.add_argument(
|
||||
TICKER_SYMBOL,
|
||||
type=str,
|
||||
nargs='+',
|
||||
help="Stock ticker symbol of the company to be scraped the financials of"
|
||||
)
|
||||
parser.add_argument(
|
||||
@ -33,6 +35,14 @@ def parse_cli() -> dict:
|
||||
action='store_true',
|
||||
help="If set, the financial data for the last quarters is returned; otherwise yearly data is returned."
|
||||
)
|
||||
parser.add_argument(
|
||||
'-b', f'--{BATCH_SIZE.replace("_", "-")}',
|
||||
type=int,
|
||||
default=DEFAULT_CONCURRENT_BATCH_SIZE,
|
||||
help="If multiple ticker symbols are passed, the company financials can be scraped concurrently. "
|
||||
"This argument determines how many datasets are scraped concurrently at any moment in time. "
|
||||
"By default, they are scraped sequentially."
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', f'--{TO_FILE.replace("_", "-")}',
|
||||
type=Path,
|
||||
@ -78,7 +88,8 @@ def write_to_csv(data: Dict[str, ResultDict], file_obj) -> None:
|
||||
async def main() -> None:
|
||||
args = parse_cli()
|
||||
configure_logging(args[VERBOSE])
|
||||
data = await get_all_financials(args[TICKER_SYMBOL], quarterly=args[QUARTERLY])
|
||||
data = await get_all_financials(*args[TICKER_SYMBOL], quarterly=args[QUARTERLY],
|
||||
concurrent_batch_size=args[BATCH_SIZE])
|
||||
path: Path = args[TO_FILE]
|
||||
if path is None:
|
||||
print(json.dumps(data, indent=args[JSON_INDENT]))
|
||||
|
Loading…
Reference in New Issue
Block a user