89 lines
3.0 KiB
Python
89 lines
3.0 KiB
Python
import logging
|
|
import asyncio
|
|
import json
|
|
from argparse import ArgumentParser
|
|
from pathlib import Path
|
|
|
|
from .functions import get_all_financials
|
|
from .constants import MAIN_LOGGER_NAME, DEFAULT_CONCURRENT_BATCH_SIZE
|
|
|
|
|
|
log = logging.getLogger(MAIN_LOGGER_NAME)
|
|
|
|
TICKER_SYMBOL = 'ticker_symbol'
|
|
QUARTERLY = 'quarterly'
|
|
BATCH_SIZE = 'concurrent_batch_size'
|
|
TO_FILE = 'to_file'
|
|
JSON_INDENT = 'json_indent'
|
|
VERBOSE = 'verbose'
|
|
|
|
|
|
def parse_cli() -> dict:
|
|
parser = ArgumentParser(description="Scrape company financials")
|
|
parser.add_argument(
|
|
TICKER_SYMBOL,
|
|
type=str,
|
|
nargs='+',
|
|
help="Stock ticker symbol of the company to be scraped the financials of"
|
|
)
|
|
parser.add_argument(
|
|
'-Q', f'--{QUARTERLY}',
|
|
action='store_true',
|
|
help="If set, the financial data for the last quarters is returned; otherwise yearly data is returned."
|
|
)
|
|
parser.add_argument(
|
|
'-b', f'--{BATCH_SIZE.replace("_", "-")}',
|
|
type=int,
|
|
default=DEFAULT_CONCURRENT_BATCH_SIZE,
|
|
help="If multiple ticker symbols are passed, the company financials can be scraped concurrently. "
|
|
"This argument determines how many companies are scraped concurrently at any moment in time. "
|
|
"By default, they are scraped sequentially (i.e. a batch size of 1)."
|
|
)
|
|
parser.add_argument(
|
|
'-f', f'--{TO_FILE.replace("_", "-")}',
|
|
type=Path,
|
|
help="Writes results to the specified destination file. If omitted results are printed to stdout."
|
|
)
|
|
parser.add_argument(
|
|
f'--{JSON_INDENT.replace("_", "-")}',
|
|
type=int,
|
|
help="If set to a positive integer and the output format is JSON (default), the resulting JSON document is "
|
|
"indented accordingly for more readability; if omitted, output is returned in one line."
|
|
)
|
|
parser.add_argument(
|
|
'-v', f'--{VERBOSE}',
|
|
action='count',
|
|
default=0,
|
|
help="Verbose mode. Reduces the log level and thus prints out more status messages while running. "
|
|
"Using this flag multiple times increases verbosity further."
|
|
)
|
|
return vars(parser.parse_args())
|
|
|
|
|
|
def configure_logging(verbosity: int) -> None:
|
|
root_logger = logging.getLogger()
|
|
root_logger.addHandler(logging.StreamHandler())
|
|
if verbosity > 2:
|
|
root_logger.setLevel(logging.DEBUG)
|
|
elif verbosity == 2:
|
|
root_logger.setLevel(logging.INFO)
|
|
elif verbosity == 1:
|
|
root_logger.setLevel(logging.WARNING)
|
|
|
|
|
|
async def main() -> None:
|
|
args = parse_cli()
|
|
configure_logging(args[VERBOSE])
|
|
data = await get_all_financials(*args[TICKER_SYMBOL], quarterly=args[QUARTERLY],
|
|
concurrent_batch_size=args[BATCH_SIZE])
|
|
path: Path = args[TO_FILE]
|
|
if path is None:
|
|
print(json.dumps(data, indent=args[JSON_INDENT]))
|
|
return
|
|
with open(path, 'w') as f:
|
|
json.dump(data, f, indent=args[JSON_INDENT])
|
|
|
|
|
|
if __name__ == '__main__':
|
|
asyncio.run(main())
|