ditched csv; minor help text change

This commit is contained in:
Maximilian Fajnberg 2021-12-26 21:08:36 +01:00
parent 2b3fb9f7ba
commit 3276dbfae3

View File

@ -1,19 +1,15 @@
import logging
import asyncio
import csv
import json
from argparse import ArgumentParser
from pathlib import Path
from typing import Dict
from .functions import get_all_financials, ResultDict
from .constants import END_DATE, MAIN_LOGGER_NAME, DEFAULT_CONCURRENT_BATCH_SIZE
from .functions import get_all_financials
from .constants import MAIN_LOGGER_NAME, DEFAULT_CONCURRENT_BATCH_SIZE
log = logging.getLogger(MAIN_LOGGER_NAME)
JSON_EXT, CSV_EXT = '.json', '.csv'
TICKER_SYMBOL = 'ticker_symbol'
QUARTERLY = 'quarterly'
BATCH_SIZE = 'concurrent_batch_size'
@ -40,14 +36,13 @@ def parse_cli() -> dict:
type=int,
default=DEFAULT_CONCURRENT_BATCH_SIZE,
help="If multiple ticker symbols are passed, the company financials can be scraped concurrently. "
"This argument determines how many datasets are scraped concurrently at any moment in time. "
"By default, they are scraped sequentially."
"This argument determines how many companies are scraped concurrently at any moment in time. "
"By default, they are scraped sequentially (i.e. a batch size of 1)."
)
parser.add_argument(
'-f', f'--{TO_FILE.replace("_", "-")}',
type=Path,
help="Writes results to the specified destination file. If omitted results are printed to stdout. "
"Depending on the file name suffix, the output format can be either as CSV or in JSON."
help="Writes results to the specified destination file. If omitted results are printed to stdout."
)
parser.add_argument(
f'--{JSON_INDENT.replace("_", "-")}',
@ -76,15 +71,6 @@ def configure_logging(verbosity: int) -> None:
root_logger.setLevel(logging.WARNING)
def write_to_csv(data: Dict[str, ResultDict], file_obj) -> None:
writer = csv.writer(file_obj)
for statement_key, statement_dict in data.items():
end_dates = statement_dict.pop(END_DATE)
writer.writerow([statement_key] + list(end_dates))
for key, values in statement_dict.items():
writer.writerow([key] + list(str(val) for val in values))
async def main() -> None:
args = parse_cli()
configure_logging(args[VERBOSE])
@ -95,11 +81,6 @@ async def main() -> None:
print(json.dumps(data, indent=args[JSON_INDENT]))
return
with open(path, 'w') as f:
if path.suffix.lower() == CSV_EXT:
write_to_csv(data, f)
return
if not path.suffix.lower() == JSON_EXT:
log.warning(f"Extension '{path.suffix}' unknown; using JSON format")
json.dump(data, f, indent=args[JSON_INDENT])