renamed modules and package (removed dashes)

This commit is contained in:
Daniil Fajnberg 2021-11-12 16:30:51 +01:00
parent 1e5bb78053
commit 29c82a4ff7
4 changed files with 45 additions and 41 deletions

View File

@ -1,14 +1,14 @@
[metadata] [metadata]
name = stock-symbol-scraper name = stocksymbolscraper
version = 0.0.1 version = 0.0.1
author = Daniil & Maximilian F. author = Daniil & Maximilian F.
author_email = mail@placeholder123.to author_email = mail@placeholder123.to
description = Scraper for stock symbols description = Scraper for stock symbols
long_description = file: README.md long_description = file: README.md
long_description_content_type = text/markdown long_description_content_type = text/markdown
url = https://git.fajnberg.de/daniil/stock-symbol-scraper url = https://git.fajnberg.de/daniil/stocksymbolscraper
project_urls = project_urls =
Bug Tracker = https://github.com/daniil-berg/stock-symbol-scraper/issues Bug Tracker = https://github.com/daniil-berg/stocksymbolscraper/issues
classifiers = classifiers =
Programming Language :: Python :: 3 Programming Language :: Python :: 3
License :: OSI Approved :: MIT License License :: OSI Approved :: MIT License

View File

View File

@ -0,0 +1,42 @@
import logging
import asyncio
import sys
import csv
from argparse import ArgumentParser
from pathlib import Path
from .scrape import get_all_data, log
def main() -> None:
parser = ArgumentParser(description="Scrape all stock symbols")
parser.add_argument(
'-v', '--verbose',
action='store_true',
help="If set, prints all sorts of stuff."
)
parser.add_argument(
'-S', '--sequential',
action='store_true',
help="If set, all requests are performed sequentially; otherwise async capabilities are used for concurrency."
)
parser.add_argument(
'-f', '--to-file',
type=Path,
help="Writes results to the specified destination file. If omitted results are printed to stdout."
)
args = parser.parse_args()
if args.verbose:
log.setLevel(logging.DEBUG)
data = asyncio.run(get_all_data(args.sequential))
if args.to_file is None:
csv.writer(sys.stdout).writerows(data)
else:
with open(args.to_file, 'w') as f:
csv.writer(f).writerows(data)
if __name__ == '__main__':
main()

View File

@ -1,10 +1,6 @@
import logging import logging
import re import re
import csv
import sys
import asyncio import asyncio
from argparse import ArgumentParser
from pathlib import Path
from datetime import datetime from datetime import datetime
from string import ascii_uppercase from string import ascii_uppercase
from math import inf from math import inf
@ -182,37 +178,3 @@ async def get_all_data(sequential: bool = False) -> list[row_type]:
for result in results: for result in results:
data.extend(result) data.extend(result)
return data return data
def main() -> None:
parser = ArgumentParser(description="Scrape all stock symbols")
parser.add_argument(
'-v', '--verbose',
action='store_true',
help="If set, prints all sorts of stuff."
)
parser.add_argument(
'-S', '--sequential',
action='store_true',
help="If set, all requests are performed sequentially; otherwise async capabilities are used for concurrency."
)
parser.add_argument(
'-f', '--to-file',
type=Path,
help="Writes results to the specified destination file. If omitted results are printed to stdout."
)
args = parser.parse_args()
if args.verbose:
log.setLevel(logging.DEBUG)
data = asyncio.run(get_all_data(args.sequential))
if args.to_file is None:
csv.writer(sys.stdout).writerows(data)
else:
with open(args.to_file, 'w') as f:
csv.writer(f).writerows(data)
if __name__ == '__main__':
main()