forked from openkentuckiana/kubra-scraper
-
Notifications
You must be signed in to change notification settings - Fork 2
/
scrape_all.py
40 lines (33 loc) · 1.17 KB
/
scrape_all.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import pathlib
import importlib
import os
import sys
from dotenv import load_dotenv
load_dotenv()
from base_scraper import Scraper, DeltaScraper
def discover_scrapers(token):
scrapers = []
for filepath in pathlib.Path(".").glob("*.py"):
mod = importlib.import_module(filepath.stem)
# if there's a load_scrapers() function, call that
if hasattr(mod, "load_scrapers"):
scrapers.extend(mod.load_scrapers(token))
# Otherwise instantiate a scraper for each class
else:
for klass in mod.__dict__.values():
try:
if (
issubclass(klass, DeltaScraper)
and klass.__module__ != "kubra_scraper"
and klass.__module__ != "base_scraper"
):
scrapers.append(klass(token))
except TypeError:
pass
return scrapers
if __name__ == "__main__":
github_token = os.getenv("GITHUB_TOKEN")
for scraper in discover_scrapers(github_token):
if github_token is None:
scraper.test_mode = True
scraper.scrape_and_store()