|
|
#!/bin/env python3import osimport sysimport argparseimport logging as log
from studip import Studipfrom crawler import Crawlerfrom database import Database
parser = argparse.ArgumentParser(description='Download Files from StudIP.')parser.add_argument('-o', '--output', type=str, default='./data', help='path to output directory')parser.add_argument('-u', '--user', type=str, help='studip username', required=True)parser.add_argument('-p', '--passwd', type=str, help='studip password', required=True)parser.add_argument('-s', '--url', type=str, help='studip url', required=True)parser.add_argument('--chunk', type=int, default=1024 * 1024, help='chunksize for downloading data')parser.add_argument('-r', '--reset_dl_date', action='store_true', help='downloads everything and ignores last download date')parser.add_argument('-d', '--debug_output', action='store_true', help='display debug information about the process')parser.add_argument('-q', '--quiet', action='store_true', help='only display most important output')parser.add_argument('-l', '--log_file', action='store_true', help='saves log to a log file named "log.txt"')args = parser.parse_args()
if args.quiet: log_level = log.WARNINGelif args.debug_output: log_level = log.DEBUGelse: log_level = log.INFO
if args.log_file: log.basicConfig(level=log_level, filename='log.txt')else: log.basicConfig(level=log_level)
BASE_DIR = os.path.abspath(args.output)USERNAME = args.userPASSWORD = args.passwd
db = Database(args.reset_dl_date)
studip = Studip(args.chunk, args.url, (USERNAME, PASSWORD), db)
crawler = Crawler(studip)
# Start crawlingtry: crawler.download_curr_courses(BASE_DIR)except KeyboardInterrupt: sys.exit(0)
|