This program downloads all files of a Stud.IP users current semester.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

58 lines
1.9 KiB

  1. #!/bin/env python3
  2. import os
  3. import sys
  4. import argparse
  5. import logging as log
  6. from studip import Studip
  7. from crawler import Crawler
  8. from database import Database
  9. parser = argparse.ArgumentParser(description='Download Files from StudIP.')
  10. parser.add_argument('-o', '--output', type=str,
  11. default='./data', help='path to output directory')
  12. parser.add_argument('-u', '--user', type=str,
  13. help='studip username', required=True)
  14. parser.add_argument('-p', '--passwd', type=str,
  15. help='studip password', required=True)
  16. parser.add_argument('-s', '--url', type=str, help='studip url', required=True)
  17. parser.add_argument('--chunk', type=int, default=1024 *
  18. 1024, help='chunksize for downloading data')
  19. parser.add_argument('-r', '--reset_dl_date', action='store_true',
  20. help='downloads everything and ignores last download date')
  21. parser.add_argument('-d', '--debug_output', action='store_true',
  22. help='display debug information about the process')
  23. parser.add_argument('-q', '--quiet', action='store_true',
  24. help='only display most important output')
  25. parser.add_argument('-l', '--log_file', action='store_true',
  26. help='saves log to a log file named "log.txt"')
  27. args = parser.parse_args()
  28. if args.quiet:
  29. log_level = log.WARNING
  30. elif args.debug_output:
  31. log_level = log.DEBUG
  32. else:
  33. log_level = log.INFO
  34. if args.log_file:
  35. log.basicConfig(level=log_level, filename='log.txt')
  36. else:
  37. log.basicConfig(level=log_level)
  38. BASE_DIR = os.path.abspath(args.output)
  39. USERNAME = args.user
  40. PASSWORD = args.passwd
  41. db = Database(args.reset_dl_date)
  42. studip = Studip(args.chunk, args.url, (USERNAME, PASSWORD), db)
  43. crawler = Crawler(studip)
  44. # Start crawling
  45. try:
  46. crawler.download_curr_courses(BASE_DIR)
  47. except KeyboardInterrupt:
  48. sys.exit(0)