This program downloads all files of a Stud.IP users current semester.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

67 lines
2.4 KiB

  1. #!/bin/env python3
  2. import os
  3. import sys
  4. import argparse
  5. import logging as log
  6. from studip import Studip
  7. from crawler import Crawler
  8. from mysql import Database
  9. parser = argparse.ArgumentParser(description='Download Files from StudIP.')
  10. parser.add_argument('-o', '--output', type=str,
  11. default='./data', help='path to output directory')
  12. parser.add_argument('-u', '--user', type=str,
  13. help='studip username', required=True)
  14. parser.add_argument('-p', '--passwd', type=str,
  15. help='studip password', required=True)
  16. parser.add_argument('-s', '--url', type=str, help='studip url', required=True)
  17. parser.add_argument('--chunk', type=int, default=1024 *
  18. 1024, help='chunksize for downloading data')
  19. parser.add_argument('-r', '--reset_dl_date', action='store_true',
  20. help='downloads everything and ignores last download date')
  21. parser.add_argument('--host', type=str, default='localhost', help='mysql host')
  22. parser.add_argument('--port', type=int, default=3306, help='mysql port')
  23. parser.add_argument('--db_name', type=str, default='studip',
  24. help='mysql database name')
  25. parser.add_argument('--db_user', type=str, default='root',
  26. help='mysql database user')
  27. parser.add_argument('--db_passwd', type=str,
  28. default='secret-pw', help='mysql database password')
  29. parser.add_argument('-d', '--debug_output', action='store_true',
  30. help='display debug information about the process')
  31. parser.add_argument('-q', '--quiet', action='store_true',
  32. help='only display most important output')
  33. parser.add_argument('-l', '--log_file', action='store_true',
  34. help='saves log to a log file named "log.txt"')
  35. args = parser.parse_args()
  36. if args.quiet:
  37. log_level = log.WARNING
  38. elif args.debug_output:
  39. log_level = log.DEBUG
  40. else:
  41. log_level = log.INFO
  42. if args.log_file:
  43. log.basicConfig(level=log_level, filename='log.txt')
  44. else:
  45. log.basicConfig(level=log_level)
  46. BASE_DIR = os.path.abspath(args.output)
  47. USERNAME = args.user
  48. PASSWORD = args.passwd
  49. db = Database(args.host, args.port, args.db_name,
  50. args.db_user, args.db_passwd, args.reset_dl_date)
  51. studip = Studip(args.chunk, args.url, (USERNAME, PASSWORD), db)
  52. crawler = Crawler(studip)
  53. # Start crawling
  54. try:
  55. crawler.download_curr_courses(BASE_DIR)
  56. except KeyboardInterrupt:
  57. sys.exit(0)