config.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. #!/usr/bin/env python3
  2. from logging import basicConfig, DEBUG, INFO, WARN, ERROR, CRITICAL, getLogger
  3. from logging.handlers import TimedRotatingFileHandler
  4. from os.path import exists, join, dirname, abspath
  5. from json import loads, dumps
  6. from json.decoder import JSONDecodeError
  7. import pendulum
  8. # Get the full path for this file
  9. currentdir = dirname(abspath(__file__))
  10. # Target log file
  11. TARGET = join("data", join("data", "hack.log"))
  12. # Setup logging
  13. # DEBUG, INFO, WARN, ERROR, CRITICAL
  14. basicConfig(
  15. level=DEBUG,
  16. format="%(asctime)s - %(filename)s (%(lineno)d) - %(name)s - %(levelname)s - %(message)s",
  17. handlers=[
  18. TimedRotatingFileHandler(
  19. filename=join(currentdir, "failUser.log"),
  20. when="midnight",
  21. backupCount=1,
  22. ),
  23. #logging.StreamHandler(stream=sys.stdout),
  24. ],
  25. )
  26. log = getLogger("failUser")
  27. # Config JSON
  28. def save_config(con):
  29. with open("failUser.cfg", "w") as f:
  30. f.write(dumps(con, indent=4, sort_keys=False))
  31. def load_config():
  32. if not exists("failUser.cfg"):
  33. now = pendulum.now().to_datetime_string()
  34. defaults = {
  35. # Target hack logs
  36. "target": "data/data/hack.log",
  37. # Just print what whould have been executed or execute it?
  38. "debug_blocks": False, # True is just print, False is execute
  39. # block_time in hours
  40. "block_time": 4,
  41. # Last unblock
  42. "last_unblock": now,
  43. # List of bad users to detect and block
  44. "bad_users": [
  45. "root",
  46. "postgres",
  47. "mysql",
  48. "apache",
  49. "nginx",
  50. "admin",
  51. "test"
  52. ],
  53. "whitelist": []
  54. }
  55. save_config(defaults)
  56. return defaults
  57. else:
  58. with open("failUser.cfg", "r") as f:
  59. config = loads(f.read())
  60. return config
  61. # blocks in json
  62. def add_block(ip, time):
  63. # first load in all blocks
  64. try:
  65. with open("blocks.json", "r") as f:
  66. blocks = loads(f.read())
  67. except FileNotFoundError:
  68. blocks = {}
  69. pass
  70. except JSONDecodeError:
  71. blocks = {}
  72. pass
  73. # add ip and time
  74. #log.debug("Added {0} in blocks.json".format(ip))
  75. blocks[ip] = time
  76. # update blocks
  77. with open("blocks.json", "w") as f:
  78. f.write(dumps(blocks))
  79. def rm_block(ip):
  80. # first load all blocks
  81. try:
  82. with open("blocks.json", "r") as f:
  83. blocks = loads(f.read())
  84. except FileNotFoundError:
  85. return
  86. except JSONDecodeError:
  87. return
  88. try:
  89. if blocks[ip]:
  90. #log.debug("Removed {0} in blocks.json".format(ip))
  91. del blocks[ip]
  92. # update blocks
  93. with open("blocks.json", "w") as f:
  94. f.write(dumps(blocks))
  95. except KeyError:
  96. log.error("Unable to unblock '{0}'".format(ip))
  97. def check_blocks():
  98. # return a list of ips exceeding block_time in config
  99. result = []
  100. conf = load_config()
  101. # load in blocks
  102. try:
  103. with open("blocks.json", "r") as f:
  104. blocks = loads(f.read())
  105. except FileNotFoundError:
  106. return
  107. now = pendulum.now()
  108. for ip in blocks:
  109. dt = pendulum.parse(blocks[ip])
  110. #log.debug("IP={0} TIME_LEFT={1}".format(ip, abs(now.diff(dt, False).in_hours())))
  111. if now.diff(dt).in_hours() > conf["block_time"]:
  112. # Oops, this ip needs to be unblocked
  113. result.append(ip)
  114. if result:
  115. return result