config.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. #!/usr/bin/env python3
  2. from logging import basicConfig, DEBUG, INFO, WARN, ERROR, CRITICAL, getLogger
  3. from logging.handlers import TimedRotatingFileHandler
  4. from os.path import exists, join, dirname, abspath
  5. from json import loads, dumps
  6. from json.decoder import JSONDecodeError
  7. import pendulum
  8. # Get the full path for this file
  9. currentdir = dirname(abspath(__file__))
  10. # Target log file
  11. TARGET = join("data", join("data", "hack.log"))
  12. # Setup logging
  13. # DEBUG, INFO, WARN, ERROR, CRITICAL
  14. basicConfig(
  15. level=DEBUG,
  16. format="%(asctime)s - %(filename)s (%(lineno)d) - %(name)s - %(levelname)s - %(message)s",
  17. handlers=[
  18. TimedRotatingFileHandler(
  19. filename=join(currentdir, "failUser.log"),
  20. when="midnight",
  21. backupCount=1,
  22. ),
  23. #logging.StreamHandler(stream=sys.stdout),
  24. ],
  25. )
  26. log = getLogger("failUser")
  27. # Config JSON
  28. def save_config(con):
  29. with open("failUser.cfg", "w") as f:
  30. f.write(dumps(con, indent=4, sort_keys=False))
  31. def load_config():
  32. if not exists("failUser.cfg"):
  33. now = pendulum.now().to_datetime_string()
  34. defaults = {
  35. # Target hack logs
  36. "target": "data/data/hack.log",
  37. # Just print what whould have been executed or execute it?
  38. "debug_blocks": False, # True is just print, False is execute
  39. # block_time in hours
  40. "block_time": 4,
  41. # Last unblock
  42. "last_unblock": now,
  43. # List of bad users to detect and block
  44. "bad_users": [
  45. "root",
  46. "postgres",
  47. "mysql",
  48. "apache",
  49. "nginx",
  50. "admin"
  51. ],
  52. "whitelist": []
  53. }
  54. save_config(defaults)
  55. return defaults
  56. else:
  57. with open("failUser.cfg", "r") as f:
  58. config = loads(f.read())
  59. return config
  60. # blocks in json
  61. def add_block(ip, time):
  62. # first load in all blocks
  63. try:
  64. with open("blocks.json", "r") as f:
  65. blocks = loads(f.read())
  66. except FileNotFoundError:
  67. blocks = {}
  68. pass
  69. except JSONDecodeError:
  70. blocks = {}
  71. pass
  72. # add ip and time
  73. #log.debug("Added {0} in blocks.json".format(ip))
  74. blocks[ip] = time
  75. # update blocks
  76. with open("blocks.json", "w") as f:
  77. f.write(dumps(blocks))
  78. def rm_block(ip):
  79. # first load all blocks
  80. try:
  81. with open("blocks.json", "r") as f:
  82. blocks = loads(f.read())
  83. except FileNotFoundError:
  84. return
  85. except JSONDecodeError:
  86. return
  87. try:
  88. if blocks[ip]:
  89. #log.debug("Removed {0} in blocks.json".format(ip))
  90. del blocks[ip]
  91. # update blocks
  92. with open("blocks.json", "w") as f:
  93. f.write(dumps(blocks))
  94. except KeyError:
  95. log.error("Unable to unblock '{0}'".format(ip))
  96. def check_blocks():
  97. # return a list of ips exceeding block_time in config
  98. result = []
  99. conf = load_config()
  100. # load in blocks
  101. try:
  102. with open("blocks.json", "r") as f:
  103. blocks = loads(f.read())
  104. except FileNotFoundError:
  105. return
  106. now = pendulum.now()
  107. for ip in blocks:
  108. dt = pendulum.parse(blocks[ip])
  109. #log.debug("IP={0} TIME_LEFT={1}".format(ip, abs(now.diff(dt, False).in_hours())))
  110. if now.diff(dt).in_hours() > conf["block_time"]:
  111. # Oops, this ip needs to be unblocked
  112. result.append(ip)
  113. if result:
  114. return result