config.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. #!/usr/bin/env python3
  2. from logging import basicConfig, DEBUG, INFO, WARN, ERROR, CRITICAL, getLogger
  3. from logging.handlers import TimedRotatingFileHandler
  4. from os.path import exists, join, dirname, abspath
  5. from json import loads, dumps
  6. from json.decoder import JSONDecodeError
  7. import pendulum
  8. # Get the full path for this file
  9. currentdir = dirname(abspath(__file__))
  10. # Target log file
  11. TARGET = join("data", join("data", "hack.log"))
  12. # Setup logging
  13. # DEBUG, INFO, WARN, ERROR, CRITICAL
  14. basicConfig(
  15. level=DEBUG,
  16. format="%(asctime)s - %(filename)s (%(lineno)d) - %(name)s - %(levelname)s - %(message)s",
  17. handlers=[
  18. TimedRotatingFileHandler(
  19. filename=join(currentdir, "failUser.log"),
  20. when="midnight",
  21. backupCount=1,
  22. ),
  23. #logging.StreamHandler(stream=sys.stdout),
  24. ],
  25. )
  26. log = getLogger("failUser")
  27. # Config JSON
  28. def save_config(con):
  29. with open("failUser.cfg", "w") as f:
  30. f.write(dumps(con, indent=4, sort_keys=False))
  31. def load_config():
  32. if not exists("failUser.cfg"):
  33. now = pendulum.now().to_datetime_string()
  34. defaults = {
  35. # Target hack logs
  36. "target": "data/data/hack.log",
  37. # block_time in hours
  38. "block_time": 4,
  39. # Last unblock
  40. "last_unblock": now,
  41. # List of bad users to detect and block
  42. "bad_users": [
  43. "root",
  44. "postgres",
  45. "mysql",
  46. "apache",
  47. "nginx",
  48. "admin"
  49. ],
  50. "good_users": []
  51. }
  52. save_config(defaults)
  53. return defaults
  54. else:
  55. with open("failUser.cfg", "r") as f:
  56. config = loads(f.read())
  57. return config
  58. # blocks in json
  59. def add_block(ip, time):
  60. # first load in all blocks
  61. try:
  62. with open("blocks.json", "r") as f:
  63. blocks = loads(f.read())
  64. except FileNotFoundError:
  65. blocks = {}
  66. pass
  67. except JSONDecodeError:
  68. blocks = {}
  69. pass
  70. # add ip and time
  71. #log.debug("Added {0} in blocks.json".format(ip))
  72. blocks[ip] = time
  73. # update blocks
  74. with open("blocks.json", "w") as f:
  75. f.write(dumps(blocks))
  76. def rm_block(ip):
  77. # first load all blocks
  78. try:
  79. with open("blocks.json", "r") as f:
  80. blocks = loads(f.read())
  81. except FileNotFoundError:
  82. return
  83. except JSONDecodeError:
  84. return
  85. try:
  86. if blocks[ip]:
  87. #log.debug("Removed {0} in blocks.json".format(ip))
  88. del blocks[ip]
  89. # update blocks
  90. with open("blocks.json", "w") as f:
  91. f.write(dumps(blocks))
  92. except KeyError:
  93. log.error("Unable to unblock '{0}'".format(ip))
  94. def check_blocks():
  95. # return a list of ips exceeding block_time in config
  96. result = []
  97. conf = load_config()
  98. # load in blocks
  99. try:
  100. with open("blocks.json", "r") as f:
  101. blocks = loads(f.read())
  102. except FileNotFoundError:
  103. return
  104. now = pendulum.now()
  105. for ip in blocks:
  106. dt = pendulum.parse(blocks[ip])
  107. #log.debug("IP={0} TIME_LEFT={1}".format(ip, abs(now.diff(dt, False).in_hours())))
  108. if now.diff(dt).in_hours() > conf["block_time"]:
  109. # Oops, this ip needs to be unblocked
  110. result.append(ip)
  111. if result:
  112. return result