config.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. #!/usr/bin/env python3
  2. from logging import basicConfig, DEBUG, INFO, WARN, ERROR, CRITICAL, getLogger
  3. from logging.handlers import TimedRotatingFileHandler
  4. from os.path import exists, join, dirname, abspath
  5. from json import loads, dumps
  6. from json.decoder import JSONDecodeError
  7. import pendulum
  8. # Get the full path for this file
  9. currentdir = dirname(abspath(__file__))
  10. # Target log file
  11. TARGET = join("data", join("data", "hack.log"))
  12. # Setup logging
  13. # DEBUG, INFO, WARN, ERROR, CRITICAL
  14. basicConfig(
  15. level=DEBUG,
  16. format="%(asctime)s - %(filename)s (%(lineno)d) - %(name)s - %(levelname)s - %(message)s",
  17. handlers=[
  18. TimedRotatingFileHandler(
  19. filename=join(currentdir, "failUser.log"),
  20. when="midnight",
  21. backupCount=1,
  22. ),
  23. #logging.StreamHandler(stream=sys.stdout),
  24. ],
  25. )
  26. log = getLogger("failUser")
  27. # Config JSON
  28. def save_config(con):
  29. with open("failUser.cfg", "w") as f:
  30. f.write(dumps(con, indent=4, sort_keys=False))
  31. def load_config():
  32. if not exists("failUser.cfg"):
  33. now = pendulum.now().to_datetime_string()
  34. defaults = {
  35. # Target hack logs
  36. "target": "data/data/hack.log",
  37. # block_time in hours
  38. "block_time": 4,
  39. # Last unblock
  40. "last_unblock": now,
  41. # List of bad users to detect and block
  42. "bad_users": [
  43. "root",
  44. "postgres",
  45. "mysql",
  46. "apache",
  47. "nginx",
  48. "admin"
  49. ],
  50. }
  51. save_config(defaults)
  52. return defaults
  53. else:
  54. with open("failUser.cfg", "r") as f:
  55. config = loads(f.read())
  56. return config
  57. # blocks in json
  58. def add_block(ip, time):
  59. # first load in all blocks
  60. try:
  61. with open("blocks.json", "r") as f:
  62. blocks = loads(f.read())
  63. except FileNotFoundError:
  64. blocks = {}
  65. pass
  66. except JSONDecodeError:
  67. blocks = {}
  68. pass
  69. # add ip and time
  70. #log.debug("Added {0} in blocks.json".format(ip))
  71. blocks[ip] = time
  72. # update blocks
  73. with open("blocks.json", "w") as f:
  74. f.write(dumps(blocks))
  75. def rm_block(ip):
  76. # first load all blocks
  77. try:
  78. with open("blocks.json", "r") as f:
  79. blocks = loads(f.read())
  80. except FileNotFoundError:
  81. return
  82. except JSONDecodeError:
  83. return
  84. try:
  85. if blocks[ip]:
  86. #log.debug("Removed {0} in blocks.json".format(ip))
  87. del blocks[ip]
  88. # update blocks
  89. with open("blocks.json", "w") as f:
  90. f.write(dumps(blocks))
  91. except KeyError:
  92. log.error("Unable to unblock '{0}'".format(ip))
  93. def check_blocks():
  94. # return a list of ips exceeding block_time in config
  95. result = []
  96. conf = load_config()
  97. # load in blocks
  98. try:
  99. with open("blocks.json", "r") as f:
  100. blocks = loads(f.read())
  101. except FileNotFoundError:
  102. return
  103. now = pendulum.now()
  104. for ip in blocks:
  105. dt = pendulum.parse(blocks[ip])
  106. #log.debug("IP={0} TIME_LEFT={1}".format(ip, abs(now.diff(dt, False).in_hours())))
  107. if now.diff(dt).in_hours() > conf["block_time"]:
  108. # Oops, this ip needs to be unblocked
  109. result.append(ip)
  110. if result:
  111. return result