| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120 | #!/usr/bin/env python3from logging import basicConfig, DEBUG, INFO, WARN, ERROR, CRITICAL, getLoggerfrom logging.handlers import TimedRotatingFileHandlerfrom os.path import exists, join, dirname, abspathfrom json import loads, dumpsfrom json.decoder import JSONDecodeErrorimport pendulum# Get the full path for this filecurrentdir = dirname(abspath(__file__))# Target log fileTARGET = join("data", join("data", "hack.log"))# Setup logging# DEBUG, INFO, WARN, ERROR, CRITICALbasicConfig(    level=DEBUG,    format="%(asctime)s - %(filename)s (%(lineno)d) - %(name)s - %(levelname)s - %(message)s",    handlers=[        TimedRotatingFileHandler(            filename=join(currentdir, "failUser.log"),            when="midnight",            backupCount=1,        ),        #logging.StreamHandler(stream=sys.stdout),    ],)log = getLogger("failUser")# Config JSONdef save_config(con):    with open("failUser.cfg", "w") as f:        f.write(dumps(con, indent=4, sort_keys=False))def load_config():    if not exists("failUser.cfg"):        now = pendulum.now().to_datetime_string()        defaults = {            # Target hack logs            "target": "data/data/hack.log",            # block_time in hours            "block_time": 4,            # Last unblock            "last_unblock": now,            # List of bad users to detect and block            "bad_users": [                "root",                "postgres",                "mysql",                "apache",                "nginx",                "admin"            ],        }        save_config(defaults)        return defaults    else:        with open("failUser.cfg", "r") as f:            config = loads(f.read())        return config# blocks in jsondef add_block(ip, time):    # first load in all blocks    try:        with open("blocks.json", "r") as f:            blocks = loads(f.read())    except FileNotFoundError:        blocks = {}        pass    except JSONDecodeError:        blocks = {}        pass    # add ip and time    #log.debug("Added {0} in blocks.json".format(ip))    blocks[ip] = time    # update blocks    with open("blocks.json", "w") as f:        f.write(dumps(blocks))def rm_block(ip):    # first load all blocks    try:        with open("blocks.json", "r") as f:            blocks = loads(f.read())    except FileNotFoundError:        return    except JSONDecodeError:        return    try:        if blocks[ip]:            #log.debug("Removed {0} in blocks.json".format(ip))            del blocks[ip]        # update blocks        with open("blocks.json", "w") as f:            f.write(dumps(blocks))    except KeyError:        log.error("Unable to unblock '{0}'".format(ip))def check_blocks():    # return a list of ips exceeding block_time in config    result = []    conf = load_config()    # load in blocks    try:        with open("blocks.json", "r") as f:            blocks = loads(f.read())    except FileNotFoundError:        return    now = pendulum.now()    for ip in blocks:        dt = pendulum.parse(blocks[ip])        #log.debug("IP={0} TIME_LEFT={1}".format(ip, abs(now.diff(dt, False).in_hours())))        if now.diff(dt).in_hours() > conf["block_time"]:            # Oops, this ip needs to be unblocked            result.append(ip)    if result:        return result
 |