#!/usr/bin/env python
#blockhosts.py

"""Automatic updates to hosts.allow to block IP addresses based on failed
login accesses for ssh/ftp.

Script to record how many times "sshd" or "proftpd" is being attacked,
and when a particular IP address exceeds a configured number of
failed login attempts, that IP address is added to /etc/hosts.allow with
the deny file to prohibit access.
Script uses /etc/hosts.allow to store (in comments) count
of failed attempts, and date of last attempt for each IP address
By default, hosts.allow is used, but program can be configured to use any
other file, including /etc/hosts.deny, as needed.
IP addresses with expired last attempt dates (configurable)
can be removed, to keep /etc/hosts.allow size manageable.
This script can be run as the optional command in /etc/hosts.allow
itself, so will kick off only when someone connects to sshd/proftpd, no
need to use cron to run this script.

TCP_WRAPPERS should be enabled for all services, which allows use of
hosts.allow file.
hosts_options should also have been enabled, which requires compile time
PROCESS_OPTIONS to be turned on. This allows extensions to the
basic hosts.* file line format.  The extensible language supports lines
of this format in /etc/hosts.allow:
    daemon_list : client_list : option : option ...
See the man pages for hosts_options and hosts_access(5) for more
information.

Warnings:
* Be sure to keep a backup of your initial hosts.allow (or hosts.deny)
  file, in case it gets overwritten due to an error in this script.
* Do read up on the web topics related to security, denial-of-service,
  and IP-address spoofing.
  Visit the blockhosts home page for references.
* This script handles IPv4 addresses only.

Usage:
For more info, run this program with --help option.

The blockfile (hosts.allow, or if needed, hosts.deny) layout needs to
have a certain format:
  Add following sections, in this order:
  -- permament whitelist and blacklist of IP addresses
  -- blockhosts marker lines - two lines
  -- execute command to kick off blockhosts.py on connects to services

See "man 5 hosts_access" and "man hosts_options" for more details on
hosts.* files line formats.

The two HOSTS_MARKER_LINEs define a section, this is the
region where blockhosts will read/write IP blocking data in the
hosts.allow file. It will use comments to store bookkeeping data needed
by this script in that section, too.
Lines before and after the two HOST_MARKER_LINEs will be left unchanged
in the hosts.allow file

See the "INSTALL" file in the blockhosts.py source package for a
detailed example of the hosts.allow file.

====
This script was inspired by: DenyHosts, which has been developed
by Phil Schwartz: http://denyhosts.sourceforge.net/

====
Requires python 2.3 minimum, many 2.3 modules/constructs used, such as
datetime, optparse.

====
Modified 29/12/06 by Erik Ljungström to allow for email alerts being
sent whenever a host is blocked.

====
BlockHosts Script License
This work is hereby released into the Public Domain.
To view a copy of the public domain dedication, visit
http://creativecommons.org/licenses/publicdomain/ or send a letter to
Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.

Author: Avinash Chopde <avinash@acm.org>
Created: May 2005
http://www.aczoom.com/cms/blockhosts/

"""

# script metadata, also used by setup.py
VERSION="1.0.5"
VERSION_DATE="December 2006"
AUTHOR="Avinash Chopde"
AUTHOR_EMAIL="avinash@acm.org"
URL="http://www.aczoom.com/cms/blockhosts/"
LICENSE="http://creativecommons.org/licenses/publicdomain/"
DESCRIPTION="Block IP Addresses based on information in system logs related to SSH/FTP failures."
LONG_DESCRIPTION="""Block IP Addresses based on information in system logs
related to SSH/FTP or other such login attacks.

Updates hosts.allow file automatically, to block IP addresses.
Will also expire previously blocked addresses based on age of last failed
login attempt, this keeps the hosts.allow file size manageable.

"""
CONFIG_FILE = "/etc/blockhosts.cfg"

# --------------------------------------
# defaults for parameters follow this order:
# 1 -> use the value provided as an argument in argv[] to this script
# 2 -> if not, then use the value defined in CONFIG_FILE
# 3 -> if not, then use the value hard-coded in this script - HC_OPTIONS

import re

# Defaults, hard-coded options, these values are used last if no args
# and no config file provided
HC_OPTIONS = {
    "HOSTS_BLOCKFILE": "/etc/hosts.allow",
    "SMTP_SERVER": "localhost",
    "NOTIFY_ADDRESS": '',
    "SMTP_USER":'',
    "SMTP_PASSWD":'',
    "SENDER_ADDRESS": "BlockHosts <blockhosts@localhost>",    
    "LOGFILES": ( "/var/log/secure", ),
        # default list of logs to process, multiple files can be listed

    "COUNT_THRESHOLD": 7,
        # number of invalid attempts after which host is blocked
        # note that actual denial make take one or more attempts - depends
        # on the timing of when LOGFILES are updated by the system,
        # and when this script gets to run

    "BLOCK_SERVICES": "ALL",
        # this string used for "daemon_list" in hosts.allow for each blocked
        # IP address

    "AGE_THRESHOLD": 12,
        # number of hours after which host entry is discarded from hosts.allow
        # 24 -> one day, 168 -> one week, 720 -> 30 days, integer values only
        # most attackers go away after they are blocked, so to keep hosts.allow
        # file size small, no reason to make this any more than, say, half-a-day

    "LOCKFILE": "/tmp/blockhosts.lock",
        # need create/write access to this file, used to make sure only one
        # instance of this script runs at one time

    ##############################################################
    # ALL_REGEXS: All expressions that match a failed entry.
    # Compulsory: P<host> group, matching the IP address - n.n.n.n - is used to
    # determine IP address to block in each pattern below
    # P<pid> is optional below - use only if expecting multiple regexs
    # to match a failure, but should be counted as one failure only, if
    # the PID in all the matched log lines is the same. SSHD expressions have
    # P<pid> but vsftpd doesn't. Vsftpd keeps the same process active for
    # any number of login failures on that connection, so the vsftpd regex
    # does not include the P<pid> group.

    "ALL_REGEXS": {

        # SSHD -----------------------

        "SSHD-Invalid": re.compile(r"""sshd\[(?P<pid>\d+)\]:.*?(Invalid|Illegal) user (?P<user>.*?) from (::ffff:)?(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"""),
        # Jul 19 06:47:27 hostname sshd[1768]: Invalid user xxx from 10.10.58.3
        # Nov 15 04:57:19 hostname sshd[1668]: Illegal user yyy from ::ffff:10.6.184.165

        "SSHD-NotAllowed": re.compile(r"""sshd\[(?P<pid>\d+)\]: User (?P<user>.*?) from (::ffff:)?(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) not allowed"""),
        # Jul 19 06:58:23 hostname sshd[2821]: User root from 10.10.58.3 not allowed because none of user's groups are listed in AllowGroups

        # sshd always puts a "Invalid user" line for failed user (non-root)
        # logins, when PasswordAuthentication is "yes".
        # If PasswordAuthentication is yes, and PermitRootLogin
        # is no, sshd only prints a "Failed password" line, it does
        # not print "Invalid user" line.
        # For non-root users, both lines are printed. To prevent
        # double counting of the IP address for the same connection,
        # blockhosts.py now looks at process-id also, and will only
        # count one failure per process id.
        "SSHD-Fail": re.compile(r"""sshd\[(?P<pid>\d+)\]: Failed (?P<method>.*?) for (?P<invalid>invalid user |illegal user )?(?P<user>.*?) from (::ffff:)?(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"""),
        #Apr 20 12:34:30 hostname sshd[9701]: Failed password for invalid user root from 10.21.45.30 port 35993 ssh2

        # ProFTPD -----------------------
        # May 29 22:38:10 hostname proftpd[28865]: hostname (10.0.0.1[10.0.0.1]) - USER validuser (Login failed): Incorrect password.
        # May 29 22:40:20 hostname proftpd[28879]: hostname (10.0.0.1[10.0.0.1]) - USER aaa: no such user found from 10.0.0.1 [10.0.0.1] to 10.0.0.1:21
        # May 30 07:31:55 hostname proftpd[1450]: hostname (10.0.0.1[10.0.0.1]) - SECURITY VIOLATION: root login attempted.
        "ProFTPD-NoPassword": re.compile(r"""proftpd\[(?P<pid>\d+)\]: [^[]+\[(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).+Login failed"""), 

        "ProFTPD-NoUser": re.compile(r"""proftpd\[(?P<pid>\d+)\]: [^[]+\[(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).+no such user"""), 

        "ProFTPD-SecurityViolation": re.compile(r"""proftpd\[(?P<pid>\d+)\]: [^[]+\[(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).+SECURITY VIOLATION"""), 

        # VSFTPD -----------------------
        # Fri Jan 21 15:56:57 2005 [pid 6726] [test] FAIL LOGIN: Client "10.204.30.15"
        "VSFTPD-Fail": re.compile(r"""\[pid \d+\] \[(?P<user>.*?)\] FAIL LOGIN: Client "(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"""),

        # Pure-FTPd -----------------------
        # May 17 16:13:29 hostname pure-ftpd: (?@10.10.199.69) [WARNING] Authentication failed for user [username]
        "PureFTPD-Fail": re.compile(r"""pure-ftpd: \(\?\@(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\) \[WARNING\] Authentication failed"""),

        # ipop3d Aug 18 16:27:38 hostname ipop3d[2540]: Login failed user=username auth=username host=badhostname [10.3.32.17]
        # IPOP3D -----------------------
        "POP-Fail": re.compile(r"""ipop3d\[(?P<pid>\d+)\]: Login failed.* \[(?P<host>\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\]"""),
    },
}


# -------------------------------------------------------------
HOSTS_MARKER_LINE       = "#---- BlockHosts Additions"
HOSTS_MARKER_ABUSIVE    = "#bh: ip:"
HOSTS_MARKER_FIRSTLINE  = "#bh: first line:"
HOSTS_MARKER_OFFSET     = "#bh: offset:"
HOSTS_MARKER_LOGFILE    = "#bh: logfile:"

#######################################################################
import syslog
syslog.openlog("blockhosts.py")

def die(msg, *args):
    """Exit, serious error occurred"""

    string = "ERROR: exiting: " + " ".join([str(msg)] + map(str, args))
    print >> sys.stderr, string

    syslog.syslog(syslog.LOG_ERR, string)
    sys.exit(1)

#-------------------------------------
try:
    import os, os.path, sys, traceback
    import copy
    import string
    import time
    import errno
    import fcntl
    import smtplib
    import datetime
    from optparse import OptionParser, OptionGroup
    import ConfigParser
except ImportError, e:
    die("Script requires modules from python 2.3 - datetime and optparse\nWill not work with earlier versions.\n", e)

##############################################################

# message levels
# 0 -> error
# 1 -> warning
# 2 -> info
# 3 -> debug

global MESSAGE_LEVEL; MESSAGE_LEVEL = 1

NOW_DATETIME = datetime.datetime.today()

#######################################################################

def print_level(level, msg, *args):
    """Print message to stderr, but only if level is >= MESSAGE_LEVEL"""

    if MESSAGE_LEVEL >= level :
        string = " ".join([str(msg)] + map(str, args))
        print >> sys.stderr, string

def error(msg, *args):
    """Print error message, a level 0 message, using print_level"""
    print_level(0, "ERROR: ", msg, *args)
    string = " ".join([str(msg)] + map(str, args))
    syslog.syslog(syslog.LOG_ERR, string)

def warning(msg, *args):
    """Print warning message, a level 1 message, using print_level"""
    print_level(1, "  Warning: " + msg, *args)

def info(msg, *args):
    """Print info message, a level 2 message, using print_level"""
    print_level(2, msg, *args)

def debug(msg, *args):
    """Print debug message, a level 3 message, using print_level"""
    print_level(3, msg, *args)

def print_always(msg, *args):
    """Print message, used to unconditionally print message"""
    print_level(MESSAGE_LEVEL, msg, *args)

#######################################################################

def sort_by_value(d, reverse = False):
    """ Returns the keys of dictionary d sorted by their values """
    items=d.items()
    backitems=[ [v[1],v[0]] for v in items]
    backitems.sort()
    if reverse:
        backitems.reverse()
    return [ backitems[i][1] for i in range(0,len(backitems))]

#######################################################################
class Error(Exception):
    """Base class for exceptions in this module."""
    pass

class MissingMarkerError(Error):
    "Error: No blockhosts marker found in hosts.* file "
    pass

class SecondMarkerError(Error):
    "Error: parsing blockhosts section in hosts_blockfile - second marker not found"
    pass

#######################################################################

class LockFile:
    """Create exclusive advisory lock on given file, which must be opened
    for write access atleast
    """
    def __init__(self, path):
        self.__path = path
        self.__locked = 0

    def lock(self):
        try:
            self.__fp = open(self.__path, "a+") # a+ prevents trashing the file!
        except IOError, e :
            if e.errno == errno.ENOENT: # no such file
                # "w+" will trash existing file, or create new one
                self.__fp = open(self.__path, "w+")
                debug(" ... first r+ lock file open failed, so opened with w+ mode")
            else:
                raise

        try:
            rv = fcntl.lockf(self.__fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
        except IOError, e :
            if e.errno == errno.EAGAIN:
                debug("File (%s) already locked, EAGAIN." % self.__path)
            elif e.errno == errno.EACCES:
                debug("File (%s) permission denied, EACCES." % self.__path)
            else:
                debug("File (%s) fcntl.lockf failed." % self.__path, e)
            raise
        else:
            self.__locked = 1


    def unlock(self):
        if not self.__locked:
            debug("  debug warning: LockFile: called unlock when no lock was held, file ", self.__path)
            return

        try:
            rv = fcntl.lockf(self.__fp.fileno(), fcntl.LOCK_UN)
            self.__fp.close()
        except IOError, e:
            debug("  debug warning: LockFile: failed to unlock or close file ", self.__path, e)
        else:
            self.__fp = None
            self.__locked = 0


    def get_path(self):
        return self.__path

#######################################################################
class HostData:
    """
    simple record structure to keep track of count seen and time last seen
    for a particular IP host address
    """
    def __init__(self, count=0, datetime = None):
        self.count = count
        self.datetime = datetime

    def __repr__(self):
        return "HostData(" + repr(self.count) + ", " + repr(self.datetime)  + ")"

    def __cmp__(self, other):
        return cmp(self.datetime, other.datetime)

#######################################################################
class SecureLogOffset:
    """Simple record structure to keep track of location into a syslog like
    message/secure file.

    Uses a offset, along with the entire first line of the file at the
    time, to allow detection of log rotation
    """
    def __init__(self, offset=0L, first_line=""):
        self.offset = long(offset)
        self.first_line = first_line

#######
    def load_string(self, line):
        if line.startswith(HOSTS_MARKER_OFFSET):
            value = line[ len(HOSTS_MARKER_OFFSET) : ]
            try:
                self.offset = long(value.strip())
            except ValueError, e:
                warning("could not decode offset, using 0:", e)
                self.__last_offset = 0
                return False
        elif line.startswith(HOSTS_MARKER_FIRSTLINE):
            value = line[ len(HOSTS_MARKER_FIRSTLINE) : ]
            self.first_line = value.rstrip()
        return True

#######
    def dump_string(self):
        return "%s %ld\n%s%s\n\n" % (HOSTS_MARKER_OFFSET, self.offset,
                                HOSTS_MARKER_FIRSTLINE, self.first_line)

    def __repr__(self):
        return 'SecureLogOffset(%ld, %s)' % (self.offset, repr(self.first_line))

#######################################################################

class SecureLog:
    """
    Handles operations on the syslog-like messages/secure log which
    contains all the sshd/proftpd or other logging attempts.
    """
    def __init__(self, logfile):
        self.__offset = SecureLogOffset()
        self.__logfile = logfile
        self.__fp = None

    def open(self, offset):
        try:
            self.__fp = open(self.__logfile, "r")
            self.__offset.first_line = self.__fp.readline()[:-1]
            self.__fp.seek(0, 2)
            self.__offset.offset = self.__fp.tell()
        except IOError:
            traceback.print_exc()
            die("Can't open or read: %s" % self.__logfile)
            sys.exit(1)

        debug("SecureLog open:")
        debug("   first_line:", self.__offset.first_line)
        debug("   file length:", self.__offset.offset)

        if self.__offset.first_line != offset.first_line:
            # log file was rotated, start from beginning
            self.__offset.offset = 0L
            debug("   log file rotated, ignore old offset, start at 0")
        elif self.__offset.offset > offset.offset:
            # new lines exist in log file, start from old offset
            self.__offset.offset = offset.offset
        else:
            # no new entries in log file
            # debug("   log file offset unchanged, nothing new to read")
            pass

        info(" ... securelog, loading file, offset:", self.__logfile, self.__offset.offset)

        self.__fp.seek(self.__offset.offset)

        return self.__fp != None

    def close(self):
        try:
            return self.__fp.close()
        except IOError, e:
            warning("could not close logfile ", self.__logfile, e)

        return None

    def readline(self):
        try:
            line = self.__fp.readline()
            self.__offset.offset = self.__fp.tell()
        except IOError, e:
            line = None
            warning("readline: could not read logfile", self.__logfile, e)

        return line

    def get_offset(self):
        return self.__offset


#######################################################################
class MailAlert:
    def __init__(self, address=["root@localhost.localdomain"], sender_address="blockhosts@localhost.localdomain", smtp_server="localhost", smtp_user='', smtp_passwd='', host=''):
        self.__address = address.replace('\@', '@')
        self.__sender_address = sender_address.replace('\@', '@')
        self.__smtp_server = smtp_server
        # If smtp_user and passwd is empty, assume no authentication is necessary
        self.__smtp_user = smtp_user.replace('\@', '@')
        self.__smtp_passwd = smtp_passwd
        self.__host = host
    def sendMail(self):
        if len(self.__address) == 0:
            return
        session = smtplib.SMTP(self.__smtp_server)
        time = datetime.datetime.now().strftime("%a %d/%m/%Y %H:%M:%S")
        message = "To: " + self.__address
        message += "\nFrom: "+ self.__sender_address
        message += "\nSubject: Host blocked!\n\n"
        message += time
        message += "\nAdded abusive host: " + self.__host + " to the naughty list"
        if len(self.__smtp_user) > 0:
            session.login(self.__smtp_user, self.__smtp_passwd)
        smtpresult = session.sendmail(self.__sender_address, self.__address, message)
        if smtpresult:
            errstr = ""
            for recip in smtpresult.keys():
                errstr = """Unable to deliver mail to: %s Server responded: %s %s %s"""\
                         % (recip, smtpresult[recip][0], smtpresult[recip][1], errstr)
                raise smtplib.SMTPException, errstr
        
class BlockHosts:
    def __init__(self, blockfile, block_services="ALL",address=["root@localhost.localdomain"], sender_address="blockhosts@localhost.localdomain", smtp_server="localhost", smtp_user='', smtp_passwd=''):
        self.__abusive_hosts = {} # hosts -> HostData [count, last seen]
        self.__offset_first_marker = -1L
        self.__remaining_lines = [] # all lines after the 2nd end marker
        self.__blockfile = blockfile
        self.__block_services = block_services
        self.__found_first_marker = False
        self.__found_second_marker = False
        #Mail alert variables - purely for passing on to the MailAlert class
        self.__address = address
        self.__sender_address = sender_address
        self.__smtp_server = smtp_server
        self.__smtp_user = smtp_user
        self.__smtp_passwd = smtp_passwd

#######
    def load_hosts_blockfile(self, logoffsets):
        self.__remaining_lines = []

        info(" ... load blockfile:", self.__blockfile)

        state = 0
        # state = 0 -> error state
        # state = 1 -> have not seen first marker
        # state = 2 -> have seen first marker, not seen second marker
        # state = 3 -> have seen second marker
        try:
            fp = open(self.__blockfile, "r")
            state = 1
            # skip all lines to first marker
            while fp and state < 2:
                offset = fp.tell()
                line = fp.readline()
                if not line: break

                line = line.strip()
                if not line: continue

                debug("1: got line: ", line)
                if line.startswith(HOSTS_MARKER_LINE):
                    self.__offset_first_marker = offset
                    self.__found_first_marker = True
                    debug(" ... seen all state 1 lines, now inside blockhosts markers at offset ", offset)
                    state = 2

            # read all lines to second marker
            state = self.__process_state_2(fp, logoffsets, line)

            # read all lines from second marker to end of file
            if fp and state == 3:
                info(" ... found both markers, count of hosts being watched:", len(self.__abusive_hosts))
                self.__remaining_lines = fp.readlines()

            fp.close()

        except IOError, e:
            error("could not read block-file, last state: ", state)
            state = 0
            raise

        if state > 0:
            debug("block-file: Got previous abusive hosts data:")
            debug(self.__abusive_hosts )
            debug("-------------------")
            debug("block-file: Got remaining lines:")
            debug(self.__remaining_lines)
            debug("-------------------")

            if not self.__found_first_marker:
                error("no blockhosts marker found in file (%s)" % self.__blockfile)
                raise MissingMarkerError

            if not self.__found_second_marker:
                error("second blockhosts marker missing in file (%s)" % self.__blockfile)
                raise SecondMarkerError

        return self.__found_second_marker

#######
    def __process_state_2(self, fp, logoffsets, line):
        state = 2
        logfile = ""
        while fp and state == 2:
            line = fp.readline()
            if not line: break
            try:
                line = line.strip()
                if line.startswith(HOSTS_MARKER_LINE):
                    self.__found_second_marker = True
                    state = 3
                elif line.startswith(HOSTS_MARKER_LOGFILE):
                    logfile = line[ len(HOSTS_MARKER_LOGFILE) : ]
                    logfile = logfile.strip()
                    debug("2: found logfile name line: ", logfile)
                    logoffsets[ logfile ] = SecureLogOffset()
                elif line.startswith(HOSTS_MARKER_OFFSET) or line.startswith(HOSTS_MARKER_FIRSTLINE):
                    if logfile:
                        logoffsets[logfile].load_string(line)
                    else:
                        warning("... log file name not known, ignoring offset or first_line info: ", line)
                elif line.startswith(HOSTS_MARKER_ABUSIVE):
                    line = line[ len(HOSTS_MARKER_ABUSIVE) : ]

                    name, value = line.split(":", 1)
                    if not name: return state

                    name = name.strip()
                    self.__abusive_hosts[name] = HostData(0, NOW_DATETIME)

                    if ":" in value:
                        value, datestr = value.split(":", 1)
                        date1 = datestr.split("-")
                        date2 = map(lambda i: int(i), date1)
                        try:
                            self.__abusive_hosts[name].datetime = datetime.datetime(*date2)
                            self.__abusive_hosts[name].count = int(value)
                        except ValueError, e:
                            error("reading date or count for ip:", e)
                            traceback.print_exc()
                            state = 0

                        debug("2: got host-count-date ", name, value, self.__abusive_hosts[name].datetime)

                    else:
                        warning("In get_abusive_hosts, invalid line, no date, just count", name, value)
                        self.__abusive_hosts[name].count = int(value)
                else:
                    if line: debug("2: ignore line", line)
                    pass # ignore all other lines, will be regenerated

            except Exception, e:
                state = 0
                error("in __process_state_2", e)
                traceback.print_exc()

        return state

#######
    def increment_host(self, host):
        try:
            stat = self.__abusive_hosts[host]
        except KeyError:
            self.__abusive_hosts[host] = HostData()
            stat = self.__abusive_hosts[host]
            debug(" ... In increment host, created host entry ", host)

        stat.count += 1
        stat.datetime = NOW_DATETIME
        # date time is aggresive - exact would be to parse the syslog line,
        # but that much accuracy is not necessary

#######
    def get_deny_hosts(self, count_threshold, prune_datetime):
        deny_hosts = []
        hosts = self.__abusive_hosts.keys()
        for host in hosts:
            # first remove all records that are considered old/expired
            # use <= instead of <, to allow --discard=0 to remove all hosts
            if self.__abusive_hosts[host].datetime <= prune_datetime:
                info("  will remove expired host: ", host, self.__abusive_hosts[host])
                del self.__abusive_hosts[host]
                continue

            # check if number of invalid attempts exceeds threshold
            data = self.__abusive_hosts[host]
            if data.count > count_threshold:
                deny_hosts.append(host)

        return deny_hosts

#######
    def update_hosts_blockfile(self, count_threshold, prune_date, logoffsets, dry_run = False):

        new_hosts = self.get_deny_hosts(count_threshold, prune_date)
        lines = []

        #debug(" here are new hosts from get_deny_hosts:", new_hosts)

        # first collect all the lines that will go in the blockhosts
        # section of blockfile - this is stored in lines[]
        status = False
        lines.append("%s\n" % HOSTS_MARKER_LINE) # first marker line
        for host in new_hosts:
            mailer = MailAlert(self.__address, self.__sender_address, self.__smtp_server,self.__smtp_user, self.__smtp_passwd, host)
            mailer.sendMail()
            lines.append("%s: %15s : deny\n" % (self.__block_services, host))

        if new_hosts: lines.append("\n")

        debug("Collecting abusive_hosts counts info for block-file")
        hosts = sort_by_value(self.__abusive_hosts, reverse = True)
        for host in hosts:
            date = self.__abusive_hosts[host].datetime
            lines.append("%s %15s : %3d : %s\n" % (HOSTS_MARKER_ABUSIVE, host, self.__abusive_hosts[host].count, date.strftime("%Y-%m-%d-%H-%M")))
            # debug("adding line to blockfile: ", host)

        if hosts: lines.append("\n")

        debug("Collecting log file offset info for block-file")
        files = logoffsets.keys()
        for name in files:
            lines.append("%s %s\n" % (HOSTS_MARKER_LOGFILE, name))
            lines.append(logoffsets[name].dump_string())

        lines.append("%s\n" % HOSTS_MARKER_LINE) # second marker line
        lines = lines + self.__remaining_lines;

        info(" ... updates: counts: hosts to block: %d; hosts being watched: %d" % (len(new_hosts), len(self.__abusive_hosts)))
        if dry_run:
            sys.stdout.writelines(lines)
            return True

        # open file in read/write mode
        try:
            fp = open(self.__blockfile, "r+")
            try:
                if self.__offset_first_marker > -1:
                    # have seen first marker, go to start of first marker
                    fp.seek(self.__offset_first_marker)
                else:
                    # no marker, go to end of existing file
                    # may not come here, depends on if not seeing a marker
                    # was considered an error in the load_hosts_blockfile function,
                    # but if it does come here, then don't overwrite any
                    # existing data
                    fp.seek(0, 2)
                    debug(" no hosts marker found, positioning for writing at end of (%s)" % self__blockfile)

                fp.writelines(lines)
                fp.truncate()
                status = True

                # this could be the final message, no errors to this point, send
                # this info to the system log
                syslog.syslog(syslog.LOG_INFO, "final counts: blocking %d, watching %d" % (len(new_hosts), len(self.__abusive_hosts)))

            finally:
                fp.close()
        except IOError, e:
            traceback.print_exc()
            error("Could not update blockfile ", self.__blockfile)

        return status

#######################################################################

def get_config(options):
    """Reads in a configuration file"""

    config = ConfigParser.SafeConfigParser()
    config.optionxform = str # leaves tags same case - upper/lower

    # this function will be called very early in main, no debug()/info()
    # functions available, so only use print or die() here.

    if not os.path.isfile(CONFIG_FILE):
        # not an error, skip over reading the config file
        return options

    try:
        config.read(CONFIG_FILE)
    except Exception, e:
        traceback.print_exc()
        die("Error reading config file:", e)

    try:
        allitems = dict ( config.items("constants") )
    except NoSectionError, e:
        traceback.print_exc()
        die("Config file missing 'constants' section", e)
        
    keys = allitems.keys()
    for key in keys:
        if options.has_key(key):
            try:
                options[key] = eval(allitems[key])
            except Exception, e:
                die("Config file Error: invalid line or value found for (%s):\n%s\n" % (key, allitems[key]), e)
        else:
            die("Config file Error: found invalid/unneeded definition:", key)

    return options

#######################################################################


def main():
    """Collects args, open block-file, search log files, update block-file"""
    global MESSAGE_LEVEL

    args = sys.argv[1:]

    oparser = OptionParser(version=VERSION, description=DESCRIPTION)

    config_options = copy.copy(HC_OPTIONS)
    get_config(config_options)
#DEFAULTS
    oparser.set_defaults(verbose=1,
        ignore_offset=False,
        dry_run=False,
        notify_address=config_options["NOTIFY_ADDRESS"],
        sender_address=config_options["SENDER_ADDRESS"],
        smtp_server=config_options["SMTP_SERVER"],
        smtp_user=config_options["SMTP_USER"],
        smtp_passwd=config_options["SMTP_PASSWD"],
        logfiles=",".join(config_options["LOGFILES"]),
        blockfile=config_options["HOSTS_BLOCKFILE"],
        block_services=config_options["BLOCK_SERVICES"],
        blockcount=config_options["COUNT_THRESHOLD"],
        discard=config_options["AGE_THRESHOLD"],
        lockfile=config_options["LOCKFILE"],
        echo="",
        )

    defaults = oparser.get_default_values()

    oparser.add_option("-q", "--quiet",
        action="store_const", const=0, dest="verbose",
        help="Be as quiet as possible - only print out error messages")

    oparser.add_option("-v", "--verbose",
        action="store_const", const=2, dest="verbose",
        help="Be verbose - print errors, warnings, and info messages")

    oparser.add_option("-g", "--debug",
        action="store_const", const=3, dest="verbose",
        help="Be chatty - print out debug level messages also")

    oparser.add_option("--ignore-offset",
        action="store_true",
        help="Ignore last-processed offset, start processing from beginning (%s)" % defaults.ignore_offset)

    oparser.add_option("--dry-run", action="store_true",
        help="Don't write the block file, just print out blockhosts section of output block file file to stdout instead (%s)" % defaults.dry_run)

    oconfig = OptionGroup(oparser, "Configuration Options",
        """Hard-coded defaults can be overridden by the values in the
config file - if it exists at %s, and those values can be overriden by
using the command-line options.  The current values are shown in () below.
This program uses a section in the block file (%s) to store blockhosts data.
This section is demarcated by two identical marker lines, with this text
- without the quotes: "%s".
Also see the "INSTALL" file in the blockhosts.py source package for a
detailed example of the hosts.allow file
""" % (CONFIG_FILE, defaults.blockfile, HOSTS_MARKER_LINE))

    oconfig.add_option("--blockfile", type="string", metavar="FILE",
        help="Name of hosts-block-file to read/write (%s)" % defaults.blockfile)

    oconfig.add_option("--logfiles", type="string", metavar="FILE1,FILE2,...",
        help="The names of log files to parse (\"%s\")" % defaults.logfiles)

    oconfig.add_option("--block", dest="block_services",
        help="""Block these services - daemon_list in block file, see 'man hosts.allow' ("%s")""" % defaults.block_services)

    oconfig.add_option("--blockcount", metavar="COUNT", type="int",
        help="Number of invalid tries allowed, before blocking host (%d).  Integer values only." % defaults.blockcount)

    oconfig.add_option("--discard", type="int", metavar="AGE",
        help="Number of hours after which to discard record - if most recent invalid attempt from IP address is older, discard that host entry (%d).  Integer values only." % defaults.discard)

    oconfig.add_option("--lockfile", metavar="FILE",
        help="Prevent multiple instances from running - open this file for locking and writing (\"%s\")" % defaults.lockfile)

    oconfig.add_option("--notify-address", metavar="ADDRESS",
        help="Address to send notification emails to (Leave blank to disable email alerts altogether) (\"%s\")" % defaults.notify_address)

    oconfig.add_option("--sender-address", metavar="ADDRESS",
        help="Address appearing as sender address in notification emails (\"%s\")" % defaults.sender_address)

    oconfig.add_option("--smtp-server", metavar="HOST",
        help="SMTP server to send notification emails through (\"%s\")" % defaults.smtp_server)

    oconfig.add_option("--smtp-user", metavar="USERNAME",
        help="If SMTP authentication is required, use this username (\"%s\")" % defaults.smtp_user)
    oconfig.add_option("--smtp-passwd", metavar="PASSWORD",
        help="If SMTP authentication is required, use this password (\"%s\")" % defaults.smtp_passwd)

    oconfig.add_option("--echo", type="string", metavar="TAG",
        help="Prints TAG on stderr and syslog, may be used to identify a run of blockhosts (%s)" % defaults.echo)

                                
    oparser.add_option_group(oconfig)

    (options, remaining_args) = oparser.parse_args()

    MESSAGE_LEVEL = options.verbose

    info("blockhosts %s started: %s" % (VERSION, NOW_DATETIME.strftime("%Y-%m-%d %H:%M:%S")))
    if options.echo:
        # force printing echo tag to screen, and to syslog
        syslog.syslog(syslog.LOG_INFO, "echo tag: %s" % options.echo)
        print_always(" ... echo tag: %s" % options.echo)

    debug("Got options:", options)
    debug("Using ALL_REGEXS:", config_options["ALL_REGEXS"])

    if remaining_args:
        warning("ignoring positional arguments - there should be none!", remaining_args)

    debug("Debug mode enabled.")
    # logfiles are extracted specially - since optparse can't do
    # eval(), and I did not want to add a new optparse type, command
    # line arg for logfiles only accepts string, unlike the config file,
    # which accepts the full python syntax - list elements, characters
    # escaped as needed, etc. 
    if options.logfiles != defaults.logfiles: 
        logfiles = options.logfiles.split(",")
        debug("from cmd line, got logfiles:", logfiles)
    else:
        logfiles = config_options["LOGFILES"]
        debug("from config file, got logfiles:", logfiles)

    if not options.dry_run:
        lock = LockFile(options.lockfile)
        try:
            lock.lock()
        except IOError, e:
            if e.errno == errno.EAGAIN:
                die("Exiting: another instance running? File (%s) already locked" % lock.get_path())
            elif e.errno == errno.EACCES:
                die("Failed to lock: open/write permission denied on (%s)" % lock.get_path())
            else:
                die("Lock error: file (%s), failed to get lock" % lock.get_path(), e)

        debug("File lock obtained (%s) for excluding other instances" % lock.get_path())

    prune_date = NOW_DATETIME - datetime.timedelta(0, options.discard*60*60)

    info(" ... will discard all host entries older than ", prune_date.strftime("%Y-%m-%d %H:%M"))
# Had to do this.. options:: not reachable from within BlockHosts?
    dh = BlockHosts(options.blockfile, options.block_services,options.notify_address, options.sender_address, options.smtp_server, options.smtp_user, options.smtp_passwd )
    prev_logoffsets = {}
    new_logoffsets = {}
    ip_pid = {}

    try:
        dh.load_hosts_blockfile(prev_logoffsets)
    except (MissingMarkerError, SecondMarkerError):
        die("Failed to load blockfile - block-file marker error\n Expected two marker lines in the file,\n somewhere in the middle of the file:\n%s\n%s\n" % (HOSTS_MARKER_LINE, HOSTS_MARKER_LINE))
    except:
        traceback.print_exc()
        die("Failed to load blockfile")

    for logfile in logfiles:
        debug(" ------- looking into log file: ", logfile)
        sl = SecureLog(logfile)

        offset = SecureLogOffset(0,"")
        if not options.ignore_offset:
            if prev_logoffsets.has_key(logfile):
                offset = prev_logoffsets[logfile]
            else:
                warning("no offset found, will read from beginning in logfile:", logfile)

        sl.open(offset)

        while 1:
            line = sl.readline()
            if not line: break

            line = line.strip()

            regexs = config_options["ALL_REGEXS"].keys()
            for regex in regexs:
                m = config_options["ALL_REGEXS"][regex].search(line)
                if m:
                    try:
                        host = m.group("host")
                    except IndexError:
                        die("** Program error: pattern matched line:\n%s\n  but no 'host' group defined in regex: (%s)" % (line, regex))

                    try:
                        pid = m.group("pid")
                        ip_pid_key = host + "-" + pid
                    except IndexError:
                        ip_pid_key = ""

                    # if this hostip and processid already seen before,
                    # then this attempt has already been counted, don't
                    # double count, break out of here.  This may happen
                    # with SSHD-Fail and SSHD-Invalid matches.
                    if ip_pid_key:
                        if ip_pid.has_key(ip_pid_key):
                            ip_pid[ip_pid_key] += 1
                            debug("      ignoring duplicate failure line:", regex, ", IP-pid:", ip_pid_key)
                            break

                    dh.increment_host(host)

                    if ip_pid_key:
                        ip_pid[ip_pid_key] = 1
                        debug("    found failed access for ", regex, ", IP-pid:", ip_pid_key)
                    else:
                        debug("    found failed access for ", regex, ", IP:", host)
                    break

        sl.close()

        new_logoffsets[logfile] = sl.get_offset()

        debug(" ------- finished looking into log file: ", logfile)

    debug(" ------- collecting block file updates --- ")
    dh.update_hosts_blockfile(options.blockcount, prune_date, new_logoffsets, options.dry_run)
    
    if not options.dry_run: lock.unlock()

#######################################################################
if __name__ == '__main__':
    main()
