"Fossies" - the Fresh Open Source Software Archive

Member "fail2ban-0.10.4/fail2ban/server/filter.py" (4 Oct 2018, 39781 Bytes) of package /linux/misc/fail2ban-0.10.4.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file. For more information about "filter.py" see the Fossies "Dox" file reference documentation and the latest Fossies "Diffs" side-by-side code changes report: 0.10.3.1_vs_0.10.4.

    1 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
    2 # vi: set ft=python sts=4 ts=4 sw=4 noet :
    3 
    4 # This file is part of Fail2Ban.
    5 #
    6 # Fail2Ban is free software; you can redistribute it and/or modify
    7 # it under the terms of the GNU General Public License as published by
    8 # the Free Software Foundation; either version 2 of the License, or
    9 # (at your option) any later version.
   10 #
   11 # Fail2Ban is distributed in the hope that it will be useful,
   12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
   13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   14 # GNU General Public License for more details.
   15 #
   16 # You should have received a copy of the GNU General Public License
   17 # along with Fail2Ban; if not, write to the Free Software
   18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
   19 
   20 __author__ = "Cyril Jaquier and Fail2Ban Contributors"
   21 __copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2011-2013 Yaroslav Halchenko"
   22 __license__ = "GPL"
   23 
   24 import codecs
   25 import datetime
   26 import fcntl
   27 import logging
   28 import os
   29 import re
   30 import sys
   31 import time
   32 
   33 from .actions import Actions
   34 from .failmanager import FailManagerEmpty, FailManager
   35 from .ipdns import DNSUtils, IPAddr
   36 from .ticket import FailTicket
   37 from .jailthread import JailThread
   38 from .datedetector import DateDetector, validateTimeZone
   39 from .mytime import MyTime
   40 from .failregex import FailRegex, Regex, RegexException
   41 from .action import CommandAction
   42 from .utils import Utils
   43 from ..helpers import getLogger, PREFER_ENC
   44 
   45 # Gets the instance of the logger.
   46 logSys = getLogger(__name__)
   47 
   48 ##
   49 # Log reader class.
   50 #
   51 # This class reads a log file and detects login failures or anything else
   52 # that matches a given regular expression. This class is instantiated by
   53 # a Jail object.
   54 
   55 
   56 class Filter(JailThread):
   57 
   58     ##
   59     # Constructor.
   60     #
   61     # Initialize the filter object with default values.
   62     # @param jail the jail object
   63 
   64     def __init__(self, jail, useDns='warn'):
   65         JailThread.__init__(self)
   66         ## The jail which contains this filter.
   67         self.jail = jail
   68         ## The failures manager.
   69         self.failManager = FailManager()
   70         ## Regular expression pre-filtering matching the failures.
   71         self.__prefRegex = None
   72         ## The regular expression list matching the failures.
   73         self.__failRegex = list()
   74         ## The regular expression list with expressions to ignore.
   75         self.__ignoreRegex = list()
   76         ## Use DNS setting
   77         self.setUseDns(useDns)
   78         ## The amount of time to look back.
   79         self.__findTime = 600
   80         ## Ignore own IPs flag:
   81         self.__ignoreSelf = True
   82         ## The ignore IP list.
   83         self.__ignoreIpList = []
   84         ## External command
   85         self.__ignoreCommand = False
   86         ## Cache for ignoreip:
   87         self.__ignoreCache = None
   88         ## Size of line buffer
   89         self.__lineBufferSize = 1
   90         ## Line buffer
   91         self.__lineBuffer = []
   92         ## Store last time stamp, applicable for multi-line
   93         self.__lastTimeText = ""
   94         self.__lastDate = None
   95         ## if set, treat log lines without explicit time zone to be in this time zone
   96         self.__logtimezone = None
   97         ## Default or preferred encoding (to decode bytes from file or journal):
   98         self.__encoding = PREFER_ENC
   99         ## Cache temporary holds failures info (used by multi-line for wrapping e. g. conn-id to host):
  100         self.__mlfidCache = None
  101         ## Error counter (protected, so can be used in filter implementations)
  102         ## if it reached 100 (at once), run-cycle will go idle
  103         self._errors = 0
  104         ## return raw host (host is not dns):
  105         self.returnRawHost = False
  106         ## check each regex (used for test purposes):
  107         self.checkAllRegex = False
  108         ## if true ignores obsolete failures (failure time < now - findTime):
  109         self.checkFindTime = True
  110         ## Ticks counter
  111         self.ticks = 0
  112 
  113         self.dateDetector = DateDetector()
  114         logSys.debug("Created %s", self)
  115 
  116     def __repr__(self):
  117         return "%s(%r)" % (self.__class__.__name__, self.jail)
  118 
  119     @property
  120     def jailName(self):
  121         return (self.jail is not None and self.jail.name or "~jailless~")
  122 
  123     def clearAllParams(self):
  124         """ Clear all lists/dicts parameters (used by reloading)
  125         """
  126         self.delFailRegex()
  127         self.delIgnoreRegex()
  128         self.delIgnoreIP()
  129 
  130     def reload(self, begin=True):
  131         """ Begin or end of reloading resp. refreshing of all parameters
  132         """
  133         if begin:
  134             self.clearAllParams()
  135             if hasattr(self, 'getLogPaths'):
  136                 self._reload_logs = dict((k, 1) for k in self.getLogPaths())
  137         else:
  138             if hasattr(self, '_reload_logs'):
  139                 # if it was not reloaded - remove obsolete log file:
  140                 for path in self._reload_logs:
  141                     self.delLogPath(path)
  142                 delattr(self, '_reload_logs')
  143 
  144     @property
  145     def mlfidCache(self):
  146         if self.__mlfidCache:
  147             return self.__mlfidCache
  148         self.__mlfidCache = Utils.Cache(maxCount=100, maxTime=5*60)
  149         return self.__mlfidCache
  150 
  151     @property
  152     def prefRegex(self):
  153         return self.__prefRegex
  154     @prefRegex.setter
  155     def prefRegex(self, value):
  156         if value:
  157             self.__prefRegex = Regex(value, useDns=self.__useDns)
  158         else:
  159             self.__prefRegex = None
  160 
  161     ##
  162     # Add a regular expression which matches the failure.
  163     #
  164     # The regular expression can also match any other pattern than failures
  165     # and thus can be used for many purporse.
  166     # @param value the regular expression
  167 
  168     def addFailRegex(self, value):
  169         multiLine = self.getMaxLines() > 1
  170         try:
  171             regex = FailRegex(value, prefRegex=self.__prefRegex, multiline=multiLine,
  172                 useDns=self.__useDns)
  173             self.__failRegex.append(regex)
  174         except RegexException as e:
  175             logSys.error(e)
  176             raise e
  177 
  178     def delFailRegex(self, index=None):
  179         try:
  180             # clear all:
  181             if index is None:
  182                 del self.__failRegex[:]
  183                 return
  184             # delete by index:
  185             del self.__failRegex[index]
  186         except IndexError:
  187             logSys.error("Cannot remove regular expression. Index %d is not "
  188                          "valid", index)
  189 
  190     ##
  191     # Get the regular expressions as list.
  192     #
  193     # @return the regular expression list
  194 
  195     def getFailRegex(self):
  196         return [regex.getRegex() for regex in self.__failRegex]
  197 
  198     ##
  199     # Add the regular expression which matches the failure.
  200     #
  201     # The regular expression can also match any other pattern than failures
  202     # and thus can be used for many purpose.
  203     # @param value the regular expression
  204 
  205     def addIgnoreRegex(self, value):
  206         try:
  207             regex = Regex(value, useDns=self.__useDns)
  208             self.__ignoreRegex.append(regex)
  209         except RegexException as e:
  210             logSys.error(e)
  211             raise e 
  212 
  213     def delIgnoreRegex(self, index=None):
  214         try:
  215             # clear all:
  216             if index is None:
  217                 del self.__ignoreRegex[:]
  218                 return
  219             # delete by index:
  220             del self.__ignoreRegex[index]
  221         except IndexError:
  222             logSys.error("Cannot remove regular expression. Index %d is not "
  223                          "valid", index)
  224 
  225     ##
  226     # Get the regular expression which matches the failure.
  227     #
  228     # @return the regular expression
  229 
  230     def getIgnoreRegex(self):
  231         ignoreRegex = list()
  232         for regex in self.__ignoreRegex:
  233             ignoreRegex.append(regex.getRegex())
  234         return ignoreRegex
  235 
  236     ##
  237     # Set the Use DNS mode
  238     # @param value the usedns mode
  239 
  240     def setUseDns(self, value):
  241         if isinstance(value, bool):
  242             value = {True: 'yes', False: 'no'}[value]
  243         value = value.lower()             # must be a string by now
  244         if not (value in ('yes', 'warn', 'no', 'raw')):
  245             logSys.error("Incorrect value %r specified for usedns. "
  246                          "Using safe 'no'", value)
  247             value = 'no'
  248         logSys.debug("Setting usedns = %s for %s", value, self)
  249         self.__useDns = value
  250 
  251     ##
  252     # Get the usedns mode
  253     # @return the usedns mode
  254 
  255     def getUseDns(self):
  256         return self.__useDns
  257 
  258     ##
  259     # Set the time needed to find a failure.
  260     #
  261     # This value tells the filter how long it has to take failures into
  262     # account.
  263     # @param value the time
  264 
  265     def setFindTime(self, value):
  266         value = MyTime.str2seconds(value)
  267         self.__findTime = value
  268         self.failManager.setMaxTime(value)
  269         logSys.info("  findtime: %s", value)
  270 
  271     ##
  272     # Get the time needed to find a failure.
  273     #
  274     # @return the time
  275 
  276     def getFindTime(self):
  277         return self.__findTime
  278 
  279     ##
  280     # Set the date detector pattern, removing Defaults
  281     #
  282     # @param pattern the date template pattern
  283 
  284     def setDatePattern(self, pattern):
  285         if pattern is None:
  286             self.dateDetector = None
  287             return
  288         else:
  289             dd = DateDetector()
  290             dd.default_tz = self.__logtimezone
  291             if not isinstance(pattern, (list, tuple)):
  292                 pattern = filter(bool, map(str.strip, re.split('\n+', pattern)))
  293             for pattern in pattern:
  294                 dd.appendTemplate(pattern)
  295             self.dateDetector = dd
  296 
  297     ##
  298     # Get the date detector pattern, or Default Detectors if not changed
  299     #
  300     # @return pattern of the date template pattern
  301 
  302     def getDatePattern(self):
  303         if self.dateDetector is not None:
  304             templates = self.dateDetector.templates
  305             # lazy template init, by first match
  306             if not len(templates) or len(templates) > 2:
  307                 return None, "Default Detectors"
  308             elif len(templates):
  309                 if hasattr(templates[0], "pattern"):
  310                     pattern =  templates[0].pattern
  311                 else:
  312                     pattern = None
  313                 return pattern, templates[0].name
  314         return None
  315 
  316     ##
  317     # Set the log default time zone
  318     #
  319     # @param tz the symbolic timezone (for now fixed offset only: UTC[+-]HHMM)
  320 
  321     def setLogTimeZone(self, tz):
  322         validateTimeZone(tz); # avoid setting of wrong value, but hold original
  323         self.__logtimezone = tz
  324         if self.dateDetector: self.dateDetector.default_tz = self.__logtimezone
  325 
  326     ##
  327     # Get the log default timezone
  328     #
  329     # @return symbolic timezone (a string)
  330 
  331     def getLogTimeZone(self):
  332         return self.__logtimezone
  333 
  334     ##
  335     # Set the maximum retry value.
  336     #
  337     # @param value the retry value
  338 
  339     def setMaxRetry(self, value):
  340         self.failManager.setMaxRetry(value)
  341         logSys.info("  maxRetry: %s", value)
  342 
  343     ##
  344     # Get the maximum retry value.
  345     #
  346     # @return the retry value
  347 
  348     def getMaxRetry(self):
  349         return self.failManager.getMaxRetry()
  350 
  351     ##
  352     # Set the maximum line buffer size.
  353     #
  354     # @param value the line buffer size
  355 
  356     def setMaxLines(self, value):
  357         if int(value) <= 0:
  358             raise ValueError("maxlines must be integer greater than zero")
  359         self.__lineBufferSize = int(value)
  360         logSys.info("  maxLines: %i", self.__lineBufferSize)
  361 
  362     ##
  363     # Get the maximum line buffer size.
  364     #
  365     # @return the line buffer size
  366 
  367     def getMaxLines(self):
  368         return self.__lineBufferSize
  369 
  370     ##
  371     # Set the log file encoding
  372     #
  373     # @param encoding the encoding used with log files
  374 
  375     def setLogEncoding(self, encoding):
  376         if encoding.lower() == "auto":
  377             encoding = PREFER_ENC
  378         codecs.lookup(encoding) # Raise LookupError if invalid codec
  379         self.__encoding = encoding
  380         logSys.info("  encoding: %s", encoding)
  381         return encoding
  382 
  383     ##
  384     # Get the log file encoding
  385     #
  386     # @return log encoding value
  387 
  388     def getLogEncoding(self):
  389         return self.__encoding
  390 
  391     ##
  392     # Main loop.
  393     #
  394     # This function is the main loop of the thread. It checks if the
  395     # file has been modified and looks for failures.
  396     # @return True when the thread exits nicely
  397 
  398     def run(self): # pragma: no cover
  399         raise Exception("run() is abstract")
  400 
  401     ##
  402     # External command, for ignoredips
  403     #
  404 
  405     @property
  406     def ignoreCommand(self):
  407         return self.__ignoreCommand
  408 
  409     @ignoreCommand.setter
  410     def ignoreCommand(self, command):
  411         self.__ignoreCommand = command
  412 
  413     ##
  414     # Cache parameters for ignoredips
  415     #
  416 
  417     @property
  418     def ignoreCache(self):
  419         return [self.__ignoreCache[0], self.__ignoreCache[1].maxCount, self.__ignoreCache[1].maxTime] \
  420             if self.__ignoreCache else None
  421 
  422     @ignoreCache.setter
  423     def ignoreCache(self, command):
  424         if command:
  425             self.__ignoreCache = command['key'], Utils.Cache(
  426                 maxCount=int(command.get('max-count', 100)), maxTime=MyTime.str2seconds(command.get('max-time', 5*60))
  427             )
  428         else:
  429             self.__ignoreCache = None
  430     ##
  431     # Ban an IP - http://blogs.buanzo.com.ar/2009/04/fail2ban-patch-ban-ip-address-manually.html
  432     # Arturo 'Buanzo' Busleiman <buanzo@buanzo.com.ar>
  433     #
  434     # to enable banip fail2ban-client BAN command
  435 
  436     def addBannedIP(self, ip):
  437         if not isinstance(ip, IPAddr):
  438             ip = IPAddr(ip)
  439 
  440         unixTime = MyTime.time()
  441         ticket = FailTicket(ip, unixTime)
  442         if self._inIgnoreIPList(ip, ticket, log_ignore=False):
  443             logSys.warning('Requested to manually ban an ignored IP %s. User knows best. Proceeding to ban it.', ip)
  444         self.failManager.addFailure(ticket, self.failManager.getMaxRetry())
  445 
  446         # Perform the banning of the IP now.
  447         try: # pragma: no branch - exception is the only way out
  448             while True:
  449                 ticket = self.failManager.toBan(ip)
  450                 self.jail.putFailTicket(ticket)
  451         except FailManagerEmpty:
  452             self.failManager.cleanup(MyTime.time())
  453 
  454         return ip
  455 
  456     ##
  457     # Ignore own IP/DNS.
  458     #
  459     @property
  460     def ignoreSelf(self):
  461         return self.__ignoreSelf
  462 
  463     @ignoreSelf.setter
  464     def ignoreSelf(self, value):
  465         self.__ignoreSelf = value
  466 
  467     ##
  468     # Add an IP/DNS to the ignore list.
  469     #
  470     # IP addresses in the ignore list are not taken into account
  471     # when finding failures. CIDR mask and DNS are also accepted.
  472     # @param ip IP address to ignore
  473 
  474     def addIgnoreIP(self, ipstr):
  475         # An empty string is always false
  476         if ipstr == "":
  477             return
  478         # Create IP address object
  479         ip = IPAddr(ipstr)
  480         # Avoid exact duplicates
  481         if ip in self.__ignoreIpList:
  482             logSys.warn("  Ignore duplicate %r (%r), already in ignore list", ip, ipstr)
  483             return
  484         # log and append to ignore list
  485         logSys.debug("  Add %r to ignore list (%r)", ip, ipstr)
  486         self.__ignoreIpList.append(ip)
  487 
  488     def delIgnoreIP(self, ip=None):
  489         # clear all:
  490         if ip is None:
  491             del self.__ignoreIpList[:]
  492             return
  493         # delete by ip:
  494         logSys.debug("  Remove %r from ignore list", ip)
  495         self.__ignoreIpList.remove(ip)
  496 
  497     def logIgnoreIp(self, ip, log_ignore, ignore_source="unknown source"):
  498         if log_ignore:
  499             logSys.info("[%s] Ignore %s by %s", self.jailName, ip, ignore_source)
  500 
  501     def getIgnoreIP(self):
  502         return self.__ignoreIpList
  503 
  504     ##
  505     # Check if IP address/DNS is in the ignore list.
  506     #
  507     # Check if the given IP address matches an IP address/DNS or a CIDR
  508     # mask in the ignore list.
  509     # @param ip IP address object or ticket
  510     # @return True if IP address is in ignore list
  511 
  512     def inIgnoreIPList(self, ip, log_ignore=True):
  513         ticket = None
  514         if isinstance(ip, FailTicket):
  515             ticket = ip
  516             ip = ticket.getIP()
  517         elif not isinstance(ip, IPAddr):
  518             ip = IPAddr(ip)
  519         return self._inIgnoreIPList(ip, ticket, log_ignore)
  520 
  521     def _inIgnoreIPList(self, ip, ticket, log_ignore=True):
  522         aInfo = None
  523         # cached ?
  524         if self.__ignoreCache:
  525             key, c = self.__ignoreCache
  526             if ticket:
  527                 aInfo = Actions.ActionInfo(ticket, self.jail)
  528                 key = CommandAction.replaceDynamicTags(key, aInfo)
  529             else:
  530                 aInfo = { 'ip': ip }
  531                 key = CommandAction.replaceTag(key, aInfo)
  532             v = c.get(key)
  533             if v is not None:
  534                 return v
  535 
  536         # check own IPs should be ignored and 'ip' is self IP:
  537         if self.__ignoreSelf and ip in DNSUtils.getSelfIPs():
  538             self.logIgnoreIp(ip, log_ignore, ignore_source="ignoreself rule")
  539             if self.__ignoreCache: c.set(key, True)
  540             return True
  541 
  542         for net in self.__ignoreIpList:
  543             # check if the IP is covered by ignore IP
  544             if ip.isInNet(net):
  545                 self.logIgnoreIp(ip, log_ignore, ignore_source=("ip" if net.isValid else "dns"))
  546                 if self.__ignoreCache: c.set(key, True)
  547                 return True
  548 
  549         if self.__ignoreCommand:
  550             if ticket:
  551                 if not aInfo: aInfo = Actions.ActionInfo(ticket, self.jail)
  552                 command = CommandAction.replaceDynamicTags(self.__ignoreCommand, aInfo)
  553             else:
  554                 if not aInfo: aInfo = { 'ip': ip }
  555                 command = CommandAction.replaceTag(self.__ignoreCommand, aInfo)
  556             logSys.debug('ignore command: %s', command)
  557             ret, ret_ignore = CommandAction.executeCmd(command, success_codes=(0, 1))
  558             ret_ignore = ret and ret_ignore == 0
  559             self.logIgnoreIp(ip, log_ignore and ret_ignore, ignore_source="command")
  560             if self.__ignoreCache: c.set(key, ret_ignore)
  561             return ret_ignore
  562 
  563         if self.__ignoreCache: c.set(key, False)
  564         return False
  565 
  566     def processLine(self, line, date=None):
  567         """Split the time portion from log msg and return findFailures on them
  568         """
  569         if date:
  570             tupleLine = line
  571         else:
  572             l = line.rstrip('\r\n')
  573             logSys.log(7, "Working on line %r", line)
  574 
  575             (timeMatch, template) = self.dateDetector.matchTime(l)
  576             if timeMatch:
  577                 tupleLine  = (
  578                     l[:timeMatch.start(1)],
  579                     l[timeMatch.start(1):timeMatch.end(1)],
  580                     l[timeMatch.end(1):],
  581                     (timeMatch, template)
  582                 )
  583             else:
  584                 tupleLine = (l, "", "", None)
  585 
  586         # save last line (lazy convert of process line tuple to string on demand):
  587         self.processedLine = lambda: "".join(tupleLine[::2])
  588         return self.findFailure(tupleLine, date)
  589 
  590     def processLineAndAdd(self, line, date=None):
  591         """Processes the line for failures and populates failManager
  592         """
  593         try:
  594             for element in self.processLine(line, date):
  595                 ip = element[1]
  596                 unixTime = element[2]
  597                 fail = element[3]
  598                 logSys.debug("Processing line with time:%s and ip:%s", 
  599                         unixTime, ip)
  600                 tick = FailTicket(ip, unixTime, data=fail)
  601                 if self._inIgnoreIPList(ip, tick):
  602                     continue
  603                 logSys.info(
  604                     "[%s] Found %s - %s", self.jailName, ip, datetime.datetime.fromtimestamp(unixTime).strftime("%Y-%m-%d %H:%M:%S")
  605                 )
  606                 self.failManager.addFailure(tick)
  607             # reset (halve) error counter (successfully processed line):
  608             if self._errors:
  609                 self._errors //= 2
  610         except Exception as e:
  611             logSys.error("Failed to process line: %r, caught exception: %r", line, e,
  612                 exc_info=logSys.getEffectiveLevel()<=logging.DEBUG)
  613             # incr common error counter:
  614             self.commonError()
  615 
  616     def commonError(self):
  617         # incr error counter, stop processing (going idle) after 100th error :
  618         self._errors += 1
  619         # sleep a little bit (to get around time-related errors):
  620         time.sleep(self.sleeptime)
  621         if self._errors >= 100:
  622             logSys.error("Too many errors at once (%s), going idle", self._errors)
  623             self._errors //= 2
  624             self.idle = True
  625 
  626     ##
  627     # Returns true if the line should be ignored.
  628     #
  629     # Uses ignoreregex.
  630     # @param line: the line
  631     # @return: a boolean
  632 
  633     def ignoreLine(self, tupleLines):
  634         buf = Regex._tupleLinesBuf(tupleLines)
  635         for ignoreRegexIndex, ignoreRegex in enumerate(self.__ignoreRegex):
  636             ignoreRegex.search(buf, tupleLines)
  637             if ignoreRegex.hasMatched():
  638                 return ignoreRegexIndex
  639         return None
  640 
  641     def _updateUsers(self, fail, user=()):
  642         users = fail.get('users')
  643         # only for regex contains user:
  644         if user:
  645             if not users:
  646                 fail['users'] = users = set()
  647             users.add(user)
  648             return users
  649         return None
  650 
  651     # # ATM incremental (non-empty only) merge deactivated ...
  652     # @staticmethod
  653     # def _updateFailure(self, mlfidGroups, fail):
  654     #   # reset old failure-ids when new types of id available in this failure:
  655     #   fids = set()
  656     #   for k in ('fid', 'ip4', 'ip6', 'dns'):
  657     #       if fail.get(k):
  658     #           fids.add(k)
  659     #   if fids:
  660     #       for k in ('fid', 'ip4', 'ip6', 'dns'):
  661     #           if k not in fids:
  662     #               try:
  663     #                   del mlfidGroups[k]
  664     #               except:
  665     #                   pass
  666     #   # update not empty values:
  667     #   mlfidGroups.update(((k,v) for k,v in fail.iteritems() if v))
  668 
  669     def _mergeFailure(self, mlfid, fail, failRegex):
  670         mlfidFail = self.mlfidCache.get(mlfid) if self.__mlfidCache else None
  671         users = None
  672         nfflgs = 0
  673         if fail.get('nofail'): nfflgs |= 1
  674         if fail.get('mlfforget'): nfflgs |= 2
  675         # if multi-line failure id (connection id) known:
  676         if mlfidFail:
  677             mlfidGroups = mlfidFail[1]
  678             # update users set (hold all users of connect):
  679             users = self._updateUsers(mlfidGroups, fail.get('user'))
  680             # be sure we've correct current state ('nofail' only from last failure)
  681             try:
  682                 del mlfidGroups['nofail']
  683             except KeyError:
  684                 pass
  685             # # ATM incremental (non-empty only) merge deactivated (for future version only),
  686             # # it can be simulated using alternate value tags, like <F-ALT_VAL>...</F-ALT_VAL>,
  687             # # so previous value 'val' will be overwritten only if 'alt_val' is not empty...       
  688             # _updateFailure(mlfidGroups, fail)
  689             #
  690             # overwrite multi-line failure with all values, available in fail:
  691             mlfidGroups.update(fail)
  692             # new merged failure data:
  693             fail = mlfidGroups
  694             # if forget (disconnect/reset) - remove cached entry:
  695             if nfflgs & 2:
  696                 self.mlfidCache.unset(mlfid)
  697         elif not (nfflgs & 2): # not mlfforget
  698             users = self._updateUsers(fail, fail.get('user'))
  699             mlfidFail = [self.__lastDate, fail]
  700             self.mlfidCache.set(mlfid, mlfidFail)
  701         # check users in order to avoid reset failure by multiple logon-attempts:
  702         if users and len(users) > 1:
  703             # we've new user, reset 'nofail' because of multiple users attempts:
  704             try:
  705                 del fail['nofail']
  706             except KeyError:
  707                 pass
  708         # merge matches:
  709         if not fail.get('nofail'): # current state (corresponding users)
  710             try:
  711                 m = fail.pop("nofail-matches")
  712                 m += fail.get("matches", [])
  713             except KeyError:
  714                 m = fail.get("matches", [])
  715             if not (nfflgs & 2): # not mlfforget:
  716                 m += failRegex.getMatchedTupleLines()
  717             fail["matches"] = m
  718         elif not (nfflgs & 2) and (nfflgs & 1): # not mlfforget and nofail:
  719             fail["nofail-matches"] = fail.get("nofail-matches", []) + failRegex.getMatchedTupleLines()
  720         # return merged:
  721         return fail
  722 
  723 
  724     ##
  725     # Finds the failure in a line given split into time and log parts.
  726     #
  727     # Uses the failregex pattern to find it and timeregex in order
  728     # to find the logging time.
  729     # @return a dict with IP and timestamp.
  730 
  731     def findFailure(self, tupleLine, date=None):
  732         failList = list()
  733 
  734         ll = logSys.getEffectiveLevel()
  735         returnRawHost = self.returnRawHost
  736         cidr = IPAddr.CIDR_UNSPEC
  737         if self.__useDns == "raw":
  738             returnRawHost = True
  739             cidr = IPAddr.CIDR_RAW
  740 
  741         # Checks if we mut ignore this line.
  742         if self.ignoreLine([tupleLine[::2]]) is not None:
  743             # The ignoreregex matched. Return.
  744             if ll <= 7: logSys.log(7, "Matched ignoreregex and was \"%s\" ignored",
  745                 "".join(tupleLine[::2]))
  746             return failList
  747 
  748         timeText = tupleLine[1]
  749         if date:
  750             self.__lastTimeText = timeText
  751             self.__lastDate = date
  752         elif timeText:
  753 
  754             dateTimeMatch = self.dateDetector.getTime(timeText, tupleLine[3])
  755 
  756             if dateTimeMatch is None:
  757                 logSys.error("findFailure failed to parse timeText: %s", timeText)
  758                 date = self.__lastDate
  759 
  760             else:
  761                 # Lets get the time part
  762                 date = dateTimeMatch[0]
  763 
  764                 self.__lastTimeText = timeText
  765                 self.__lastDate = date
  766         else:
  767             timeText = self.__lastTimeText or "".join(tupleLine[::2])
  768             date = self.__lastDate
  769 
  770         if self.checkFindTime and date is not None and date < MyTime.time() - self.getFindTime():
  771             if ll <= 5: logSys.log(5, "Ignore line since time %s < %s - %s", 
  772                 date, MyTime.time(), self.getFindTime())
  773             return failList
  774 
  775         if self.__lineBufferSize > 1:
  776             orgBuffer = self.__lineBuffer = (
  777                 self.__lineBuffer + [tupleLine[:3]])[-self.__lineBufferSize:]
  778         else:
  779             orgBuffer = self.__lineBuffer = [tupleLine[:3]]
  780         if ll <= 5: logSys.log(5, "Looking for match of %r", self.__lineBuffer)
  781         buf = Regex._tupleLinesBuf(self.__lineBuffer)
  782 
  783         # Pre-filter fail regex (if available):
  784         preGroups = {}
  785         if self.__prefRegex:
  786             if ll <= 5: logSys.log(5, "  Looking for prefregex %r", self.__prefRegex.getRegex())
  787             self.__prefRegex.search(buf, self.__lineBuffer)
  788             if not self.__prefRegex.hasMatched():
  789                 if ll <= 5: logSys.log(5, "  Prefregex not matched")
  790                 return failList
  791             preGroups = self.__prefRegex.getGroups()
  792             if ll <= 7: logSys.log(7, "  Pre-filter matched %s", preGroups)
  793             repl = preGroups.get('content')
  794             # Content replacement:
  795             if repl:
  796                 del preGroups['content']
  797                 self.__lineBuffer, buf = [('', '', repl)], None
  798 
  799         # Iterates over all the regular expressions.
  800         for failRegexIndex, failRegex in enumerate(self.__failRegex):
  801             try:
  802                 # buffer from tuples if changed: 
  803                 if buf is None:
  804                     buf = Regex._tupleLinesBuf(self.__lineBuffer)
  805                 if ll <= 5: logSys.log(5, "  Looking for failregex %d - %r", failRegexIndex, failRegex.getRegex())
  806                 failRegex.search(buf, orgBuffer)
  807                 if not failRegex.hasMatched():
  808                     continue
  809                 # current failure data (matched group dict):
  810                 fail = failRegex.getGroups()
  811                 # The failregex matched.
  812                 if ll <= 7: logSys.log(7, "  Matched failregex %d: %s", failRegexIndex, fail)
  813                 # Checks if we must ignore this match.
  814                 if self.ignoreLine(failRegex.getMatchedTupleLines()) \
  815                         is not None:
  816                     # The ignoreregex matched. Remove ignored match.
  817                     self.__lineBuffer, buf = failRegex.getUnmatchedTupleLines(), None
  818                     if ll <= 7: logSys.log(7, "  Matched ignoreregex and was ignored")
  819                     if not self.checkAllRegex:
  820                         break
  821                     else:
  822                         continue
  823                 if date is None:
  824                     logSys.warning(
  825                         "Found a match for %r but no valid date/time "
  826                         "found for %r. Please try setting a custom "
  827                         "date pattern (see man page jail.conf(5)). "
  828                         "If format is complex, please "
  829                         "file a detailed issue on"
  830                         " https://github.com/fail2ban/fail2ban/issues "
  831                         "in order to get support for this format.",
  832                          "\n".join(failRegex.getMatchedLines()), timeText)
  833                     continue
  834                 # we should check all regex (bypass on multi-line, otherwise too complex):
  835                 if not self.checkAllRegex or self.getMaxLines() > 1:
  836                     self.__lineBuffer, buf = failRegex.getUnmatchedTupleLines(), None
  837                 # merge data if multi-line failure:
  838                 raw = returnRawHost
  839                 if preGroups:
  840                     currFail, fail = fail, preGroups.copy()
  841                     fail.update(currFail)
  842                 # first try to check we have mlfid case (caching of connection id by multi-line):
  843                 mlfid = fail.get('mlfid')
  844                 if mlfid is not None:
  845                     fail = self._mergeFailure(mlfid, fail, failRegex)
  846                     # bypass if no-failure case:
  847                     if fail.get('nofail'):
  848                         if ll <= 7: logSys.log(7, "Nofail by mlfid %r in regex %s: %s",
  849                             mlfid, failRegexIndex, fail.get('mlfforget', "waiting for failure"))
  850                         if not self.checkAllRegex: return failList
  851                 else:
  852                     # matched lines:
  853                     fail["matches"] = fail.get("matches", []) + failRegex.getMatchedTupleLines()
  854                 # failure-id:
  855                 fid = fail.get('fid')
  856                 # ip-address or host:
  857                 host = fail.get('ip4')
  858                 if host is not None:
  859                     cidr = IPAddr.FAM_IPv4
  860                     raw = True
  861                 else:
  862                     host = fail.get('ip6')
  863                     if host is not None:
  864                         cidr = IPAddr.FAM_IPv6
  865                         raw = True
  866                 if host is None:
  867                     host = fail.get('dns')
  868                     if host is None:
  869                         # first try to check we have mlfid case (cache connection id):
  870                         if fid is None and mlfid is None:
  871                                 # if no failure-id also (obscure case, wrong regex), throw error inside getFailID:
  872                                 fid = failRegex.getFailID()
  873                         host = fid
  874                         cidr = IPAddr.CIDR_RAW
  875                 # if mlfid case (not failure):
  876                 if host is None:
  877                     if ll <= 7: logSys.log(7, "No failure-id by mlfid %r in regex %s: %s",
  878                         mlfid, failRegexIndex, fail.get('mlfforget', "waiting for identifier"))
  879                     if not self.checkAllRegex: return failList
  880                     ips = [None]
  881                 # if raw - add single ip or failure-id,
  882                 # otherwise expand host to multiple ips using dns (or ignore it if not valid):
  883                 elif raw:
  884                     ip = IPAddr(host, cidr)
  885                     # check host equal failure-id, if not - failure with complex id:
  886                     if fid is not None and fid != host:
  887                         ip = IPAddr(fid, IPAddr.CIDR_RAW)
  888                     ips = [ip]
  889                 # otherwise, try to use dns conversion:
  890                 else:
  891                     ips = DNSUtils.textToIp(host, self.__useDns)
  892                 # append failure with match to the list:
  893                 for ip in ips:
  894                     failList.append([failRegexIndex, ip, date, fail])
  895                 if not self.checkAllRegex:
  896                     break
  897             except RegexException as e: # pragma: no cover - unsure if reachable
  898                 logSys.error(e)
  899         return failList
  900 
  901     def status(self, flavor="basic"):
  902         """Status of failures detected by filter.
  903         """
  904         ret = [("Currently failed", self.failManager.size()),
  905                ("Total failed", self.failManager.getFailTotal())]
  906         return ret
  907 
  908 
  909 class FileFilter(Filter):
  910 
  911     def __init__(self, jail, **kwargs):
  912         Filter.__init__(self, jail, **kwargs)
  913         ## The log file path.
  914         self.__logs = dict()
  915         self.__autoSeek = dict()
  916 
  917     ##
  918     # Add a log file path
  919     #
  920     # @param path log file path
  921 
  922     def addLogPath(self, path, tail=False, autoSeek=True):
  923         if path in self.__logs:
  924             if hasattr(self, '_reload_logs') and path in self._reload_logs:
  925                 del self._reload_logs[path]
  926             else:
  927                 logSys.error(path + " already exists")
  928         else:
  929             log = FileContainer(path, self.getLogEncoding(), tail)
  930             db = self.jail.database
  931             if db is not None:
  932                 lastpos = db.addLog(self.jail, log)
  933                 if lastpos and not tail:
  934                     log.setPos(lastpos)
  935             self.__logs[path] = log
  936             logSys.info("Added logfile: %r (pos = %s, hash = %s)" , path, log.getPos(), log.getHash())
  937             if autoSeek:
  938                 self.__autoSeek[path] = autoSeek
  939             self._addLogPath(path)          # backend specific
  940 
  941     def _addLogPath(self, path):
  942         # nothing to do by default
  943         # to be overridden by backends
  944         pass
  945 
  946     ##
  947     # Delete a log path
  948     #
  949     # @param path the log file to delete
  950 
  951     def delLogPath(self, path):
  952         try:
  953             log = self.__logs.pop(path)
  954         except KeyError:
  955             return
  956         db = self.jail.database
  957         if db is not None:
  958             db.updateLog(self.jail, log)
  959         logSys.info("Removed logfile: %r", path)
  960         self._delLogPath(path)
  961         return
  962 
  963     def _delLogPath(self, path): # pragma: no cover - overwritten function
  964         # nothing to do by default
  965         # to be overridden by backends
  966         pass
  967 
  968     ##
  969     # Get the log file names
  970     #
  971     # @return log paths
  972 
  973     def getLogPaths(self):
  974         return self.__logs.keys()
  975 
  976     ##
  977     # Get the log containers
  978     #
  979     # @return log containers
  980 
  981     def getLogs(self):
  982         return self.__logs.values()
  983 
  984     ##
  985     # Get the count of log containers
  986     #
  987     # @return count of log containers
  988 
  989     def getLogCount(self):
  990         return len(self.__logs)
  991 
  992     ##
  993     # Check whether path is already monitored.
  994     #
  995     # @param path The path
  996     # @return True if the path is already monitored else False
  997 
  998     def containsLogPath(self, path):
  999         return path in self.__logs
 1000 
 1001     ##
 1002     # Set the log file encoding
 1003     #
 1004     # @param encoding the encoding used with log files
 1005 
 1006     def setLogEncoding(self, encoding):
 1007         encoding = super(FileFilter, self).setLogEncoding(encoding)
 1008         for log in self.__logs.itervalues():
 1009             log.setEncoding(encoding)
 1010 
 1011     def getLog(self, path):
 1012         return self.__logs.get(path, None)
 1013 
 1014     ##
 1015     # Gets all the failure in the log file.
 1016     #
 1017     # Gets all the failure in the log file which are newer than
 1018     # MyTime.time()-self.findTime. When a failure is detected, a FailTicket
 1019     # is created and is added to the FailManager.
 1020 
 1021     def getFailures(self, filename):
 1022         log = self.getLog(filename)
 1023         if log is None:
 1024             logSys.error("Unable to get failures in %s", filename)
 1025             return False
 1026         # We should always close log (file), otherwise may be locked (log-rotate, etc.)
 1027         try:
 1028             # Try to open log file.
 1029             try:
 1030                 has_content = log.open()
 1031             # see http://python.org/dev/peps/pep-3151/
 1032             except IOError as e:
 1033                 logSys.error("Unable to open %s", filename)
 1034                 if e.errno != 2: # errno.ENOENT
 1035                     logSys.exception(e)
 1036                 return False
 1037             except OSError as e: # pragma: no cover - requires race condition to trigger this
 1038                 logSys.error("Error opening %s", filename)
 1039                 logSys.exception(e)
 1040                 return False
 1041             except Exception as e: # pragma: no cover - Requires implementation error in FileContainer to generate
 1042                 logSys.error("Internal error in FileContainer open method - please report as a bug to https://github.com/fail2ban/fail2ban/issues")
 1043                 logSys.exception(e)
 1044                 return False
 1045 
 1046             # seek to find time for first usage only (prevent performance decline with polling of big files)
 1047             if self.__autoSeek:
 1048                 startTime = self.__autoSeek.pop(filename, None)
 1049                 if startTime:
 1050                     # if default, seek to "current time" - "find time":
 1051                     if isinstance(startTime, bool):
 1052                         startTime = MyTime.time() - self.getFindTime()
 1053                     # prevent completely read of big files first time (after start of service), 
 1054                     # initial seek to start time using half-interval search algorithm:
 1055                     try:
 1056                         self.seekToTime(log, startTime)
 1057                     except Exception as e: # pragma: no cover
 1058                         logSys.error("Error during seek to start time in \"%s\"", filename)
 1059                         raise
 1060                         logSys.exception(e)
 1061                         return False
 1062 
 1063             if has_content:
 1064                 while not self.idle:
 1065                     line = log.readline()
 1066                     if not line or not self.active:
 1067                         # The jail reached the bottom or has been stopped
 1068                         break
 1069                     self.processLineAndAdd(line)
 1070         finally:
 1071             log.close()
 1072         db = self.jail.database
 1073         if db is not None:
 1074             db.updateLog(self.jail, log)
 1075         return True
 1076 
 1077     ##
 1078     # Seeks to line with date (search using half-interval search algorithm), to start polling from it
 1079     #
 1080 
 1081     def seekToTime(self, container, date, accuracy=3):
 1082         fs = container.getFileSize()
 1083         if logSys.getEffectiveLevel() <= logging.DEBUG:
 1084             logSys.debug("Seek to find time %s (%s), file size %s", date, 
 1085                 datetime.datetime.fromtimestamp(date).strftime("%Y-%m-%d %H:%M:%S"), fs)
 1086         minp = container.getPos()
 1087         maxp = fs
 1088         tryPos = minp
 1089         lastPos = -1
 1090         foundPos = 0
 1091         foundTime = None
 1092         cntr = 0
 1093         unixTime = None
 1094         movecntr = accuracy
 1095         while maxp > minp:
 1096             if tryPos is None:
 1097                 pos = int(minp + (maxp - minp) / 2)
 1098             else:
 1099                 pos, tryPos = tryPos, None
 1100             # because container seek will go to start of next line (minus CRLF):
 1101             pos = max(0, pos-2)
 1102             seekpos = pos = container.seek(pos)
 1103             cntr += 1
 1104             # within next 5 lines try to find any legal datetime:
 1105             lncntr = 5;
 1106             dateTimeMatch = None
 1107             nextp = None
 1108             while True:
 1109                 line = container.readline()
 1110                 if not line:
 1111                     break
 1112                 (timeMatch, template) = self.dateDetector.matchTime(line)
 1113                 if timeMatch:
 1114                     dateTimeMatch = self.dateDetector.getTime(
 1115                         line[timeMatch.start():timeMatch.end()],
 1116                         (timeMatch, template))
 1117                 else:
 1118                     nextp = container.tell()
 1119                     if nextp > maxp:
 1120                         pos = seekpos
 1121                         break
 1122                     pos = nextp
 1123                 if not dateTimeMatch and lncntr:
 1124                     lncntr -= 1
 1125                     continue
 1126                 break
 1127             # not found at this step - stop searching
 1128             if dateTimeMatch:
 1129                 unixTime = dateTimeMatch[0]
 1130                 if unixTime >= date:
 1131                     if foundTime is None or unixTime <= foundTime:
 1132                         foundPos = pos
 1133                         foundTime = unixTime
 1134                     if pos == maxp:
 1135                         pos = seekpos
 1136                     if pos < maxp:
 1137                         maxp = pos
 1138                 else:
 1139                     if foundTime is None or unixTime >= foundTime:
 1140                         foundPos = pos
 1141                         foundTime = unixTime
 1142                     if nextp is None:
 1143                         nextp = container.tell()
 1144                     pos = nextp
 1145                     if pos > minp:
 1146                         minp = pos
 1147             # if we can't move (position not changed)
 1148             if pos == lastPos:
 1149                 movecntr -= 1
 1150                 if movecntr <= 0:
 1151                     break
 1152                 # we have found large area without any date matched 
 1153                 # or end of search - try min position (because can be end of previous line):
 1154                 if minp != lastPos:
 1155                     lastPos = tryPos = minp
 1156                     continue
 1157                 break
 1158             lastPos = pos
 1159         # always use smallest pos, that could be found:
 1160         foundPos = container.seek(minp, False)
 1161         container.setPos(foundPos)
 1162         if logSys.getEffectiveLevel() <= logging.DEBUG:
 1163             logSys.debug("Position %s from %s, found time %s (%s) within %s seeks", lastPos, fs, foundTime, 
 1164                 (datetime.datetime.fromtimestamp(foundTime).strftime("%Y-%m-%d %H:%M:%S") if foundTime is not None else ''), cntr)
 1165         
 1166     def status(self, flavor="basic"):
 1167         """Status of Filter plus files being monitored.
 1168         """
 1169         ret = super(FileFilter, self).status(flavor=flavor)
 1170         path = self.__logs.keys()
 1171         ret.append(("File list", path))
 1172         return ret
 1173 
 1174     def stop(self):
 1175         """Stop monitoring of log-file(s)
 1176         """
 1177         # stop files monitoring:
 1178         for path in self.__logs.keys():
 1179             self.delLogPath(path)
 1180         # stop thread:
 1181         super(Filter, self).stop()
 1182 
 1183 ##
 1184 # FileContainer class.
 1185 #
 1186 # This class manages a file handler and takes care of log rotation detection.
 1187 # In order to detect log rotation, the hash (MD5) of the first line of the file
 1188 # is computed and compared to the previous hash of this line.
 1189 
 1190 try:
 1191     import hashlib
 1192     try:
 1193         md5sum = hashlib.md5
 1194         # try to use it (several standards like FIPS forbid it):
 1195         md5sum(' ').hexdigest()
 1196     except: # pragma: no cover
 1197         md5sum = hashlib.sha1
 1198 except ImportError: # pragma: no cover
 1199     # hashlib was introduced in Python 2.5.  For compatibility with those
 1200     # elderly Pythons, import from md5
 1201     import md5
 1202     md5sum = md5.new
 1203 
 1204 
 1205 class FileContainer:
 1206 
 1207     def __init__(self, filename, encoding, tail = False):
 1208         self.__filename = filename
 1209         self.setEncoding(encoding)
 1210         self.__tail = tail
 1211         self.__handler = None
 1212         # Try to open the file. Raises an exception if an error occurred.
 1213         handler = open(filename, 'rb')
 1214         stats = os.fstat(handler.fileno())
 1215         self.__ino = stats.st_ino
 1216         try:
 1217             firstLine = handler.readline()
 1218             # Computes the MD5 of the first line.
 1219             self.__hash = md5sum(firstLine).hexdigest()
 1220             # Start at the beginning of file if tail mode is off.
 1221             if tail:
 1222                 handler.seek(0, 2)
 1223                 self.__pos = handler.tell()
 1224             else:
 1225                 self.__pos = 0
 1226         finally:
 1227             handler.close()
 1228 
 1229     def getFileName(self):
 1230         return self.__filename
 1231 
 1232     def getFileSize(self):
 1233         return os.path.getsize(self.__filename);
 1234 
 1235     def setEncoding(self, encoding):
 1236         codecs.lookup(encoding) # Raises LookupError if invalid
 1237         self.__encoding = encoding
 1238 
 1239     def getEncoding(self):
 1240         return self.__encoding
 1241 
 1242     def getHash(self):
 1243         return self.__hash
 1244 
 1245     def getPos(self):
 1246         return self.__pos
 1247 
 1248     def setPos(self, value):
 1249         self.__pos = value
 1250 
 1251     def open(self):
 1252         self.__handler = open(self.__filename, 'rb')
 1253         # Set the file descriptor to be FD_CLOEXEC
 1254         fd = self.__handler.fileno()
 1255         flags = fcntl.fcntl(fd, fcntl.F_GETFD)
 1256         fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
 1257         # Stat the file before even attempting to read it
 1258         stats = os.fstat(self.__handler.fileno())
 1259         if not stats.st_size:
 1260             # yoh: so it is still an empty file -- nothing should be
 1261             #      read from it yet
 1262             # print "D: no content -- return"
 1263             return False
 1264         firstLine = self.__handler.readline()
 1265         # Computes the MD5 of the first line.
 1266         myHash = md5sum(firstLine).hexdigest()
 1267         ## print "D: fn=%s hashes=%s/%s inos=%s/%s pos=%s rotate=%s" % (
 1268         ##  self.__filename, self.__hash, myHash, stats.st_ino, self.__ino, self.__pos,
 1269         ##  self.__hash != myHash or self.__ino != stats.st_ino)
 1270         ## sys.stdout.flush()
 1271         # Compare hash and inode
 1272         if self.__hash != myHash or self.__ino != stats.st_ino:
 1273             logSys.log(logging.MSG, "Log rotation detected for %s", self.__filename)
 1274             self.__hash = myHash
 1275             self.__ino = stats.st_ino
 1276             self.__pos = 0
 1277         # Sets the file pointer to the last position.
 1278         self.__handler.seek(self.__pos)
 1279         return True
 1280 
 1281     def seek(self, offs, endLine=True):
 1282         h = self.__handler
 1283         # seek to given position
 1284         h.seek(offs, 0)
 1285         # goto end of next line
 1286         if offs and endLine:
 1287             h.readline()
 1288         # get current real position
 1289         return h.tell()
 1290 
 1291     def tell(self):
 1292         # get current real position
 1293         return self.__handler.tell()
 1294 
 1295     @staticmethod
 1296     def decode_line(filename, enc, line):
 1297         try:
 1298             return line.decode(enc, 'strict')
 1299         except (UnicodeDecodeError, UnicodeEncodeError) as e:
 1300             global _decode_line_warn
 1301             lev = logging.DEBUG
 1302             if _decode_line_warn.get(filename, 0) <= MyTime.time():
 1303                 lev = logging.WARNING
 1304                 _decode_line_warn[filename] = MyTime.time() + 24*60*60
 1305             logSys.log(lev,
 1306                 "Error decoding line from '%s' with '%s'."
 1307                 " Consider setting logencoding=utf-8 (or another appropriate"
 1308                 " encoding) for this jail. Continuing"
 1309                 " to process line ignoring invalid characters: %r",
 1310                 filename, enc, line)
 1311             # decode with replacing error chars:
 1312             line = line.decode(enc, 'replace')
 1313         return line
 1314 
 1315     def readline(self):
 1316         if self.__handler is None:
 1317             return ""
 1318         return FileContainer.decode_line(
 1319             self.getFileName(), self.getEncoding(), self.__handler.readline())
 1320 
 1321     def close(self):
 1322         if not self.__handler is None:
 1323             # Saves the last position.
 1324             self.__pos = self.__handler.tell()
 1325             # Closes the file.
 1326             self.__handler.close()
 1327             self.__handler = None
 1328         ## print "D: Closed %s with pos %d" % (handler, self.__pos)
 1329         ## sys.stdout.flush()
 1330 
 1331 _decode_line_warn = {}
 1332 
 1333 
 1334 ##
 1335 # JournalFilter class.
 1336 #
 1337 # Base interface class for systemd journal filters
 1338 
 1339 class JournalFilter(Filter): # pragma: systemd no cover
 1340 
 1341     def clearAllParams(self):
 1342         super(JournalFilter, self).clearAllParams()
 1343         self.delJournalMatch()
 1344 
 1345     def addJournalMatch(self, match): # pragma: no cover - Base class, not used
 1346         pass
 1347 
 1348     def delJournalMatch(self, match=None): # pragma: no cover - Base class, not used
 1349         pass
 1350 
 1351     def getJournalMatch(self, match): # pragma: no cover - Base class, not used
 1352         return []
 1353