"Fossies" - the Fresh Open Source Software Archive

Member "fail2ban-0.11.1/fail2ban/server/filter.py" (11 Jan 2020, 40272 Bytes) of package /linux/misc/fail2ban-0.11.1.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file. For more information about "filter.py" see the Fossies "Dox" file reference documentation and the latest Fossies "Diffs" side-by-side code changes report: 0.10.5_vs_0.11.1.

    1 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
    2 # vi: set ft=python sts=4 ts=4 sw=4 noet :
    3 
    4 # This file is part of Fail2Ban.
    5 #
    6 # Fail2Ban is free software; you can redistribute it and/or modify
    7 # it under the terms of the GNU General Public License as published by
    8 # the Free Software Foundation; either version 2 of the License, or
    9 # (at your option) any later version.
   10 #
   11 # Fail2Ban is distributed in the hope that it will be useful,
   12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
   13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   14 # GNU General Public License for more details.
   15 #
   16 # You should have received a copy of the GNU General Public License
   17 # along with Fail2Ban; if not, write to the Free Software
   18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
   19 
   20 __author__ = "Cyril Jaquier and Fail2Ban Contributors"
   21 __copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2011-2013 Yaroslav Halchenko"
   22 __license__ = "GPL"
   23 
   24 import codecs
   25 import datetime
   26 import fcntl
   27 import logging
   28 import os
   29 import re
   30 import sys
   31 import time
   32 
   33 from .actions import Actions
   34 from .failmanager import FailManagerEmpty, FailManager
   35 from .ipdns import DNSUtils, IPAddr
   36 from .observer import Observers
   37 from .ticket import FailTicket
   38 from .jailthread import JailThread
   39 from .datedetector import DateDetector, validateTimeZone
   40 from .mytime import MyTime
   41 from .failregex import FailRegex, Regex, RegexException
   42 from .action import CommandAction
   43 from .utils import Utils
   44 from ..helpers import getLogger, PREFER_ENC
   45 
   46 # Gets the instance of the logger.
   47 logSys = getLogger(__name__)
   48 
   49 ##
   50 # Log reader class.
   51 #
   52 # This class reads a log file and detects login failures or anything else
   53 # that matches a given regular expression. This class is instantiated by
   54 # a Jail object.
   55 
   56 
   57 class Filter(JailThread):
   58 
   59     ##
   60     # Constructor.
   61     #
   62     # Initialize the filter object with default values.
   63     # @param jail the jail object
   64 
   65     def __init__(self, jail, useDns='warn'):
   66         JailThread.__init__(self)
   67         ## The jail which contains this filter.
   68         self.jail = jail
   69         ## The failures manager.
   70         self.failManager = FailManager()
   71         ## Regular expression pre-filtering matching the failures.
   72         self.__prefRegex = None
   73         ## The regular expression list matching the failures.
   74         self.__failRegex = list()
   75         ## The regular expression list with expressions to ignore.
   76         self.__ignoreRegex = list()
   77         ## Use DNS setting
   78         self.setUseDns(useDns)
   79         ## The amount of time to look back.
   80         self.__findTime = 600
   81         ## Ignore own IPs flag:
   82         self.__ignoreSelf = True
   83         ## The ignore IP list.
   84         self.__ignoreIpList = []
   85         ## External command
   86         self.__ignoreCommand = False
   87         ## Cache for ignoreip:
   88         self.__ignoreCache = None
   89         ## Size of line buffer
   90         self.__lineBufferSize = 1
   91         ## Line buffer
   92         self.__lineBuffer = []
   93         ## Store last time stamp, applicable for multi-line
   94         self.__lastTimeText = ""
   95         self.__lastDate = None
   96         ## if set, treat log lines without explicit time zone to be in this time zone
   97         self.__logtimezone = None
   98         ## Default or preferred encoding (to decode bytes from file or journal):
   99         self.__encoding = PREFER_ENC
  100         ## Cache temporary holds failures info (used by multi-line for wrapping e. g. conn-id to host):
  101         self.__mlfidCache = None
  102         ## Error counter (protected, so can be used in filter implementations)
  103         ## if it reached 100 (at once), run-cycle will go idle
  104         self._errors = 0
  105         ## return raw host (host is not dns):
  106         self.returnRawHost = False
  107         ## check each regex (used for test purposes):
  108         self.checkAllRegex = False
  109         ## if true ignores obsolete failures (failure time < now - findTime):
  110         self.checkFindTime = True
  111         ## Ticks counter
  112         self.ticks = 0
  113         ## Thread name:
  114         self.name="f2b/f."+self.jailName
  115 
  116         self.dateDetector = DateDetector()
  117         logSys.debug("Created %s", self)
  118 
  119     def __repr__(self):
  120         return "%s(%r)" % (self.__class__.__name__, self.jail)
  121 
  122     @property
  123     def jailName(self):
  124         return (self.jail is not None and self.jail.name or "~jailless~")
  125 
  126     def clearAllParams(self):
  127         """ Clear all lists/dicts parameters (used by reloading)
  128         """
  129         self.delFailRegex()
  130         self.delIgnoreRegex()
  131         self.delIgnoreIP()
  132 
  133     def reload(self, begin=True):
  134         """ Begin or end of reloading resp. refreshing of all parameters
  135         """
  136         if begin:
  137             self.clearAllParams()
  138             if hasattr(self, 'getLogPaths'):
  139                 self._reload_logs = dict((k, 1) for k in self.getLogPaths())
  140         else:
  141             if hasattr(self, '_reload_logs'):
  142                 # if it was not reloaded - remove obsolete log file:
  143                 for path in self._reload_logs:
  144                     self.delLogPath(path)
  145                 delattr(self, '_reload_logs')
  146 
  147     @property
  148     def mlfidCache(self):
  149         if self.__mlfidCache:
  150             return self.__mlfidCache
  151         self.__mlfidCache = Utils.Cache(maxCount=100, maxTime=5*60)
  152         return self.__mlfidCache
  153 
  154     @property
  155     def prefRegex(self):
  156         return self.__prefRegex
  157     @prefRegex.setter
  158     def prefRegex(self, value):
  159         if value:
  160             self.__prefRegex = Regex(value, useDns=self.__useDns)
  161         else:
  162             self.__prefRegex = None
  163 
  164     ##
  165     # Add a regular expression which matches the failure.
  166     #
  167     # The regular expression can also match any other pattern than failures
  168     # and thus can be used for many purporse.
  169     # @param value the regular expression
  170 
  171     def addFailRegex(self, value):
  172         multiLine = self.getMaxLines() > 1
  173         try:
  174             regex = FailRegex(value, prefRegex=self.__prefRegex, multiline=multiLine,
  175                 useDns=self.__useDns)
  176             self.__failRegex.append(regex)
  177         except RegexException as e:
  178             logSys.error(e)
  179             raise e
  180 
  181     def delFailRegex(self, index=None):
  182         try:
  183             # clear all:
  184             if index is None:
  185                 del self.__failRegex[:]
  186                 return
  187             # delete by index:
  188             del self.__failRegex[index]
  189         except IndexError:
  190             logSys.error("Cannot remove regular expression. Index %d is not "
  191                          "valid", index)
  192 
  193     ##
  194     # Get the regular expressions as list.
  195     #
  196     # @return the regular expression list
  197 
  198     def getFailRegex(self):
  199         return [regex.getRegex() for regex in self.__failRegex]
  200 
  201     ##
  202     # Add the regular expression which matches the failure.
  203     #
  204     # The regular expression can also match any other pattern than failures
  205     # and thus can be used for many purpose.
  206     # @param value the regular expression
  207 
  208     def addIgnoreRegex(self, value):
  209         try:
  210             regex = Regex(value, useDns=self.__useDns)
  211             self.__ignoreRegex.append(regex)
  212         except RegexException as e:
  213             logSys.error(e)
  214             raise e 
  215 
  216     def delIgnoreRegex(self, index=None):
  217         try:
  218             # clear all:
  219             if index is None:
  220                 del self.__ignoreRegex[:]
  221                 return
  222             # delete by index:
  223             del self.__ignoreRegex[index]
  224         except IndexError:
  225             logSys.error("Cannot remove regular expression. Index %d is not "
  226                          "valid", index)
  227 
  228     ##
  229     # Get the regular expression which matches the failure.
  230     #
  231     # @return the regular expression
  232 
  233     def getIgnoreRegex(self):
  234         ignoreRegex = list()
  235         for regex in self.__ignoreRegex:
  236             ignoreRegex.append(regex.getRegex())
  237         return ignoreRegex
  238 
  239     ##
  240     # Set the Use DNS mode
  241     # @param value the usedns mode
  242 
  243     def setUseDns(self, value):
  244         if isinstance(value, bool):
  245             value = {True: 'yes', False: 'no'}[value]
  246         value = value.lower()             # must be a string by now
  247         if not (value in ('yes', 'warn', 'no', 'raw')):
  248             logSys.error("Incorrect value %r specified for usedns. "
  249                          "Using safe 'no'", value)
  250             value = 'no'
  251         logSys.debug("Setting usedns = %s for %s", value, self)
  252         self.__useDns = value
  253 
  254     ##
  255     # Get the usedns mode
  256     # @return the usedns mode
  257 
  258     def getUseDns(self):
  259         return self.__useDns
  260 
  261     ##
  262     # Set the time needed to find a failure.
  263     #
  264     # This value tells the filter how long it has to take failures into
  265     # account.
  266     # @param value the time
  267 
  268     def setFindTime(self, value):
  269         value = MyTime.str2seconds(value)
  270         self.__findTime = value
  271         self.failManager.setMaxTime(value)
  272         logSys.info("  findtime: %s", value)
  273 
  274     ##
  275     # Get the time needed to find a failure.
  276     #
  277     # @return the time
  278 
  279     def getFindTime(self):
  280         return self.__findTime
  281 
  282     ##
  283     # Set the date detector pattern, removing Defaults
  284     #
  285     # @param pattern the date template pattern
  286 
  287     def setDatePattern(self, pattern):
  288         if pattern is None:
  289             self.dateDetector = None
  290             return
  291         else:
  292             dd = DateDetector()
  293             dd.default_tz = self.__logtimezone
  294             if not isinstance(pattern, (list, tuple)):
  295                 pattern = filter(bool, map(str.strip, re.split('\n+', pattern)))
  296             for pattern in pattern:
  297                 dd.appendTemplate(pattern)
  298             self.dateDetector = dd
  299 
  300     ##
  301     # Get the date detector pattern, or Default Detectors if not changed
  302     #
  303     # @return pattern of the date template pattern
  304 
  305     def getDatePattern(self):
  306         if self.dateDetector is not None:
  307             templates = self.dateDetector.templates
  308             # lazy template init, by first match
  309             if not len(templates) or len(templates) > 2:
  310                 return None, "Default Detectors"
  311             elif len(templates):
  312                 if hasattr(templates[0], "pattern"):
  313                     pattern =  templates[0].pattern
  314                 else:
  315                     pattern = None
  316                 return pattern, templates[0].name
  317         return None
  318 
  319     ##
  320     # Set the log default time zone
  321     #
  322     # @param tz the symbolic timezone (for now fixed offset only: UTC[+-]HHMM)
  323 
  324     def setLogTimeZone(self, tz):
  325         validateTimeZone(tz); # avoid setting of wrong value, but hold original
  326         self.__logtimezone = tz
  327         if self.dateDetector: self.dateDetector.default_tz = self.__logtimezone
  328 
  329     ##
  330     # Get the log default timezone
  331     #
  332     # @return symbolic timezone (a string)
  333 
  334     def getLogTimeZone(self):
  335         return self.__logtimezone
  336 
  337     ##
  338     # Set the maximum retry value.
  339     #
  340     # @param value the retry value
  341 
  342     def setMaxRetry(self, value):
  343         self.failManager.setMaxRetry(value)
  344         logSys.info("  maxRetry: %s", value)
  345 
  346     ##
  347     # Get the maximum retry value.
  348     #
  349     # @return the retry value
  350 
  351     def getMaxRetry(self):
  352         return self.failManager.getMaxRetry()
  353 
  354     ##
  355     # Set the maximum line buffer size.
  356     #
  357     # @param value the line buffer size
  358 
  359     def setMaxLines(self, value):
  360         if int(value) <= 0:
  361             raise ValueError("maxlines must be integer greater than zero")
  362         self.__lineBufferSize = int(value)
  363         logSys.info("  maxLines: %i", self.__lineBufferSize)
  364 
  365     ##
  366     # Get the maximum line buffer size.
  367     #
  368     # @return the line buffer size
  369 
  370     def getMaxLines(self):
  371         return self.__lineBufferSize
  372 
  373     ##
  374     # Set the log file encoding
  375     #
  376     # @param encoding the encoding used with log files
  377 
  378     def setLogEncoding(self, encoding):
  379         if encoding.lower() == "auto":
  380             encoding = PREFER_ENC
  381         codecs.lookup(encoding) # Raise LookupError if invalid codec
  382         self.__encoding = encoding
  383         logSys.info("  encoding: %s", encoding)
  384         return encoding
  385 
  386     ##
  387     # Get the log file encoding
  388     #
  389     # @return log encoding value
  390 
  391     def getLogEncoding(self):
  392         return self.__encoding
  393 
  394     ##
  395     # Main loop.
  396     #
  397     # This function is the main loop of the thread. It checks if the
  398     # file has been modified and looks for failures.
  399     # @return True when the thread exits nicely
  400 
  401     def run(self): # pragma: no cover
  402         raise Exception("run() is abstract")
  403 
  404     ##
  405     # External command, for ignoredips
  406     #
  407 
  408     @property
  409     def ignoreCommand(self):
  410         return self.__ignoreCommand
  411 
  412     @ignoreCommand.setter
  413     def ignoreCommand(self, command):
  414         self.__ignoreCommand = command
  415 
  416     ##
  417     # Cache parameters for ignoredips
  418     #
  419 
  420     @property
  421     def ignoreCache(self):
  422         return [self.__ignoreCache[0], self.__ignoreCache[1].maxCount, self.__ignoreCache[1].maxTime] \
  423             if self.__ignoreCache else None
  424 
  425     @ignoreCache.setter
  426     def ignoreCache(self, command):
  427         if command:
  428             self.__ignoreCache = command['key'], Utils.Cache(
  429                 maxCount=int(command.get('max-count', 100)), maxTime=MyTime.str2seconds(command.get('max-time', 5*60))
  430             )
  431         else:
  432             self.__ignoreCache = None
  433 
  434     def performBan(self, ip=None):
  435         """Performs a ban for IPs (or given ip) that are reached maxretry of the jail."""
  436         try: # pragma: no branch - exception is the only way out
  437             while True:
  438                 ticket = self.failManager.toBan(ip)
  439                 self.jail.putFailTicket(ticket)
  440         except FailManagerEmpty:
  441             self.failManager.cleanup(MyTime.time())
  442 
  443     def addAttempt(self, ip, *matches):
  444         """Generate a failed attempt for ip"""
  445         if not isinstance(ip, IPAddr):
  446             ip = IPAddr(ip)
  447         matches = list(matches) # tuple to list
  448 
  449         # Generate the failure attempt for the IP:
  450         unixTime = MyTime.time()
  451         ticket = FailTicket(ip, unixTime, matches=matches)
  452         logSys.info(
  453             "[%s] Attempt %s - %s", self.jailName, ip, datetime.datetime.fromtimestamp(unixTime).strftime("%Y-%m-%d %H:%M:%S")
  454         )
  455         self.failManager.addFailure(ticket, len(matches) or 1)
  456 
  457         # Perform the ban if this attempt is resulted to:
  458         self.performBan(ip)
  459 
  460         return 1
  461 
  462     ##
  463     # Ignore own IP/DNS.
  464     #
  465     @property
  466     def ignoreSelf(self):
  467         return self.__ignoreSelf
  468 
  469     @ignoreSelf.setter
  470     def ignoreSelf(self, value):
  471         self.__ignoreSelf = value
  472 
  473     ##
  474     # Add an IP/DNS to the ignore list.
  475     #
  476     # IP addresses in the ignore list are not taken into account
  477     # when finding failures. CIDR mask and DNS are also accepted.
  478     # @param ip IP address to ignore
  479 
  480     def addIgnoreIP(self, ipstr):
  481         # An empty string is always false
  482         if ipstr == "":
  483             return
  484         # Create IP address object
  485         ip = IPAddr(ipstr)
  486         # Avoid exact duplicates
  487         if ip in self.__ignoreIpList:
  488             logSys.warn("  Ignore duplicate %r (%r), already in ignore list", ip, ipstr)
  489             return
  490         # log and append to ignore list
  491         logSys.debug("  Add %r to ignore list (%r)", ip, ipstr)
  492         self.__ignoreIpList.append(ip)
  493 
  494     def delIgnoreIP(self, ip=None):
  495         # clear all:
  496         if ip is None:
  497             del self.__ignoreIpList[:]
  498             return
  499         # delete by ip:
  500         logSys.debug("  Remove %r from ignore list", ip)
  501         self.__ignoreIpList.remove(ip)
  502 
  503     def logIgnoreIp(self, ip, log_ignore, ignore_source="unknown source"):
  504         if log_ignore:
  505             logSys.info("[%s] Ignore %s by %s", self.jailName, ip, ignore_source)
  506 
  507     def getIgnoreIP(self):
  508         return self.__ignoreIpList
  509 
  510     ##
  511     # Check if IP address/DNS is in the ignore list.
  512     #
  513     # Check if the given IP address matches an IP address/DNS or a CIDR
  514     # mask in the ignore list.
  515     # @param ip IP address object or ticket
  516     # @return True if IP address is in ignore list
  517 
  518     def inIgnoreIPList(self, ip, log_ignore=True):
  519         ticket = None
  520         if isinstance(ip, FailTicket):
  521             ticket = ip
  522             ip = ticket.getIP()
  523         elif not isinstance(ip, IPAddr):
  524             ip = IPAddr(ip)
  525         return self._inIgnoreIPList(ip, ticket, log_ignore)
  526 
  527     def _inIgnoreIPList(self, ip, ticket, log_ignore=True):
  528         aInfo = None
  529         # cached ?
  530         if self.__ignoreCache:
  531             key, c = self.__ignoreCache
  532             if ticket:
  533                 aInfo = Actions.ActionInfo(ticket, self.jail)
  534                 key = CommandAction.replaceDynamicTags(key, aInfo)
  535             else:
  536                 aInfo = { 'ip': ip }
  537                 key = CommandAction.replaceTag(key, aInfo)
  538             v = c.get(key)
  539             if v is not None:
  540                 return v
  541 
  542         # check own IPs should be ignored and 'ip' is self IP:
  543         if self.__ignoreSelf and ip in DNSUtils.getSelfIPs():
  544             self.logIgnoreIp(ip, log_ignore, ignore_source="ignoreself rule")
  545             if self.__ignoreCache: c.set(key, True)
  546             return True
  547 
  548         for net in self.__ignoreIpList:
  549             # check if the IP is covered by ignore IP
  550             if ip.isInNet(net):
  551                 self.logIgnoreIp(ip, log_ignore, ignore_source=("ip" if net.isValid else "dns"))
  552                 if self.__ignoreCache: c.set(key, True)
  553                 return True
  554 
  555         if self.__ignoreCommand:
  556             if ticket:
  557                 if not aInfo: aInfo = Actions.ActionInfo(ticket, self.jail)
  558                 command = CommandAction.replaceDynamicTags(self.__ignoreCommand, aInfo)
  559             else:
  560                 if not aInfo: aInfo = { 'ip': ip }
  561                 command = CommandAction.replaceTag(self.__ignoreCommand, aInfo)
  562             logSys.debug('ignore command: %s', command)
  563             ret, ret_ignore = CommandAction.executeCmd(command, success_codes=(0, 1))
  564             ret_ignore = ret and ret_ignore == 0
  565             self.logIgnoreIp(ip, log_ignore and ret_ignore, ignore_source="command")
  566             if self.__ignoreCache: c.set(key, ret_ignore)
  567             return ret_ignore
  568 
  569         if self.__ignoreCache: c.set(key, False)
  570         return False
  571 
  572     def processLine(self, line, date=None):
  573         """Split the time portion from log msg and return findFailures on them
  574         """
  575         if date:
  576             tupleLine = line
  577         else:
  578             l = line.rstrip('\r\n')
  579             logSys.log(7, "Working on line %r", line)
  580 
  581             (timeMatch, template) = self.dateDetector.matchTime(l)
  582             if timeMatch:
  583                 tupleLine  = (
  584                     l[:timeMatch.start(1)],
  585                     l[timeMatch.start(1):timeMatch.end(1)],
  586                     l[timeMatch.end(1):],
  587                     (timeMatch, template)
  588                 )
  589             else:
  590                 tupleLine = (l, "", "", None)
  591 
  592         # save last line (lazy convert of process line tuple to string on demand):
  593         self.processedLine = lambda: "".join(tupleLine[::2])
  594         return self.findFailure(tupleLine, date)
  595 
  596     def processLineAndAdd(self, line, date=None):
  597         """Processes the line for failures and populates failManager
  598         """
  599         try:
  600             for element in self.processLine(line, date):
  601                 ip = element[1]
  602                 unixTime = element[2]
  603                 fail = element[3]
  604                 logSys.debug("Processing line with time:%s and ip:%s", 
  605                         unixTime, ip)
  606                 tick = FailTicket(ip, unixTime, data=fail)
  607                 if self._inIgnoreIPList(ip, tick):
  608                     continue
  609                 logSys.info(
  610                     "[%s] Found %s - %s", self.jailName, ip, MyTime.time2str(unixTime)
  611                 )
  612                 self.failManager.addFailure(tick)
  613                 # report to observer - failure was found, for possibly increasing of it retry counter (asynchronous)
  614                 if Observers.Main is not None:
  615                     Observers.Main.add('failureFound', self.failManager, self.jail, tick)
  616             # reset (halve) error counter (successfully processed line):
  617             if self._errors:
  618                 self._errors //= 2
  619         except Exception as e:
  620             logSys.error("Failed to process line: %r, caught exception: %r", line, e,
  621                 exc_info=logSys.getEffectiveLevel()<=logging.DEBUG)
  622             # incr common error counter:
  623             self.commonError()
  624 
  625     def commonError(self):
  626         # incr error counter, stop processing (going idle) after 100th error :
  627         self._errors += 1
  628         # sleep a little bit (to get around time-related errors):
  629         time.sleep(self.sleeptime)
  630         if self._errors >= 100:
  631             logSys.error("Too many errors at once (%s), going idle", self._errors)
  632             self._errors //= 2
  633             self.idle = True
  634 
  635     ##
  636     # Returns true if the line should be ignored.
  637     #
  638     # Uses ignoreregex.
  639     # @param line: the line
  640     # @return: a boolean
  641 
  642     def ignoreLine(self, tupleLines):
  643         buf = Regex._tupleLinesBuf(tupleLines)
  644         for ignoreRegexIndex, ignoreRegex in enumerate(self.__ignoreRegex):
  645             ignoreRegex.search(buf, tupleLines)
  646             if ignoreRegex.hasMatched():
  647                 return ignoreRegexIndex
  648         return None
  649 
  650     def _updateUsers(self, fail, user=()):
  651         users = fail.get('users')
  652         # only for regex contains user:
  653         if user:
  654             if not users:
  655                 fail['users'] = users = set()
  656             users.add(user)
  657             return users
  658         return None
  659 
  660     # # ATM incremental (non-empty only) merge deactivated ...
  661     # @staticmethod
  662     # def _updateFailure(self, mlfidGroups, fail):
  663     #   # reset old failure-ids when new types of id available in this failure:
  664     #   fids = set()
  665     #   for k in ('fid', 'ip4', 'ip6', 'dns'):
  666     #       if fail.get(k):
  667     #           fids.add(k)
  668     #   if fids:
  669     #       for k in ('fid', 'ip4', 'ip6', 'dns'):
  670     #           if k not in fids:
  671     #               try:
  672     #                   del mlfidGroups[k]
  673     #               except:
  674     #                   pass
  675     #   # update not empty values:
  676     #   mlfidGroups.update(((k,v) for k,v in fail.iteritems() if v))
  677 
  678     def _mergeFailure(self, mlfid, fail, failRegex):
  679         mlfidFail = self.mlfidCache.get(mlfid) if self.__mlfidCache else None
  680         users = None
  681         nfflgs = 0
  682         if fail.get("mlfgained"):
  683             nfflgs |= 9
  684             if not fail.get('nofail'):
  685                 fail['nofail'] = fail["mlfgained"]
  686         elif fail.get('nofail'): nfflgs |= 1
  687         if fail.get('mlfforget'): nfflgs |= 2
  688         # if multi-line failure id (connection id) known:
  689         if mlfidFail:
  690             mlfidGroups = mlfidFail[1]
  691             # update users set (hold all users of connect):
  692             users = self._updateUsers(mlfidGroups, fail.get('user'))
  693             # be sure we've correct current state ('nofail' and 'mlfgained' only from last failure)
  694             try:
  695                 del mlfidGroups['nofail']
  696                 del mlfidGroups['mlfgained']
  697             except KeyError:
  698                 pass
  699             # # ATM incremental (non-empty only) merge deactivated (for future version only),
  700             # # it can be simulated using alternate value tags, like <F-ALT_VAL>...</F-ALT_VAL>,
  701             # # so previous value 'val' will be overwritten only if 'alt_val' is not empty...       
  702             # _updateFailure(mlfidGroups, fail)
  703             #
  704             # overwrite multi-line failure with all values, available in fail:
  705             mlfidGroups.update(fail)
  706             # new merged failure data:
  707             fail = mlfidGroups
  708             # if forget (disconnect/reset) - remove cached entry:
  709             if nfflgs & 2:
  710                 self.mlfidCache.unset(mlfid)
  711         elif not (nfflgs & 2): # not mlfforget
  712             users = self._updateUsers(fail, fail.get('user'))
  713             mlfidFail = [self.__lastDate, fail]
  714             self.mlfidCache.set(mlfid, mlfidFail)
  715         # check users in order to avoid reset failure by multiple logon-attempts:
  716         if users and len(users) > 1:
  717             # we've new user, reset 'nofail' because of multiple users attempts:
  718             try:
  719                 del fail['nofail']
  720                 nfflgs &= ~1 # reset nofail
  721             except KeyError:
  722                 pass
  723         # merge matches:
  724         if not (nfflgs & 1): # current nofail state (corresponding users)
  725             try:
  726                 m = fail.pop("nofail-matches")
  727                 m += fail.get("matches", [])
  728             except KeyError:
  729                 m = fail.get("matches", [])
  730             if not (nfflgs & 8): # no gain signaled
  731                 m += failRegex.getMatchedTupleLines()
  732             fail["matches"] = m
  733         elif not (nfflgs & 2) and (nfflgs & 1): # not mlfforget and nofail:
  734             fail["nofail-matches"] = fail.get("nofail-matches", []) + failRegex.getMatchedTupleLines()
  735         # return merged:
  736         return fail
  737 
  738 
  739     ##
  740     # Finds the failure in a line given split into time and log parts.
  741     #
  742     # Uses the failregex pattern to find it and timeregex in order
  743     # to find the logging time.
  744     # @return a dict with IP and timestamp.
  745 
  746     def findFailure(self, tupleLine, date=None):
  747         failList = list()
  748 
  749         ll = logSys.getEffectiveLevel()
  750         returnRawHost = self.returnRawHost
  751         cidr = IPAddr.CIDR_UNSPEC
  752         if self.__useDns == "raw":
  753             returnRawHost = True
  754             cidr = IPAddr.CIDR_RAW
  755 
  756         # Checks if we mut ignore this line.
  757         if self.ignoreLine([tupleLine[::2]]) is not None:
  758             # The ignoreregex matched. Return.
  759             if ll <= 7: logSys.log(7, "Matched ignoreregex and was \"%s\" ignored",
  760                 "".join(tupleLine[::2]))
  761             return failList
  762 
  763         timeText = tupleLine[1]
  764         if date:
  765             self.__lastTimeText = timeText
  766             self.__lastDate = date
  767         elif timeText:
  768 
  769             dateTimeMatch = self.dateDetector.getTime(timeText, tupleLine[3])
  770 
  771             if dateTimeMatch is None:
  772                 logSys.error("findFailure failed to parse timeText: %s", timeText)
  773                 date = self.__lastDate
  774 
  775             else:
  776                 # Lets get the time part
  777                 date = dateTimeMatch[0]
  778 
  779                 self.__lastTimeText = timeText
  780                 self.__lastDate = date
  781         else:
  782             timeText = self.__lastTimeText or "".join(tupleLine[::2])
  783             date = self.__lastDate
  784 
  785         if self.checkFindTime and date is not None and date < MyTime.time() - self.getFindTime():
  786             if ll <= 5: logSys.log(5, "Ignore line since time %s < %s - %s", 
  787                 date, MyTime.time(), self.getFindTime())
  788             return failList
  789 
  790         if self.__lineBufferSize > 1:
  791             orgBuffer = self.__lineBuffer = (
  792                 self.__lineBuffer + [tupleLine[:3]])[-self.__lineBufferSize:]
  793         else:
  794             orgBuffer = self.__lineBuffer = [tupleLine[:3]]
  795         if ll <= 5: logSys.log(5, "Looking for match of %r", self.__lineBuffer)
  796         buf = Regex._tupleLinesBuf(self.__lineBuffer)
  797 
  798         # Pre-filter fail regex (if available):
  799         preGroups = {}
  800         if self.__prefRegex:
  801             if ll <= 5: logSys.log(5, "  Looking for prefregex %r", self.__prefRegex.getRegex())
  802             self.__prefRegex.search(buf, self.__lineBuffer)
  803             if not self.__prefRegex.hasMatched():
  804                 if ll <= 5: logSys.log(5, "  Prefregex not matched")
  805                 return failList
  806             preGroups = self.__prefRegex.getGroups()
  807             if ll <= 7: logSys.log(7, "  Pre-filter matched %s", preGroups)
  808             repl = preGroups.get('content')
  809             # Content replacement:
  810             if repl:
  811                 del preGroups['content']
  812                 self.__lineBuffer, buf = [('', '', repl)], None
  813 
  814         # Iterates over all the regular expressions.
  815         for failRegexIndex, failRegex in enumerate(self.__failRegex):
  816             try:
  817                 # buffer from tuples if changed: 
  818                 if buf is None:
  819                     buf = Regex._tupleLinesBuf(self.__lineBuffer)
  820                 if ll <= 5: logSys.log(5, "  Looking for failregex %d - %r", failRegexIndex, failRegex.getRegex())
  821                 failRegex.search(buf, orgBuffer)
  822                 if not failRegex.hasMatched():
  823                     continue
  824                 # current failure data (matched group dict):
  825                 fail = failRegex.getGroups()
  826                 # The failregex matched.
  827                 if ll <= 7: logSys.log(7, "  Matched failregex %d: %s", failRegexIndex, fail)
  828                 # Checks if we must ignore this match.
  829                 if self.ignoreLine(failRegex.getMatchedTupleLines()) \
  830                         is not None:
  831                     # The ignoreregex matched. Remove ignored match.
  832                     self.__lineBuffer, buf = failRegex.getUnmatchedTupleLines(), None
  833                     if ll <= 7: logSys.log(7, "  Matched ignoreregex and was ignored")
  834                     if not self.checkAllRegex:
  835                         break
  836                     else:
  837                         continue
  838                 if date is None:
  839                     logSys.warning(
  840                         "Found a match for %r but no valid date/time "
  841                         "found for %r. Please try setting a custom "
  842                         "date pattern (see man page jail.conf(5)). "
  843                         "If format is complex, please "
  844                         "file a detailed issue on"
  845                         " https://github.com/fail2ban/fail2ban/issues "
  846                         "in order to get support for this format.",
  847                          "\n".join(failRegex.getMatchedLines()), timeText)
  848                     continue
  849                 # we should check all regex (bypass on multi-line, otherwise too complex):
  850                 if not self.checkAllRegex or self.getMaxLines() > 1:
  851                     self.__lineBuffer, buf = failRegex.getUnmatchedTupleLines(), None
  852                 # merge data if multi-line failure:
  853                 raw = returnRawHost
  854                 if preGroups:
  855                     currFail, fail = fail, preGroups.copy()
  856                     fail.update(currFail)
  857                 # first try to check we have mlfid case (caching of connection id by multi-line):
  858                 mlfid = fail.get('mlfid')
  859                 if mlfid is not None:
  860                     fail = self._mergeFailure(mlfid, fail, failRegex)
  861                     # bypass if no-failure case:
  862                     if fail.get('nofail'):
  863                         if ll <= 7: logSys.log(7, "Nofail by mlfid %r in regex %s: %s",
  864                             mlfid, failRegexIndex, fail.get('mlfforget', "waiting for failure"))
  865                         if not self.checkAllRegex: return failList
  866                 else:
  867                     # matched lines:
  868                     fail["matches"] = fail.get("matches", []) + failRegex.getMatchedTupleLines()
  869                 # failure-id:
  870                 fid = fail.get('fid')
  871                 # ip-address or host:
  872                 host = fail.get('ip4')
  873                 if host is not None:
  874                     cidr = int(fail.get('cidr') or IPAddr.FAM_IPv4)
  875                     raw = True
  876                 else:
  877                     host = fail.get('ip6')
  878                     if host is not None:
  879                         cidr = int(fail.get('cidr') or IPAddr.FAM_IPv6)
  880                         raw = True
  881                 if host is None:
  882                     host = fail.get('dns')
  883                     if host is None:
  884                         # first try to check we have mlfid case (cache connection id):
  885                         if fid is None and mlfid is None:
  886                                 # if no failure-id also (obscure case, wrong regex), throw error inside getFailID:
  887                                 fid = failRegex.getFailID()
  888                         host = fid
  889                         cidr = IPAddr.CIDR_RAW
  890                         raw = True
  891                 # if mlfid case (not failure):
  892                 if host is None:
  893                     if ll <= 7: logSys.log(7, "No failure-id by mlfid %r in regex %s: %s",
  894                         mlfid, failRegexIndex, fail.get('mlfforget', "waiting for identifier"))
  895                     if not self.checkAllRegex: return failList
  896                     ips = [None]
  897                 # if raw - add single ip or failure-id,
  898                 # otherwise expand host to multiple ips using dns (or ignore it if not valid):
  899                 elif raw:
  900                     ip = IPAddr(host, cidr)
  901                     # check host equal failure-id, if not - failure with complex id:
  902                     if fid is not None and fid != host:
  903                         ip = IPAddr(fid, IPAddr.CIDR_RAW)
  904                     ips = [ip]
  905                 # otherwise, try to use dns conversion:
  906                 else:
  907                     ips = DNSUtils.textToIp(host, self.__useDns)
  908                 # append failure with match to the list:
  909                 for ip in ips:
  910                     failList.append([failRegexIndex, ip, date, fail])
  911                 if not self.checkAllRegex:
  912                     break
  913             except RegexException as e: # pragma: no cover - unsure if reachable
  914                 logSys.error(e)
  915         return failList
  916 
  917     def status(self, flavor="basic"):
  918         """Status of failures detected by filter.
  919         """
  920         ret = [("Currently failed", self.failManager.size()),
  921                ("Total failed", self.failManager.getFailTotal())]
  922         return ret
  923 
  924 
  925 class FileFilter(Filter):
  926 
  927     def __init__(self, jail, **kwargs):
  928         Filter.__init__(self, jail, **kwargs)
  929         ## The log file path.
  930         self.__logs = dict()
  931         self.__autoSeek = dict()
  932 
  933     ##
  934     # Add a log file path
  935     #
  936     # @param path log file path
  937 
  938     def addLogPath(self, path, tail=False, autoSeek=True):
  939         if path in self.__logs:
  940             if hasattr(self, '_reload_logs') and path in self._reload_logs:
  941                 del self._reload_logs[path]
  942             else:
  943                 logSys.error(path + " already exists")
  944         else:
  945             log = FileContainer(path, self.getLogEncoding(), tail)
  946             db = self.jail.database
  947             if db is not None:
  948                 lastpos = db.addLog(self.jail, log)
  949                 if lastpos and not tail:
  950                     log.setPos(lastpos)
  951             self.__logs[path] = log
  952             logSys.info("Added logfile: %r (pos = %s, hash = %s)" , path, log.getPos(), log.getHash())
  953             if autoSeek:
  954                 self.__autoSeek[path] = autoSeek
  955             self._addLogPath(path)          # backend specific
  956 
  957     def _addLogPath(self, path):
  958         # nothing to do by default
  959         # to be overridden by backends
  960         pass
  961 
  962     ##
  963     # Delete a log path
  964     #
  965     # @param path the log file to delete
  966 
  967     def delLogPath(self, path):
  968         try:
  969             log = self.__logs.pop(path)
  970         except KeyError:
  971             return
  972         db = self.jail.database
  973         if db is not None:
  974             db.updateLog(self.jail, log)
  975         logSys.info("Removed logfile: %r", path)
  976         self._delLogPath(path)
  977         return
  978 
  979     def _delLogPath(self, path): # pragma: no cover - overwritten function
  980         # nothing to do by default
  981         # to be overridden by backends
  982         pass
  983 
  984     ##
  985     # Get the log file names
  986     #
  987     # @return log paths
  988 
  989     def getLogPaths(self):
  990         return self.__logs.keys()
  991 
  992     ##
  993     # Get the log containers
  994     #
  995     # @return log containers
  996 
  997     def getLogs(self):
  998         return self.__logs.values()
  999 
 1000     ##
 1001     # Get the count of log containers
 1002     #
 1003     # @return count of log containers
 1004 
 1005     def getLogCount(self):
 1006         return len(self.__logs)
 1007 
 1008     ##
 1009     # Check whether path is already monitored.
 1010     #
 1011     # @param path The path
 1012     # @return True if the path is already monitored else False
 1013 
 1014     def containsLogPath(self, path):
 1015         return path in self.__logs
 1016 
 1017     ##
 1018     # Set the log file encoding
 1019     #
 1020     # @param encoding the encoding used with log files
 1021 
 1022     def setLogEncoding(self, encoding):
 1023         encoding = super(FileFilter, self).setLogEncoding(encoding)
 1024         for log in self.__logs.itervalues():
 1025             log.setEncoding(encoding)
 1026 
 1027     def getLog(self, path):
 1028         return self.__logs.get(path, None)
 1029 
 1030     ##
 1031     # Gets all the failure in the log file.
 1032     #
 1033     # Gets all the failure in the log file which are newer than
 1034     # MyTime.time()-self.findTime. When a failure is detected, a FailTicket
 1035     # is created and is added to the FailManager.
 1036 
 1037     def getFailures(self, filename):
 1038         log = self.getLog(filename)
 1039         if log is None:
 1040             logSys.error("Unable to get failures in %s", filename)
 1041             return False
 1042         # We should always close log (file), otherwise may be locked (log-rotate, etc.)
 1043         try:
 1044             # Try to open log file.
 1045             try:
 1046                 has_content = log.open()
 1047             # see http://python.org/dev/peps/pep-3151/
 1048             except IOError as e:
 1049                 logSys.error("Unable to open %s", filename)
 1050                 if e.errno != 2: # errno.ENOENT
 1051                     logSys.exception(e)
 1052                 return False
 1053             except OSError as e: # pragma: no cover - requires race condition to trigger this
 1054                 logSys.error("Error opening %s", filename)
 1055                 logSys.exception(e)
 1056                 return False
 1057             except Exception as e: # pragma: no cover - Requires implementation error in FileContainer to generate
 1058                 logSys.error("Internal error in FileContainer open method - please report as a bug to https://github.com/fail2ban/fail2ban/issues")
 1059                 logSys.exception(e)
 1060                 return False
 1061 
 1062             # seek to find time for first usage only (prevent performance decline with polling of big files)
 1063             if self.__autoSeek:
 1064                 startTime = self.__autoSeek.pop(filename, None)
 1065                 if startTime:
 1066                     # if default, seek to "current time" - "find time":
 1067                     if isinstance(startTime, bool):
 1068                         startTime = MyTime.time() - self.getFindTime()
 1069                     # prevent completely read of big files first time (after start of service), 
 1070                     # initial seek to start time using half-interval search algorithm:
 1071                     try:
 1072                         self.seekToTime(log, startTime)
 1073                     except Exception as e: # pragma: no cover
 1074                         logSys.error("Error during seek to start time in \"%s\"", filename)
 1075                         raise
 1076                         logSys.exception(e)
 1077                         return False
 1078 
 1079             if has_content:
 1080                 while not self.idle:
 1081                     line = log.readline()
 1082                     if not line or not self.active:
 1083                         # The jail reached the bottom or has been stopped
 1084                         break
 1085                     self.processLineAndAdd(line)
 1086         finally:
 1087             log.close()
 1088         db = self.jail.database
 1089         if db is not None:
 1090             db.updateLog(self.jail, log)
 1091         return True
 1092 
 1093     ##
 1094     # Seeks to line with date (search using half-interval search algorithm), to start polling from it
 1095     #
 1096 
 1097     def seekToTime(self, container, date, accuracy=3):
 1098         fs = container.getFileSize()
 1099         if logSys.getEffectiveLevel() <= logging.DEBUG:
 1100             logSys.debug("Seek to find time %s (%s), file size %s", date, 
 1101                 MyTime.time2str(date), fs)
 1102         minp = container.getPos()
 1103         maxp = fs
 1104         tryPos = minp
 1105         lastPos = -1
 1106         foundPos = 0
 1107         foundTime = None
 1108         cntr = 0
 1109         unixTime = None
 1110         movecntr = accuracy
 1111         while maxp > minp:
 1112             if tryPos is None:
 1113                 pos = int(minp + (maxp - minp) / 2)
 1114             else:
 1115                 pos, tryPos = tryPos, None
 1116             # because container seek will go to start of next line (minus CRLF):
 1117             pos = max(0, pos-2)
 1118             seekpos = pos = container.seek(pos)
 1119             cntr += 1
 1120             # within next 5 lines try to find any legal datetime:
 1121             lncntr = 5;
 1122             dateTimeMatch = None
 1123             nextp = None
 1124             while True:
 1125                 line = container.readline()
 1126                 if not line:
 1127                     break
 1128                 (timeMatch, template) = self.dateDetector.matchTime(line)
 1129                 if timeMatch:
 1130                     dateTimeMatch = self.dateDetector.getTime(
 1131                         line[timeMatch.start():timeMatch.end()],
 1132                         (timeMatch, template))
 1133                 else:
 1134                     nextp = container.tell()
 1135                     if nextp > maxp:
 1136                         pos = seekpos
 1137                         break
 1138                     pos = nextp
 1139                 if not dateTimeMatch and lncntr:
 1140                     lncntr -= 1
 1141                     continue
 1142                 break
 1143             # not found at this step - stop searching
 1144             if dateTimeMatch:
 1145                 unixTime = dateTimeMatch[0]
 1146                 if unixTime >= date:
 1147                     if foundTime is None or unixTime <= foundTime:
 1148                         foundPos = pos
 1149                         foundTime = unixTime
 1150                     if pos == maxp:
 1151                         pos = seekpos
 1152                     if pos < maxp:
 1153                         maxp = pos
 1154                 else:
 1155                     if foundTime is None or unixTime >= foundTime:
 1156                         foundPos = pos
 1157                         foundTime = unixTime
 1158                     if nextp is None:
 1159                         nextp = container.tell()
 1160                     pos = nextp
 1161                     if pos > minp:
 1162                         minp = pos
 1163             # if we can't move (position not changed)
 1164             if pos == lastPos:
 1165                 movecntr -= 1
 1166                 if movecntr <= 0:
 1167                     break
 1168                 # we have found large area without any date matched 
 1169                 # or end of search - try min position (because can be end of previous line):
 1170                 if minp != lastPos:
 1171                     lastPos = tryPos = minp
 1172                     continue
 1173                 break
 1174             lastPos = pos
 1175         # always use smallest pos, that could be found:
 1176         foundPos = container.seek(minp, False)
 1177         container.setPos(foundPos)
 1178         if logSys.getEffectiveLevel() <= logging.DEBUG:
 1179             logSys.debug("Position %s from %s, found time %s (%s) within %s seeks", lastPos, fs, foundTime, 
 1180                 (MyTime.time2str(foundTime) if foundTime is not None else ''), cntr)
 1181         
 1182     def status(self, flavor="basic"):
 1183         """Status of Filter plus files being monitored.
 1184         """
 1185         ret = super(FileFilter, self).status(flavor=flavor)
 1186         path = self.__logs.keys()
 1187         ret.append(("File list", path))
 1188         return ret
 1189 
 1190     def stop(self):
 1191         """Stop monitoring of log-file(s)
 1192         """
 1193         # stop files monitoring:
 1194         for path in self.__logs.keys():
 1195             self.delLogPath(path)
 1196         # stop thread:
 1197         super(Filter, self).stop()
 1198 
 1199 ##
 1200 # FileContainer class.
 1201 #
 1202 # This class manages a file handler and takes care of log rotation detection.
 1203 # In order to detect log rotation, the hash (MD5) of the first line of the file
 1204 # is computed and compared to the previous hash of this line.
 1205 
 1206 try:
 1207     import hashlib
 1208     try:
 1209         md5sum = hashlib.md5
 1210         # try to use it (several standards like FIPS forbid it):
 1211         md5sum(' ').hexdigest()
 1212     except: # pragma: no cover
 1213         md5sum = hashlib.sha1
 1214 except ImportError: # pragma: no cover
 1215     # hashlib was introduced in Python 2.5.  For compatibility with those
 1216     # elderly Pythons, import from md5
 1217     import md5
 1218     md5sum = md5.new
 1219 
 1220 
 1221 class FileContainer:
 1222 
 1223     def __init__(self, filename, encoding, tail = False):
 1224         self.__filename = filename
 1225         self.setEncoding(encoding)
 1226         self.__tail = tail
 1227         self.__handler = None
 1228         # Try to open the file. Raises an exception if an error occurred.
 1229         handler = open(filename, 'rb')
 1230         stats = os.fstat(handler.fileno())
 1231         self.__ino = stats.st_ino
 1232         try:
 1233             firstLine = handler.readline()
 1234             # Computes the MD5 of the first line.
 1235             self.__hash = md5sum(firstLine).hexdigest()
 1236             # Start at the beginning of file if tail mode is off.
 1237             if tail:
 1238                 handler.seek(0, 2)
 1239                 self.__pos = handler.tell()
 1240             else:
 1241                 self.__pos = 0
 1242         finally:
 1243             handler.close()
 1244 
 1245     def getFileName(self):
 1246         return self.__filename
 1247 
 1248     def getFileSize(self):
 1249         return os.path.getsize(self.__filename);
 1250 
 1251     def setEncoding(self, encoding):
 1252         codecs.lookup(encoding) # Raises LookupError if invalid
 1253         self.__encoding = encoding
 1254 
 1255     def getEncoding(self):
 1256         return self.__encoding
 1257 
 1258     def getHash(self):
 1259         return self.__hash
 1260 
 1261     def getPos(self):
 1262         return self.__pos
 1263 
 1264     def setPos(self, value):
 1265         self.__pos = value
 1266 
 1267     def open(self):
 1268         self.__handler = open(self.__filename, 'rb')
 1269         # Set the file descriptor to be FD_CLOEXEC
 1270         fd = self.__handler.fileno()
 1271         flags = fcntl.fcntl(fd, fcntl.F_GETFD)
 1272         fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
 1273         # Stat the file before even attempting to read it
 1274         stats = os.fstat(self.__handler.fileno())
 1275         if not stats.st_size:
 1276             # yoh: so it is still an empty file -- nothing should be
 1277             #      read from it yet
 1278             # print "D: no content -- return"
 1279             return False
 1280         firstLine = self.__handler.readline()
 1281         # Computes the MD5 of the first line.
 1282         myHash = md5sum(firstLine).hexdigest()
 1283         ## print "D: fn=%s hashes=%s/%s inos=%s/%s pos=%s rotate=%s" % (
 1284         ##  self.__filename, self.__hash, myHash, stats.st_ino, self.__ino, self.__pos,
 1285         ##  self.__hash != myHash or self.__ino != stats.st_ino)
 1286         ## sys.stdout.flush()
 1287         # Compare hash and inode
 1288         if self.__hash != myHash or self.__ino != stats.st_ino:
 1289             logSys.log(logging.MSG, "Log rotation detected for %s", self.__filename)
 1290             self.__hash = myHash
 1291             self.__ino = stats.st_ino
 1292             self.__pos = 0
 1293         # Sets the file pointer to the last position.
 1294         self.__handler.seek(self.__pos)
 1295         return True
 1296 
 1297     def seek(self, offs, endLine=True):
 1298         h = self.__handler
 1299         # seek to given position
 1300         h.seek(offs, 0)
 1301         # goto end of next line
 1302         if offs and endLine:
 1303             h.readline()
 1304         # get current real position
 1305         return h.tell()
 1306 
 1307     def tell(self):
 1308         # get current real position
 1309         return self.__handler.tell()
 1310 
 1311     @staticmethod
 1312     def decode_line(filename, enc, line):
 1313         try:
 1314             return line.decode(enc, 'strict')
 1315         except (UnicodeDecodeError, UnicodeEncodeError) as e:
 1316             global _decode_line_warn
 1317             lev = logging.DEBUG
 1318             if _decode_line_warn.get(filename, 0) <= MyTime.time():
 1319                 lev = logging.WARNING
 1320                 _decode_line_warn[filename] = MyTime.time() + 24*60*60
 1321             logSys.log(lev,
 1322                 "Error decoding line from '%s' with '%s'."
 1323                 " Consider setting logencoding=utf-8 (or another appropriate"
 1324                 " encoding) for this jail. Continuing"
 1325                 " to process line ignoring invalid characters: %r",
 1326                 filename, enc, line)
 1327             # decode with replacing error chars:
 1328             line = line.decode(enc, 'replace')
 1329         return line
 1330 
 1331     def readline(self):
 1332         if self.__handler is None:
 1333             return ""
 1334         return FileContainer.decode_line(
 1335             self.getFileName(), self.getEncoding(), self.__handler.readline())
 1336 
 1337     def close(self):
 1338         if not self.__handler is None:
 1339             # Saves the last position.
 1340             self.__pos = self.__handler.tell()
 1341             # Closes the file.
 1342             self.__handler.close()
 1343             self.__handler = None
 1344         ## print "D: Closed %s with pos %d" % (handler, self.__pos)
 1345         ## sys.stdout.flush()
 1346 
 1347 _decode_line_warn = {}
 1348 
 1349 
 1350 ##
 1351 # JournalFilter class.
 1352 #
 1353 # Base interface class for systemd journal filters
 1354 
 1355 class JournalFilter(Filter): # pragma: systemd no cover
 1356 
 1357     def clearAllParams(self):
 1358         super(JournalFilter, self).clearAllParams()
 1359         self.delJournalMatch()
 1360 
 1361     def addJournalMatch(self, match): # pragma: no cover - Base class, not used
 1362         pass
 1363 
 1364     def delJournalMatch(self, match=None): # pragma: no cover - Base class, not used
 1365         pass
 1366 
 1367     def getJournalMatch(self, match): # pragma: no cover - Base class, not used
 1368         return []
 1369