"Fossies" - the Fresh Open Source Software Archive

Member "Tardis-1.2.1/src/Tardis/Util.py" (9 Jun 2021, 33232 Bytes) of package /linux/privat/Tardis-1.2.1.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file. For more information about "Util.py" see the Fossies "Dox" file reference documentation and the latest Fossies "Diffs" side-by-side code changes report: 1.1.5_vs_1.2.1.

    1 # vim: set et sw=4 sts=4 fileencoding=utf-8:
    2 #
    3 # Tardis: A Backup System
    4 # Copyright 2013-2020, Eric Koldinger, All Rights Reserved.
    5 # kolding@washington.edu
    6 #
    7 # Redistribution and use in source and binary forms, with or without
    8 # modification, are permitted provided that the following conditions are met:
    9 #
   10 #     * Redistributions of source code must retain the above copyright
   11 #       notice, this list of conditions and the following disclaimer.
   12 #     * Redistributions in binary form must reproduce the above copyright
   13 #       notice, this list of conditions and the following disclaimer in the
   14 #       documentation and/or other materials provided with the distribution.
   15 #     * Neither the name of the copyright holder nor the
   16 #       names of its contributors may be used to endorse or promote products
   17 #       derived from this software without specific prior written permission.
   18 #
   19 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
   20 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
   21 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
   22 # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
   23 # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
   24 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
   25 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
   26 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
   27 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
   28 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
   29 # POSSIBILITY OF SUCH DAMAGE.
   30 
   31 import os
   32 import logging
   33 import argparse
   34 import configparser
   35 import sys
   36 import subprocess
   37 import hashlib
   38 import shlex
   39 import getpass
   40 import stat
   41 import fnmatch
   42 import json
   43 import types
   44 import base64
   45 import functools
   46 import pwd
   47 import grp
   48 import time
   49 import struct
   50 import io
   51 import signal
   52 
   53 import urllib.request, urllib.parse, urllib.error
   54 
   55 import zlib
   56 import bz2
   57 import lzma
   58 import srp
   59 import passwordmeter
   60 import colorlog
   61 import parsedatetime
   62 
   63 import Tardis.Connection as Connection
   64 import Tardis.CompressedBuffer as CompressedBuffer
   65 import Tardis.Defaults as Defaults
   66 
   67 import Tardis.TardisDB as TardisDB
   68 import Tardis.TardisCrypto as TardisCrypto
   69 import Tardis.CacheDir as CacheDir
   70 import Tardis.RemoteDB as RemoteDB
   71 
   72 try:
   73     import genzshcomp
   74 except ImportError:
   75     genzshcomp = None
   76 
   77 
   78 logger = logging.getLogger('UTIL')
   79 
   80 def fmtSize(num, base=1024, formats = ['bytes','KB','MB','GB', 'TB', 'PB']):
   81     fmt = "%d %s"
   82     if num is None:
   83         return 'None'
   84     num = float(num)
   85     for x in formats:
   86         #if num < base and num > -base:
   87         if -base < num < base:
   88             return (fmt % (num, x)).strip()
   89         num /= float(base)
   90         fmt = "%3.1f %s"
   91     return (fmt % (num, 'EB')).strip()
   92 
   93 def getIntOrNone(config, section, name):
   94     try:
   95         x = config.get(section, name)
   96         return int(x, 0)
   97     except Exception:
   98         return None
   99 
  100 # Get group and user names.  Very unixy
  101 _groups = {}
  102 _users = {}
  103 
  104 def getGroupName(gid):
  105     if gid in _groups:
  106         return _groups[gid]
  107     else:
  108         group = grp.getgrgid(gid)
  109         if group:
  110             name = group.gr_name
  111             _groups[gid] = name
  112             return name
  113         else:
  114             return None
  115 
  116 def getUserId(uid):
  117     if uid in _users:
  118         return _users[uid]
  119     else:
  120         user = pwd.getpwuid(uid)
  121         if user:
  122             name = user.pw_name
  123             _users[uid] = name
  124             return name
  125         else:
  126             return None
  127 
  128 # Format time.  If we're less that a year before now, print the time as Jan 12, 02:17, if earlier,
  129 # then Jan 12, 2014.  Same as ls.
  130 _now = time.time()
  131 _yearago = _now - (365 * 24 * 3600)
  132 def formatTime(then):
  133     if then > _yearago:
  134         fmt = '%b %d %H:%M'
  135     else:
  136         fmt = '%b %d, %Y'
  137     return time.strftime(fmt, time.localtime(then))
  138 
  139 # Strip comments from input lines.
  140 def stripComments(line):
  141     return line.partition('#')[0].strip()
  142 
  143 # Convert a string to an integer
  144 def parseInt(x):
  145     if x.startswith('0x'):
  146         return int(x[2:], 16)
  147     elif x.startswith('0o'):
  148         return int(x[2:], 8)
  149     elif x.startswith('0'):
  150         return int(x[1:], 8)
  151     else:
  152         return int(x)
  153 
  154 # Make a path look short.
  155 def shortPath(path, width=80):
  156     """
  157     Compress a path to only show the last elements if it's wider than specified.
  158     Replaces early elements with ".../"
  159     """
  160 
  161     # If we're already short enough, just return what we have
  162     if not path or len(path) < width:
  163         return path
  164 
  165     # Compensate for a coming .../ plus slosh
  166     width -= 5
  167 
  168     # split into path prefix, + the current file
  169     path, retPath = os.path.split(path)
  170 
  171     # Check to see if we're already wider than width.....
  172     # If so, put a "..." in the middle of the filename
  173     # retPath is the current file at this point
  174     if len(retPath) > width:
  175         namecomps = retPath.rsplit('.', 1)
  176         #print("Name Comps:: ",  namecomps)
  177         if len(namecomps) == 2:
  178             main, suffix = namecomps
  179         else:
  180             main = namecomps[0]
  181             suffix = ''
  182         #print("Split:: ", main, suffix)
  183         length = min(len(retPath), width) - 5
  184         retPath   = main[0:(length // 2) - 1] + "..." + main[-(length // 2) + 1:]
  185         if suffix:
  186             retPath = '.'.join([retPath, suffix])
  187 
  188     #print("First chunk:: ", len(retPath), retPath)
  189 
  190     # Build it up backwards from the end
  191     while len(retPath) < width:
  192         path, tail = os.path.split(path)
  193         #print(retPath, len(retPath), path, tail)
  194         if not path or not tail:
  195             break
  196         elif len(tail) + len(os.sep) + len(retPath) > width:
  197             break
  198         else:
  199             retPath = os.path.join(tail, retPath)
  200 
  201     return "..." + os.sep + retPath
  202 
  203 def accumulateStat(stats, name, amount=1):
  204     if stats:
  205         stats[name] = stats.setdefault(name, 0) + amount
  206 
  207 def setupLogging(verbosity=1, levels=None, format=None, stream=sys.stdout):
  208     if levels is None:
  209         levels = [logging.WARNING, logging.INFO, logging.DEBUG]
  210 
  211     loglevel = levels[verbosity] if verbosity < len(levels) else logging.DEBUG
  212 
  213     if format is None:
  214         if loglevel <= logging.DEBUG:
  215             format = "%(log_color)s%(levelname)s%(reset)s : %(filename)s:%(lineno)d: %(message)s"
  216         else:
  217             format = "%(log_color)s%(levelname)s%(reset)s : %(message)s"
  218 
  219     colors = colorlog.default_log_colors.copy()
  220     colors.update({ 'DEBUG': 'green' })
  221 
  222     formatter = colorlog.TTYColoredFormatter(format, log_colors=colors, stream=stream)
  223     handler = logging.StreamHandler()
  224     handler.setFormatter(formatter)
  225     logging.root.addHandler(handler)
  226 
  227     logging.raiseExceptions = False
  228 
  229     logger = logging.getLogger("")
  230     logger.setLevel(loglevel)
  231     return logger
  232 
  233 # Functions for reducing a path.
  234 
  235 def findDirInRoot(tardis, bset, path, crypt=None):
  236     #logger = logging.getLogger('UTIL')
  237     """
  238     Find a directory which exists in the root directory
  239     Return the number of components which must be removed to have a directory in
  240     the root of the tree.
  241     """
  242     comps = path.split(os.sep)
  243     comps.pop(0)
  244     for i in range(0, len(comps)):
  245         name = comps[i]
  246         #logger.debug("Looking for root directory %s (%d)", name, i)
  247         if crypt:
  248             name = crypt.encryptFilename(name)
  249         info = tardis.getFileInfoByName(name, (0, 0), bset)
  250         if info and info['dir'] == 1:
  251             return i
  252     return None
  253 
  254 def reducePath(tardis, bset, path, reduceBy, crypt=None):
  255     #logger = logging.getLogger('UTIL')
  256     """
  257     Reduce a path by a specified number of directory levels.
  258     If the number is sys.maxint, perform a "smart" reduction, by looking for a directory
  259     element which occurs in the root directory.
  260     """
  261     #logger.debug("Computing path for %s in %d (%d)", path, bset, reduce)
  262     if reduceBy == sys.maxsize:
  263         reduceBy = findDirInRoot(tardis, bset, path, crypt)
  264     if reduceBy:
  265         #logger.debug("Reducing path by %d entries: %s", reduceBy, path)
  266         comps = path.split(os.sep)
  267         if reduceBy > len(comps):
  268             #logger.error("Path reduction value (%d) greater than path length (%d) for %s.  Skipping.", reduceBy, len(comps), path)
  269             return None
  270         tmp = os.path.join(os.sep, *comps[reduceBy + 1:])
  271         #logger.info("reduced path %s to %s", path, tmp)
  272         path = tmp
  273     return path
  274 
  275 def isMagic(path):
  276     if ('*' in path) or ('?' in path) or ('[' in path):
  277         return True
  278     return False
  279 
  280 def matchPath(pattern, path):
  281     if pattern == path:
  282         return True
  283     pats = pattern.split(os.sep)
  284     dirs = path.split(os.sep)
  285     inWild = False
  286     while len(pats) != 0 and len(dirs) != 0:
  287         if not inWild:
  288             p = pats.pop(0)
  289             d = dirs.pop(0)
  290             if p == '**':
  291                 inWild = True
  292             else:
  293                 if not fnmatch.fnmatch(d, p):
  294                     return False
  295         else:
  296             d = dirs.pop(0)
  297             p = pats[0]
  298             if p != '**':
  299                 if fnmatch.fnmatch(d, p):
  300                     inWild = False
  301                     pats.pop(0)
  302             else:
  303                 pats.pop(0)
  304 
  305     if len(pats) or len(dirs):
  306         return False
  307     else:
  308         return True
  309 
  310 def fullPath(name):
  311     return os.path.realpath(os.path.expanduser(os.path.expandvars(name)))
  312 
  313 """
  314 Filemode printer.  Translated from Perl's File::Strmode function (from cpan.org)
  315 Not necessary in Python 3, but stat.filemode() doesn't exist in Python 2
  316 """
  317 _fmtypes = { stat.S_IFDIR: 'd', stat.S_IFCHR: 'c', stat.S_IFBLK: 'b', stat.S_IFREG: '-', stat.S_IFLNK: 'l', stat.S_IFSOCK: 's', stat.S_IFIFO: 'p' }
  318 
  319 def filemode(mode):
  320     str = _fmtypes.setdefault(stat.S_IFMT(mode), '?')
  321     str += 'r' if mode & stat.S_IRUSR else '-'
  322     str += 'w' if mode & stat.S_IWUSR else '-'
  323     if mode & stat.S_IXUSR:
  324         str += 's' if mode & stat.S_ISUID else 'x'
  325     else:
  326         str += 's' if mode & stat.S_ISUID else 'x'
  327 
  328     str += 'r' if mode & stat.S_IRGRP else '-'
  329     str += 'w' if mode & stat.S_IWGRP else '-'
  330     if mode & stat.S_IXGRP:
  331         str += 's' if mode & stat.S_ISGID else 'x'
  332     else:
  333         str += 's' if mode & stat.S_ISGID else 'x'
  334 
  335     str += 'r' if mode & stat.S_IROTH else '-'
  336     str += 'w' if mode & stat.S_IWOTH else '-'
  337     if mode & stat.S_IXOTH:
  338         str += 't' if mode & stat.S_ISVTX else 'x'
  339     else:
  340         str += 'T' if mode & stat.S_ISVTX else 'x'
  341     return str
  342 
  343 def getTerminalSize():
  344     rows, columns = os.popen('stty size', 'r').read().split()
  345     return (int(rows), int(columns))
  346 
  347 """
  348 Retrieve a password.
  349 Either takes a URL, a program name, a plain password string.
  350 Only one can be valid.
  351 Retrieves from the URL, program, or file if so specified.
  352 If a string is passed in, returns it.
  353 If the string is True or empty (''), it will use the getpass function to prompt on the
  354 terminal.
  355 """
  356 def _readWithTimeout(prompt, timeout):
  357     def _interuptPassword(signum, frame):
  358         print("\nTimeout")
  359         raise Exception("Password read timedout")
  360 
  361     previous = signal.signal(signal.SIGALRM, _interuptPassword)
  362     try:
  363         if timeout:
  364             signal.alarm(timeout)
  365         password = getpass.getpass(prompt=prompt)
  366     finally:
  367         signal.alarm(0)
  368         signal.signal(signal.SIGALRM, previous)
  369     return password.rstrip()
  370 
  371 def getPassword(password, pwurl, pwprog, prompt='Password: ', allowNone=True, confirm=False, strength=False, timeout=Defaults.getDefault('TARDIS_PWTIMEOUT')):
  372     methods = 0
  373     if password: methods += 1
  374     if pwurl:    methods += 1
  375     if pwprog:   methods += 1
  376 
  377     if methods > 1:
  378         raise Exception("Cannot specify more than one password retrieval mechanism")
  379 
  380     if methods == 0 and not allowNone:
  381         # Nothing specified, and it wants a value.  Set password to True to fetch
  382         password = True
  383 
  384     if password == True or password == '':
  385         password = _readWithTimeout(prompt, int(timeout))
  386         password = password.rstrip()       # Delete trailing characters
  387         if confirm:
  388             pw2 = _readWithTimeout("Confirm password:", int(timeout))
  389             if password != pw2:
  390                 raise Exception("Passwords don't match")
  391 
  392     if pwurl:
  393         loc = urllib.parse.urlunparse(urllib.parse.urlparse(pwurl, scheme='file'))
  394         pwf = urllib.request.urlopen(loc)
  395         password = pwf.readline().rstrip()
  396         pwf.close()
  397 
  398     if pwprog:
  399         a = shlex.split(pwprog)
  400         output = subprocess.check_output(a)
  401         password = output.split('\n')[0].rstrip()
  402 
  403     if not allowNone and not password:
  404         raise Exception("Password required")
  405 
  406     if strength and password:
  407         if not checkPasswordStrength(password):
  408             raise Exception("Password not strong enough")
  409 
  410     return password
  411 
  412 
  413 def checkPasswordStrength(password):
  414     pwStrMin     = float(Defaults.getDefault('TARDIS_PW_STRENGTH'))
  415     strength, improvements = passwordmeter.test(password)
  416     if strength < pwStrMin:
  417         logger.error("Password too weak: %f (%f required)", strength, pwStrMin)
  418         for i in improvements:
  419             logger.error("    %s", improvements[i])
  420         return False
  421     else:
  422         return True
  423 
  424 # Get the database, cachedir, and crypto object.
  425 
  426 def setupDataConnection(dataLoc, client, password, keyFile, dbName, dbLoc=None, allow_upgrade=False, retpassword=False):
  427     """ Setup a data connection to a client.   Determines the correct way to connect, either via direct filesystem, 
  428     or via TardisRemote (http).
  429     Returns a 3-tuple, the TardisDB object, the CacheDir object, and the appropriate crypto object
  430     """
  431     logger.debug("Connection requested for %s under %s", client, dataLoc)
  432     crypt = None
  433 
  434     loc = urllib.parse.urlparse(dataLoc)
  435     if (loc.scheme == 'http') or (loc.scheme == 'https'):
  436         logger.debug("Creating remote connection to %s", dataLoc)
  437         # If no port specified, insert the port
  438         if loc.port is None:
  439             netloc = loc.netloc + ":" + Defaults.getDefault('TARDIS_REMOTE_PORT')
  440             dbLoc = urllib.parse.urlunparse((loc.scheme, netloc, loc.path, loc.params, loc.query, loc.fragment))
  441         else:
  442             dbLoc = dataLoc
  443         # get the RemoteURL object
  444         logger.debug("==> %s %s", dbLoc, client)
  445         tardis = RemoteDB.RemoteDB(dbLoc, client)
  446         cache = tardis
  447     else:
  448         logger.debug("Creating direct connection to %s", dataLoc)
  449         cacheDir = os.path.join(loc.path, client)
  450         cache = CacheDir.CacheDir(cacheDir, create=False)
  451         if not dbLoc:
  452             dbDir = cacheDir
  453         else:
  454             dbDir = os.path.join(dbLoc, client)
  455         dbPath = os.path.join(dbDir, dbName)
  456         tardis = TardisDB.TardisDB(dbPath, allow_upgrade=allow_upgrade)
  457 
  458     needsAuth = tardis.needsAuthentication()
  459     if needsAuth and password is None:
  460         password = getPassword(True, None, None, "Password for %s: " % client, allowNone=False)
  461 
  462     if needsAuth:
  463         authenticate(tardis, client, password)
  464     elif password:
  465         raise TardisDB.AuthenticationFailed()
  466 
  467     # Password specified, so create the crypto unit
  468     #cryptoScheme = tardis.getConfigValue('CryptoScheme', '1')
  469     cryptoScheme = tardis.getCryptoScheme()
  470 
  471     crypt = TardisCrypto.getCrypto(cryptoScheme, password, client)
  472     if keyFile:
  473         (f, c) = loadKeys(keyFile, tardis.getConfigValue('ClientID'))
  474     else:
  475         (f, c) = tardis.getKeys()
  476     crypt.setKeys(f, c)
  477 
  478     if retpassword:
  479         return (tardis, cache, crypt, password)
  480     else:
  481         return (tardis, cache, crypt)
  482 
  483 # Perform SRP authentication locally against the DB
  484 def authenticate(db, client, password):
  485     usr      = srp.User(client, password)
  486     uname, A = usr.start_authentication()
  487 
  488     s, B = db.authenticate1(uname, A)
  489 
  490     M = usr.process_challenge(s, B)
  491 
  492     if M is None:
  493         raise TardisDB.AuthenticationFailed()
  494 
  495     HAMK = db.authenticate2(M)
  496 
  497     usr.verify_session(HAMK)
  498 
  499     if not usr.authenticated():
  500         raise TardisDB.AuthenticationFailed()
  501 
  502 
  503 def getBackupSet(db, bset):
  504     bsetInfo = None
  505     # First, try as an integer
  506     try:
  507         bset = int(bset)
  508         bsetInfo = db.getBackupSetInfoById(bset)
  509     except ValueError:
  510         # Else, let's look it up based on name
  511         if bset == Defaults.getDefault('TARDIS_RECENT_SET') or bset == '' or bset == None:
  512             bsetInfo = db.lastBackupSet()
  513         else:
  514             bsetInfo = db.getBackupSetInfo(bset)
  515         if not bsetInfo:
  516             # still nothing, hm, let's try a date format
  517             cal = parsedatetime.Calendar()
  518             (then, success) = cal.parse(bset)
  519             if success:
  520                 timestamp = time.mktime(then)
  521                 logger.debug("Using time: %s", time.asctime(then))
  522                 bsetInfo = db.getBackupSetInfoForTime(timestamp)
  523                 if bsetInfo and bsetInfo['backupset'] != 1:
  524                     bset = bsetInfo['backupset']
  525                     logger.debug("Using backupset: %s %d for %s", bsetInfo['name'], bsetInfo['backupset'], bset)
  526                 else:
  527                     # Weed out the ".Initial" set
  528                     logger.critical("No backupset at date: %s (%s)", bset, time.asctime(then))
  529                     bsetInfo = None
  530             else:
  531                 logger.critical("Could not parse string: %s", bset)
  532     return bsetInfo
  533 
  534 # Data manipulation functions
  535 
  536 _suffixes = [".basis", ".sig", ".meta", ""]
  537 def _removeOrphans(db, cache):
  538     #logger = logging.getLogger('UTIL')
  539 
  540     size = 0
  541     count = 0
  542     # Get a list of orphan'd files
  543     orphans = db.listOrphanChecksums(isFile=True)
  544     for cksum in orphans:
  545         logger.debug("Removing %s", cksum)
  546         # And remove them each....
  547         try:
  548             s = cache.size(cksum)
  549             if s:
  550                 size += s
  551                 count += 1
  552 
  553             sig = cksum + ".sig"
  554             size += cache.size(sig)
  555 
  556             cache.removeSuffixes(cksum, _suffixes)
  557 
  558             db.deleteChecksum(cksum)
  559         except OSError:
  560             logger.warning("No checksum file for checksum %s", cksum)
  561     return count, size
  562 
  563 def removeOrphans(db, cache):
  564     count = 0
  565     size = 0
  566     rounds = 0
  567     # Repeatedly prune the file trees until there are no more checksums
  568     # we have to do this, as there can be multiple levels of basis files, each dependant on the one above (below?)
  569     # Theoretically we should be able to do this is one go, but SQLite's implementation of recursive queries doesn't
  570     # seem to work quite right.
  571     while True:
  572         (lCount, lSize) = _removeOrphans(db, cache)
  573         if lCount == 0:
  574             break
  575         rounds += 1
  576         count  += lCount
  577         size   += lSize
  578 
  579     db.deleteOrphanChecksums(False)
  580     return count, size, rounds
  581 
  582 # Data transmission functions
  583 
  584 def _chunks(stream, chunksize):
  585     last = b''
  586     for chunk in iter(functools.partial(stream.read, chunksize), b''):
  587         if last:
  588             yield (last, False)
  589         last = chunk
  590     yield (last, True)
  591 
  592 _transmissionTime = 0
  593 
  594 def sendDataPlain(sender, data, chunksize=(16 * 1024), compress=None, stats=None):
  595     """
  596     Send data, with no encryption, or calculation
  597     """
  598     encrypt = TardisCrypto.NullEncryptor()
  599     sendData(sender, data, encrypt, chunksize=chunksize, compress=compress, stats=stats)
  600 
  601 def sendData(sender, data, encrypt, chunksize=(16 * 1024), hasher=None, compress=None, stats=None, signature=False, progress=None, progressPeriod=8*1024*1024):
  602     """
  603     Send a block of data, optionally encrypt and/or compress it before sending
  604     Compress should be either None, for no compression, or one of the known compression types (zlib, bzip, lzma)
  605     """
  606     #logger = logging.getLogger('Data')
  607     if isinstance(sender, Connection.Connection):
  608         sender = sender.sender
  609     size = 0
  610     status = "OK"
  611     ck = None
  612     sig = None
  613 
  614     start = time.time()
  615     if progress:
  616         # Set the chunksize
  617         if progressPeriod % chunksize != 0:
  618             progressPeriod -= progressPeriod % chunksize
  619 
  620     if compress:
  621         stream = CompressedBuffer.CompressedBufferedReader(data, hasher=hasher, signature=signature, compressor=compress)
  622     else:
  623         stream = CompressedBuffer.BufferedReader(data, hasher=hasher, signature=signature)
  624 
  625     try:
  626         if encrypt.iv:
  627             sender.sendMessage(encrypt.iv, raw=True)
  628             accumulateStat(stats, 'dataSent', len(encrypt.iv))
  629         for chunk, eof in _chunks(stream, chunksize):
  630             #print len(chunk), eof
  631             if chunk:
  632                 data = encrypt.encrypt(chunk)
  633             else:
  634                 data = b''
  635             if eof:
  636                 data += encrypt.finish()
  637             #chunkMessage = { "chunk" : num, "data": data }
  638             if data:
  639                 sender.sendMessage(data, raw=True)
  640                 accumulateStat(stats, 'dataSent', len(data))
  641                 size += len(data)
  642                 if progress:
  643                     if (size % progressPeriod) == 0:
  644                         progress()
  645 
  646             #num += 1
  647         digest = encrypt.digest()
  648         if digest:
  649             sender.sendMessage(digest, raw=True)
  650             accumulateStat(stats, 'dataSent', len(digest))
  651 
  652     except Exception as e:
  653         status = "Fail"
  654         #logger = logging.getLogger('Data')
  655         #logger.exception(e)
  656         raise e
  657     finally:
  658         sender.sendMessage(b'', raw=True)
  659         compressed = compress if stream.isCompressed() else "None"
  660         size = stream.size()
  661 
  662         accumulateStat(stats, 'dataBacked', size)
  663 
  664         message = { "chunk": "done", "size": size, "status": status, "compressed": compressed }
  665         if hasher:
  666             ck = stream.checksum()
  667             message["checksum"] = ck
  668         if signature:
  669             sig = stream.signatureFile()
  670         #print message
  671         sender.sendMessage(message)
  672         stream = None
  673         end = time.time()
  674         global _transmissionTime
  675         _transmissionTime += end - start
  676     return size, ck, sig
  677 
  678 def receiveData(receiver, output):
  679     """ Receive a block of data from the sender, and store it in the specified file.
  680     Collect some info sent, and return it.
  681     """
  682     # logger = logging.getLogger('Data')
  683     if isinstance(receiver, Connection.Connection):
  684         receiver = receiver.sender
  685     bytesReceived = 0
  686     checksum = None
  687     compressed = False
  688     while True:
  689         chunk = receiver.recvMessage(raw=True)
  690         #print chunk
  691         # logger.debug("Chunk: %s", str(chunk))
  692         if len(chunk) == 0:
  693             break
  694         data = receiver.decode(chunk)
  695         if output:
  696             output.write(data)
  697             output.flush()
  698         bytesReceived += len(data)
  699 
  700     chunk = receiver.recvMessage()
  701     status = chunk['status']
  702     size   = chunk['size']
  703     if 'checksum' in chunk:
  704         checksum = chunk['checksum']
  705     if 'compressed' in chunk:
  706         compressed = chunk['compressed']
  707     return (bytesReceived, status, size, checksum, compressed)
  708 
  709 
  710 # Function to determine whether we can execute a function
  711 _uidForPerm = os.getuid()
  712 _groupForPerm = os.getgroups()
  713 
  714 def checkPermission(pUid, pGid, mode, uid=_uidForPerm, groups=_groupForPerm):
  715     # Check for super user.   Hack, this isn't really right, but still.
  716     # Assumes *nix permission system.   May not work on Windows or Mac.
  717     if uid == 0:
  718         return True
  719     if stat.S_ISDIR(mode):
  720         if (uid == pUid) and (stat.S_IRUSR & mode) and (stat.S_IXUSR & mode):
  721             return True
  722         elif (pGid in groups) and (stat.S_IRGRP & mode) and (stat.S_IXGRP & mode):
  723             return True
  724         elif (stat.S_IROTH & mode) and (stat.S_IXOTH & mode):
  725             return True
  726     else:
  727         if (uid == pUid) and (stat.S_IRUSR & mode):
  728             return True
  729         elif (pGid in groups) and (stat.S_IRGRP & mode):
  730             return True
  731         elif stat.S_IROTH & mode:
  732             return True
  733     return False
  734 
  735 """
  736 Load a key file.
  737 Key files are config databases, where each section is keyed by the clientID from the server.  Each secition needs to contain two entries, a ContentKey
  738 and a FilenameKey, both of which are base64 encoded strings containing the encyrpted keys.
  739 """
  740 def _updateLen(value, length):
  741     if not value:
  742         return None
  743 
  744     res = base64.b64decode(value)
  745     if len(res) != length:
  746         if len(res) > length:
  747             res = base64.b64encode(res[0:length])
  748         else:
  749             res = base64.b64encode(res + '\0' * (length - len(res)))
  750     else:
  751         res = value
  752     return res
  753 
  754 def loadKeys(name, client):
  755     config = configparser.ConfigParser({'ContentKey': None, 'FilenameKey': None}, allow_no_value=True)
  756     client = str(client)
  757     config.add_section(client)
  758     config.read(fullPath(name))
  759     try:
  760         contentKey =  _updateLen(config.get(client, 'ContentKey'), 32)
  761         nameKey    =  _updateLen(config.get(client, 'FilenameKey'), 32)
  762         return (nameKey, contentKey)
  763     except configparser.NoOptionError as e:
  764         raise Exception("No keys available for client " + client)
  765 
  766 def saveKeys(name, client, nameKey, contentKey, srpSalt=None, srpVKey=None):
  767     def _addOrDelete(config, client, key, value):
  768         if value:
  769             config.set(client, key, value)
  770         else:
  771             config.remove_option(client, key)
  772 
  773     config = configparser.ConfigParser()
  774     config.add_section(client)
  775     config.read(name)
  776 
  777     _addOrDelete(config, client, 'ContentKey', contentKey)
  778     _addOrDelete(config, client, 'FilenameKey', nameKey)
  779     _addOrDelete(config, client, 'SRPSalt', srpSalt)
  780     _addOrDelete(config, client, 'SRPVkey', srpVkey)
  781 
  782     with open(name, 'w') as configfile:
  783         config.write(configfile)
  784 
  785 def mkKeyString(client, nameKey, contentKey):
  786     config = configparser.ConfigParser()
  787     config.add_section(client)
  788     config.set(client, 'ContentKey', contentKey)
  789     config.set(client, 'FilenameKey', nameKey)
  790     x = io.StringIO()
  791     config.write(x)
  792     return x.getvalue()
  793 
  794 ###
  795 ### Create a metadata file for file.
  796 ###
  797 def recordMetaData(cache, checksum, size, compressed, encrypted, disksize, basis=None, logger=None):
  798     f = None
  799     metaName = checksum + '.meta'
  800     metaData = {'checksum': checksum, 'compressed': bool(compressed), 'encrypted': bool(encrypted), 'size': size, 'disksize': disksize }
  801     if basis:
  802         metaData['basis'] = basis
  803     metaStr = json.dumps(metaData)
  804     logger.debug("Storing metadata for %s: %s", checksum, metaStr)
  805 
  806     try:
  807         f = cache.open(metaName, 'w')
  808         f.write(metaStr)
  809         f.write('\n')
  810         f.close()
  811     except Exception as e:
  812         logger.warning("Could not write metadata file for %s: %s: %s", checksum, metaName, str(e))
  813 
  814 
  815 class StoreBoolean(argparse.Action):
  816     """
  817     Class to handle options of the form "--[no]argument" where you can specify --noargument to store a False,
  818     or --argument to store a true.
  819     """
  820     def __init__(self, option_strings, dest, negate="no", nargs=0, **kwargs):
  821         if nargs != 0:
  822             raise ValueError("nargs not allowed")
  823         #if len(option_strings) > 1:
  824         #    raise ValueError("Multiple option strings not allowed")
  825         self.negative_option = "--" + negate + option_strings[0][2:]
  826         self.help_option = "--[" + negate + "]" + option_strings[0][2:]
  827         option_strings.append(self.negative_option)
  828         super(StoreBoolean, self).__init__(option_strings, dest, nargs=0, **kwargs)
  829 
  830     def __call__(self, parser, arguments, values, option_string=None):
  831         #print "Here: ", option_string, " :: ", self.option_strings
  832         if option_string == self.negative_option:
  833             value = False
  834         else:
  835             value = True
  836         setattr(arguments, self.dest, value)
  837 
  838 
  839 class Toggle(argparse.Action):
  840     """
  841     Class to handle toggling options.  -x = true -xx = false -xxx = true, etc
  842     """
  843     def __init__(self,
  844                  option_strings,
  845                  dest,
  846                  default=None,
  847                  required=False,
  848                  help=None):
  849         super(Toggle, self).__init__(
  850             option_strings=option_strings,
  851             dest=dest,
  852             nargs=0,
  853             default=default,
  854             required=required,
  855             help=help)
  856 
  857     def __call__(self, parser, namespace, values, option_string=None):
  858         new_value = not argparse._ensure_value(namespace, self.dest, False)
  859         setattr(namespace, self.dest, new_value)
  860 
  861 class GenShellCompletions(argparse.Action):
  862     """
  863     Class to generate arguments and exit
  864     """
  865     def __call__(self, parser, namespace, values, option_string=None):
  866         path = os.path.split(sys.argv[0])[1]
  867         c = genzshcomp.CompletionGenerator(path, parser, parser_type='argparse', output_format=values)
  868         print(c.get())
  869         sys.exit(0)
  870 
  871 def addGenCompletions(parser):
  872     if genzshcomp:
  873         parser.add_argument('--gencompletions',  dest='gencomps',    default=None, const='zsh', nargs='?', choices=['bash', 'zsh', 'list'], help=argparse.SUPPRESS, action=GenShellCompletions)
  874 
  875 # Help formatter to handle the StoreBoolean options.
  876 # Only handles overriding the basic HelpFormatter class.
  877 
  878 class HelpFormatter(argparse.RawTextHelpFormatter):
  879     def _format_action_invocation(self, action):
  880         #print "_format_action_invocation", str(action)
  881         if hasattr(action, 'help_option'):
  882             ret = action.help_option
  883         else:
  884             ret = super(argparse.RawTextHelpFormatter, self)._format_action_invocation(action)
  885         #print "Got ", ret
  886         return ret
  887 
  888 # Argument formatter.  Useful for converting our command line arguments into strings"
  889 
  890 class ArgJsonEncoder(json.JSONEncoder):
  891     def default(self, obj):
  892         if isinstance(obj, io.IOBase):
  893             if obj == sys.stderr:
  894                 return "<stderr>"
  895             elif obj == sys.stdout:
  896                 return "<stdout>"
  897             else:
  898                 return "<file>"
  899         else:
  900             return json.JSONEncoder(self, obj)
  901 
  902 # Stream Handler which will always clear the line before printing
  903 class ClearingStreamHandler(logging.StreamHandler):
  904     clearLines = False
  905 
  906     def __init__(self, stream = None):
  907         super(ClearingStreamHandler, self).__init__(stream)
  908         if stream == None: stream = sys.stderr
  909         self.clearLines = os.isatty(stream.fileno())
  910 
  911     def emit(self, record):
  912         _ansiClearEol = '\x1b[K'
  913 
  914         if self.clearLines:
  915             self.stream.write(_ansiClearEol)
  916 
  917         super(ClearingStreamHandler, self).emit(record)
  918 
  919 # AN exception logging mechanism
  920 class ExceptionLogger:
  921     def __init__(self, logger, logExceptions):
  922         self.logger = logger
  923         self.logExceptions = logExceptions
  924 
  925     def log(self, exception):
  926         if self.logExceptions:
  927             self.logger.exception(exception)
  928 
  929 
  930 
  931 # Class to have a two directional dictionary.
  932 
  933 class bidict(dict):
  934     def __init__(self, *args, **kwargs):
  935         super(bidict, self).__init__(*args, **kwargs)
  936         self.inverse = {}
  937         for key, value in self.items():
  938             self.inverse.setdefault(value,[]).append(key)
  939 
  940     def __setitem__(self, key, value):
  941         super(bidict, self).__setitem__(key, value)
  942         self.inverse.setdefault(value,[]).append(key)
  943 
  944     def __delitem__(self, key):
  945         self.inverse.setdefault(self[key],[]).remove(key)
  946         if self[key] in self.inverse and not self.inverse[self[key]]:
  947             del self.inverse[self[key]]
  948         super(bidict, self).__delitem__(key)
  949 
  950 # Get a hash function.  Configurable.
  951 
  952 _hashMagic = struct.pack("!I", 0xffeeddcc)
  953 
  954 def hashDir(crypt, files, decrypt=False):
  955     """ Generate the hash of the filenames, and the number of files, so we can confirm that the contents are the same """
  956     if decrypt:
  957         f = list(files)
  958         #print map(crypt.decryptFilename, [x['name'] for x in f])
  959         filenames = sorted([crypt.decryptFilename(n) for n in [x['name'] for x in f]])
  960     else:
  961         filenames = sorted([x["name"] for x in files])
  962 
  963     m = crypt.getHash()
  964     # Insert "magic" number to help prevent collisions
  965     m.update(_hashMagic)
  966     # Insert a magic number
  967     # Generate a length, and convert it to a byte string
  968     z = struct.pack("!I", len(filenames))
  969     # Hash that
  970     m.update(z)
  971     for f in filenames:
  972         # For each entry, hash the name, and a null character
  973         m.update(bytes(f, 'utf8', 'xmlcharrefreplace'))
  974         m.update(b'\0')
  975     m.update(z)
  976     # Again, Insert "magic" number to help prevent collisions
  977     m.update(_hashMagic)
  978     return (m.hexdigest(), len(filenames))
  979 
  980 
  981 def asString(a, policy='ignore'):
  982     if isinstance(a, str):
  983         return a
  984     elif isinstance(a, bytes):
  985         return a.decode('utf-8', policy)
  986     else:
  987         return str(a)
  988 
  989 
  990 # 'Test' code
  991 
  992 if __name__ == "__main__":
  993     p = argparse.ArgumentParser(formatter_class=HelpFormatter)
  994 
  995     p.add_argument("--doit", action=StoreBoolean, help="Yo mama")
  996     p.add_argument("-x", action=Toggle, help="Whatever")
  997 
  998     args = p.parse_args()
  999     print(args)