"Fossies" - the Fresh Open Source Software Archive

Member "Atom/resources/app/apm/node_modules/node-gyp/gyp/pylib/gyp/input.py" (11 Apr 2017, 115880 Bytes) of package /windows/misc/atom-windows.zip:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file.

    1 # Copyright (c) 2012 Google Inc. All rights reserved.
    2 # Use of this source code is governed by a BSD-style license that can be
    3 # found in the LICENSE file.
    4 
    5 from compiler.ast import Const
    6 from compiler.ast import Dict
    7 from compiler.ast import Discard
    8 from compiler.ast import List
    9 from compiler.ast import Module
   10 from compiler.ast import Node
   11 from compiler.ast import Stmt
   12 import compiler
   13 import gyp.common
   14 import gyp.simple_copy
   15 import multiprocessing
   16 import optparse
   17 import os.path
   18 import re
   19 import shlex
   20 import signal
   21 import subprocess
   22 import sys
   23 import threading
   24 import time
   25 import traceback
   26 from gyp.common import GypError
   27 from gyp.common import OrderedSet
   28 
   29 
   30 # A list of types that are treated as linkable.
   31 linkable_types = [
   32   'executable',
   33   'shared_library',
   34   'loadable_module',
   35   'mac_kernel_extension',
   36 ]
   37 
   38 # A list of sections that contain links to other targets.
   39 dependency_sections = ['dependencies', 'export_dependent_settings']
   40 
   41 # base_path_sections is a list of sections defined by GYP that contain
   42 # pathnames.  The generators can provide more keys, the two lists are merged
   43 # into path_sections, but you should call IsPathSection instead of using either
   44 # list directly.
   45 base_path_sections = [
   46   'destination',
   47   'files',
   48   'include_dirs',
   49   'inputs',
   50   'libraries',
   51   'outputs',
   52   'sources',
   53 ]
   54 path_sections = set()
   55 
   56 # These per-process dictionaries are used to cache build file data when loading
   57 # in parallel mode.
   58 per_process_data = {}
   59 per_process_aux_data = {}
   60 
   61 def IsPathSection(section):
   62   # If section ends in one of the '=+?!' characters, it's applied to a section
   63   # without the trailing characters.  '/' is notably absent from this list,
   64   # because there's no way for a regular expression to be treated as a path.
   65   while section and section[-1:] in '=+?!':
   66     section = section[:-1]
   67 
   68   if section in path_sections:
   69     return True
   70 
   71   # Sections mathing the regexp '_(dir|file|path)s?$' are also
   72   # considered PathSections. Using manual string matching since that
   73   # is much faster than the regexp and this can be called hundreds of
   74   # thousands of times so micro performance matters.
   75   if "_" in section:
   76     tail = section[-6:]
   77     if tail[-1] == 's':
   78       tail = tail[:-1]
   79     if tail[-5:] in ('_file', '_path'):
   80       return True
   81     return tail[-4:] == '_dir'
   82 
   83   return False
   84 
   85 # base_non_configuration_keys is a list of key names that belong in the target
   86 # itself and should not be propagated into its configurations.  It is merged
   87 # with a list that can come from the generator to
   88 # create non_configuration_keys.
   89 base_non_configuration_keys = [
   90   # Sections that must exist inside targets and not configurations.
   91   'actions',
   92   'configurations',
   93   'copies',
   94   'default_configuration',
   95   'dependencies',
   96   'dependencies_original',
   97   'libraries',
   98   'postbuilds',
   99   'product_dir',
  100   'product_extension',
  101   'product_name',
  102   'product_prefix',
  103   'rules',
  104   'run_as',
  105   'sources',
  106   'standalone_static_library',
  107   'suppress_wildcard',
  108   'target_name',
  109   'toolset',
  110   'toolsets',
  111   'type',
  112 
  113   # Sections that can be found inside targets or configurations, but that
  114   # should not be propagated from targets into their configurations.
  115   'variables',
  116 ]
  117 non_configuration_keys = []
  118 
  119 # Keys that do not belong inside a configuration dictionary.
  120 invalid_configuration_keys = [
  121   'actions',
  122   'all_dependent_settings',
  123   'configurations',
  124   'dependencies',
  125   'direct_dependent_settings',
  126   'libraries',
  127   'link_settings',
  128   'sources',
  129   'standalone_static_library',
  130   'target_name',
  131   'type',
  132 ]
  133 
  134 # Controls whether or not the generator supports multiple toolsets.
  135 multiple_toolsets = False
  136 
  137 # Paths for converting filelist paths to output paths: {
  138 #   toplevel,
  139 #   qualified_output_dir,
  140 # }
  141 generator_filelist_paths = None
  142 
  143 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
  144   """Return a list of all build files included into build_file_path.
  145 
  146   The returned list will contain build_file_path as well as all other files
  147   that it included, either directly or indirectly.  Note that the list may
  148   contain files that were included into a conditional section that evaluated
  149   to false and was not merged into build_file_path's dict.
  150 
  151   aux_data is a dict containing a key for each build file or included build
  152   file.  Those keys provide access to dicts whose "included" keys contain
  153   lists of all other files included by the build file.
  154 
  155   included should be left at its default None value by external callers.  It
  156   is used for recursion.
  157 
  158   The returned list will not contain any duplicate entries.  Each build file
  159   in the list will be relative to the current directory.
  160   """
  161 
  162   if included == None:
  163     included = []
  164 
  165   if build_file_path in included:
  166     return included
  167 
  168   included.append(build_file_path)
  169 
  170   for included_build_file in aux_data[build_file_path].get('included', []):
  171     GetIncludedBuildFiles(included_build_file, aux_data, included)
  172 
  173   return included
  174 
  175 
  176 def CheckedEval(file_contents):
  177   """Return the eval of a gyp file.
  178 
  179   The gyp file is restricted to dictionaries and lists only, and
  180   repeated keys are not allowed.
  181 
  182   Note that this is slower than eval() is.
  183   """
  184 
  185   ast = compiler.parse(file_contents)
  186   assert isinstance(ast, Module)
  187   c1 = ast.getChildren()
  188   assert c1[0] is None
  189   assert isinstance(c1[1], Stmt)
  190   c2 = c1[1].getChildren()
  191   assert isinstance(c2[0], Discard)
  192   c3 = c2[0].getChildren()
  193   assert len(c3) == 1
  194   return CheckNode(c3[0], [])
  195 
  196 
  197 def CheckNode(node, keypath):
  198   if isinstance(node, Dict):
  199     c = node.getChildren()
  200     dict = {}
  201     for n in range(0, len(c), 2):
  202       assert isinstance(c[n], Const)
  203       key = c[n].getChildren()[0]
  204       if key in dict:
  205         raise GypError("Key '" + key + "' repeated at level " +
  206               repr(len(keypath) + 1) + " with key path '" +
  207               '.'.join(keypath) + "'")
  208       kp = list(keypath)  # Make a copy of the list for descending this node.
  209       kp.append(key)
  210       dict[key] = CheckNode(c[n + 1], kp)
  211     return dict
  212   elif isinstance(node, List):
  213     c = node.getChildren()
  214     children = []
  215     for index, child in enumerate(c):
  216       kp = list(keypath)  # Copy list.
  217       kp.append(repr(index))
  218       children.append(CheckNode(child, kp))
  219     return children
  220   elif isinstance(node, Const):
  221     return node.getChildren()[0]
  222   else:
  223     raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
  224          "': " + repr(node))
  225 
  226 
  227 def LoadOneBuildFile(build_file_path, data, aux_data, includes,
  228                      is_target, check):
  229   if build_file_path in data:
  230     return data[build_file_path]
  231 
  232   if os.path.exists(build_file_path):
  233     build_file_contents = open(build_file_path).read()
  234   else:
  235     raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
  236 
  237   build_file_data = None
  238   try:
  239     if check:
  240       build_file_data = CheckedEval(build_file_contents)
  241     else:
  242       build_file_data = eval(build_file_contents, {'__builtins__': None},
  243                              None)
  244   except SyntaxError, e:
  245     e.filename = build_file_path
  246     raise
  247   except Exception, e:
  248     gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
  249     raise
  250 
  251   if type(build_file_data) is not dict:
  252     raise GypError("%s does not evaluate to a dictionary." % build_file_path)
  253 
  254   data[build_file_path] = build_file_data
  255   aux_data[build_file_path] = {}
  256 
  257   # Scan for includes and merge them in.
  258   if ('skip_includes' not in build_file_data or
  259       not build_file_data['skip_includes']):
  260     try:
  261       if is_target:
  262         LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
  263                                       aux_data, includes, check)
  264       else:
  265         LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
  266                                       aux_data, None, check)
  267     except Exception, e:
  268       gyp.common.ExceptionAppend(e,
  269                                  'while reading includes of ' + build_file_path)
  270       raise
  271 
  272   return build_file_data
  273 
  274 
  275 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
  276                                   includes, check):
  277   includes_list = []
  278   if includes != None:
  279     includes_list.extend(includes)
  280   if 'includes' in subdict:
  281     for include in subdict['includes']:
  282       # "include" is specified relative to subdict_path, so compute the real
  283       # path to include by appending the provided "include" to the directory
  284       # in which subdict_path resides.
  285       relative_include = \
  286           os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
  287       includes_list.append(relative_include)
  288     # Unhook the includes list, it's no longer needed.
  289     del subdict['includes']
  290 
  291   # Merge in the included files.
  292   for include in includes_list:
  293     if not 'included' in aux_data[subdict_path]:
  294       aux_data[subdict_path]['included'] = []
  295     aux_data[subdict_path]['included'].append(include)
  296 
  297     gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
  298 
  299     MergeDicts(subdict,
  300                LoadOneBuildFile(include, data, aux_data, None, False, check),
  301                subdict_path, include)
  302 
  303   # Recurse into subdictionaries.
  304   for k, v in subdict.iteritems():
  305     if type(v) is dict:
  306       LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
  307                                     None, check)
  308     elif type(v) is list:
  309       LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
  310                                     check)
  311 
  312 
  313 # This recurses into lists so that it can look for dicts.
  314 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
  315   for item in sublist:
  316     if type(item) is dict:
  317       LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
  318                                     None, check)
  319     elif type(item) is list:
  320       LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
  321 
  322 # Processes toolsets in all the targets. This recurses into condition entries
  323 # since they can contain toolsets as well.
  324 def ProcessToolsetsInDict(data):
  325   if 'targets' in data:
  326     target_list = data['targets']
  327     new_target_list = []
  328     for target in target_list:
  329       # If this target already has an explicit 'toolset', and no 'toolsets'
  330       # list, don't modify it further.
  331       if 'toolset' in target and 'toolsets' not in target:
  332         new_target_list.append(target)
  333         continue
  334       if multiple_toolsets:
  335         toolsets = target.get('toolsets', ['target'])
  336       else:
  337         toolsets = ['target']
  338       # Make sure this 'toolsets' definition is only processed once.
  339       if 'toolsets' in target:
  340         del target['toolsets']
  341       if len(toolsets) > 0:
  342         # Optimization: only do copies if more than one toolset is specified.
  343         for build in toolsets[1:]:
  344           new_target = gyp.simple_copy.deepcopy(target)
  345           new_target['toolset'] = build
  346           new_target_list.append(new_target)
  347         target['toolset'] = toolsets[0]
  348         new_target_list.append(target)
  349     data['targets'] = new_target_list
  350   if 'conditions' in data:
  351     for condition in data['conditions']:
  352       if type(condition) is list:
  353         for condition_dict in condition[1:]:
  354           if type(condition_dict) is dict:
  355             ProcessToolsetsInDict(condition_dict)
  356 
  357 
  358 # TODO(mark): I don't love this name.  It just means that it's going to load
  359 # a build file that contains targets and is expected to provide a targets dict
  360 # that contains the targets...
  361 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
  362                         depth, check, load_dependencies):
  363   # If depth is set, predefine the DEPTH variable to be a relative path from
  364   # this build file's directory to the directory identified by depth.
  365   if depth:
  366     # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
  367     # temporary measure. This should really be addressed by keeping all paths
  368     # in POSIX until actual project generation.
  369     d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
  370     if d == '':
  371       variables['DEPTH'] = '.'
  372     else:
  373       variables['DEPTH'] = d.replace('\\', '/')
  374 
  375   # The 'target_build_files' key is only set when loading target build files in
  376   # the non-parallel code path, where LoadTargetBuildFile is called
  377   # recursively.  In the parallel code path, we don't need to check whether the
  378   # |build_file_path| has already been loaded, because the 'scheduled' set in
  379   # ParallelState guarantees that we never load the same |build_file_path|
  380   # twice.
  381   if 'target_build_files' in data:
  382     if build_file_path in data['target_build_files']:
  383       # Already loaded.
  384       return False
  385     data['target_build_files'].add(build_file_path)
  386 
  387   gyp.DebugOutput(gyp.DEBUG_INCLUDES,
  388                   "Loading Target Build File '%s'", build_file_path)
  389 
  390   build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
  391                                      includes, True, check)
  392 
  393   # Store DEPTH for later use in generators.
  394   build_file_data['_DEPTH'] = depth
  395 
  396   # Set up the included_files key indicating which .gyp files contributed to
  397   # this target dict.
  398   if 'included_files' in build_file_data:
  399     raise GypError(build_file_path + ' must not contain included_files key')
  400 
  401   included = GetIncludedBuildFiles(build_file_path, aux_data)
  402   build_file_data['included_files'] = []
  403   for included_file in included:
  404     # included_file is relative to the current directory, but it needs to
  405     # be made relative to build_file_path's directory.
  406     included_relative = \
  407         gyp.common.RelativePath(included_file,
  408                                 os.path.dirname(build_file_path))
  409     build_file_data['included_files'].append(included_relative)
  410 
  411   # Do a first round of toolsets expansion so that conditions can be defined
  412   # per toolset.
  413   ProcessToolsetsInDict(build_file_data)
  414 
  415   # Apply "pre"/"early" variable expansions and condition evaluations.
  416   ProcessVariablesAndConditionsInDict(
  417       build_file_data, PHASE_EARLY, variables, build_file_path)
  418 
  419   # Since some toolsets might have been defined conditionally, perform
  420   # a second round of toolsets expansion now.
  421   ProcessToolsetsInDict(build_file_data)
  422 
  423   # Look at each project's target_defaults dict, and merge settings into
  424   # targets.
  425   if 'target_defaults' in build_file_data:
  426     if 'targets' not in build_file_data:
  427       raise GypError("Unable to find targets in build file %s" %
  428                      build_file_path)
  429 
  430     index = 0
  431     while index < len(build_file_data['targets']):
  432       # This procedure needs to give the impression that target_defaults is
  433       # used as defaults, and the individual targets inherit from that.
  434       # The individual targets need to be merged into the defaults.  Make
  435       # a deep copy of the defaults for each target, merge the target dict
  436       # as found in the input file into that copy, and then hook up the
  437       # copy with the target-specific data merged into it as the replacement
  438       # target dict.
  439       old_target_dict = build_file_data['targets'][index]
  440       new_target_dict = gyp.simple_copy.deepcopy(
  441         build_file_data['target_defaults'])
  442       MergeDicts(new_target_dict, old_target_dict,
  443                  build_file_path, build_file_path)
  444       build_file_data['targets'][index] = new_target_dict
  445       index += 1
  446 
  447     # No longer needed.
  448     del build_file_data['target_defaults']
  449 
  450   # Look for dependencies.  This means that dependency resolution occurs
  451   # after "pre" conditionals and variable expansion, but before "post" -
  452   # in other words, you can't put a "dependencies" section inside a "post"
  453   # conditional within a target.
  454 
  455   dependencies = []
  456   if 'targets' in build_file_data:
  457     for target_dict in build_file_data['targets']:
  458       if 'dependencies' not in target_dict:
  459         continue
  460       for dependency in target_dict['dependencies']:
  461         dependencies.append(
  462             gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
  463 
  464   if load_dependencies:
  465     for dependency in dependencies:
  466       try:
  467         LoadTargetBuildFile(dependency, data, aux_data, variables,
  468                             includes, depth, check, load_dependencies)
  469       except Exception, e:
  470         gyp.common.ExceptionAppend(
  471           e, 'while loading dependencies of %s' % build_file_path)
  472         raise
  473   else:
  474     return (build_file_path, dependencies)
  475 
  476 def CallLoadTargetBuildFile(global_flags,
  477                             build_file_path, variables,
  478                             includes, depth, check,
  479                             generator_input_info):
  480   """Wrapper around LoadTargetBuildFile for parallel processing.
  481 
  482      This wrapper is used when LoadTargetBuildFile is executed in
  483      a worker process.
  484   """
  485 
  486   try:
  487     signal.signal(signal.SIGINT, signal.SIG_IGN)
  488 
  489     # Apply globals so that the worker process behaves the same.
  490     for key, value in global_flags.iteritems():
  491       globals()[key] = value
  492 
  493     SetGeneratorGlobals(generator_input_info)
  494     result = LoadTargetBuildFile(build_file_path, per_process_data,
  495                                  per_process_aux_data, variables,
  496                                  includes, depth, check, False)
  497     if not result:
  498       return result
  499 
  500     (build_file_path, dependencies) = result
  501 
  502     # We can safely pop the build_file_data from per_process_data because it
  503     # will never be referenced by this process again, so we don't need to keep
  504     # it in the cache.
  505     build_file_data = per_process_data.pop(build_file_path)
  506 
  507     # This gets serialized and sent back to the main process via a pipe.
  508     # It's handled in LoadTargetBuildFileCallback.
  509     return (build_file_path,
  510             build_file_data,
  511             dependencies)
  512   except GypError, e:
  513     sys.stderr.write("gyp: %s\n" % e)
  514     return None
  515   except Exception, e:
  516     print >>sys.stderr, 'Exception:', e
  517     print >>sys.stderr, traceback.format_exc()
  518     return None
  519 
  520 
  521 class ParallelProcessingError(Exception):
  522   pass
  523 
  524 
  525 class ParallelState(object):
  526   """Class to keep track of state when processing input files in parallel.
  527 
  528   If build files are loaded in parallel, use this to keep track of
  529   state during farming out and processing parallel jobs. It's stored
  530   in a global so that the callback function can have access to it.
  531   """
  532 
  533   def __init__(self):
  534     # The multiprocessing pool.
  535     self.pool = None
  536     # The condition variable used to protect this object and notify
  537     # the main loop when there might be more data to process.
  538     self.condition = None
  539     # The "data" dict that was passed to LoadTargetBuildFileParallel
  540     self.data = None
  541     # The number of parallel calls outstanding; decremented when a response
  542     # was received.
  543     self.pending = 0
  544     # The set of all build files that have been scheduled, so we don't
  545     # schedule the same one twice.
  546     self.scheduled = set()
  547     # A list of dependency build file paths that haven't been scheduled yet.
  548     self.dependencies = []
  549     # Flag to indicate if there was an error in a child process.
  550     self.error = False
  551 
  552   def LoadTargetBuildFileCallback(self, result):
  553     """Handle the results of running LoadTargetBuildFile in another process.
  554     """
  555     self.condition.acquire()
  556     if not result:
  557       self.error = True
  558       self.condition.notify()
  559       self.condition.release()
  560       return
  561     (build_file_path0, build_file_data0, dependencies0) = result
  562     self.data[build_file_path0] = build_file_data0
  563     self.data['target_build_files'].add(build_file_path0)
  564     for new_dependency in dependencies0:
  565       if new_dependency not in self.scheduled:
  566         self.scheduled.add(new_dependency)
  567         self.dependencies.append(new_dependency)
  568     self.pending -= 1
  569     self.condition.notify()
  570     self.condition.release()
  571 
  572 
  573 def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
  574                                  check, generator_input_info):
  575   parallel_state = ParallelState()
  576   parallel_state.condition = threading.Condition()
  577   # Make copies of the build_files argument that we can modify while working.
  578   parallel_state.dependencies = list(build_files)
  579   parallel_state.scheduled = set(build_files)
  580   parallel_state.pending = 0
  581   parallel_state.data = data
  582 
  583   try:
  584     parallel_state.condition.acquire()
  585     while parallel_state.dependencies or parallel_state.pending:
  586       if parallel_state.error:
  587         break
  588       if not parallel_state.dependencies:
  589         parallel_state.condition.wait()
  590         continue
  591 
  592       dependency = parallel_state.dependencies.pop()
  593 
  594       parallel_state.pending += 1
  595       global_flags = {
  596         'path_sections': globals()['path_sections'],
  597         'non_configuration_keys': globals()['non_configuration_keys'],
  598         'multiple_toolsets': globals()['multiple_toolsets']}
  599 
  600       if not parallel_state.pool:
  601         parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
  602       parallel_state.pool.apply_async(
  603           CallLoadTargetBuildFile,
  604           args = (global_flags, dependency,
  605                   variables, includes, depth, check, generator_input_info),
  606           callback = parallel_state.LoadTargetBuildFileCallback)
  607   except KeyboardInterrupt, e:
  608     parallel_state.pool.terminate()
  609     raise e
  610 
  611   parallel_state.condition.release()
  612 
  613   parallel_state.pool.close()
  614   parallel_state.pool.join()
  615   parallel_state.pool = None
  616 
  617   if parallel_state.error:
  618     sys.exit(1)
  619 
  620 # Look for the bracket that matches the first bracket seen in a
  621 # string, and return the start and end as a tuple.  For example, if
  622 # the input is something like "<(foo <(bar)) blah", then it would
  623 # return (1, 13), indicating the entire string except for the leading
  624 # "<" and trailing " blah".
  625 LBRACKETS= set('{[(')
  626 BRACKETS = {'}': '{', ']': '[', ')': '('}
  627 def FindEnclosingBracketGroup(input_str):
  628   stack = []
  629   start = -1
  630   for index, char in enumerate(input_str):
  631     if char in LBRACKETS:
  632       stack.append(char)
  633       if start == -1:
  634         start = index
  635     elif char in BRACKETS:
  636       if not stack:
  637         return (-1, -1)
  638       if stack.pop() != BRACKETS[char]:
  639         return (-1, -1)
  640       if not stack:
  641         return (start, index + 1)
  642   return (-1, -1)
  643 
  644 
  645 def IsStrCanonicalInt(string):
  646   """Returns True if |string| is in its canonical integer form.
  647 
  648   The canonical form is such that str(int(string)) == string.
  649   """
  650   if type(string) is str:
  651     # This function is called a lot so for maximum performance, avoid
  652     # involving regexps which would otherwise make the code much
  653     # shorter. Regexps would need twice the time of this function.
  654     if string:
  655       if string == "0":
  656         return True
  657       if string[0] == "-":
  658         string = string[1:]
  659         if not string:
  660           return False
  661       if '1' <= string[0] <= '9':
  662         return string.isdigit()
  663 
  664   return False
  665 
  666 
  667 # This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
  668 # "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
  669 # In the last case, the inner "<()" is captured in match['content'].
  670 early_variable_re = re.compile(
  671     r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
  672     r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
  673     r'\((?P<is_array>\s*\[?)'
  674     r'(?P<content>.*?)(\]?)\))')
  675 
  676 # This matches the same as early_variable_re, but with '>' instead of '<'.
  677 late_variable_re = re.compile(
  678     r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
  679     r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
  680     r'\((?P<is_array>\s*\[?)'
  681     r'(?P<content>.*?)(\]?)\))')
  682 
  683 # This matches the same as early_variable_re, but with '^' instead of '<'.
  684 latelate_variable_re = re.compile(
  685     r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
  686     r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
  687     r'\((?P<is_array>\s*\[?)'
  688     r'(?P<content>.*?)(\]?)\))')
  689 
  690 # Global cache of results from running commands so they don't have to be run
  691 # more then once.
  692 cached_command_results = {}
  693 
  694 
  695 def FixupPlatformCommand(cmd):
  696   if sys.platform == 'win32':
  697     if type(cmd) is list:
  698       cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
  699     else:
  700       cmd = re.sub('^cat ', 'type ', cmd)
  701   return cmd
  702 
  703 
  704 PHASE_EARLY = 0
  705 PHASE_LATE = 1
  706 PHASE_LATELATE = 2
  707 
  708 
  709 def ExpandVariables(input, phase, variables, build_file):
  710   # Look for the pattern that gets expanded into variables
  711   if phase == PHASE_EARLY:
  712     variable_re = early_variable_re
  713     expansion_symbol = '<'
  714   elif phase == PHASE_LATE:
  715     variable_re = late_variable_re
  716     expansion_symbol = '>'
  717   elif phase == PHASE_LATELATE:
  718     variable_re = latelate_variable_re
  719     expansion_symbol = '^'
  720   else:
  721     assert False
  722 
  723   input_str = str(input)
  724   if IsStrCanonicalInt(input_str):
  725     return int(input_str)
  726 
  727   # Do a quick scan to determine if an expensive regex search is warranted.
  728   if expansion_symbol not in input_str:
  729     return input_str
  730 
  731   # Get the entire list of matches as a list of MatchObject instances.
  732   # (using findall here would return strings instead of MatchObjects).
  733   matches = list(variable_re.finditer(input_str))
  734   if not matches:
  735     return input_str
  736 
  737   output = input_str
  738   # Reverse the list of matches so that replacements are done right-to-left.
  739   # That ensures that earlier replacements won't mess up the string in a
  740   # way that causes later calls to find the earlier substituted text instead
  741   # of what's intended for replacement.
  742   matches.reverse()
  743   for match_group in matches:
  744     match = match_group.groupdict()
  745     gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
  746     # match['replace'] is the substring to look for, match['type']
  747     # is the character code for the replacement type (< > <! >! <| >| <@
  748     # >@ <!@ >!@), match['is_array'] contains a '[' for command
  749     # arrays, and match['content'] is the name of the variable (< >)
  750     # or command to run (<! >!). match['command_string'] is an optional
  751     # command string. Currently, only 'pymod_do_main' is supported.
  752 
  753     # run_command is true if a ! variant is used.
  754     run_command = '!' in match['type']
  755     command_string = match['command_string']
  756 
  757     # file_list is true if a | variant is used.
  758     file_list = '|' in match['type']
  759 
  760     # Capture these now so we can adjust them later.
  761     replace_start = match_group.start('replace')
  762     replace_end = match_group.end('replace')
  763 
  764     # Find the ending paren, and re-evaluate the contained string.
  765     (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
  766 
  767     # Adjust the replacement range to match the entire command
  768     # found by FindEnclosingBracketGroup (since the variable_re
  769     # probably doesn't match the entire command if it contained
  770     # nested variables).
  771     replace_end = replace_start + c_end
  772 
  773     # Find the "real" replacement, matching the appropriate closing
  774     # paren, and adjust the replacement start and end.
  775     replacement = input_str[replace_start:replace_end]
  776 
  777     # Figure out what the contents of the variable parens are.
  778     contents_start = replace_start + c_start + 1
  779     contents_end = replace_end - 1
  780     contents = input_str[contents_start:contents_end]
  781 
  782     # Do filter substitution now for <|().
  783     # Admittedly, this is different than the evaluation order in other
  784     # contexts. However, since filtration has no chance to run on <|(),
  785     # this seems like the only obvious way to give them access to filters.
  786     if file_list:
  787       processed_variables = gyp.simple_copy.deepcopy(variables)
  788       ProcessListFiltersInDict(contents, processed_variables)
  789       # Recurse to expand variables in the contents
  790       contents = ExpandVariables(contents, phase,
  791                                  processed_variables, build_file)
  792     else:
  793       # Recurse to expand variables in the contents
  794       contents = ExpandVariables(contents, phase, variables, build_file)
  795 
  796     # Strip off leading/trailing whitespace so that variable matches are
  797     # simpler below (and because they are rarely needed).
  798     contents = contents.strip()
  799 
  800     # expand_to_list is true if an @ variant is used.  In that case,
  801     # the expansion should result in a list.  Note that the caller
  802     # is to be expecting a list in return, and not all callers do
  803     # because not all are working in list context.  Also, for list
  804     # expansions, there can be no other text besides the variable
  805     # expansion in the input string.
  806     expand_to_list = '@' in match['type'] and input_str == replacement
  807 
  808     if run_command or file_list:
  809       # Find the build file's directory, so commands can be run or file lists
  810       # generated relative to it.
  811       build_file_dir = os.path.dirname(build_file)
  812       if build_file_dir == '' and not file_list:
  813         # If build_file is just a leaf filename indicating a file in the
  814         # current directory, build_file_dir might be an empty string.  Set
  815         # it to None to signal to subprocess.Popen that it should run the
  816         # command in the current directory.
  817         build_file_dir = None
  818 
  819     # Support <|(listfile.txt ...) which generates a file
  820     # containing items from a gyp list, generated at gyp time.
  821     # This works around actions/rules which have more inputs than will
  822     # fit on the command line.
  823     if file_list:
  824       if type(contents) is list:
  825         contents_list = contents
  826       else:
  827         contents_list = contents.split(' ')
  828       replacement = contents_list[0]
  829       if os.path.isabs(replacement):
  830         raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
  831 
  832       if not generator_filelist_paths:
  833         path = os.path.join(build_file_dir, replacement)
  834       else:
  835         if os.path.isabs(build_file_dir):
  836           toplevel = generator_filelist_paths['toplevel']
  837           rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
  838         else:
  839           rel_build_file_dir = build_file_dir
  840         qualified_out_dir = generator_filelist_paths['qualified_out_dir']
  841         path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
  842         gyp.common.EnsureDirExists(path)
  843 
  844       replacement = gyp.common.RelativePath(path, build_file_dir)
  845       f = gyp.common.WriteOnDiff(path)
  846       for i in contents_list[1:]:
  847         f.write('%s\n' % i)
  848       f.close()
  849 
  850     elif run_command:
  851       use_shell = True
  852       if match['is_array']:
  853         contents = eval(contents)
  854         use_shell = False
  855 
  856       # Check for a cached value to avoid executing commands, or generating
  857       # file lists more than once. The cache key contains the command to be
  858       # run as well as the directory to run it from, to account for commands
  859       # that depend on their current directory.
  860       # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
  861       # someone could author a set of GYP files where each time the command
  862       # is invoked it produces different output by design. When the need
  863       # arises, the syntax should be extended to support no caching off a
  864       # command's output so it is run every time.
  865       cache_key = (str(contents), build_file_dir)
  866       cached_value = cached_command_results.get(cache_key, None)
  867       if cached_value is None:
  868         gyp.DebugOutput(gyp.DEBUG_VARIABLES,
  869                         "Executing command '%s' in directory '%s'",
  870                         contents, build_file_dir)
  871 
  872         replacement = ''
  873 
  874         if command_string == 'pymod_do_main':
  875           # <!pymod_do_main(modulename param eters) loads |modulename| as a
  876           # python module and then calls that module's DoMain() function,
  877           # passing ["param", "eters"] as a single list argument. For modules
  878           # that don't load quickly, this can be faster than
  879           # <!(python modulename param eters). Do this in |build_file_dir|.
  880           oldwd = os.getcwd()  # Python doesn't like os.open('.'): no fchdir.
  881           if build_file_dir:  # build_file_dir may be None (see above).
  882             os.chdir(build_file_dir)
  883           try:
  884 
  885             parsed_contents = shlex.split(contents)
  886             try:
  887               py_module = __import__(parsed_contents[0])
  888             except ImportError as e:
  889               raise GypError("Error importing pymod_do_main"
  890                              "module (%s): %s" % (parsed_contents[0], e))
  891             replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
  892           finally:
  893             os.chdir(oldwd)
  894           assert replacement != None
  895         elif command_string:
  896           raise GypError("Unknown command string '%s' in '%s'." %
  897                          (command_string, contents))
  898         else:
  899           # Fix up command with platform specific workarounds.
  900           contents = FixupPlatformCommand(contents)
  901           try:
  902             p = subprocess.Popen(contents, shell=use_shell,
  903                                  stdout=subprocess.PIPE,
  904                                  stderr=subprocess.PIPE,
  905                                  stdin=subprocess.PIPE,
  906                                  cwd=build_file_dir)
  907           except Exception, e:
  908             raise GypError("%s while executing command '%s' in %s" %
  909                            (e, contents, build_file))
  910 
  911           p_stdout, p_stderr = p.communicate('')
  912 
  913           if p.wait() != 0 or p_stderr:
  914             sys.stderr.write(p_stderr)
  915             # Simulate check_call behavior, since check_call only exists
  916             # in python 2.5 and later.
  917             raise GypError("Call to '%s' returned exit status %d while in %s." %
  918                            (contents, p.returncode, build_file))
  919           replacement = p_stdout.rstrip()
  920 
  921         cached_command_results[cache_key] = replacement
  922       else:
  923         gyp.DebugOutput(gyp.DEBUG_VARIABLES,
  924                         "Had cache value for command '%s' in directory '%s'",
  925                         contents,build_file_dir)
  926         replacement = cached_value
  927 
  928     else:
  929       if not contents in variables:
  930         if contents[-1] in ['!', '/']:
  931           # In order to allow cross-compiles (nacl) to happen more naturally,
  932           # we will allow references to >(sources/) etc. to resolve to
  933           # and empty list if undefined. This allows actions to:
  934           # 'action!': [
  935           #   '>@(_sources!)',
  936           # ],
  937           # 'action/': [
  938           #   '>@(_sources/)',
  939           # ],
  940           replacement = []
  941         else:
  942           raise GypError('Undefined variable ' + contents +
  943                          ' in ' + build_file)
  944       else:
  945         replacement = variables[contents]
  946 
  947     if type(replacement) is list:
  948       for item in replacement:
  949         if not contents[-1] == '/' and type(item) not in (str, int):
  950           raise GypError('Variable ' + contents +
  951                          ' must expand to a string or list of strings; ' +
  952                          'list contains a ' +
  953                          item.__class__.__name__)
  954       # Run through the list and handle variable expansions in it.  Since
  955       # the list is guaranteed not to contain dicts, this won't do anything
  956       # with conditions sections.
  957       ProcessVariablesAndConditionsInList(replacement, phase, variables,
  958                                           build_file)
  959     elif type(replacement) not in (str, int):
  960           raise GypError('Variable ' + contents +
  961                          ' must expand to a string or list of strings; ' +
  962                          'found a ' + replacement.__class__.__name__)
  963 
  964     if expand_to_list:
  965       # Expanding in list context.  It's guaranteed that there's only one
  966       # replacement to do in |input_str| and that it's this replacement.  See
  967       # above.
  968       if type(replacement) is list:
  969         # If it's already a list, make a copy.
  970         output = replacement[:]
  971       else:
  972         # Split it the same way sh would split arguments.
  973         output = shlex.split(str(replacement))
  974     else:
  975       # Expanding in string context.
  976       encoded_replacement = ''
  977       if type(replacement) is list:
  978         # When expanding a list into string context, turn the list items
  979         # into a string in a way that will work with a subprocess call.
  980         #
  981         # TODO(mark): This isn't completely correct.  This should
  982         # call a generator-provided function that observes the
  983         # proper list-to-argument quoting rules on a specific
  984         # platform instead of just calling the POSIX encoding
  985         # routine.
  986         encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
  987       else:
  988         encoded_replacement = replacement
  989 
  990       output = output[:replace_start] + str(encoded_replacement) + \
  991                output[replace_end:]
  992     # Prepare for the next match iteration.
  993     input_str = output
  994 
  995   if output == input:
  996     gyp.DebugOutput(gyp.DEBUG_VARIABLES,
  997                     "Found only identity matches on %r, avoiding infinite "
  998                     "recursion.",
  999                     output)
 1000   else:
 1001     # Look for more matches now that we've replaced some, to deal with
 1002     # expanding local variables (variables defined in the same
 1003     # variables block as this one).
 1004     gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
 1005     if type(output) is list:
 1006       if output and type(output[0]) is list:
 1007         # Leave output alone if it's a list of lists.
 1008         # We don't want such lists to be stringified.
 1009         pass
 1010       else:
 1011         new_output = []
 1012         for item in output:
 1013           new_output.append(
 1014               ExpandVariables(item, phase, variables, build_file))
 1015         output = new_output
 1016     else:
 1017       output = ExpandVariables(output, phase, variables, build_file)
 1018 
 1019   # Convert all strings that are canonically-represented integers into integers.
 1020   if type(output) is list:
 1021     for index in xrange(0, len(output)):
 1022       if IsStrCanonicalInt(output[index]):
 1023         output[index] = int(output[index])
 1024   elif IsStrCanonicalInt(output):
 1025     output = int(output)
 1026 
 1027   return output
 1028 
 1029 # The same condition is often evaluated over and over again so it
 1030 # makes sense to cache as much as possible between evaluations.
 1031 cached_conditions_asts = {}
 1032 
 1033 def EvalCondition(condition, conditions_key, phase, variables, build_file):
 1034   """Returns the dict that should be used or None if the result was
 1035   that nothing should be used."""
 1036   if type(condition) is not list:
 1037     raise GypError(conditions_key + ' must be a list')
 1038   if len(condition) < 2:
 1039     # It's possible that condition[0] won't work in which case this
 1040     # attempt will raise its own IndexError.  That's probably fine.
 1041     raise GypError(conditions_key + ' ' + condition[0] +
 1042                    ' must be at least length 2, not ' + str(len(condition)))
 1043 
 1044   i = 0
 1045   result = None
 1046   while i < len(condition):
 1047     cond_expr = condition[i]
 1048     true_dict = condition[i + 1]
 1049     if type(true_dict) is not dict:
 1050       raise GypError('{} {} must be followed by a dictionary, not {}'.format(
 1051         conditions_key, cond_expr, type(true_dict)))
 1052     if len(condition) > i + 2 and type(condition[i + 2]) is dict:
 1053       false_dict = condition[i + 2]
 1054       i = i + 3
 1055       if i != len(condition):
 1056         raise GypError('{} {} has {} unexpected trailing items'.format(
 1057           conditions_key, cond_expr, len(condition) - i))
 1058     else:
 1059       false_dict = None
 1060       i = i + 2
 1061     if result == None:
 1062       result = EvalSingleCondition(
 1063           cond_expr, true_dict, false_dict, phase, variables, build_file)
 1064 
 1065   return result
 1066 
 1067 
 1068 def EvalSingleCondition(
 1069     cond_expr, true_dict, false_dict, phase, variables, build_file):
 1070   """Returns true_dict if cond_expr evaluates to true, and false_dict
 1071   otherwise."""
 1072   # Do expansions on the condition itself.  Since the conditon can naturally
 1073   # contain variable references without needing to resort to GYP expansion
 1074   # syntax, this is of dubious value for variables, but someone might want to
 1075   # use a command expansion directly inside a condition.
 1076   cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
 1077                                        build_file)
 1078   if type(cond_expr_expanded) not in (str, int):
 1079     raise ValueError(
 1080           'Variable expansion in this context permits str and int ' + \
 1081             'only, found ' + cond_expr_expanded.__class__.__name__)
 1082 
 1083   try:
 1084     if cond_expr_expanded in cached_conditions_asts:
 1085       ast_code = cached_conditions_asts[cond_expr_expanded]
 1086     else:
 1087       ast_code = compile(cond_expr_expanded, '<string>', 'eval')
 1088       cached_conditions_asts[cond_expr_expanded] = ast_code
 1089     if eval(ast_code, {'__builtins__': None}, variables):
 1090       return true_dict
 1091     return false_dict
 1092   except SyntaxError, e:
 1093     syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
 1094                                'at character %d.' %
 1095                                (str(e.args[0]), e.text, build_file, e.offset),
 1096                                e.filename, e.lineno, e.offset, e.text)
 1097     raise syntax_error
 1098   except NameError, e:
 1099     gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
 1100                                (cond_expr_expanded, build_file))
 1101     raise GypError(e)
 1102 
 1103 
 1104 def ProcessConditionsInDict(the_dict, phase, variables, build_file):
 1105   # Process a 'conditions' or 'target_conditions' section in the_dict,
 1106   # depending on phase.
 1107   # early -> conditions
 1108   # late -> target_conditions
 1109   # latelate -> no conditions
 1110   #
 1111   # Each item in a conditions list consists of cond_expr, a string expression
 1112   # evaluated as the condition, and true_dict, a dict that will be merged into
 1113   # the_dict if cond_expr evaluates to true.  Optionally, a third item,
 1114   # false_dict, may be present.  false_dict is merged into the_dict if
 1115   # cond_expr evaluates to false.
 1116   #
 1117   # Any dict merged into the_dict will be recursively processed for nested
 1118   # conditionals and other expansions, also according to phase, immediately
 1119   # prior to being merged.
 1120 
 1121   if phase == PHASE_EARLY:
 1122     conditions_key = 'conditions'
 1123   elif phase == PHASE_LATE:
 1124     conditions_key = 'target_conditions'
 1125   elif phase == PHASE_LATELATE:
 1126     return
 1127   else:
 1128     assert False
 1129 
 1130   if not conditions_key in the_dict:
 1131     return
 1132 
 1133   conditions_list = the_dict[conditions_key]
 1134   # Unhook the conditions list, it's no longer needed.
 1135   del the_dict[conditions_key]
 1136 
 1137   for condition in conditions_list:
 1138     merge_dict = EvalCondition(condition, conditions_key, phase, variables,
 1139                                build_file)
 1140 
 1141     if merge_dict != None:
 1142       # Expand variables and nested conditinals in the merge_dict before
 1143       # merging it.
 1144       ProcessVariablesAndConditionsInDict(merge_dict, phase,
 1145                                           variables, build_file)
 1146 
 1147       MergeDicts(the_dict, merge_dict, build_file, build_file)
 1148 
 1149 
 1150 def LoadAutomaticVariablesFromDict(variables, the_dict):
 1151   # Any keys with plain string values in the_dict become automatic variables.
 1152   # The variable name is the key name with a "_" character prepended.
 1153   for key, value in the_dict.iteritems():
 1154     if type(value) in (str, int, list):
 1155       variables['_' + key] = value
 1156 
 1157 
 1158 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
 1159   # Any keys in the_dict's "variables" dict, if it has one, becomes a
 1160   # variable.  The variable name is the key name in the "variables" dict.
 1161   # Variables that end with the % character are set only if they are unset in
 1162   # the variables dict.  the_dict_key is the name of the key that accesses
 1163   # the_dict in the_dict's parent dict.  If the_dict's parent is not a dict
 1164   # (it could be a list or it could be parentless because it is a root dict),
 1165   # the_dict_key will be None.
 1166   for key, value in the_dict.get('variables', {}).iteritems():
 1167     if type(value) not in (str, int, list):
 1168       continue
 1169 
 1170     if key.endswith('%'):
 1171       variable_name = key[:-1]
 1172       if variable_name in variables:
 1173         # If the variable is already set, don't set it.
 1174         continue
 1175       if the_dict_key is 'variables' and variable_name in the_dict:
 1176         # If the variable is set without a % in the_dict, and the_dict is a
 1177         # variables dict (making |variables| a varaibles sub-dict of a
 1178         # variables dict), use the_dict's definition.
 1179         value = the_dict[variable_name]
 1180     else:
 1181       variable_name = key
 1182 
 1183     variables[variable_name] = value
 1184 
 1185 
 1186 def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
 1187                                         build_file, the_dict_key=None):
 1188   """Handle all variable and command expansion and conditional evaluation.
 1189 
 1190   This function is the public entry point for all variable expansions and
 1191   conditional evaluations.  The variables_in dictionary will not be modified
 1192   by this function.
 1193   """
 1194 
 1195   # Make a copy of the variables_in dict that can be modified during the
 1196   # loading of automatics and the loading of the variables dict.
 1197   variables = variables_in.copy()
 1198   LoadAutomaticVariablesFromDict(variables, the_dict)
 1199 
 1200   if 'variables' in the_dict:
 1201     # Make sure all the local variables are added to the variables
 1202     # list before we process them so that you can reference one
 1203     # variable from another.  They will be fully expanded by recursion
 1204     # in ExpandVariables.
 1205     for key, value in the_dict['variables'].iteritems():
 1206       variables[key] = value
 1207 
 1208     # Handle the associated variables dict first, so that any variable
 1209     # references within can be resolved prior to using them as variables.
 1210     # Pass a copy of the variables dict to avoid having it be tainted.
 1211     # Otherwise, it would have extra automatics added for everything that
 1212     # should just be an ordinary variable in this scope.
 1213     ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
 1214                                         variables, build_file, 'variables')
 1215 
 1216   LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
 1217 
 1218   for key, value in the_dict.iteritems():
 1219     # Skip "variables", which was already processed if present.
 1220     if key != 'variables' and type(value) is str:
 1221       expanded = ExpandVariables(value, phase, variables, build_file)
 1222       if type(expanded) not in (str, int):
 1223         raise ValueError(
 1224               'Variable expansion in this context permits str and int ' + \
 1225               'only, found ' + expanded.__class__.__name__ + ' for ' + key)
 1226       the_dict[key] = expanded
 1227 
 1228   # Variable expansion may have resulted in changes to automatics.  Reload.
 1229   # TODO(mark): Optimization: only reload if no changes were made.
 1230   variables = variables_in.copy()
 1231   LoadAutomaticVariablesFromDict(variables, the_dict)
 1232   LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
 1233 
 1234   # Process conditions in this dict.  This is done after variable expansion
 1235   # so that conditions may take advantage of expanded variables.  For example,
 1236   # if the_dict contains:
 1237   #   {'type':       '<(library_type)',
 1238   #    'conditions': [['_type=="static_library"', { ... }]]},
 1239   # _type, as used in the condition, will only be set to the value of
 1240   # library_type if variable expansion is performed before condition
 1241   # processing.  However, condition processing should occur prior to recursion
 1242   # so that variables (both automatic and "variables" dict type) may be
 1243   # adjusted by conditions sections, merged into the_dict, and have the
 1244   # intended impact on contained dicts.
 1245   #
 1246   # This arrangement means that a "conditions" section containing a "variables"
 1247   # section will only have those variables effective in subdicts, not in
 1248   # the_dict.  The workaround is to put a "conditions" section within a
 1249   # "variables" section.  For example:
 1250   #   {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
 1251   #    'defines':    ['<(define)'],
 1252   #    'my_subdict': {'defines': ['<(define)']}},
 1253   # will not result in "IS_MAC" being appended to the "defines" list in the
 1254   # current scope but would result in it being appended to the "defines" list
 1255   # within "my_subdict".  By comparison:
 1256   #   {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
 1257   #    'defines':    ['<(define)'],
 1258   #    'my_subdict': {'defines': ['<(define)']}},
 1259   # will append "IS_MAC" to both "defines" lists.
 1260 
 1261   # Evaluate conditions sections, allowing variable expansions within them
 1262   # as well as nested conditionals.  This will process a 'conditions' or
 1263   # 'target_conditions' section, perform appropriate merging and recursive
 1264   # conditional and variable processing, and then remove the conditions section
 1265   # from the_dict if it is present.
 1266   ProcessConditionsInDict(the_dict, phase, variables, build_file)
 1267 
 1268   # Conditional processing may have resulted in changes to automatics or the
 1269   # variables dict.  Reload.
 1270   variables = variables_in.copy()
 1271   LoadAutomaticVariablesFromDict(variables, the_dict)
 1272   LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
 1273 
 1274   # Recurse into child dicts, or process child lists which may result in
 1275   # further recursion into descendant dicts.
 1276   for key, value in the_dict.iteritems():
 1277     # Skip "variables" and string values, which were already processed if
 1278     # present.
 1279     if key == 'variables' or type(value) is str:
 1280       continue
 1281     if type(value) is dict:
 1282       # Pass a copy of the variables dict so that subdicts can't influence
 1283       # parents.
 1284       ProcessVariablesAndConditionsInDict(value, phase, variables,
 1285                                           build_file, key)
 1286     elif type(value) is list:
 1287       # The list itself can't influence the variables dict, and
 1288       # ProcessVariablesAndConditionsInList will make copies of the variables
 1289       # dict if it needs to pass it to something that can influence it.  No
 1290       # copy is necessary here.
 1291       ProcessVariablesAndConditionsInList(value, phase, variables,
 1292                                           build_file)
 1293     elif type(value) is not int:
 1294       raise TypeError('Unknown type ' + value.__class__.__name__ + \
 1295                       ' for ' + key)
 1296 
 1297 
 1298 def ProcessVariablesAndConditionsInList(the_list, phase, variables,
 1299                                         build_file):
 1300   # Iterate using an index so that new values can be assigned into the_list.
 1301   index = 0
 1302   while index < len(the_list):
 1303     item = the_list[index]
 1304     if type(item) is dict:
 1305       # Make a copy of the variables dict so that it won't influence anything
 1306       # outside of its own scope.
 1307       ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
 1308     elif type(item) is list:
 1309       ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
 1310     elif type(item) is str:
 1311       expanded = ExpandVariables(item, phase, variables, build_file)
 1312       if type(expanded) in (str, int):
 1313         the_list[index] = expanded
 1314       elif type(expanded) is list:
 1315         the_list[index:index+1] = expanded
 1316         index += len(expanded)
 1317 
 1318         # index now identifies the next item to examine.  Continue right now
 1319         # without falling into the index increment below.
 1320         continue
 1321       else:
 1322         raise ValueError(
 1323               'Variable expansion in this context permits strings and ' + \
 1324               'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
 1325               index)
 1326     elif type(item) is not int:
 1327       raise TypeError('Unknown type ' + item.__class__.__name__ + \
 1328                       ' at index ' + index)
 1329     index = index + 1
 1330 
 1331 
 1332 def BuildTargetsDict(data):
 1333   """Builds a dict mapping fully-qualified target names to their target dicts.
 1334 
 1335   |data| is a dict mapping loaded build files by pathname relative to the
 1336   current directory.  Values in |data| are build file contents.  For each
 1337   |data| value with a "targets" key, the value of the "targets" key is taken
 1338   as a list containing target dicts.  Each target's fully-qualified name is
 1339   constructed from the pathname of the build file (|data| key) and its
 1340   "target_name" property.  These fully-qualified names are used as the keys
 1341   in the returned dict.  These keys provide access to the target dicts,
 1342   the dicts in the "targets" lists.
 1343   """
 1344 
 1345   targets = {}
 1346   for build_file in data['target_build_files']:
 1347     for target in data[build_file].get('targets', []):
 1348       target_name = gyp.common.QualifiedTarget(build_file,
 1349                                                target['target_name'],
 1350                                                target['toolset'])
 1351       if target_name in targets:
 1352         raise GypError('Duplicate target definitions for ' + target_name)
 1353       targets[target_name] = target
 1354 
 1355   return targets
 1356 
 1357 
 1358 def QualifyDependencies(targets):
 1359   """Make dependency links fully-qualified relative to the current directory.
 1360 
 1361   |targets| is a dict mapping fully-qualified target names to their target
 1362   dicts.  For each target in this dict, keys known to contain dependency
 1363   links are examined, and any dependencies referenced will be rewritten
 1364   so that they are fully-qualified and relative to the current directory.
 1365   All rewritten dependencies are suitable for use as keys to |targets| or a
 1366   similar dict.
 1367   """
 1368 
 1369   all_dependency_sections = [dep + op
 1370                              for dep in dependency_sections
 1371                              for op in ('', '!', '/')]
 1372 
 1373   for target, target_dict in targets.iteritems():
 1374     target_build_file = gyp.common.BuildFile(target)
 1375     toolset = target_dict['toolset']
 1376     for dependency_key in all_dependency_sections:
 1377       dependencies = target_dict.get(dependency_key, [])
 1378       for index in xrange(0, len(dependencies)):
 1379         dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
 1380             target_build_file, dependencies[index], toolset)
 1381         if not multiple_toolsets:
 1382           # Ignore toolset specification in the dependency if it is specified.
 1383           dep_toolset = toolset
 1384         dependency = gyp.common.QualifiedTarget(dep_file,
 1385                                                 dep_target,
 1386                                                 dep_toolset)
 1387         dependencies[index] = dependency
 1388 
 1389         # Make sure anything appearing in a list other than "dependencies" also
 1390         # appears in the "dependencies" list.
 1391         if dependency_key != 'dependencies' and \
 1392            dependency not in target_dict['dependencies']:
 1393           raise GypError('Found ' + dependency + ' in ' + dependency_key +
 1394                          ' of ' + target + ', but not in dependencies')
 1395 
 1396 
 1397 def ExpandWildcardDependencies(targets, data):
 1398   """Expands dependencies specified as build_file:*.
 1399 
 1400   For each target in |targets|, examines sections containing links to other
 1401   targets.  If any such section contains a link of the form build_file:*, it
 1402   is taken as a wildcard link, and is expanded to list each target in
 1403   build_file.  The |data| dict provides access to build file dicts.
 1404 
 1405   Any target that does not wish to be included by wildcard can provide an
 1406   optional "suppress_wildcard" key in its target dict.  When present and
 1407   true, a wildcard dependency link will not include such targets.
 1408 
 1409   All dependency names, including the keys to |targets| and the values in each
 1410   dependency list, must be qualified when this function is called.
 1411   """
 1412 
 1413   for target, target_dict in targets.iteritems():
 1414     toolset = target_dict['toolset']
 1415     target_build_file = gyp.common.BuildFile(target)
 1416     for dependency_key in dependency_sections:
 1417       dependencies = target_dict.get(dependency_key, [])
 1418 
 1419       # Loop this way instead of "for dependency in" or "for index in xrange"
 1420       # because the dependencies list will be modified within the loop body.
 1421       index = 0
 1422       while index < len(dependencies):
 1423         (dependency_build_file, dependency_target, dependency_toolset) = \
 1424             gyp.common.ParseQualifiedTarget(dependencies[index])
 1425         if dependency_target != '*' and dependency_toolset != '*':
 1426           # Not a wildcard.  Keep it moving.
 1427           index = index + 1
 1428           continue
 1429 
 1430         if dependency_build_file == target_build_file:
 1431           # It's an error for a target to depend on all other targets in
 1432           # the same file, because a target cannot depend on itself.
 1433           raise GypError('Found wildcard in ' + dependency_key + ' of ' +
 1434                          target + ' referring to same build file')
 1435 
 1436         # Take the wildcard out and adjust the index so that the next
 1437         # dependency in the list will be processed the next time through the
 1438         # loop.
 1439         del dependencies[index]
 1440         index = index - 1
 1441 
 1442         # Loop through the targets in the other build file, adding them to
 1443         # this target's list of dependencies in place of the removed
 1444         # wildcard.
 1445         dependency_target_dicts = data[dependency_build_file]['targets']
 1446         for dependency_target_dict in dependency_target_dicts:
 1447           if int(dependency_target_dict.get('suppress_wildcard', False)):
 1448             continue
 1449           dependency_target_name = dependency_target_dict['target_name']
 1450           if (dependency_target != '*' and
 1451               dependency_target != dependency_target_name):
 1452             continue
 1453           dependency_target_toolset = dependency_target_dict['toolset']
 1454           if (dependency_toolset != '*' and
 1455               dependency_toolset != dependency_target_toolset):
 1456             continue
 1457           dependency = gyp.common.QualifiedTarget(dependency_build_file,
 1458                                                   dependency_target_name,
 1459                                                   dependency_target_toolset)
 1460           index = index + 1
 1461           dependencies.insert(index, dependency)
 1462 
 1463         index = index + 1
 1464 
 1465 
 1466 def Unify(l):
 1467   """Removes duplicate elements from l, keeping the first element."""
 1468   seen = {}
 1469   return [seen.setdefault(e, e) for e in l if e not in seen]
 1470 
 1471 
 1472 def RemoveDuplicateDependencies(targets):
 1473   """Makes sure every dependency appears only once in all targets's dependency
 1474   lists."""
 1475   for target_name, target_dict in targets.iteritems():
 1476     for dependency_key in dependency_sections:
 1477       dependencies = target_dict.get(dependency_key, [])
 1478       if dependencies:
 1479         target_dict[dependency_key] = Unify(dependencies)
 1480 
 1481 
 1482 def Filter(l, item):
 1483   """Removes item from l."""
 1484   res = {}
 1485   return [res.setdefault(e, e) for e in l if e != item]
 1486 
 1487 
 1488 def RemoveSelfDependencies(targets):
 1489   """Remove self dependencies from targets that have the prune_self_dependency
 1490   variable set."""
 1491   for target_name, target_dict in targets.iteritems():
 1492     for dependency_key in dependency_sections:
 1493       dependencies = target_dict.get(dependency_key, [])
 1494       if dependencies:
 1495         for t in dependencies:
 1496           if t == target_name:
 1497             if targets[t].get('variables', {}).get('prune_self_dependency', 0):
 1498               target_dict[dependency_key] = Filter(dependencies, target_name)
 1499 
 1500 
 1501 def RemoveLinkDependenciesFromNoneTargets(targets):
 1502   """Remove dependencies having the 'link_dependency' attribute from the 'none'
 1503   targets."""
 1504   for target_name, target_dict in targets.iteritems():
 1505     for dependency_key in dependency_sections:
 1506       dependencies = target_dict.get(dependency_key, [])
 1507       if dependencies:
 1508         for t in dependencies:
 1509           if target_dict.get('type', None) == 'none':
 1510             if targets[t].get('variables', {}).get('link_dependency', 0):
 1511               target_dict[dependency_key] = \
 1512                   Filter(target_dict[dependency_key], t)
 1513 
 1514 
 1515 class DependencyGraphNode(object):
 1516   """
 1517 
 1518   Attributes:
 1519     ref: A reference to an object that this DependencyGraphNode represents.
 1520     dependencies: List of DependencyGraphNodes on which this one depends.
 1521     dependents: List of DependencyGraphNodes that depend on this one.
 1522   """
 1523 
 1524   class CircularException(GypError):
 1525     pass
 1526 
 1527   def __init__(self, ref):
 1528     self.ref = ref
 1529     self.dependencies = []
 1530     self.dependents = []
 1531 
 1532   def __repr__(self):
 1533     return '<DependencyGraphNode: %r>' % self.ref
 1534 
 1535   def FlattenToList(self):
 1536     # flat_list is the sorted list of dependencies - actually, the list items
 1537     # are the "ref" attributes of DependencyGraphNodes.  Every target will
 1538     # appear in flat_list after all of its dependencies, and before all of its
 1539     # dependents.
 1540     flat_list = OrderedSet()
 1541 
 1542     # in_degree_zeros is the list of DependencyGraphNodes that have no
 1543     # dependencies not in flat_list.  Initially, it is a copy of the children
 1544     # of this node, because when the graph was built, nodes with no
 1545     # dependencies were made implicit dependents of the root node.
 1546     in_degree_zeros = set(self.dependents[:])
 1547 
 1548     while in_degree_zeros:
 1549       # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
 1550       # can be appended to flat_list.  Take these nodes out of in_degree_zeros
 1551       # as work progresses, so that the next node to process from the list can
 1552       # always be accessed at a consistent position.
 1553       node = in_degree_zeros.pop()
 1554       flat_list.add(node.ref)
 1555 
 1556       # Look at dependents of the node just added to flat_list.  Some of them
 1557       # may now belong in in_degree_zeros.
 1558       for node_dependent in node.dependents:
 1559         is_in_degree_zero = True
 1560         # TODO: We want to check through the
 1561         # node_dependent.dependencies list but if it's long and we
 1562         # always start at the beginning, then we get O(n^2) behaviour.
 1563         for node_dependent_dependency in node_dependent.dependencies:
 1564           if not node_dependent_dependency.ref in flat_list:
 1565             # The dependent one or more dependencies not in flat_list.  There
 1566             # will be more chances to add it to flat_list when examining
 1567             # it again as a dependent of those other dependencies, provided
 1568             # that there are no cycles.
 1569             is_in_degree_zero = False
 1570             break
 1571 
 1572         if is_in_degree_zero:
 1573           # All of the dependent's dependencies are already in flat_list.  Add
 1574           # it to in_degree_zeros where it will be processed in a future
 1575           # iteration of the outer loop.
 1576           in_degree_zeros.add(node_dependent)
 1577 
 1578     return list(flat_list)
 1579 
 1580   def FindCycles(self):
 1581     """
 1582     Returns a list of cycles in the graph, where each cycle is its own list.
 1583     """
 1584     results = []
 1585     visited = set()
 1586 
 1587     def Visit(node, path):
 1588       for child in node.dependents:
 1589         if child in path:
 1590           results.append([child] + path[:path.index(child) + 1])
 1591         elif not child in visited:
 1592           visited.add(child)
 1593           Visit(child, [child] + path)
 1594 
 1595     visited.add(self)
 1596     Visit(self, [self])
 1597 
 1598     return results
 1599 
 1600   def DirectDependencies(self, dependencies=None):
 1601     """Returns a list of just direct dependencies."""
 1602     if dependencies == None:
 1603       dependencies = []
 1604 
 1605     for dependency in self.dependencies:
 1606       # Check for None, corresponding to the root node.
 1607       if dependency.ref != None and dependency.ref not in dependencies:
 1608         dependencies.append(dependency.ref)
 1609 
 1610     return dependencies
 1611 
 1612   def _AddImportedDependencies(self, targets, dependencies=None):
 1613     """Given a list of direct dependencies, adds indirect dependencies that
 1614     other dependencies have declared to export their settings.
 1615 
 1616     This method does not operate on self.  Rather, it operates on the list
 1617     of dependencies in the |dependencies| argument.  For each dependency in
 1618     that list, if any declares that it exports the settings of one of its
 1619     own dependencies, those dependencies whose settings are "passed through"
 1620     are added to the list.  As new items are added to the list, they too will
 1621     be processed, so it is possible to import settings through multiple levels
 1622     of dependencies.
 1623 
 1624     This method is not terribly useful on its own, it depends on being
 1625     "primed" with a list of direct dependencies such as one provided by
 1626     DirectDependencies.  DirectAndImportedDependencies is intended to be the
 1627     public entry point.
 1628     """
 1629 
 1630     if dependencies == None:
 1631       dependencies = []
 1632 
 1633     index = 0
 1634     while index < len(dependencies):
 1635       dependency = dependencies[index]
 1636       dependency_dict = targets[dependency]
 1637       # Add any dependencies whose settings should be imported to the list
 1638       # if not already present.  Newly-added items will be checked for
 1639       # their own imports when the list iteration reaches them.
 1640       # Rather than simply appending new items, insert them after the
 1641       # dependency that exported them.  This is done to more closely match
 1642       # the depth-first method used by DeepDependencies.
 1643       add_index = 1
 1644       for imported_dependency in \
 1645           dependency_dict.get('export_dependent_settings', []):
 1646         if imported_dependency not in dependencies:
 1647           dependencies.insert(index + add_index, imported_dependency)
 1648           add_index = add_index + 1
 1649       index = index + 1
 1650 
 1651     return dependencies
 1652 
 1653   def DirectAndImportedDependencies(self, targets, dependencies=None):
 1654     """Returns a list of a target's direct dependencies and all indirect
 1655     dependencies that a dependency has advertised settings should be exported
 1656     through the dependency for.
 1657     """
 1658 
 1659     dependencies = self.DirectDependencies(dependencies)
 1660     return self._AddImportedDependencies(targets, dependencies)
 1661 
 1662   def DeepDependencies(self, dependencies=None):
 1663     """Returns an OrderedSet of all of a target's dependencies, recursively."""
 1664     if dependencies is None:
 1665       # Using a list to get ordered output and a set to do fast "is it
 1666       # already added" checks.
 1667       dependencies = OrderedSet()
 1668 
 1669     for dependency in self.dependencies:
 1670       # Check for None, corresponding to the root node.
 1671       if dependency.ref is None:
 1672         continue
 1673       if dependency.ref not in dependencies:
 1674         dependency.DeepDependencies(dependencies)
 1675         dependencies.add(dependency.ref)
 1676 
 1677     return dependencies
 1678 
 1679   def _LinkDependenciesInternal(self, targets, include_shared_libraries,
 1680                                 dependencies=None, initial=True):
 1681     """Returns an OrderedSet of dependency targets that are linked
 1682     into this target.
 1683 
 1684     This function has a split personality, depending on the setting of
 1685     |initial|.  Outside callers should always leave |initial| at its default
 1686     setting.
 1687 
 1688     When adding a target to the list of dependencies, this function will
 1689     recurse into itself with |initial| set to False, to collect dependencies
 1690     that are linked into the linkable target for which the list is being built.
 1691 
 1692     If |include_shared_libraries| is False, the resulting dependencies will not
 1693     include shared_library targets that are linked into this target.
 1694     """
 1695     if dependencies is None:
 1696       # Using a list to get ordered output and a set to do fast "is it
 1697       # already added" checks.
 1698       dependencies = OrderedSet()
 1699 
 1700     # Check for None, corresponding to the root node.
 1701     if self.ref is None:
 1702       return dependencies
 1703 
 1704     # It's kind of sucky that |targets| has to be passed into this function,
 1705     # but that's presently the easiest way to access the target dicts so that
 1706     # this function can find target types.
 1707 
 1708     if 'target_name' not in targets[self.ref]:
 1709       raise GypError("Missing 'target_name' field in target.")
 1710 
 1711     if 'type' not in targets[self.ref]:
 1712       raise GypError("Missing 'type' field in target %s" %
 1713                      targets[self.ref]['target_name'])
 1714 
 1715     target_type = targets[self.ref]['type']
 1716 
 1717     is_linkable = target_type in linkable_types
 1718 
 1719     if initial and not is_linkable:
 1720       # If this is the first target being examined and it's not linkable,
 1721       # return an empty list of link dependencies, because the link
 1722       # dependencies are intended to apply to the target itself (initial is
 1723       # True) and this target won't be linked.
 1724       return dependencies
 1725 
 1726     # Don't traverse 'none' targets if explicitly excluded.
 1727     if (target_type == 'none' and
 1728         not targets[self.ref].get('dependencies_traverse', True)):
 1729       dependencies.add(self.ref)
 1730       return dependencies
 1731 
 1732     # Executables, mac kernel extensions and loadable modules are already fully
 1733     # and finally linked. Nothing else can be a link dependency of them, there
 1734     # can only be dependencies in the sense that a dependent target might run
 1735     # an executable or load the loadable_module.
 1736     if not initial and target_type in ('executable', 'loadable_module',
 1737                                        'mac_kernel_extension'):
 1738       return dependencies
 1739 
 1740     # Shared libraries are already fully linked.  They should only be included
 1741     # in |dependencies| when adjusting static library dependencies (in order to
 1742     # link against the shared_library's import lib), but should not be included
 1743     # in |dependencies| when propagating link_settings.
 1744     # The |include_shared_libraries| flag controls which of these two cases we
 1745     # are handling.
 1746     if (not initial and target_type == 'shared_library' and
 1747         not include_shared_libraries):
 1748       return dependencies
 1749 
 1750     # The target is linkable, add it to the list of link dependencies.
 1751     if self.ref not in dependencies:
 1752       dependencies.add(self.ref)
 1753       if initial or not is_linkable:
 1754         # If this is a subsequent target and it's linkable, don't look any
 1755         # further for linkable dependencies, as they'll already be linked into
 1756         # this target linkable.  Always look at dependencies of the initial
 1757         # target, and always look at dependencies of non-linkables.
 1758         for dependency in self.dependencies:
 1759           dependency._LinkDependenciesInternal(targets,
 1760                                                include_shared_libraries,
 1761                                                dependencies, False)
 1762 
 1763     return dependencies
 1764 
 1765   def DependenciesForLinkSettings(self, targets):
 1766     """
 1767     Returns a list of dependency targets whose link_settings should be merged
 1768     into this target.
 1769     """
 1770 
 1771     # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
 1772     # link_settings are propagated.  So for now, we will allow it, unless the
 1773     # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
 1774     # False.  Once chrome is fixed, we can remove this flag.
 1775     include_shared_libraries = \
 1776         targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
 1777     return self._LinkDependenciesInternal(targets, include_shared_libraries)
 1778 
 1779   def DependenciesToLinkAgainst(self, targets):
 1780     """
 1781     Returns a list of dependency targets that are linked into this target.
 1782     """
 1783     return self._LinkDependenciesInternal(targets, True)
 1784 
 1785 
 1786 def BuildDependencyList(targets):
 1787   # Create a DependencyGraphNode for each target.  Put it into a dict for easy
 1788   # access.
 1789   dependency_nodes = {}
 1790   for target, spec in targets.iteritems():
 1791     if target not in dependency_nodes:
 1792       dependency_nodes[target] = DependencyGraphNode(target)
 1793 
 1794   # Set up the dependency links.  Targets that have no dependencies are treated
 1795   # as dependent on root_node.
 1796   root_node = DependencyGraphNode(None)
 1797   for target, spec in targets.iteritems():
 1798     target_node = dependency_nodes[target]
 1799     target_build_file = gyp.common.BuildFile(target)
 1800     dependencies = spec.get('dependencies')
 1801     if not dependencies:
 1802       target_node.dependencies = [root_node]
 1803       root_node.dependents.append(target_node)
 1804     else:
 1805       for dependency in dependencies:
 1806         dependency_node = dependency_nodes.get(dependency)
 1807         if not dependency_node:
 1808           raise GypError("Dependency '%s' not found while "
 1809                          "trying to load target %s" % (dependency, target))
 1810         target_node.dependencies.append(dependency_node)
 1811         dependency_node.dependents.append(target_node)
 1812 
 1813   flat_list = root_node.FlattenToList()
 1814 
 1815   # If there's anything left unvisited, there must be a circular dependency
 1816   # (cycle).
 1817   if len(flat_list) != len(targets):
 1818     if not root_node.dependents:
 1819       # If all targets have dependencies, add the first target as a dependent
 1820       # of root_node so that the cycle can be discovered from root_node.
 1821       target = targets.keys()[0]
 1822       target_node = dependency_nodes[target]
 1823       target_node.dependencies.append(root_node)
 1824       root_node.dependents.append(target_node)
 1825 
 1826     cycles = []
 1827     for cycle in root_node.FindCycles():
 1828       paths = [node.ref for node in cycle]
 1829       cycles.append('Cycle: %s' % ' -> '.join(paths))
 1830     raise DependencyGraphNode.CircularException(
 1831         'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
 1832 
 1833   return [dependency_nodes, flat_list]
 1834 
 1835 
 1836 def VerifyNoGYPFileCircularDependencies(targets):
 1837   # Create a DependencyGraphNode for each gyp file containing a target.  Put
 1838   # it into a dict for easy access.
 1839   dependency_nodes = {}
 1840   for target in targets.iterkeys():
 1841     build_file = gyp.common.BuildFile(target)
 1842     if not build_file in dependency_nodes:
 1843       dependency_nodes[build_file] = DependencyGraphNode(build_file)
 1844 
 1845   # Set up the dependency links.
 1846   for target, spec in targets.iteritems():
 1847     build_file = gyp.common.BuildFile(target)
 1848     build_file_node = dependency_nodes[build_file]
 1849     target_dependencies = spec.get('dependencies', [])
 1850     for dependency in target_dependencies:
 1851       try:
 1852         dependency_build_file = gyp.common.BuildFile(dependency)
 1853       except GypError, e:
 1854         gyp.common.ExceptionAppend(
 1855             e, 'while computing dependencies of .gyp file %s' % build_file)
 1856         raise
 1857 
 1858       if dependency_build_file == build_file:
 1859         # A .gyp file is allowed to refer back to itself.
 1860         continue
 1861       dependency_node = dependency_nodes.get(dependency_build_file)
 1862       if not dependency_node:
 1863         raise GypError("Dependancy '%s' not found" % dependency_build_file)
 1864       if dependency_node not in build_file_node.dependencies:
 1865         build_file_node.dependencies.append(dependency_node)
 1866         dependency_node.dependents.append(build_file_node)
 1867 
 1868 
 1869   # Files that have no dependencies are treated as dependent on root_node.
 1870   root_node = DependencyGraphNode(None)
 1871   for build_file_node in dependency_nodes.itervalues():
 1872     if len(build_file_node.dependencies) == 0:
 1873       build_file_node.dependencies.append(root_node)
 1874       root_node.dependents.append(build_file_node)
 1875 
 1876   flat_list = root_node.FlattenToList()
 1877 
 1878   # If there's anything left unvisited, there must be a circular dependency
 1879   # (cycle).
 1880   if len(flat_list) != len(dependency_nodes):
 1881     if not root_node.dependents:
 1882       # If all files have dependencies, add the first file as a dependent
 1883       # of root_node so that the cycle can be discovered from root_node.
 1884       file_node = dependency_nodes.values()[0]
 1885       file_node.dependencies.append(root_node)
 1886       root_node.dependents.append(file_node)
 1887     cycles = []
 1888     for cycle in root_node.FindCycles():
 1889       paths = [node.ref for node in cycle]
 1890       cycles.append('Cycle: %s' % ' -> '.join(paths))
 1891     raise DependencyGraphNode.CircularException(
 1892         'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
 1893 
 1894 
 1895 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
 1896   # key should be one of all_dependent_settings, direct_dependent_settings,
 1897   # or link_settings.
 1898 
 1899   for target in flat_list:
 1900     target_dict = targets[target]
 1901     build_file = gyp.common.BuildFile(target)
 1902 
 1903     if key == 'all_dependent_settings':
 1904       dependencies = dependency_nodes[target].DeepDependencies()
 1905     elif key == 'direct_dependent_settings':
 1906       dependencies = \
 1907           dependency_nodes[target].DirectAndImportedDependencies(targets)
 1908     elif key == 'link_settings':
 1909       dependencies = \
 1910           dependency_nodes[target].DependenciesForLinkSettings(targets)
 1911     else:
 1912       raise GypError("DoDependentSettings doesn't know how to determine "
 1913                       'dependencies for ' + key)
 1914 
 1915     for dependency in dependencies:
 1916       dependency_dict = targets[dependency]
 1917       if not key in dependency_dict:
 1918         continue
 1919       dependency_build_file = gyp.common.BuildFile(dependency)
 1920       MergeDicts(target_dict, dependency_dict[key],
 1921                  build_file, dependency_build_file)
 1922 
 1923 
 1924 def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
 1925                                     sort_dependencies):
 1926   # Recompute target "dependencies" properties.  For each static library
 1927   # target, remove "dependencies" entries referring to other static libraries,
 1928   # unless the dependency has the "hard_dependency" attribute set.  For each
 1929   # linkable target, add a "dependencies" entry referring to all of the
 1930   # target's computed list of link dependencies (including static libraries
 1931   # if no such entry is already present.
 1932   for target in flat_list:
 1933     target_dict = targets[target]
 1934     target_type = target_dict['type']
 1935 
 1936     if target_type == 'static_library':
 1937       if not 'dependencies' in target_dict:
 1938         continue
 1939 
 1940       target_dict['dependencies_original'] = target_dict.get(
 1941           'dependencies', [])[:]
 1942 
 1943       # A static library should not depend on another static library unless
 1944       # the dependency relationship is "hard," which should only be done when
 1945       # a dependent relies on some side effect other than just the build
 1946       # product, like a rule or action output. Further, if a target has a
 1947       # non-hard dependency, but that dependency exports a hard dependency,
 1948       # the non-hard dependency can safely be removed, but the exported hard
 1949       # dependency must be added to the target to keep the same dependency
 1950       # ordering.
 1951       dependencies = \
 1952           dependency_nodes[target].DirectAndImportedDependencies(targets)
 1953       index = 0
 1954       while index < len(dependencies):
 1955         dependency = dependencies[index]
 1956         dependency_dict = targets[dependency]
 1957 
 1958         # Remove every non-hard static library dependency and remove every
 1959         # non-static library dependency that isn't a direct dependency.
 1960         if (dependency_dict['type'] == 'static_library' and \
 1961             not dependency_dict.get('hard_dependency', False)) or \
 1962            (dependency_dict['type'] != 'static_library' and \
 1963             not dependency in target_dict['dependencies']):
 1964           # Take the dependency out of the list, and don't increment index
 1965           # because the next dependency to analyze will shift into the index
 1966           # formerly occupied by the one being removed.
 1967           del dependencies[index]
 1968         else:
 1969           index = index + 1
 1970 
 1971       # Update the dependencies. If the dependencies list is empty, it's not
 1972       # needed, so unhook it.
 1973       if len(dependencies) > 0:
 1974         target_dict['dependencies'] = dependencies
 1975       else:
 1976         del target_dict['dependencies']
 1977 
 1978     elif target_type in linkable_types:
 1979       # Get a list of dependency targets that should be linked into this
 1980       # target.  Add them to the dependencies list if they're not already
 1981       # present.
 1982 
 1983       link_dependencies = \
 1984           dependency_nodes[target].DependenciesToLinkAgainst(targets)
 1985       for dependency in link_dependencies:
 1986         if dependency == target:
 1987           continue
 1988         if not 'dependencies' in target_dict:
 1989           target_dict['dependencies'] = []
 1990         if not dependency in target_dict['dependencies']:
 1991           target_dict['dependencies'].append(dependency)
 1992       # Sort the dependencies list in the order from dependents to dependencies.
 1993       # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
 1994       # Note: flat_list is already sorted in the order from dependencies to
 1995       # dependents.
 1996       if sort_dependencies and 'dependencies' in target_dict:
 1997         target_dict['dependencies'] = [dep for dep in reversed(flat_list)
 1998                                        if dep in target_dict['dependencies']]
 1999 
 2000 
 2001 # Initialize this here to speed up MakePathRelative.
 2002 exception_re = re.compile(r'''["']?[-/$<>^]''')
 2003 
 2004 
 2005 def MakePathRelative(to_file, fro_file, item):
 2006   # If item is a relative path, it's relative to the build file dict that it's
 2007   # coming from.  Fix it up to make it relative to the build file dict that
 2008   # it's going into.
 2009   # Exception: any |item| that begins with these special characters is
 2010   # returned without modification.
 2011   #   /   Used when a path is already absolute (shortcut optimization;
 2012   #       such paths would be returned as absolute anyway)
 2013   #   $   Used for build environment variables
 2014   #   -   Used for some build environment flags (such as -lapr-1 in a
 2015   #       "libraries" section)
 2016   #   <   Used for our own variable and command expansions (see ExpandVariables)
 2017   #   >   Used for our own variable and command expansions (see ExpandVariables)
 2018   #   ^   Used for our own variable and command expansions (see ExpandVariables)
 2019   #
 2020   #   "/' Used when a value is quoted.  If these are present, then we
 2021   #       check the second character instead.
 2022   #
 2023   if to_file == fro_file or exception_re.match(item):
 2024     return item
 2025   else:
 2026     # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
 2027     # temporary measure. This should really be addressed by keeping all paths
 2028     # in POSIX until actual project generation.
 2029     ret = os.path.normpath(os.path.join(
 2030         gyp.common.RelativePath(os.path.dirname(fro_file),
 2031                                 os.path.dirname(to_file)),
 2032                                 item)).replace('\\', '/')
 2033     if item[-1] == '/':
 2034       ret += '/'
 2035     return ret
 2036 
 2037 def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
 2038   # Python documentation recommends objects which do not support hash
 2039   # set this value to None. Python library objects follow this rule.
 2040   is_hashable = lambda val: val.__hash__
 2041 
 2042   # If x is hashable, returns whether x is in s. Else returns whether x is in l.
 2043   def is_in_set_or_list(x, s, l):
 2044     if is_hashable(x):
 2045       return x in s
 2046     return x in l
 2047 
 2048   prepend_index = 0
 2049 
 2050   # Make membership testing of hashables in |to| (in particular, strings)
 2051   # faster.
 2052   hashable_to_set = set(x for x in to if is_hashable(x))
 2053   for item in fro:
 2054     singleton = False
 2055     if type(item) in (str, int):
 2056       # The cheap and easy case.
 2057       if is_paths:
 2058         to_item = MakePathRelative(to_file, fro_file, item)
 2059       else:
 2060         to_item = item
 2061 
 2062       if not (type(item) is str and item.startswith('-')):
 2063         # Any string that doesn't begin with a "-" is a singleton - it can
 2064         # only appear once in a list, to be enforced by the list merge append
 2065         # or prepend.
 2066         singleton = True
 2067     elif type(item) is dict:
 2068       # Make a copy of the dictionary, continuing to look for paths to fix.
 2069       # The other intelligent aspects of merge processing won't apply because
 2070       # item is being merged into an empty dict.
 2071       to_item = {}
 2072       MergeDicts(to_item, item, to_file, fro_file)
 2073     elif type(item) is list:
 2074       # Recurse, making a copy of the list.  If the list contains any
 2075       # descendant dicts, path fixing will occur.  Note that here, custom
 2076       # values for is_paths and append are dropped; those are only to be
 2077       # applied to |to| and |fro|, not sublists of |fro|.  append shouldn't
 2078       # matter anyway because the new |to_item| list is empty.
 2079       to_item = []
 2080       MergeLists(to_item, item, to_file, fro_file)
 2081     else:
 2082       raise TypeError(
 2083           'Attempt to merge list item of unsupported type ' + \
 2084           item.__class__.__name__)
 2085 
 2086     if append:
 2087       # If appending a singleton that's already in the list, don't append.
 2088       # This ensures that the earliest occurrence of the item will stay put.
 2089       if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
 2090         to.append(to_item)
 2091         if is_hashable(to_item):
 2092           hashable_to_set.add(to_item)
 2093     else:
 2094       # If prepending a singleton that's already in the list, remove the
 2095       # existing instance and proceed with the prepend.  This ensures that the
 2096       # item appears at the earliest possible position in the list.
 2097       while singleton and to_item in to:
 2098         to.remove(to_item)
 2099 
 2100       # Don't just insert everything at index 0.  That would prepend the new
 2101       # items to the list in reverse order, which would be an unwelcome
 2102       # surprise.
 2103       to.insert(prepend_index, to_item)
 2104       if is_hashable(to_item):
 2105         hashable_to_set.add(to_item)
 2106       prepend_index = prepend_index + 1
 2107 
 2108 
 2109 def MergeDicts(to, fro, to_file, fro_file):
 2110   # I wanted to name the parameter "from" but it's a Python keyword...
 2111   for k, v in fro.iteritems():
 2112     # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
 2113     # copy semantics.  Something else may want to merge from the |fro| dict
 2114     # later, and having the same dict ref pointed to twice in the tree isn't
 2115     # what anyone wants considering that the dicts may subsequently be
 2116     # modified.
 2117     if k in to:
 2118       bad_merge = False
 2119       if type(v) in (str, int):
 2120         if type(to[k]) not in (str, int):
 2121           bad_merge = True
 2122       elif type(v) is not type(to[k]):
 2123         bad_merge = True
 2124 
 2125       if bad_merge:
 2126         raise TypeError(
 2127             'Attempt to merge dict value of type ' + v.__class__.__name__ + \
 2128             ' into incompatible type ' + to[k].__class__.__name__ + \
 2129             ' for key ' + k)
 2130     if type(v) in (str, int):
 2131       # Overwrite the existing value, if any.  Cheap and easy.
 2132       is_path = IsPathSection(k)
 2133       if is_path:
 2134         to[k] = MakePathRelative(to_file, fro_file, v)
 2135       else:
 2136         to[k] = v
 2137     elif type(v) is dict:
 2138       # Recurse, guaranteeing copies will be made of objects that require it.
 2139       if not k in to:
 2140         to[k] = {}
 2141       MergeDicts(to[k], v, to_file, fro_file)
 2142     elif type(v) is list:
 2143       # Lists in dicts can be merged with different policies, depending on
 2144       # how the key in the "from" dict (k, the from-key) is written.
 2145       #
 2146       # If the from-key has          ...the to-list will have this action
 2147       # this character appended:...     applied when receiving the from-list:
 2148       #                           =  replace
 2149       #                           +  prepend
 2150       #                           ?  set, only if to-list does not yet exist
 2151       #                      (none)  append
 2152       #
 2153       # This logic is list-specific, but since it relies on the associated
 2154       # dict key, it's checked in this dict-oriented function.
 2155       ext = k[-1]
 2156       append = True
 2157       if ext == '=':
 2158         list_base = k[:-1]
 2159         lists_incompatible = [list_base, list_base + '?']
 2160         to[list_base] = []
 2161       elif ext == '+':
 2162         list_base = k[:-1]
 2163         lists_incompatible = [list_base + '=', list_base + '?']
 2164         append = False
 2165       elif ext == '?':
 2166         list_base = k[:-1]
 2167         lists_incompatible = [list_base, list_base + '=', list_base + '+']
 2168       else:
 2169         list_base = k
 2170         lists_incompatible = [list_base + '=', list_base + '?']
 2171 
 2172       # Some combinations of merge policies appearing together are meaningless.
 2173       # It's stupid to replace and append simultaneously, for example.  Append
 2174       # and prepend are the only policies that can coexist.
 2175       for list_incompatible in lists_incompatible:
 2176         if list_incompatible in fro:
 2177           raise GypError('Incompatible list policies ' + k + ' and ' +
 2178                          list_incompatible)
 2179 
 2180       if list_base in to:
 2181         if ext == '?':
 2182           # If the key ends in "?", the list will only be merged if it doesn't
 2183           # already exist.
 2184           continue
 2185         elif type(to[list_base]) is not list:
 2186           # This may not have been checked above if merging in a list with an
 2187           # extension character.
 2188           raise TypeError(
 2189               'Attempt to merge dict value of type ' + v.__class__.__name__ + \
 2190               ' into incompatible type ' + to[list_base].__class__.__name__ + \
 2191               ' for key ' + list_base + '(' + k + ')')
 2192       else:
 2193         to[list_base] = []
 2194 
 2195       # Call MergeLists, which will make copies of objects that require it.
 2196       # MergeLists can recurse back into MergeDicts, although this will be
 2197       # to make copies of dicts (with paths fixed), there will be no
 2198       # subsequent dict "merging" once entering a list because lists are
 2199       # always replaced, appended to, or prepended to.
 2200       is_paths = IsPathSection(list_base)
 2201       MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
 2202     else:
 2203       raise TypeError(
 2204           'Attempt to merge dict value of unsupported type ' + \
 2205           v.__class__.__name__ + ' for key ' + k)
 2206 
 2207 
 2208 def MergeConfigWithInheritance(new_configuration_dict, build_file,
 2209                                target_dict, configuration, visited):
 2210   # Skip if previously visted.
 2211   if configuration in visited:
 2212     return
 2213 
 2214   # Look at this configuration.
 2215   configuration_dict = target_dict['configurations'][configuration]
 2216 
 2217   # Merge in parents.
 2218   for parent in configuration_dict.get('inherit_from', []):
 2219     MergeConfigWithInheritance(new_configuration_dict, build_file,
 2220                                target_dict, parent, visited + [configuration])
 2221 
 2222   # Merge it into the new config.
 2223   MergeDicts(new_configuration_dict, configuration_dict,
 2224              build_file, build_file)
 2225 
 2226   # Drop abstract.
 2227   if 'abstract' in new_configuration_dict:
 2228     del new_configuration_dict['abstract']
 2229 
 2230 
 2231 def SetUpConfigurations(target, target_dict):
 2232   # key_suffixes is a list of key suffixes that might appear on key names.
 2233   # These suffixes are handled in conditional evaluations (for =, +, and ?)
 2234   # and rules/exclude processing (for ! and /).  Keys with these suffixes
 2235   # should be treated the same as keys without.
 2236   key_suffixes = ['=', '+', '?', '!', '/']
 2237 
 2238   build_file = gyp.common.BuildFile(target)
 2239 
 2240   # Provide a single configuration by default if none exists.
 2241   # TODO(mark): Signal an error if default_configurations exists but
 2242   # configurations does not.
 2243   if not 'configurations' in target_dict:
 2244     target_dict['configurations'] = {'Default': {}}
 2245   if not 'default_configuration' in target_dict:
 2246     concrete = [i for (i, config) in target_dict['configurations'].iteritems()
 2247                 if not config.get('abstract')]
 2248     target_dict['default_configuration'] = sorted(concrete)[0]
 2249 
 2250   merged_configurations = {}
 2251   configs = target_dict['configurations']
 2252   for (configuration, old_configuration_dict) in configs.iteritems():
 2253     # Skip abstract configurations (saves work only).
 2254     if old_configuration_dict.get('abstract'):
 2255       continue
 2256     # Configurations inherit (most) settings from the enclosing target scope.
 2257     # Get the inheritance relationship right by making a copy of the target
 2258     # dict.
 2259     new_configuration_dict = {}
 2260     for (key, target_val) in target_dict.iteritems():
 2261       key_ext = key[-1:]
 2262       if key_ext in key_suffixes:
 2263         key_base = key[:-1]
 2264       else:
 2265         key_base = key
 2266       if not key_base in non_configuration_keys:
 2267         new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
 2268 
 2269     # Merge in configuration (with all its parents first).
 2270     MergeConfigWithInheritance(new_configuration_dict, build_file,
 2271                                target_dict, configuration, [])
 2272 
 2273     merged_configurations[configuration] = new_configuration_dict
 2274 
 2275   # Put the new configurations back into the target dict as a configuration.
 2276   for configuration in merged_configurations.keys():
 2277     target_dict['configurations'][configuration] = (
 2278         merged_configurations[configuration])
 2279 
 2280   # Now drop all the abstract ones.
 2281   for configuration in target_dict['configurations'].keys():
 2282     old_configuration_dict = target_dict['configurations'][configuration]
 2283     if old_configuration_dict.get('abstract'):
 2284       del target_dict['configurations'][configuration]
 2285 
 2286   # Now that all of the target's configurations have been built, go through
 2287   # the target dict's keys and remove everything that's been moved into a
 2288   # "configurations" section.
 2289   delete_keys = []
 2290   for key in target_dict:
 2291     key_ext = key[-1:]
 2292     if key_ext in key_suffixes:
 2293       key_base = key[:-1]
 2294     else:
 2295       key_base = key
 2296     if not key_base in non_configuration_keys:
 2297       delete_keys.append(key)
 2298   for key in delete_keys:
 2299     del target_dict[key]
 2300 
 2301   # Check the configurations to see if they contain invalid keys.
 2302   for configuration in target_dict['configurations'].keys():
 2303     configuration_dict = target_dict['configurations'][configuration]
 2304     for key in configuration_dict.keys():
 2305       if key in invalid_configuration_keys:
 2306         raise GypError('%s not allowed in the %s configuration, found in '
 2307                        'target %s' % (key, configuration, target))
 2308 
 2309 
 2310 
 2311 def ProcessListFiltersInDict(name, the_dict):
 2312   """Process regular expression and exclusion-based filters on lists.
 2313 
 2314   An exclusion list is in a dict key named with a trailing "!", like
 2315   "sources!".  Every item in such a list is removed from the associated
 2316   main list, which in this example, would be "sources".  Removed items are
 2317   placed into a "sources_excluded" list in the dict.
 2318 
 2319   Regular expression (regex) filters are contained in dict keys named with a
 2320   trailing "/", such as "sources/" to operate on the "sources" list.  Regex
 2321   filters in a dict take the form:
 2322     'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
 2323                   ['include', '_mac\\.cc$'] ],
 2324   The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
 2325   _win.cc.  The second filter then includes all files ending in _mac.cc that
 2326   are now or were once in the "sources" list.  Items matching an "exclude"
 2327   filter are subject to the same processing as would occur if they were listed
 2328   by name in an exclusion list (ending in "!").  Items matching an "include"
 2329   filter are brought back into the main list if previously excluded by an
 2330   exclusion list or exclusion regex filter.  Subsequent matching "exclude"
 2331   patterns can still cause items to be excluded after matching an "include".
 2332   """
 2333 
 2334   # Look through the dictionary for any lists whose keys end in "!" or "/".
 2335   # These are lists that will be treated as exclude lists and regular
 2336   # expression-based exclude/include lists.  Collect the lists that are
 2337   # needed first, looking for the lists that they operate on, and assemble
 2338   # then into |lists|.  This is done in a separate loop up front, because
 2339   # the _included and _excluded keys need to be added to the_dict, and that
 2340   # can't be done while iterating through it.
 2341 
 2342   lists = []
 2343   del_lists = []
 2344   for key, value in the_dict.iteritems():
 2345     operation = key[-1]
 2346     if operation != '!' and operation != '/':
 2347       continue
 2348 
 2349     if type(value) is not list:
 2350       raise ValueError(name + ' key ' + key + ' must be list, not ' + \
 2351                        value.__class__.__name__)
 2352 
 2353     list_key = key[:-1]
 2354     if list_key not in the_dict:
 2355       # This happens when there's a list like "sources!" but no corresponding
 2356       # "sources" list.  Since there's nothing for it to operate on, queue up
 2357       # the "sources!" list for deletion now.
 2358       del_lists.append(key)
 2359       continue
 2360 
 2361     if type(the_dict[list_key]) is not list:
 2362       value = the_dict[list_key]
 2363       raise ValueError(name + ' key ' + list_key + \
 2364                        ' must be list, not ' + \
 2365                        value.__class__.__name__ + ' when applying ' + \
 2366                        {'!': 'exclusion', '/': 'regex'}[operation])
 2367 
 2368     if not list_key in lists:
 2369       lists.append(list_key)
 2370 
 2371   # Delete the lists that are known to be unneeded at this point.
 2372   for del_list in del_lists:
 2373     del the_dict[del_list]
 2374 
 2375   for list_key in lists:
 2376     the_list = the_dict[list_key]
 2377 
 2378     # Initialize the list_actions list, which is parallel to the_list.  Each
 2379     # item in list_actions identifies whether the corresponding item in
 2380     # the_list should be excluded, unconditionally preserved (included), or
 2381     # whether no exclusion or inclusion has been applied.  Items for which
 2382     # no exclusion or inclusion has been applied (yet) have value -1, items
 2383     # excluded have value 0, and items included have value 1.  Includes and
 2384     # excludes override previous actions.  All items in list_actions are
 2385     # initialized to -1 because no excludes or includes have been processed
 2386     # yet.
 2387     list_actions = list((-1,) * len(the_list))
 2388 
 2389     exclude_key = list_key + '!'
 2390     if exclude_key in the_dict:
 2391       for exclude_item in the_dict[exclude_key]:
 2392         for index in xrange(0, len(the_list)):
 2393           if exclude_item == the_list[index]:
 2394             # This item matches the exclude_item, so set its action to 0
 2395             # (exclude).
 2396             list_actions[index] = 0
 2397 
 2398       # The "whatever!" list is no longer needed, dump it.
 2399       del the_dict[exclude_key]
 2400 
 2401     regex_key = list_key + '/'
 2402     if regex_key in the_dict:
 2403       for regex_item in the_dict[regex_key]:
 2404         [action, pattern] = regex_item
 2405         pattern_re = re.compile(pattern)
 2406 
 2407         if action == 'exclude':
 2408           # This item matches an exclude regex, so set its value to 0 (exclude).
 2409           action_value = 0
 2410         elif action == 'include':
 2411           # This item matches an include regex, so set its value to 1 (include).
 2412           action_value = 1
 2413         else:
 2414           # This is an action that doesn't make any sense.
 2415           raise ValueError('Unrecognized action ' + action + ' in ' + name + \
 2416                            ' key ' + regex_key)
 2417 
 2418         for index in xrange(0, len(the_list)):
 2419           list_item = the_list[index]
 2420           if list_actions[index] == action_value:
 2421             # Even if the regex matches, nothing will change so continue (regex
 2422             # searches are expensive).
 2423             continue
 2424           if pattern_re.search(list_item):
 2425             # Regular expression match.
 2426             list_actions[index] = action_value
 2427 
 2428       # The "whatever/" list is no longer needed, dump it.
 2429       del the_dict[regex_key]
 2430 
 2431     # Add excluded items to the excluded list.
 2432     #
 2433     # Note that exclude_key ("sources!") is different from excluded_key
 2434     # ("sources_excluded").  The exclude_key list is input and it was already
 2435     # processed and deleted; the excluded_key list is output and it's about
 2436     # to be created.
 2437     excluded_key = list_key + '_excluded'
 2438     if excluded_key in the_dict:
 2439       raise GypError(name + ' key ' + excluded_key +
 2440                      ' must not be present prior '
 2441                      ' to applying exclusion/regex filters for ' + list_key)
 2442 
 2443     excluded_list = []
 2444 
 2445     # Go backwards through the list_actions list so that as items are deleted,
 2446     # the indices of items that haven't been seen yet don't shift.  That means
 2447     # that things need to be prepended to excluded_list to maintain them in the
 2448     # same order that they existed in the_list.
 2449     for index in xrange(len(list_actions) - 1, -1, -1):
 2450       if list_actions[index] == 0:
 2451         # Dump anything with action 0 (exclude).  Keep anything with action 1
 2452         # (include) or -1 (no include or exclude seen for the item).
 2453         excluded_list.insert(0, the_list[index])
 2454         del the_list[index]
 2455 
 2456     # If anything was excluded, put the excluded list into the_dict at
 2457     # excluded_key.
 2458     if len(excluded_list) > 0:
 2459       the_dict[excluded_key] = excluded_list
 2460 
 2461   # Now recurse into subdicts and lists that may contain dicts.
 2462   for key, value in the_dict.iteritems():
 2463     if type(value) is dict:
 2464       ProcessListFiltersInDict(key, value)
 2465     elif type(value) is list:
 2466       ProcessListFiltersInList(key, value)
 2467 
 2468 
 2469 def ProcessListFiltersInList(name, the_list):
 2470   for item in the_list:
 2471     if type(item) is dict:
 2472       ProcessListFiltersInDict(name, item)
 2473     elif type(item) is list:
 2474       ProcessListFiltersInList(name, item)
 2475 
 2476 
 2477 def ValidateTargetType(target, target_dict):
 2478   """Ensures the 'type' field on the target is one of the known types.
 2479 
 2480   Arguments:
 2481     target: string, name of target.
 2482     target_dict: dict, target spec.
 2483 
 2484   Raises an exception on error.
 2485   """
 2486   VALID_TARGET_TYPES = ('executable', 'loadable_module',
 2487                         'static_library', 'shared_library',
 2488                         'mac_kernel_extension', 'none')
 2489   target_type = target_dict.get('type', None)
 2490   if target_type not in VALID_TARGET_TYPES:
 2491     raise GypError("Target %s has an invalid target type '%s'.  "
 2492                    "Must be one of %s." %
 2493                    (target, target_type, '/'.join(VALID_TARGET_TYPES)))
 2494   if (target_dict.get('standalone_static_library', 0) and
 2495       not target_type == 'static_library'):
 2496     raise GypError('Target %s has type %s but standalone_static_library flag is'
 2497                    ' only valid for static_library type.' % (target,
 2498                                                              target_type))
 2499 
 2500 
 2501 def ValidateSourcesInTarget(target, target_dict, build_file,
 2502                             duplicate_basename_check):
 2503   if not duplicate_basename_check:
 2504     return
 2505   if target_dict.get('type', None) != 'static_library':
 2506     return
 2507   sources = target_dict.get('sources', [])
 2508   basenames = {}
 2509   for source in sources:
 2510     name, ext = os.path.splitext(source)
 2511     is_compiled_file = ext in [
 2512         '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
 2513     if not is_compiled_file:
 2514       continue
 2515     basename = os.path.basename(name)  # Don't include extension.
 2516     basenames.setdefault(basename, []).append(source)
 2517 
 2518   error = ''
 2519   for basename, files in basenames.iteritems():
 2520     if len(files) > 1:
 2521       error += '  %s: %s\n' % (basename, ' '.join(files))
 2522 
 2523   if error:
 2524     print('static library %s has several files with the same basename:\n' %
 2525           target + error + 'libtool on Mac cannot handle that. Use '
 2526           '--no-duplicate-basename-check to disable this validation.')
 2527     raise GypError('Duplicate basenames in sources section, see list above')
 2528 
 2529 
 2530 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
 2531   """Ensures that the rules sections in target_dict are valid and consistent,
 2532   and determines which sources they apply to.
 2533 
 2534   Arguments:
 2535     target: string, name of target.
 2536     target_dict: dict, target spec containing "rules" and "sources" lists.
 2537     extra_sources_for_rules: a list of keys to scan for rule matches in
 2538         addition to 'sources'.
 2539   """
 2540 
 2541   # Dicts to map between values found in rules' 'rule_name' and 'extension'
 2542   # keys and the rule dicts themselves.
 2543   rule_names = {}
 2544   rule_extensions = {}
 2545 
 2546   rules = target_dict.get('rules', [])
 2547   for rule in rules:
 2548     # Make sure that there's no conflict among rule names and extensions.
 2549     rule_name = rule['rule_name']
 2550     if rule_name in rule_names:
 2551       raise GypError('rule %s exists in duplicate, target %s' %
 2552                      (rule_name, target))
 2553     rule_names[rule_name] = rule
 2554 
 2555     rule_extension = rule['extension']
 2556     if rule_extension.startswith('.'):
 2557       rule_extension = rule_extension[1:]
 2558     if rule_extension in rule_extensions:
 2559       raise GypError(('extension %s associated with multiple rules, ' +
 2560                       'target %s rules %s and %s') %
 2561                      (rule_extension, target,
 2562                       rule_extensions[rule_extension]['rule_name'],
 2563                       rule_name))
 2564     rule_extensions[rule_extension] = rule
 2565 
 2566     # Make sure rule_sources isn't already there.  It's going to be
 2567     # created below if needed.
 2568     if 'rule_sources' in rule:
 2569       raise GypError(
 2570             'rule_sources must not exist in input, target %s rule %s' %
 2571             (target, rule_name))
 2572 
 2573     rule_sources = []
 2574     source_keys = ['sources']
 2575     source_keys.extend(extra_sources_for_rules)
 2576     for source_key in source_keys:
 2577       for source in target_dict.get(source_key, []):
 2578         (source_root, source_extension) = os.path.splitext(source)
 2579         if source_extension.startswith('.'):
 2580           source_extension = source_extension[1:]
 2581         if source_extension == rule_extension:
 2582           rule_sources.append(source)
 2583 
 2584     if len(rule_sources) > 0:
 2585       rule['rule_sources'] = rule_sources
 2586 
 2587 
 2588 def ValidateRunAsInTarget(target, target_dict, build_file):
 2589   target_name = target_dict.get('target_name')
 2590   run_as = target_dict.get('run_as')
 2591   if not run_as:
 2592     return
 2593   if type(run_as) is not dict:
 2594     raise GypError("The 'run_as' in target %s from file %s should be a "
 2595                    "dictionary." %
 2596                    (target_name, build_file))
 2597   action = run_as.get('action')
 2598   if not action:
 2599     raise GypError("The 'run_as' in target %s from file %s must have an "
 2600                    "'action' section." %
 2601                    (target_name, build_file))
 2602   if type(action) is not list:
 2603     raise GypError("The 'action' for 'run_as' in target %s from file %s "
 2604                    "must be a list." %
 2605                    (target_name, build_file))
 2606   working_directory = run_as.get('working_directory')
 2607   if working_directory and type(working_directory) is not str:
 2608     raise GypError("The 'working_directory' for 'run_as' in target %s "
 2609                    "in file %s should be a string." %
 2610                    (target_name, build_file))
 2611   environment = run_as.get('environment')
 2612   if environment and type(environment) is not dict:
 2613     raise GypError("The 'environment' for 'run_as' in target %s "
 2614                    "in file %s should be a dictionary." %
 2615                    (target_name, build_file))
 2616 
 2617 
 2618 def ValidateActionsInTarget(target, target_dict, build_file):
 2619   '''Validates the inputs to the actions in a target.'''
 2620   target_name = target_dict.get('target_name')
 2621   actions = target_dict.get('actions', [])
 2622   for action in actions:
 2623     action_name = action.get('action_name')
 2624     if not action_name:
 2625       raise GypError("Anonymous action in target %s.  "
 2626                      "An action must have an 'action_name' field." %
 2627                      target_name)
 2628     inputs = action.get('inputs', None)
 2629     if inputs is None:
 2630       raise GypError('Action in target %s has no inputs.' % target_name)
 2631     action_command = action.get('action')
 2632     if action_command and not action_command[0]:
 2633       raise GypError("Empty action as command in target %s." % target_name)
 2634 
 2635 
 2636 def TurnIntIntoStrInDict(the_dict):
 2637   """Given dict the_dict, recursively converts all integers into strings.
 2638   """
 2639   # Use items instead of iteritems because there's no need to try to look at
 2640   # reinserted keys and their associated values.
 2641   for k, v in the_dict.items():
 2642     if type(v) is int:
 2643       v = str(v)
 2644       the_dict[k] = v
 2645     elif type(v) is dict:
 2646       TurnIntIntoStrInDict(v)
 2647     elif type(v) is list:
 2648       TurnIntIntoStrInList(v)
 2649 
 2650     if type(k) is int:
 2651       del the_dict[k]
 2652       the_dict[str(k)] = v
 2653 
 2654 
 2655 def TurnIntIntoStrInList(the_list):
 2656   """Given list the_list, recursively converts all integers into strings.
 2657   """
 2658   for index in xrange(0, len(the_list)):
 2659     item = the_list[index]
 2660     if type(item) is int:
 2661       the_list[index] = str(item)
 2662     elif type(item) is dict:
 2663       TurnIntIntoStrInDict(item)
 2664     elif type(item) is list:
 2665       TurnIntIntoStrInList(item)
 2666 
 2667 
 2668 def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
 2669                          data):
 2670   """Return only the targets that are deep dependencies of |root_targets|."""
 2671   qualified_root_targets = []
 2672   for target in root_targets:
 2673     target = target.strip()
 2674     qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
 2675     if not qualified_targets:
 2676       raise GypError("Could not find target %s" % target)
 2677     qualified_root_targets.extend(qualified_targets)
 2678 
 2679   wanted_targets = {}
 2680   for target in qualified_root_targets:
 2681     wanted_targets[target] = targets[target]
 2682     for dependency in dependency_nodes[target].DeepDependencies():
 2683       wanted_targets[dependency] = targets[dependency]
 2684 
 2685   wanted_flat_list = [t for t in flat_list if t in wanted_targets]
 2686 
 2687   # Prune unwanted targets from each build_file's data dict.
 2688   for build_file in data['target_build_files']:
 2689     if not 'targets' in data[build_file]:
 2690       continue
 2691     new_targets = []
 2692     for target in data[build_file]['targets']:
 2693       qualified_name = gyp.common.QualifiedTarget(build_file,
 2694                                                   target['target_name'],
 2695                                                   target['toolset'])
 2696       if qualified_name in wanted_targets:
 2697         new_targets.append(target)
 2698     data[build_file]['targets'] = new_targets
 2699 
 2700   return wanted_targets, wanted_flat_list
 2701 
 2702 
 2703 def VerifyNoCollidingTargets(targets):
 2704   """Verify that no two targets in the same directory share the same name.
 2705 
 2706   Arguments:
 2707     targets: A list of targets in the form 'path/to/file.gyp:target_name'.
 2708   """
 2709   # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
 2710   used = {}
 2711   for target in targets:
 2712     # Separate out 'path/to/file.gyp, 'target_name' from
 2713     # 'path/to/file.gyp:target_name'.
 2714     path, name = target.rsplit(':', 1)
 2715     # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
 2716     subdir, gyp = os.path.split(path)
 2717     # Use '.' for the current directory '', so that the error messages make
 2718     # more sense.
 2719     if not subdir:
 2720       subdir = '.'
 2721     # Prepare a key like 'path/to:target_name'.
 2722     key = subdir + ':' + name
 2723     if key in used:
 2724       # Complain if this target is already used.
 2725       raise GypError('Duplicate target name "%s" in directory "%s" used both '
 2726                      'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
 2727     used[key] = gyp
 2728 
 2729 
 2730 def SetGeneratorGlobals(generator_input_info):
 2731   # Set up path_sections and non_configuration_keys with the default data plus
 2732   # the generator-specific data.
 2733   global path_sections
 2734   path_sections = set(base_path_sections)
 2735   path_sections.update(generator_input_info['path_sections'])
 2736 
 2737   global non_configuration_keys
 2738   non_configuration_keys = base_non_configuration_keys[:]
 2739   non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
 2740 
 2741   global multiple_toolsets
 2742   multiple_toolsets = generator_input_info[
 2743       'generator_supports_multiple_toolsets']
 2744 
 2745   global generator_filelist_paths
 2746   generator_filelist_paths = generator_input_info['generator_filelist_paths']
 2747 
 2748 
 2749 def Load(build_files, variables, includes, depth, generator_input_info, check,
 2750          circular_check, duplicate_basename_check, parallel, root_targets):
 2751   SetGeneratorGlobals(generator_input_info)
 2752   # A generator can have other lists (in addition to sources) be processed
 2753   # for rules.
 2754   extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
 2755 
 2756   # Load build files.  This loads every target-containing build file into
 2757   # the |data| dictionary such that the keys to |data| are build file names,
 2758   # and the values are the entire build file contents after "early" or "pre"
 2759   # processing has been done and includes have been resolved.
 2760   # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
 2761   # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
 2762   # track of the keys corresponding to "target" files.
 2763   data = {'target_build_files': set()}
 2764   # Normalize paths everywhere.  This is important because paths will be
 2765   # used as keys to the data dict and for references between input files.
 2766   build_files = set(map(os.path.normpath, build_files))
 2767   if parallel:
 2768     LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
 2769                                  check, generator_input_info)
 2770   else:
 2771     aux_data = {}
 2772     for build_file in build_files:
 2773       try:
 2774         LoadTargetBuildFile(build_file, data, aux_data,
 2775                             variables, includes, depth, check, True)
 2776       except Exception, e:
 2777         gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
 2778         raise
 2779 
 2780   # Build a dict to access each target's subdict by qualified name.
 2781   targets = BuildTargetsDict(data)
 2782 
 2783   # Fully qualify all dependency links.
 2784   QualifyDependencies(targets)
 2785 
 2786   # Remove self-dependencies from targets that have 'prune_self_dependencies'
 2787   # set to 1.
 2788   RemoveSelfDependencies(targets)
 2789 
 2790   # Expand dependencies specified as build_file:*.
 2791   ExpandWildcardDependencies(targets, data)
 2792 
 2793   # Remove all dependencies marked as 'link_dependency' from the targets of
 2794   # type 'none'.
 2795   RemoveLinkDependenciesFromNoneTargets(targets)
 2796 
 2797   # Apply exclude (!) and regex (/) list filters only for dependency_sections.
 2798   for target_name, target_dict in targets.iteritems():
 2799     tmp_dict = {}
 2800     for key_base in dependency_sections:
 2801       for op in ('', '!', '/'):
 2802         key = key_base + op
 2803         if key in target_dict:
 2804           tmp_dict[key] = target_dict[key]
 2805           del target_dict[key]
 2806     ProcessListFiltersInDict(target_name, tmp_dict)
 2807     # Write the results back to |target_dict|.
 2808     for key in tmp_dict:
 2809       target_dict[key] = tmp_dict[key]
 2810 
 2811   # Make sure every dependency appears at most once.
 2812   RemoveDuplicateDependencies(targets)
 2813 
 2814   if circular_check:
 2815     # Make sure that any targets in a.gyp don't contain dependencies in other
 2816     # .gyp files that further depend on a.gyp.
 2817     VerifyNoGYPFileCircularDependencies(targets)
 2818 
 2819   [dependency_nodes, flat_list] = BuildDependencyList(targets)
 2820 
 2821   if root_targets:
 2822     # Remove, from |targets| and |flat_list|, the targets that are not deep
 2823     # dependencies of the targets specified in |root_targets|.
 2824     targets, flat_list = PruneUnwantedTargets(
 2825         targets, flat_list, dependency_nodes, root_targets, data)
 2826 
 2827   # Check that no two targets in the same directory have the same name.
 2828   VerifyNoCollidingTargets(flat_list)
 2829 
 2830   # Handle dependent settings of various types.
 2831   for settings_type in ['all_dependent_settings',
 2832                         'direct_dependent_settings',
 2833                         'link_settings']:
 2834     DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
 2835 
 2836     # Take out the dependent settings now that they've been published to all
 2837     # of the targets that require them.
 2838     for target in flat_list:
 2839       if settings_type in targets[target]:
 2840         del targets[target][settings_type]
 2841 
 2842   # Make sure static libraries don't declare dependencies on other static
 2843   # libraries, but that linkables depend on all unlinked static libraries
 2844   # that they need so that their link steps will be correct.
 2845   gii = generator_input_info
 2846   if gii['generator_wants_static_library_dependencies_adjusted']:
 2847     AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
 2848                                     gii['generator_wants_sorted_dependencies'])
 2849 
 2850   # Apply "post"/"late"/"target" variable expansions and condition evaluations.
 2851   for target in flat_list:
 2852     target_dict = targets[target]
 2853     build_file = gyp.common.BuildFile(target)
 2854     ProcessVariablesAndConditionsInDict(
 2855         target_dict, PHASE_LATE, variables, build_file)
 2856 
 2857   # Move everything that can go into a "configurations" section into one.
 2858   for target in flat_list:
 2859     target_dict = targets[target]
 2860     SetUpConfigurations(target, target_dict)
 2861 
 2862   # Apply exclude (!) and regex (/) list filters.
 2863   for target in flat_list:
 2864     target_dict = targets[target]
 2865     ProcessListFiltersInDict(target, target_dict)
 2866 
 2867   # Apply "latelate" variable expansions and condition evaluations.
 2868   for target in flat_list:
 2869     target_dict = targets[target]
 2870     build_file = gyp.common.BuildFile(target)
 2871     ProcessVariablesAndConditionsInDict(
 2872         target_dict, PHASE_LATELATE, variables, build_file)
 2873 
 2874   # Make sure that the rules make sense, and build up rule_sources lists as
 2875   # needed.  Not all generators will need to use the rule_sources lists, but
 2876   # some may, and it seems best to build the list in a common spot.
 2877   # Also validate actions and run_as elements in targets.
 2878   for target in flat_list:
 2879     target_dict = targets[target]
 2880     build_file = gyp.common.BuildFile(target)
 2881     ValidateTargetType(target, target_dict)
 2882     ValidateSourcesInTarget(target, target_dict, build_file,
 2883                             duplicate_basename_check)
 2884     ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
 2885     ValidateRunAsInTarget(target, target_dict, build_file)
 2886     ValidateActionsInTarget(target, target_dict, build_file)
 2887 
 2888   # Generators might not expect ints.  Turn them into strs.
 2889   TurnIntIntoStrInDict(data)
 2890 
 2891   # TODO(mark): Return |data| for now because the generator needs a list of
 2892   # build files that came in.  In the future, maybe it should just accept
 2893   # a list, and not the whole data dict.
 2894   return [flat_list, targets, data]