"Fossies" - the Fresh Open Source Software Archive  

Source code changes of the file "SCons/Tool/ninja/NinjaState.py" between
SCons-4.3.0.tar.gz and SCons-4.4.0.tar.gz

About: SCons is a software construction tool (a Python script and a set of modules as a superior alternative to the classic "Make" build tool).

NinjaState.py  (SCons-4.3.0):NinjaState.py  (SCons-4.4.0)
skipping to change at line 27 skipping to change at line 27
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import io import io
import os import os
import pathlib
import signal
import tempfile
import shutil import shutil
import sys import sys
import random
import filecmp
from os.path import splitext from os.path import splitext
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
import ninja import ninja
import hashlib
import SCons import SCons
from SCons.Script import COMMAND_LINE_TARGETS from SCons.Script import COMMAND_LINE_TARGETS
from SCons.Util import is_List from SCons.Util import wait_for_process_to_die
from SCons.Errors import InternalError from SCons.Errors import InternalError
from .Globals import COMMAND_TYPES, NINJA_RULES, NINJA_POOLS, \ from .Globals import COMMAND_TYPES, NINJA_RULES, NINJA_POOLS, \
NINJA_CUSTOM_HANDLERS NINJA_CUSTOM_HANDLERS, NINJA_DEFAULT_TARGETS
from .Rules import _install_action_function, _mkdir_action_function, _lib_symlin k_action_function, _copy_action_function from .Rules import _install_action_function, _mkdir_action_function, _lib_symlin k_action_function, _copy_action_function
from .Utils import get_path, alias_to_ninja_build, generate_depfile, ninja_noop, get_order_only, \ from .Utils import get_path, alias_to_ninja_build, generate_depfile, ninja_noop, get_order_only, \
get_outputs, get_inputs, get_dependencies, get_rule, get_command_env, to_esc aped_list get_outputs, get_inputs, get_dependencies, get_rule, get_command_env, to_esc aped_list, ninja_sorted_build
from .Methods import get_command from .Methods import get_command
# pylint: disable=too-many-instance-attributes # pylint: disable=too-many-instance-attributes
class NinjaState: class NinjaState:
"""Maintains state of Ninja build system as it's translated from SCons.""" """Maintains state of Ninja build system as it's translated from SCons."""
def __init__(self, env, ninja_file, ninja_syntax): def __init__(self, env, ninja_file, ninja_syntax):
self.env = env self.env = env
self.ninja_file = ninja_file self.ninja_file = ninja_file
skipping to change at line 66 skipping to change at line 72
self.ninja_bin_path = os.path.abspath(os.path.join( self.ninja_bin_path = os.path.abspath(os.path.join(
ninja.__file__, ninja.__file__,
os.pardir, os.pardir,
'data', 'data',
'bin', 'bin',
ninja_bin)) ninja_bin))
if not os.path.exists(self.ninja_bin_path): if not os.path.exists(self.ninja_bin_path):
# couldn't find it, just give the bin name and hope # couldn't find it, just give the bin name and hope
# its in the path later # its in the path later
self.ninja_bin_path = ninja_bin self.ninja_bin_path = ninja_bin
self.ninja_syntax = ninja_syntax
self.writer_class = ninja_syntax.Writer self.writer_class = ninja_syntax.Writer
self.__generated = False self.__generated = False
self.translator = SConsToNinjaTranslator(env) self.translator = SConsToNinjaTranslator(env)
self.generated_suffixes = env.get("NINJA_GENERATED_SOURCE_SUFFIXES", []) self.generated_suffixes = env.get("NINJA_GENERATED_SOURCE_SUFFIXES", [])
# List of generated builds that will be written at a later stage # List of generated builds that will be written at a later stage
self.builds = dict() self.builds = dict()
# List of targets for which we have generated a build. This
# allows us to take multiple Alias nodes as sources and to not
# fail to build if they have overlapping targets.
self.built = set()
# SCons sets this variable to a function which knows how to do # SCons sets this variable to a function which knows how to do
# shell quoting on whatever platform it's run on. Here we use it # shell quoting on whatever platform it's run on. Here we use it
# to make the SCONS_INVOCATION variable properly quoted for things # to make the SCONS_INVOCATION variable properly quoted for things
# like CCFLAGS # like CCFLAGS
scons_escape = env.get("ESCAPE", lambda x: x) scons_escape = env.get("ESCAPE", lambda x: x)
# The daemon port should be the same across runs, unless explicitly set
# or if the portfile is deleted. This ensures the ninja file is determin
istic
# across regen's if nothings changed. The construction var should take p
reference,
# then portfile is next, and then otherwise create a new random port to
persist in
# use.
scons_daemon_port = None
os.makedirs(get_path(self.env.get("NINJA_DIR")), exist_ok=True)
scons_daemon_port_file = str(pathlib.Path(get_path(self.env.get("NINJA_D
IR"))) / "scons_daemon_portfile")
if env.get('NINJA_SCONS_DAEMON_PORT') is not None:
scons_daemon_port = int(env.get('NINJA_SCONS_DAEMON_PORT'))
elif os.path.exists(scons_daemon_port_file):
with open(scons_daemon_port_file) as f:
scons_daemon_port = int(f.read())
else:
scons_daemon_port = random.randint(10000, 60000)
with open(scons_daemon_port_file, 'w') as f:
f.write(str(scons_daemon_port))
# if SCons was invoked from python, we expect the first arg to be the sc ons.py # if SCons was invoked from python, we expect the first arg to be the sc ons.py
# script, otherwise scons was invoked from the scons script # script, otherwise scons was invoked from the scons script
python_bin = '' python_bin = ''
if os.path.basename(sys.argv[0]) == 'scons.py': if os.path.basename(sys.argv[0]) == 'scons.py':
python_bin = ninja_syntax.escape(scons_escape(sys.executable)) python_bin = ninja_syntax.escape(scons_escape(sys.executable))
self.variables = { self.variables = {
"COPY": "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp" , "COPY": "cmd.exe /c 1>NUL copy" if sys.platform == "win32" else "cp" ,
'PORT': scons_daemon_port,
'NINJA_DIR_PATH': env.get('NINJA_DIR').abspath,
'PYTHON_BIN': sys.executable,
'NINJA_TOOL_DIR': pathlib.Path(__file__).parent,
'NINJA_SCONS_DAEMON_KEEP_ALIVE': str(env.get('NINJA_SCONS_DAEMON_KEE
P_ALIVE')),
"SCONS_INVOCATION": '{} {} --disable-ninja __NINJA_NO=1 $out'.format ( "SCONS_INVOCATION": '{} {} --disable-ninja __NINJA_NO=1 $out'.format (
python_bin, python_bin,
" ".join( " ".join(
[ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv if arg not in COMMAND_LINE_TARGETS] [ninja_syntax.escape(scons_escape(arg)) for arg in sys.argv if arg not in COMMAND_LINE_TARGETS]
), ),
), ),
"SCONS_INVOCATION_W_TARGETS": "{} {} NINJA_DISABLE_AUTO_RUN=1".forma t( "SCONS_INVOCATION_W_TARGETS": "{} {} NINJA_DISABLE_AUTO_RUN=1".forma t(
python_bin, " ".join([ python_bin, " ".join([
ninja_syntax.escape(scons_escape(arg)) ninja_syntax.escape(scons_escape(arg))
for arg in sys.argv for arg in sys.argv
skipping to change at line 129 skipping to change at line 155
"GENERATED_CMD": { "GENERATED_CMD": {
"command": "cmd /c $env$cmd" if sys.platform == "win32" else "$e nv$cmd", "command": "cmd /c $env$cmd" if sys.platform == "win32" else "$e nv$cmd",
"description": "Building $out", "description": "Building $out",
"pool": "local_pool", "pool": "local_pool",
}, },
# We add the deps processing variables to this below. We # We add the deps processing variables to this below. We
# don't pipe these through cmd.exe on Windows because we # don't pipe these through cmd.exe on Windows because we
# use this to generate a compile_commands.json database # use this to generate a compile_commands.json database
# which can't use the shell command as it's compile # which can't use the shell command as it's compile
# command. # command.
"CC": { "CC_RSP": {
"command": "$env$CC @$out.rsp", "command": "$env$CC @$out.rsp",
"description": "Compiling $out", "description": "Compiling $out",
"rspfile": "$out.rsp", "rspfile": "$out.rsp",
"rspfile_content": "$rspc", "rspfile_content": "$rspc",
}, },
"CXX": { "CXX_RSP": {
"command": "$env$CXX @$out.rsp", "command": "$env$CXX @$out.rsp",
"description": "Compiling $out", "description": "Compiling $out",
"rspfile": "$out.rsp", "rspfile": "$out.rsp",
"rspfile_content": "$rspc", "rspfile_content": "$rspc",
}, },
"LINK": { "LINK_RSP": {
"command": "$env$LINK @$out.rsp", "command": "$env$LINK @$out.rsp",
"description": "Linking $out", "description": "Linking $out",
"rspfile": "$out.rsp", "rspfile": "$out.rsp",
"rspfile_content": "$rspc", "rspfile_content": "$rspc",
"pool": "local_pool", "pool": "local_pool",
}, },
# Ninja does not automatically delete the archive before # Ninja does not automatically delete the archive before
# invoking ar. The ar utility will append to an existing archive, wh ich # invoking ar. The ar utility will append to an existing archive, wh ich
# can cause duplicate symbols if the symbols moved between object fi les. # can cause duplicate symbols if the symbols moved between object fi les.
# Native SCons will perform this operation so we need to force ninja # Native SCons will perform this operation so we need to force ninja
# to do the same. See related for more info: # to do the same. See related for more info:
# https://jira.mongodb.org/browse/SERVER-49457 # https://jira.mongodb.org/browse/SERVER-49457
"AR": { "AR_RSP": {
"command": "{}$env$AR @$out.rsp".format( "command": "{}$env$AR @$out.rsp".format(
'' if sys.platform == "win32" else "rm -f $out && " '' if sys.platform == "win32" else "rm -f $out && "
), ),
"description": "Archiving $out", "description": "Archiving $out",
"rspfile": "$out.rsp", "rspfile": "$out.rsp",
"rspfile_content": "$rspc", "rspfile_content": "$rspc",
"pool": "local_pool", "pool": "local_pool",
}, },
"CC": {
"command": "$env$CC $rspc",
"description": "Compiling $out",
},
"CXX": {
"command": "$env$CXX $rspc",
"description": "Compiling $out",
},
"LINK": {
"command": "$env$LINK $rspc",
"description": "Linking $out",
"pool": "local_pool",
},
"AR": {
"command": "{}$env$AR $rspc".format(
'' if sys.platform == "win32" else "rm -f $out && "
),
"description": "Archiving $out",
"pool": "local_pool",
},
"SYMLINK": { "SYMLINK": {
"command": ( "command": (
"cmd /c mklink $out $in" "cmd /c mklink $out $in"
if sys.platform == "win32" if sys.platform == "win32"
else "ln -s $in $out" else "ln -s $in $out"
), ),
"description": "Symlink $in -> $out", "description": "Symlink $in -> $out",
}, },
"INSTALL": { "INSTALL": {
"command": "$COPY $in $out", "command": "$COPY $in $out",
skipping to change at line 186 skipping to change at line 232
# update the timestamp on the output file. This leads # update the timestamp on the output file. This leads
# to a stuck constant timestamp in the Ninja database # to a stuck constant timestamp in the Ninja database
# and needless rebuilds. # and needless rebuilds.
# #
# Adding restat here ensures that Ninja always checks # Adding restat here ensures that Ninja always checks
# the copy updated the timestamp and that Ninja has # the copy updated the timestamp and that Ninja has
# the correct information. # the correct information.
"restat": 1, "restat": 1,
}, },
"TEMPLATE": { "TEMPLATE": {
"command": "$SCONS_INVOCATION $out", "command": "$PYTHON_BIN $NINJA_TOOL_DIR/ninja_daemon_build.py $P
"description": "Rendering $SCONS_INVOCATION $out", ORT $NINJA_DIR_PATH $out",
"pool": "scons_pool", "description": "Defer to SCons to build $out",
"restat": 1, "pool": "local_pool",
"restat": 1
},
"EXIT_SCONS_DAEMON": {
"command": "$PYTHON_BIN $NINJA_TOOL_DIR/ninja_daemon_build.py $P
ORT $NINJA_DIR_PATH --exit",
"description": "Shutting down ninja scons daemon server",
"pool": "local_pool",
"restat": 1
}, },
"SCONS": { "SCONS": {
"command": "$SCONS_INVOCATION $out", "command": "$SCONS_INVOCATION $out",
"description": "$SCONS_INVOCATION $out", "description": "$SCONS_INVOCATION $out",
"pool": "scons_pool", "pool": "scons_pool",
# restat # restat
# if present, causes Ninja to re-stat the command's outputs # if present, causes Ninja to re-stat the command's outputs
# after execution of the command. Each output whose # after execution of the command. Each output whose
# modification time the command did not change will be # modification time the command did not change will be
# treated as though it had never needed to be built. This # treated as though it had never needed to be built. This
skipping to change at line 213 skipping to change at line 265
# We use restat any time we execute SCons because # We use restat any time we execute SCons because
# SCons calls in Ninja typically create multiple # SCons calls in Ninja typically create multiple
# targets. But since SCons is doing it's own up to # targets. But since SCons is doing it's own up to
# date-ness checks it may only update say one of # date-ness checks it may only update say one of
# them. Restat will find out which of the multiple # them. Restat will find out which of the multiple
# build targets did actually change then only rebuild # build targets did actually change then only rebuild
# those targets which depend specifically on that # those targets which depend specifically on that
# output. # output.
"restat": 1, "restat": 1,
}, },
"SCONS_DAEMON": {
"command": "$PYTHON_BIN $NINJA_TOOL_DIR/ninja_run_daemon.py $POR
T $NINJA_DIR_PATH $NINJA_SCONS_DAEMON_KEEP_ALIVE $SCONS_INVOCATION",
"description": "Starting scons daemon...",
"pool": "local_pool",
# restat
# if present, causes Ninja to re-stat the command's outputs
# after execution of the command. Each output whose
# modification time the command did not change will be
# treated as though it had never needed to be built. This
# may cause the output's reverse dependencies to be removed
# from the list of pending build actions.
#
# We use restat any time we execute SCons because
# SCons calls in Ninja typically create multiple
# targets. But since SCons is doing it's own up to
# date-ness checks it may only update say one of
# them. Restat will find out which of the multiple
# build targets did actually change then only rebuild
# those targets which depend specifically on that
# output.
"restat": 1,
},
"REGENERATE": { "REGENERATE": {
"command": "$SCONS_INVOCATION_W_TARGETS", "command": "$SCONS_INVOCATION_W_TARGETS",
"description": "Regenerating $self", "description": "Regenerating $self",
"generator": 1, "generator": 1,
"pool": "console", "pool": "console",
"restat": 1, "restat": 1,
}, },
} }
if env['PLATFORM'] == 'darwin' and env.get('AR', "") == 'ar': if env['PLATFORM'] == 'darwin' and env.get('AR', "") == 'ar':
self.rules["AR"] = { self.rules["AR"] = {
"command": "rm -f $out && $env$AR $rspc", "command": "rm -f $out && $env$AR $rspc",
"description": "Archiving $out", "description": "Archiving $out",
"pool": "local_pool", "pool": "local_pool",
} }
self.pools = {"scons_pool": 1}
num_jobs = self.env.get('NINJA_MAX_JOBS', self.env.GetOption("num_jobs")
)
self.pools = {
"local_pool": num_jobs,
"install_pool": num_jobs / 2,
"scons_pool": 1,
}
for rule in ["CC", "CXX"]:
if env["PLATFORM"] == "win32":
self.rules[rule]["deps"] = "msvc"
else:
self.rules[rule]["deps"] = "gcc"
self.rules[rule]["depfile"] = "$out.d"
def add_build(self, node): def add_build(self, node):
if not node.has_builder(): if not node.has_builder():
return False return False
if isinstance(node, SCons.Node.Python.Value):
return False
if isinstance(node, SCons.Node.Alias.Alias): if isinstance(node, SCons.Node.Alias.Alias):
build = alias_to_ninja_build(node) build = alias_to_ninja_build(node)
else: else:
build = self.translator.action_to_ninja_build(node) build = self.translator.action_to_ninja_build(node)
# Some things are unbuild-able or need not be built in Ninja # Some things are unbuild-able or need not be built in Ninja
if build is None: if build is None:
return False return False
node_string = str(node) node_string = str(node)
if node_string in self.builds: if node_string in self.builds:
raise InternalError("Node {} added to ninja build state more than on # TODO: If we work out a way to handle Alias() with same name as fil
ce".format(node_string)) e this logic can be removed
# This works around adding Alias with the same name as a Node.
# It's not great way to workaround because it force renames the alia
s,
# but the alternative is broken ninja support.
warn_msg = f"Alias {node_string} name the same as File node, ninja d
oes not support this. Renaming Alias {node_string} to {node_string}_alias."
if isinstance(node, SCons.Node.Alias.Alias):
for i, output in enumerate(build["outputs"]):
if output == node_string:
build["outputs"][i] += "_alias"
node_string += "_alias"
print(warn_msg)
elif self.builds[node_string]["rule"] == "phony":
for i, output in enumerate(self.builds[node_string]["outputs"]):
if output == node_string:
self.builds[node_string]["outputs"][i] += "_alias"
tmp_build = self.builds[node_string].copy()
del self.builds[node_string]
node_string += "_alias"
self.builds[node_string] = tmp_build
print(warn_msg)
else:
raise InternalError("Node {} added to ninja build state more tha
n once".format(node_string))
self.builds[node_string] = build self.builds[node_string] = build
self.built.update(build["outputs"])
return True return True
# TODO: rely on SCons to tell us what is generated source # TODO: rely on SCons to tell us what is generated source
# or some form of user scanner maybe (Github Issue #3624) # or some form of user scanner maybe (Github Issue #3624)
def is_generated_source(self, output): def is_generated_source(self, output):
"""Check if output ends with a known generated suffix.""" """Check if output ends with a known generated suffix."""
_, suffix = splitext(output) _, suffix = splitext(output)
return suffix in self.generated_suffixes return suffix in self.generated_suffixes
def has_generated_sources(self, output): def has_generated_sources(self, output):
skipping to change at line 289 skipping to change at line 374
# pylint: disable=too-many-branches,too-many-locals # pylint: disable=too-many-branches,too-many-locals
def generate(self): def generate(self):
""" """
Generate the build.ninja. Generate the build.ninja.
This should only be called once for the lifetime of this object. This should only be called once for the lifetime of this object.
""" """
if self.__generated: if self.__generated:
return return
self.rules.update(self.env.get(NINJA_RULES, {})) num_jobs = self.env.get('NINJA_MAX_JOBS', self.env.GetOption("num_jobs")
)
self.pools.update({
"local_pool": num_jobs,
"install_pool": num_jobs / 2,
})
deps_format = self.env.get("NINJA_DEPFILE_PARSE_FORMAT", 'msvc' if self.
env['PLATFORM'] == 'win32' else 'gcc')
for rule in ["CC", "CXX"]:
if deps_format == "msvc":
self.rules[rule]["deps"] = "msvc"
elif deps_format == "gcc" or deps_format == "clang":
self.rules[rule]["deps"] = "gcc"
self.rules[rule]["depfile"] = "$out.d"
else:
raise Exception(f"Unknown 'NINJA_DEPFILE_PARSE_FORMAT'={self.env
['NINJA_DEPFILE_PARSE_FORMAT']}, use 'mvsc', 'gcc', or 'clang'.")
for key, rule in self.env.get(NINJA_RULES, {}).items():
# make a non response file rule for users custom response file rules
.
if rule.get('rspfile') is not None:
self.rules.update({key + '_RSP': rule})
non_rsp_rule = rule.copy()
del non_rsp_rule['rspfile']
del non_rsp_rule['rspfile_content']
self.rules.update({key: non_rsp_rule})
else:
self.rules.update({key: rule})
self.pools.update(self.env.get(NINJA_POOLS, {})) self.pools.update(self.env.get(NINJA_POOLS, {}))
content = io.StringIO() content = io.StringIO()
ninja = self.writer_class(content, width=100) ninja = self.writer_class(content, width=100)
ninja.comment("Generated by scons. DO NOT EDIT.") ninja.comment("Generated by scons. DO NOT EDIT.")
ninja.variable("builddir", get_path(self.env.Dir(self.env['NINJA_DIR']). path)) ninja.variable("builddir", get_path(self.env.Dir(self.env['NINJA_DIR']). path))
for pool_name, size in self.pools.items(): for pool_name, size in sorted(self.pools.items()):
ninja.pool(pool_name, min(self.env.get('NINJA_MAX_JOBS', size), size )) ninja.pool(pool_name, min(self.env.get('NINJA_MAX_JOBS', size), size ))
for var, val in self.variables.items(): for var, val in sorted(self.variables.items()):
ninja.variable(var, val) ninja.variable(var, val)
for rule, kwargs in self.rules.items(): for rule, kwargs in sorted(self.rules.items()):
if self.env.get('NINJA_MAX_JOBS') is not None and 'pool' not in kwar gs: if self.env.get('NINJA_MAX_JOBS') is not None and 'pool' not in kwar gs:
kwargs['pool'] = 'local_pool' kwargs['pool'] = 'local_pool'
ninja.rule(rule, **kwargs) ninja.rule(rule, **kwargs)
generated_source_files = sorted({ # If the user supplied an alias to determine generated sources, use that
output , otherwise
# First find builds which have header files in their outputs. # determine what the generated sources are dynamically.
for build in self.builds.values() generated_sources_alias = self.env.get('NINJA_GENERATED_SOURCE_ALIAS_NAM
if self.has_generated_sources(build["outputs"]) E')
for output in build["outputs"] generated_sources_build = None
# Collect only the header files from the builds with them
# in their output. We do this because is_generated_source if generated_sources_alias:
# returns True if it finds a header in any of the outputs, generated_sources_build = self.builds.get(generated_sources_alias)
# here we need to filter so we only have the headers and if generated_sources_build is None or generated_sources_build["rule"
# not the other outputs. ] != 'phony':
if self.is_generated_source(output) raise Exception(
}) "ERROR: 'NINJA_GENERATED_SOURCE_ALIAS_NAME' set, but no matc
hing Alias object found."
)
if generated_source_files: if generated_sources_alias and generated_sources_build:
ninja.build( generated_source_files = sorted(
outputs="_generated_sources", [] if not generated_sources_build else generated_sources_build['
rule="phony", implicit']
implicit=generated_source_files
) )
def check_generated_source_deps(build):
return (
build != generated_sources_build
and set(build["outputs"]).isdisjoint(generated_source_files)
)
else:
generated_sources_build = None
generated_source_files = sorted({
output
# First find builds which have header files in their outputs.
for build in self.builds.values()
if self.has_generated_sources(build["outputs"])
for output in build["outputs"]
# Collect only the header files from the builds with them
# in their output. We do this because is_generated_source
# returns True if it finds a header in any of the outputs,
# here we need to filter so we only have the headers and
# not the other outputs.
if self.is_generated_source(output)
})
if generated_source_files:
generated_sources_alias = "_ninja_generated_sources"
ninja.build(
outputs=generated_sources_alias,
rule="phony",
implicit=generated_source_files
)
def check_generated_source_deps(build):
return (
not build["rule"] == "INSTALL"
and set(build["outputs"]).isdisjoint(generated_source_fi
les)
and set(build.get("implicit", [])).isdisjoint(generated_
source_files)
)
template_builders = [] template_builders = []
scons_compiledb = False
if SCons.Script._Get_Default_Targets == SCons.Script._Set_Default_Target
s_Has_Not_Been_Called:
all_targets = set()
else:
all_targets = None
for build in [self.builds[key] for key in sorted(self.builds.keys())]: for build in [self.builds[key] for key in sorted(self.builds.keys())]:
if "compile_commands.json" in build["outputs"]:
scons_compiledb = True
# this is for the no command line targets, no SCons default case. We
want this default
# to just be all real files in the build.
if all_targets is not None and build['rule'] != 'phony':
all_targets = all_targets | set(build["outputs"])
if build["rule"] == "TEMPLATE": if build["rule"] == "TEMPLATE":
template_builders.append(build) template_builders.append(build)
continue continue
if "implicit" in build: if "implicit" in build:
build["implicit"].sort() build["implicit"].sort()
# Don't make generated sources depend on each other. We # Don't make generated sources depend on each other. We
# have to check that none of the outputs are generated # have to check that none of the outputs are generated
# sources and none of the direct implicit dependencies are # sources and none of the direct implicit dependencies are
# generated sources or else we will create a dependency # generated sources or else we will create a dependency
# cycle. # cycle.
if ( if (
generated_source_files generated_source_files
and not build["rule"] == "INSTALL" and check_generated_source_deps(build)
and set(build["outputs"]).isdisjoint(generated_source_files)
and set(build.get("implicit", [])).isdisjoint(generated_sour
ce_files)
): ):
# Make all non-generated source targets depend on # Make all non-generated source targets depend on
# _generated_sources. We use order_only for generated # _generated_sources. We use order_only for generated
# sources so that we don't rebuild the world if one # sources so that we don't rebuild the world if one
# generated source was rebuilt. We just need to make # generated source was rebuilt. We just need to make
# sure that all of these sources are generated before # sure that all of these sources are generated before
# other builds. # other builds.
order_only = build.get("order_only", []) order_only = build.get("order_only", [])
order_only.append("_generated_sources") order_only.append(generated_sources_alias)
build["order_only"] = order_only build["order_only"] = order_only
if "order_only" in build: if "order_only" in build:
build["order_only"].sort() build["order_only"].sort()
# When using a depfile Ninja can only have a single output # When using a depfile Ninja can only have a single output
# but SCons will usually have emitted an output for every # but SCons will usually have emitted an output for every
# thing a command will create because it's caching is much # thing a command will create because it's caching is much
# more complex than Ninja's. This includes things like DWO # more complex than Ninja's. This includes things like DWO
# files. Here we make sure that Ninja only ever sees one # files. Here we make sure that Ninja only ever sees one
# target when using a depfile. It will still have a command # target when using a depfile. It will still have a command
skipping to change at line 403 skipping to change at line 558
# we would find that the dwo file becomes the # we would find that the dwo file becomes the
# first_output, and this breaks, for instance, header # first_output, and this breaks, for instance, header
# dependency scanning. # dependency scanning.
if rule is not None and (rule.get("deps") or rule.get("rspfile")): if rule is not None and (rule.get("deps") or rule.get("rspfile")):
first_output, remaining_outputs = ( first_output, remaining_outputs = (
build["outputs"][0], build["outputs"][0],
build["outputs"][1:], build["outputs"][1:],
) )
if remaining_outputs: if remaining_outputs:
ninja.build( ninja_sorted_build(
outputs=sorted(remaining_outputs), rule="phony", implici ninja,
t=first_output, outputs=remaining_outputs, rule="phony", implicit=first_
output,
) )
build["outputs"] = first_output build["outputs"] = first_output
# Optionally a rule can specify a depfile, and SCons can generate im plicit # Optionally a rule can specify a depfile, and SCons can generate im plicit
# dependencies into the depfile. This allows for dependencies to com e and go # dependencies into the depfile. This allows for dependencies to com e and go
# without invalidating the ninja file. The depfile was created in ni nja specifically # without invalidating the ninja file. The depfile was created in ni nja specifically
# for dealing with header files appearing and disappearing across re builds, but it can # for dealing with header files appearing and disappearing across re builds, but it can
# be repurposed for anything, as long as you have a way to regenerat e the depfile. # be repurposed for anything, as long as you have a way to regenerat e the depfile.
# More specific info can be found here: https://ninja-build.org/manu al.html#_depfile # More specific info can be found here: https://ninja-build.org/manu al.html#_depfile
if rule is not None and rule.get('depfile') and build.get('deps_file s'): if rule is not None and rule.get('depfile') and build.get('deps_file s'):
path = build['outputs'] if SCons.Util.is_List(build['outputs']) else [build['outputs']] path = build['outputs'] if SCons.Util.is_List(build['outputs']) else [build['outputs']]
generate_depfile(self.env, path[0], build.pop('deps_files', [])) generate_depfile(self.env, path[0], build.pop('deps_files', []))
if "inputs" in build: if "inputs" in build:
build["inputs"].sort() build["inputs"].sort()
ninja.build(**build) ninja_sorted_build(
ninja,
**build
)
template_builds = dict() scons_daemon_dirty = str(pathlib.Path(get_path(self.env.get("NINJA_DIR") )) / "scons_daemon_dirty")
for template_builder in template_builders: for template_builder in template_builders:
template_builder["implicit"] += [scons_daemon_dirty]
# Special handling for outputs and implicit since we need to ninja_sorted_build(
# aggregate not replace for each builder. ninja,
for agg_key in ["outputs", "implicit", "inputs"]: **template_builder
new_val = template_builds.get(agg_key, []) )
# Use pop so the key is removed and so the update
# below will not overwrite our aggregated values.
cur_val = template_builder.pop(agg_key, [])
if is_List(cur_val):
new_val += cur_val
else:
new_val.append(cur_val)
template_builds[agg_key] = new_val
# Collect all other keys
template_builds.update(template_builder)
if template_builds.get("outputs", []):
# Try to clean up any dependency cycles. If we are passing an
# ouptut node to SCons, it will build any dependencys if ninja
# has not already.
for output in template_builds.get("outputs", []):
inputs = template_builds.get('inputs')
if inputs and output in inputs:
inputs.remove(output)
implicits = template_builds.get('implicit')
if implicits and output in implicits:
implicits.remove(output)
ninja.build(**template_builds)
# We have to glob the SCons files here to teach the ninja file # We have to glob the SCons files here to teach the ninja file
# how to regenerate itself. We'll never see ourselves in the # how to regenerate itself. We'll never see ourselves in the
# DAG walk so we can't rely on action_to_ninja_build to # DAG walk so we can't rely on action_to_ninja_build to
# generate this rule even though SCons should know we're # generate this rule even though SCons should know we're
# dependent on SCons files. # dependent on SCons files.
ninja_file_path = self.env.File(self.ninja_file).path ninja_file_path = self.env.File(self.ninja_file).path
regenerate_deps = to_escaped_list(self.env, self.env['NINJA_REGENERATE_D EPS']) regenerate_deps = to_escaped_list(self.env, self.env['NINJA_REGENERATE_D EPS'])
ninja.build( ninja_sorted_build(
ninja_file_path, ninja,
outputs=ninja_file_path,
rule="REGENERATE", rule="REGENERATE",
implicit=regenerate_deps, implicit=regenerate_deps,
variables={ variables={
"self": ninja_file_path, "self": ninja_file_path
} }
) )
ninja.build( ninja_sorted_build(
regenerate_deps, ninja,
outputs=regenerate_deps,
rule="phony", rule="phony",
variables={ variables={
"self": ninja_file_path, "self": ninja_file_path,
} }
) )
# If we ever change the name/s of the rules that include if not scons_compiledb:
# compile commands (i.e. something like CC) we will need to # If we ever change the name/s of the rules that include
# update this build to reflect that complete list. # compile commands (i.e. something like CC) we will need to
ninja.build( # update this build to reflect that complete list.
"compile_commands.json", ninja_sorted_build(
rule="CMD", ninja,
pool="console", outputs="compile_commands.json",
implicit=[str(self.ninja_file)], rule="CMD",
variables={ pool="console",
"cmd": "{} -f {} -t compdb {}CC CXX > compile_commands.json".for implicit=[str(self.ninja_file)],
mat( variables={
# NINJA_COMPDB_EXPAND - should only be true for ninja "cmd": "{} -f {} -t compdb {}CC CXX > compile_commands.json"
# This was added to ninja's compdb tool in version 1.9.0 (me .format(
rged April 2018) # NINJA_COMPDB_EXPAND - should only be true for ninja
# https://github.com/ninja-build/ninja/pull/1223 # This was added to ninja's compdb tool in version 1.9.0
# TODO: add check in generate to check version and enable th (merged April 2018)
is by default if it's available. # https://github.com/ninja-build/ninja/pull/1223
self.ninja_bin_path, str(self.ninja_file), # TODO: add check in generate to check version and enabl
'-x ' if self.env.get('NINJA_COMPDB_EXPAND', True) else '' e this by default if it's available.
) self.ninja_bin_path, str(self.ninja_file),
}, '-x ' if self.env.get('NINJA_COMPDB_EXPAND', True) else
''
)
},
)
ninja_sorted_build(
ninja,
outputs="compiledb", rule="phony", implicit=["compile_commands.j
son"],
)
ninja_sorted_build(
ninja,
outputs=["run_ninja_scons_daemon_phony", scons_daemon_dirty],
rule="SCONS_DAEMON",
) )
ninja.build( ninja.build(
"compiledb", rule="phony", implicit=["compile_commands.json"], "shutdown_ninja_scons_daemon_phony",
rule="EXIT_SCONS_DAEMON",
) )
# Look in SCons's list of DEFAULT_TARGETS, find the ones that if all_targets is None:
# we generated a ninja build rule for. # Look in SCons's list of DEFAULT_TARGETS, find the ones that
scons_default_targets = [ # we generated a ninja build rule for.
get_path(tgt) all_targets = [str(node) for node in NINJA_DEFAULT_TARGETS]
for tgt in SCons.Script.DEFAULT_TARGETS else:
if get_path(tgt) in self.built all_targets = list(all_targets)
]
if len(all_targets) == 0:
all_targets = ["phony_default"]
ninja_sorted_build(
ninja,
outputs=all_targets,
rule="phony",
)
# If we found an overlap between SCons's list of default ninja.default([self.ninja_syntax.escape_path(path) for path in sorted(al
# targets and the targets we created ninja builds for then use l_targets)])
# those as ninja's default as well.
if scons_default_targets:
ninja.default(" ".join(scons_default_targets))
with NamedTemporaryFile(delete=False, mode='w') as temp_ninja_file: with NamedTemporaryFile(delete=False, mode='w') as temp_ninja_file:
temp_ninja_file.write(content.getvalue()) temp_ninja_file.write(content.getvalue())
shutil.move(temp_ninja_file.name, ninja_file_path)
if self.env.GetOption('skip_ninja_regen') and os.path.exists(ninja_file_
path) and filecmp.cmp(temp_ninja_file.name, ninja_file_path):
os.unlink(temp_ninja_file.name)
else:
daemon_dir = pathlib.Path(tempfile.gettempdir()) / ('scons_daemon_'
+ str(hashlib.md5(str(get_path(self.env["NINJA_DIR"])).encode()).hexdigest()))
pidfile = None
if os.path.exists(scons_daemon_dirty):
pidfile = scons_daemon_dirty
elif os.path.exists(daemon_dir / 'pidfile'):
pidfile = daemon_dir / 'pidfile'
if pidfile:
with open(pidfile) as f:
pid = int(f.readline())
try:
os.kill(pid, signal.SIGINT)
except OSError:
pass
# wait for the server process to fully killed
# TODO: update wait_for_process_to_die() to handle timeout and t
hen catch exception
# here and do something smart.
wait_for_process_to_die(pid)
if os.path.exists(scons_daemon_dirty):
os.unlink(scons_daemon_dirty)
shutil.move(temp_ninja_file.name, ninja_file_path)
self.__generated = True self.__generated = True
class SConsToNinjaTranslator: class SConsToNinjaTranslator:
"""Translates SCons Actions into Ninja build objects.""" """Translates SCons Actions into Ninja build objects."""
def __init__(self, env): def __init__(self, env):
self.env = env self.env = env
self.func_handlers = { self.func_handlers = {
# Skip conftest builders # Skip conftest builders
skipping to change at line 589 skipping to change at line 767
build = self.handle_func_action(node, action) build = self.handle_func_action(node, action)
elif isinstance(action, SCons.Action.LazyAction): elif isinstance(action, SCons.Action.LazyAction):
# pylint: disable=protected-access # pylint: disable=protected-access
action = action._generate_cache(env) action = action._generate_cache(env)
build = self.action_to_ninja_build(node, action=action) build = self.action_to_ninja_build(node, action=action)
elif isinstance(action, SCons.Action.ListAction): elif isinstance(action, SCons.Action.ListAction):
build = self.handle_list_action(node, action) build = self.handle_list_action(node, action)
elif isinstance(action, COMMAND_TYPES): elif isinstance(action, COMMAND_TYPES):
build = get_command(env, node, action) build = get_command(env, node, action)
else: else:
raise Exception("Got an unbuildable ListAction for: {}".format(str(n return {
ode))) "rule": "TEMPLATE",
"order_only": get_order_only(node),
"outputs": get_outputs(node),
"inputs": get_inputs(node),
"implicit": get_dependencies(node, skip_sources=True),
}
if build is not None: if build is not None:
build["order_only"] = get_order_only(node) build["order_only"] = get_order_only(node)
# TODO: WPD Is this testing the filename to verify it's a configure cont ext generated file? # TODO: WPD Is this testing the filename to verify it's a configure cont ext generated file?
if not node.is_conftest(): if not node.is_conftest():
node_callback = node.check_attributes("ninja_build_callback") node_callback = node.check_attributes("ninja_build_callback")
if callable(node_callback): if callable(node_callback):
node_callback(env, node, build) node_callback(env, node, build)
skipping to change at line 658 skipping to change at line 842
result for result in results if result is not None and result["outpu ts"] result for result in results if result is not None and result["outpu ts"]
] ]
if not results: if not results:
return None return None
# No need to process the results if we only got a single result # No need to process the results if we only got a single result
if len(results) == 1: if len(results) == 1:
return results[0] return results[0]
all_outputs = list({output for build in results for output in build["out puts"]}) all_outputs = list({output for build in results for output in build["out puts"]})
dependencies = list({dep for build in results for dep in build["implicit "]}) dependencies = list({dep for build in results for dep in build.get("impl icit", [])})
if results[0]["rule"] == "CMD" or results[0]["rule"] == "GENERATED_CMD": if results[0]["rule"] == "CMD" or results[0]["rule"] == "GENERATED_CMD":
cmdline = "" cmdline = ""
for cmd in results: for cmd in results:
# Occasionally a command line will expand to a # Occasionally a command line will expand to a
# whitespace only string (i.e. ' '). Which is not a # whitespace only string (i.e. ' '). Which is not a
# valid command but does not trigger the empty command # valid command but does not trigger the empty command
# condition if not cmdstr. So here we strip preceding # condition if not cmdstr. So here we strip preceding
# and proceeding whitespace to make strings like the # and proceeding whitespace to make strings like the
# above become empty strings and so will be skipped. # above become empty strings and so will be skipped.
if not cmd.get("variables") or not cmd["variables"].get("cmd"):
continue
cmdstr = cmd["variables"]["cmd"].strip() cmdstr = cmd["variables"]["cmd"].strip()
if not cmdstr: if not cmdstr:
continue continue
# Skip duplicate commands # Skip duplicate commands
if cmdstr in cmdline: if cmdstr in cmdline:
continue continue
if cmdline: if cmdline:
cmdline += " && " cmdline += " && "
cmdline += cmdstr cmdline += cmdstr
# Remove all preceding and proceeding whitespace # Remove all preceding and proceeding whitespace
cmdline = cmdline.strip() cmdline = cmdline.strip()
env = node.env if node.env else self.env
executor = node.get_executor()
if executor is not None:
targets = executor.get_all_targets()
else:
if hasattr(node, "target_peers"):
targets = node.target_peers
else:
targets = [node]
# Make sure we didn't generate an empty cmdline # Make sure we didn't generate an empty cmdline
if cmdline: if cmdline:
ninja_build = { ninja_build = {
"outputs": all_outputs, "outputs": all_outputs,
"rule": get_rule(node, "GENERATED_CMD"), "rule": get_rule(node, "GENERATED_CMD"),
"variables": { "variables": {
"cmd": cmdline, "cmd": cmdline,
"env": get_command_env(node.env if node.env else self.en v), "env": get_command_env(env, targets, node.sources),
}, },
"implicit": dependencies, "implicit": dependencies,
} }
if node.env and node.env.get("NINJA_POOL", None) is not None: if node.env and node.env.get("NINJA_POOL", None) is not None:
ninja_build["pool"] = node.env["pool"] ninja_build["pool"] = node.env["pool"]
return ninja_build return ninja_build
elif results[0]["rule"] == "phony": elif results[0]["rule"] == "phony":
skipping to change at line 718 skipping to change at line 914
} }
elif results[0]["rule"] == "INSTALL": elif results[0]["rule"] == "INSTALL":
return { return {
"outputs": all_outputs, "outputs": all_outputs,
"rule": get_rule(node, "INSTALL"), "rule": get_rule(node, "INSTALL"),
"inputs": get_inputs(node), "inputs": get_inputs(node),
"implicit": dependencies, "implicit": dependencies,
} }
raise Exception("Unhandled list action with rule: " + results[0]["rule"] return {
) "rule": "TEMPLATE",
"order_only": get_order_only(node),
"outputs": get_outputs(node),
"inputs": get_inputs(node),
"implicit": get_dependencies(node, skip_sources=True),
}
 End of changes. 50 change blocks. 
145 lines changed or deleted 368 lines changed or added

Home  |  About  |  Features  |  All  |  Newest  |  Dox  |  Diffs  |  RSS Feeds  |  Screenshots  |  Comments  |  Imprint  |  Privacy  |  HTTP(S)