From 5ed270759fc9642ac96308ad0d4dea75d29b9bc8 Mon Sep 17 00:00:00 2001 From: Cesar Douady Date: Tue, 16 Apr 2024 14:20:46 +0200 Subject: [PATCH] fixed optional target flag + fixed Python2 support + fixed ldebug + improved build --- Makefile | 18 +++-- Manifest | 1 + TO_DO | 4 ++ _bin/sys_config | 27 +++++--- _lib/lmake/import_machinery.py | 18 +++-- _lib/lmake/rules.src.py | 15 +++-- _lib/lmake_dbg.py | 3 +- src/autodep/clmake.cc | 18 ++--- src/autodep/ldepend.cc | 12 ++-- src/autodep/ltarget.cc | 16 ++--- src/job_exec.cc | 8 ++- src/lmakeserver/cmd.cc | 9 ++- src/lmakeserver/job.cc | 25 ++++--- src/lmakeserver/node.cc | 2 +- src/lmakeserver/rule.cc | 120 ++++++++++++++++----------------- src/lmakeserver/rule.x.hh | 24 ++++--- src/rpc_job.cc | 11 +++ src/rpc_job.hh | 62 ++++++++--------- src/utils.hh | 4 +- unit_tests/bench.py | 61 +++++++++++++++++ unit_tests/link.py | 6 ++ unit_tests/python2.py | 5 +- unit_tests/star.py | 31 +++++++-- 23 files changed, 317 insertions(+), 183 deletions(-) create mode 100644 unit_tests/bench.py diff --git a/Makefile b/Makefile index 00429edd..43cf31ca 100644 --- a/Makefile +++ b/Makefile @@ -11,8 +11,12 @@ MAKEFLAGS += -r -R .DEFAULT_GOAL := DFLT -sys_config.log : _bin/sys_config - ./$< $(@:%.log=%.mk) $(@:%.log=%.h) 2>$@ || cat $@ +$(shell { echo CXX=$$CXX ; echo PYTHON2=$$PYTHON2 ; echo PYTHON=$$PYTHON ; } >sys_config_env.tmp ) +$(shell cmp sys_config_env sys_config_env.tmp 2>/dev/null || { cp sys_config_env.tmp sys_config_env ; echo new env : >&2 ; cat sys_config_env >&2 ; }) +$(shell rm -f sys_config_env.tmp ) + +sys_config.log : _bin/sys_config sys_config_env + . ./sys_config_env ; ./$< $(@:%.log=%.mk) $(@:%.log=%.h) 2>$@ || cat $@ sys_config.mk : sys_config.log ;+@[ -f $@ ] || { echo "cannot find $@" ; exit 1 ; } sys_config.h : sys_config.log ;+@[ -f $@ ] || { echo "cannot find $@" ; exit 1 ; } @@ -217,6 +221,8 @@ $(LIB)/%.py : $(SLIB)/%.src.py @mkdir -p $(@D) sed \ -e 's!\$$BASH!$(BASH)!' \ + -e 's!\$$PYTHON2!$(PYTHON2)!' \ + -e 's!\$$PYTHON!$(PYTHON)!' \ -e 's!\$$GIT!$(GIT)!' \ -e 's!\$$LD_LIBRARY_PATH!$(PY_LD_LIBRARY_PATH)!' \ -e 's!\$$STD_PATH!$(STD_PATH)!' \ @@ -649,8 +655,8 @@ UNIT_TESTS : UNIT_TESTS1 UNIT_TESTS2 @( cd $(@D) ; git clean -ffdxq >/dev/null 2>/dev/null ) ; : # keep $(@D) to ease debugging, ignore rc as old versions of git work but generate an error @for f in $$(grep '^$(UT_DIR)/base/' Manifest) ; do df=$(@D)/$${f#$(UT_DIR)/base/} ; mkdir -p $$(dirname $$df) ; cp $$f $$df ; done @cd $(@D) ; find . -type f -printf '%P\n' > Manifest - @ ( cd $(@D) ; PATH=$(ROOT_DIR)/bin:$(ROOT_DIR)/_bin:$$PATH $(ROOT_DIR)/$< ) >$@.out 2>$@.err \ - && mv $@.out $@ \ + @ ( cd $(@D) ; PATH=$(ROOT_DIR)/bin:$(ROOT_DIR)/_bin:$$PATH CXX=$(CXX) $(ROOT_DIR)/$< ) >$@.out 2>$@.err \ + && mv $@.out $@ \ || ( cat $@.out $@.err ; exit 1 ) %.dir/tok : %.py $(LMAKE_FILES) _lib/ut.py @@ -658,8 +664,8 @@ UNIT_TESTS : UNIT_TESTS1 UNIT_TESTS2 @mkdir -p $(@D) @( cd $(@D) ; git clean -ffdxq >/dev/null 2>/dev/null ) ; : # keep $(@D) to ease debugging, ignore rc as old versions of git work but generate an error @cp $< $(@D)/Lmakefile.py - @ ( cd $(@D) ; PATH=$(ROOT_DIR)/bin:$(ROOT_DIR)/_bin:$$PATH PYTHONPATH=$(ROOT_DIR)/lib:$(ROOT_DIR)/_lib HOME= $(PYTHON) Lmakefile.py ) >$@.out 2>$@.err \ - && mv $@.out $@ \ + @ ( cd $(@D) ; PATH=$(ROOT_DIR)/bin:$(ROOT_DIR)/_bin:$$PATH PYTHONPATH=$(ROOT_DIR)/lib:$(ROOT_DIR)/_lib HOME= CXX=$(CXX) $(PYTHON) Lmakefile.py ) >$@.out 2>$@.err \ + && mv $@.out $@ \ || ( cat $@.out $@.err ; exit 1 ) # diff --git a/Manifest b/Manifest index db674926..cf367518 100644 --- a/Manifest +++ b/Manifest @@ -153,6 +153,7 @@ unit_tests/base/Lmakefile.py unit_tests/base/hello.py unit_tests/base/src1 unit_tests/base/src2 +unit_tests/bench.py unit_tests/cache.py unit_tests/cargo.py unit_tests/chain.py diff --git a/TO_DO b/TO_DO index 8397050d..6ad3291b 100644 --- a/TO_DO +++ b/TO_DO @@ -108,6 +108,10 @@ items : - this way, no harm can be done outside repo * provide a reasonable default value when dynamic functions return None - at least, dont crash +* provide more options to stderr management : + - redirect to stdout + - hide it (still accessible in lshow -e) + ? rename allow_stderr to stderr * implement cache v2 (copy & link) : - 2 levels : disk level, global level - use link instead of copy diff --git a/_bin/sys_config b/_bin/sys_config index 59050531..c0b2f80c 100755 --- a/_bin/sys_config +++ b/_bin/sys_config @@ -14,12 +14,16 @@ START_DIR=$(pwd) mkdir -p trial cd trial +PYTHON2=${PYTHON2:-python2} ; PYTHON2=$(type -p $PYTHON2||:) # ok if we have no python2 +PYTHON=${PYTHON:-python3} ; PYTHON=$( type -p $PYTHON ) +CXX=${CXX:-g++} ; CXX=$( type -p $CXX ) + +echo /$PYTHON2/ /$PYTHON/ /$CXX/ + # # Python configuration (Python2 is optional) # -PYTHON2=${PYTHON2:-python2} ; PYTHON2=$(type -p $PYTHON2||:) # ok if we have no python2 -PYTHON=${PYTHON:-python3} ; PYTHON=$( type -p $PYTHON ) for python in $PYTHON2 $PYTHON ; do if [ -x $python ] ; then eval $($python -c 'if True : @@ -51,12 +55,19 @@ if [ "$PY2_LIB_DIR" -a "$PY_LIB_DIR" ] ; then PY_LD_LIBRARY_PATH=$PY_LIB_DIR:$PY else PY_LD_LIBRARY_PATH=$PY_LIB_DIR$PY2_LIB_DIR # only one is present fi # cancel Python2 if unusable as a library -{ [ -f $PY2_INCLUDEPY/Python.h ] && file -L $PY2_LIB_DIR/$PY2_LIB_BASE | grep -q shared ; } || PYTHON2= +cat <<"EOF" > python2.c + #include + int main() { + PyObject* none = Py_None ; + } +EOF +p2ld=$PY2_LIB_DIR +[ "$p2ld" ] && p2ld="-L$p2ld" +$CXX -o python2 -I$PY2_INCLUDEPY $p2ld python2.c -l:$PY2_LIB_BASE || PYTHON2= # # CXX configuration # -CXX=${CXX:-g++} [ "$CXX" != "${CXX%++*}" ] || { echo cannot recognize c++ compiler $CXX ; exit 1 ; } # ensure we can find "++" in cxx, for example gcc or clang will not work, we must use g++ and clang++ type -p $CXX >/dev/null || { echo cannot find c++ compiler $CXX ; exit 1 ; } @@ -99,7 +110,7 @@ STD_INC_DIRS="$(echo $(realpath $(sed -e '1,/<.*>.*search starts/d' -e '/End of # test whether we have seccomp : warning, include is not enough to test, check its content too # cat <<"EOF" > seccomp.c - #include + #include struct scmp_version v ; EOF # usually seccomp.h is in /usr/include (with an internal file /usr/include/linux/seccomp.h) but some systems have seccomp.h directly installed in /usr/include/linux @@ -222,8 +233,8 @@ fi # # addr2line # -[ "" ] || ADDR2LINE=$(find $(dirname $(realpath $(type -p $CXX))) -name llvm-addr2line) -[ -x "$ADDR2LINE" ] || ADDR2LINE=$(find $(dirname $(realpath $(type -p $CXX))) -name addr2line ) +[ "" ] || ADDR2LINE=$(find $(dirname $(realpath $CXX)) -name llvm-addr2line) +[ -x "$ADDR2LINE" ] || ADDR2LINE=$(find $(dirname $(realpath $CXX)) -name addr2line ) [ -x "$ADDR2LINE" ] || ADDR2LINE=$(type -p addr2line) # @@ -296,7 +307,7 @@ PY_INCLUDEPY := $PY_INCLUDEPY PY_LIB_DIR := $PY_LIB_DIR PY_LIB_BASE := $PY_LIB_BASE # -CXX := $(type -p $CXX) +CXX := $CXX CXX_FLAVOR := $CXX_FLAVOR LINK_LIB_PATH := $LINK_LIB_PATH STD_INC_DIRS := $STD_INC_DIRS diff --git a/_lib/lmake/import_machinery.py b/_lib/lmake/import_machinery.py index 4e5164b7..c273efae 100644 --- a/_lib/lmake/import_machinery.py +++ b/_lib/lmake/import_machinery.py @@ -19,9 +19,12 @@ def _mask_python_deps() : # - if a file can be generated, there will be no dep (as the file is not accessed), this may lead to a non-existing module without job rerun or worse, a following file may be used # - a lot of sibling files will be accessed, triggering potentially unwanted deps # to prevent that, autodep is deactivated during import - if _sys.version_info.major==2 : import __builtins__ as builtins - else : import builtins - orig_import = builtins.__import__ + if _sys.version_info.major==2 : + builtins_dct = __builtins__ + else : + import builtins + builtins_dct = builtins.__dict__ + orig_import = builtins_dct['__import__'] def new_import(*args,**kwds) : with Autodep(False) : return orig_import(*args,**kwds) @@ -35,13 +38,16 @@ def new_exec_module(*args,**kwds) : _LoaderBasics.exec_module = orig_exec_module except : raise RuntimeError('masking python deps during import is not available for python%d.%d'%_sys.version_info[:2]) - builtins.__import__ = new_import # wrap at the end to avoid wraping our own imports + builtins_dct['__import__'] = new_import # wrap at the end to avoid wraping our own imports def _maybe_local(file) : 'fast check for local files, avoiding full absolute path generation' return not file or file[0]!='/' or file.startswith(root_dir) -import importlib.machinery as _machinery -_std_suffixes = _machinery.all_suffixes()+['/__init__.py'] # account for packages, not included in all_suffixes() +if _sys.version_info.major==2 : + _std_suffixes = ['.py','.so','/__init__.py'] # standard suffixes are not available with Python2 +else : + import importlib.machinery as _machinery + _std_suffixes = _machinery.all_suffixes()+['/__init__.py'] # account for packages, not included in all_suffixes() def _gen_module_deps() : '''fixes imports so as to be sure all files needed to do an import is correctly reported (not merely those that exist)''' diff --git a/_lib/lmake/rules.src.py b/_lib/lmake/rules.src.py index d18f94ba..00f9ad6c 100644 --- a/_lib/lmake/rules.src.py +++ b/_lib/lmake/rules.src.py @@ -11,10 +11,10 @@ import lmake from . import has_ld_audit,pdict,root_dir # if not in an lmake repo, root_dir is not set to current dir -shell = '$BASH' # . -python = _sys.executable - -_std_path = '$STD_PATH' # substituted at installation time +shell = '$BASH' # substituted at installation time +python2 = '$PYTHON2' # . +python = '$PYTHON' # . +_std_path = '$STD_PATH' # . _ld_library_path = '$LD_LIBRARY_PATH' # . _lmake_dir = __file__.rsplit('/lib/',1)[0] @@ -88,8 +88,8 @@ class Rule(_RuleBase) : n_retries = 1 # number of retries in case of job lost. 1 is a reasonable value # n_tokens = 1 # number of jobs likely to run in parallel for this rule (used for ETA estimation) # prio = 0 # in case of ambiguity, rules are selected with highest prio first - python = (python,) # python used for callable cmd - shell = (shell ,) # shell used for str cmd (_sh is usually /bin/sh which may test for dir existence before chdir, which defeats auto_mkdir) + python = (python,) # python used for callable cmd + shell = (shell ,) # shell used for str cmd (_sh is usually /bin/sh which may test for dir existence before chdir, which defeats auto_mkdir) start_delay = 3 # delay before sending a start message if job is not done by then, 3 is a reasonable compromise max_stderr_len = 100 # maximum number of stderr lines shown in output (full content is accessible with lshow -e), 100 is a reasonable compromise # timeout = None # timeout allocated to job execution (in s), must be None or an int @@ -140,7 +140,7 @@ class _PyRule(Rule) : def cmd() : # this will be executed before cmd() of concrete subclasses as cmd() are chained in case of inheritance if gen_module_deps or mask_python_deps : # fast path :if nothing to do, do nothing try : import lmake - except ModuleNotFoundError : + except ImportError : import sys sys.path[0:0] = (_lmake_dir+'/lib',) from lmake.import_machinery import fix_import @@ -150,6 +150,7 @@ class Py2Rule(_PyRule) : 'base rule that handle pyc creation when importing modules in Python' side_targets = { '__PYC__' : ( r'{*:(.+/)?}{*:\w+}.pyc' , 'Incremental' ) } gen_module_deps = True + python = python2 class Py3Rule(_PyRule) : 'base rule that handle pyc creation when importing modules in Python' side_targets = { '__PYC__' : ( r'{*:(.+/)?}__pycache__/{*:\w+}.{*:\w+-\d+}.pyc' , 'Incremental' ) } diff --git a/_lib/lmake_dbg.py b/_lib/lmake_dbg.py index 3c4487a2..c67a8c87 100644 --- a/_lib/lmake_dbg.py +++ b/_lib/lmake_dbg.py @@ -79,7 +79,8 @@ def run_pdb(dbg_dir,redirected,func,*args,**kwds) : debugger.runcall(func,*args,**kwds) except BaseException as e : traceback.print_exc() - debugger.interaction(None,sys.exc_traceback) + debugger.reset() + debugger.interaction(None,sys.exc_info()[2]) def run_vscode(dbg_dir,redirected,func,*args,**kwds) : import json diff --git a/src/autodep/clmake.cc b/src/autodep/clmake.cc index ace759c7..016f44f2 100644 --- a/src/autodep/clmake.cc +++ b/src/autodep/clmake.cc @@ -68,11 +68,11 @@ static PyObject* depend( PyObject* /*null*/ , PyObject* args , PyObject* kwds ) AccessDigest ad { .accesses=~Accesses() , .dflags=Dflag::Required } ; if (py_kwds) { size_t n_kwds = py_kwds->size() ; - /**/ if ( const char* s="follow_symlinks" ; py_kwds->contains(s) ) { n_kwds-- ; no_follow = !(*py_kwds)[s] ; } - /**/ if ( const char* s="verbose" ; py_kwds->contains(s) ) { n_kwds-- ; verbose = +(*py_kwds)[s] ; } - /**/ if ( const char* s="read" ; py_kwds->contains(s) ) { n_kwds-- ; read = +(*py_kwds)[s] ; } - for( Dflag df : Dflag::NDyn ) if ( ::string s=snake_str(df ) ; py_kwds->contains(s) ) { n_kwds-- ; ad.dflags .set(df ,+(*py_kwds)[s]) ; } - for( ExtraDflag edf : All ) if ( ::string s=snake_str(edf) ; ExtraDflagChars[+edf] && py_kwds->contains(s) ) { n_kwds-- ; ad.extra_dflags.set(edf,+(*py_kwds)[s]) ; } + /**/ if ( const char* s="follow_symlinks" ; py_kwds->contains(s) ) { n_kwds-- ; no_follow = !(*py_kwds)[s] ; } + /**/ if ( const char* s="verbose" ; py_kwds->contains(s) ) { n_kwds-- ; verbose = +(*py_kwds)[s] ; } + /**/ if ( const char* s="read" ; py_kwds->contains(s) ) { n_kwds-- ; read = +(*py_kwds)[s] ; } + for( Dflag df : Dflag::NDyn ) if ( ::string s=snake_str(df ) ; py_kwds->contains(s) ) { n_kwds-- ; ad.dflags .set(df ,+(*py_kwds)[s]) ; } + for( ExtraDflag edf : All ) if ( ::string s=snake_str(edf) ; ExtraDflagChars[+edf].second && py_kwds->contains(s) ) { n_kwds-- ; ad.extra_dflags.set(edf,+(*py_kwds)[s]) ; } // if (n_kwds) return py_err_set(Exception::TypeErr,"unexpected keyword arg") ; } @@ -112,10 +112,10 @@ static PyObject* target( PyObject* /*null*/ , PyObject* args , PyObject* kwds ) AccessDigest ad { .write=Yes , .extra_tflags=ExtraTflag::Allow } ; if (py_kwds) { size_t n_kwds = py_kwds->size() ; - /**/ if ( const char* s="follow_symlinks" ; py_kwds->contains(s) ) { n_kwds-- ; no_follow = !(*py_kwds)[s] ; } - /**/ if ( const char* s="write" ; py_kwds->contains(s) ) { n_kwds-- ; ad.write = No | +(*py_kwds)[s] ; } - for( Tflag tf : Tflag::NDyn ) if ( ::string s=snake_str(tf ) ; py_kwds->contains(s) ) { n_kwds-- ; ad.tflags .set(tf ,+(*py_kwds)[s]) ; } - for( ExtraTflag etf : All ) if ( ::string s=snake_str(etf) ; ExtraTflagChars[+etf] && py_kwds->contains(s) ) { n_kwds-- ; ad.extra_tflags.set(etf,+(*py_kwds)[s]) ; } + /**/ if ( const char* s="follow_symlinks" ; py_kwds->contains(s) ) { n_kwds-- ; no_follow = !(*py_kwds)[s] ; } + /**/ if ( const char* s="write" ; py_kwds->contains(s) ) { n_kwds-- ; ad.write = No | +(*py_kwds)[s] ; } + for( Tflag tf : Tflag::NDyn ) if ( ::string s=snake_str(tf ) ; py_kwds->contains(s) ) { n_kwds-- ; ad.tflags .set(tf ,+(*py_kwds)[s]) ; } + for( ExtraTflag etf : All ) if ( ::string s=snake_str(etf) ; ExtraTflagChars[+etf].second && py_kwds->contains(s) ) { n_kwds-- ; ad.extra_tflags.set(etf,+(*py_kwds)[s]) ; } // if (n_kwds) return py_err_set(Exception::TypeErr,"unexpected keyword arg") ; } diff --git a/src/autodep/ldepend.cc b/src/autodep/ldepend.cc index c153d67b..10bc80f0 100644 --- a/src/autodep/ldepend.cc +++ b/src/autodep/ldepend.cc @@ -31,12 +31,12 @@ int main( int argc , char* argv[]) { , { Flag::Verbose , { .short_name='v' , .has_arg=false , .doc="write dep crcs on stdout" } } , { Flag::NoRead , { .short_name='R' , .has_arg=false , .doc="does not report a read, only flags" } } // - , { Flag::Critical , { .short_name=DflagChars [+Dflag ::Critical ] , .has_arg=false , .doc="report critical deps" } } - , { Flag::Essential , { .short_name=DflagChars [+Dflag ::Essential ] , .has_arg=false , .doc="ask that deps be seen in graphical flow" } } - , { Flag::IgnoreError , { .short_name=DflagChars [+Dflag ::IgnoreError ] , .has_arg=false , .doc="accept that deps are in error" } } - , { Flag::NoRequired , { .short_name=DflagChars [+Dflag ::Required ] , .has_arg=false , .doc="accept that deps cannot be built" } } - , { Flag::Ignore , { .short_name=ExtraDflagChars[+ExtraDflag::Ignore ] , .has_arg=false , .doc="ignore reads" } } - , { Flag::StatReadData , { .short_name=ExtraDflagChars[+ExtraDflag::StatReadData] , .has_arg=false , .doc="stat access implies access to full file content" } } + , { Flag::Critical , { .short_name=DflagChars [+Dflag ::Critical ].second , .has_arg=false , .doc="report critical deps" } } + , { Flag::Essential , { .short_name=DflagChars [+Dflag ::Essential ].second , .has_arg=false , .doc="ask that deps be seen in graphical flow" } } + , { Flag::IgnoreError , { .short_name=DflagChars [+Dflag ::IgnoreError ].second , .has_arg=false , .doc="accept that deps are in error" } } + , { Flag::NoRequired , { .short_name=DflagChars [+Dflag ::Required ].second , .has_arg=false , .doc="accept that deps cannot be built" } } + , { Flag::Ignore , { .short_name=ExtraDflagChars[+ExtraDflag::Ignore ].second , .has_arg=false , .doc="ignore reads" } } + , { Flag::StatReadData , { .short_name=ExtraDflagChars[+ExtraDflag::StatReadData].second , .has_arg=false , .doc="stat access implies access to full file content" } } }} ; CmdLine cmd_line { syntax , argc , argv } ; // diff --git a/src/autodep/ltarget.cc b/src/autodep/ltarget.cc index 2f5cd561..f5b318f9 100644 --- a/src/autodep/ltarget.cc +++ b/src/autodep/ltarget.cc @@ -28,14 +28,14 @@ int main( int argc , char* argv[]) { { Flag::FollowSymlinks , { .short_name='L' , .has_arg=false , .doc="Logical view, follow symbolic links" } } , { Flag::NoWrite , { .short_name='W' , .has_arg=false , .doc="does not report a write, only flags" } } // - , { Flag::Essential , { .short_name=TflagChars [+Tflag ::Essential ] , .has_arg=false , .doc="show when generating user oriented graphs" } } - , { Flag::Incremental , { .short_name=TflagChars [+Tflag ::Incremental] , .has_arg=false , .doc="do not rm file before job execution" } } - , { Flag::NoUniquify , { .short_name=TflagChars [+Tflag ::NoUniquify ] , .has_arg=false , .doc="do not uniquify target if incremental and several links point to it" } } - , { Flag::NoWarning , { .short_name=TflagChars [+Tflag ::NoWarning ] , .has_arg=false , .doc="do not warn user if uniquified or rm'ed while generated by another job" } } - , { Flag::Phony , { .short_name=TflagChars [+Tflag ::Phony ] , .has_arg=false , .doc="accept that target is not physically generated on disk" } } - , { Flag::Ignore , { .short_name=ExtraTflagChars[+ExtraTflag::Ignore ] , .has_arg=false , .doc="ignore writes" } } - , { Flag::NoAllow , { .short_name=ExtraTflagChars[+ExtraTflag::Allow ] , .has_arg=false , .doc="do not force target to be accepted, just inform writing to it" } } - , { Flag::SourceOk , { .short_name=ExtraTflagChars[+ExtraTflag::SourceOk ] , .has_arg=false , .doc="accept if target is actually a source" } } + , { Flag::Essential , { .short_name=TflagChars [+Tflag ::Essential ].second , .has_arg=false , .doc="show when generating user oriented graphs" } } + , { Flag::Incremental , { .short_name=TflagChars [+Tflag ::Incremental].second , .has_arg=false , .doc="do not rm file before job execution" } } + , { Flag::NoUniquify , { .short_name=TflagChars [+Tflag ::NoUniquify ].second , .has_arg=false , .doc="do not uniquify target if incremental and several links point to it" } } + , { Flag::NoWarning , { .short_name=TflagChars [+Tflag ::NoWarning ].second , .has_arg=false , .doc="do not warn user if uniquified or rm'ed while generated by another job" } } + , { Flag::Phony , { .short_name=TflagChars [+Tflag ::Phony ].second , .has_arg=false , .doc="accept that target is not physically generated on disk" } } + , { Flag::Ignore , { .short_name=ExtraTflagChars[+ExtraTflag::Ignore ].second , .has_arg=false , .doc="ignore writes" } } + , { Flag::NoAllow , { .short_name=ExtraTflagChars[+ExtraTflag::Allow ].second , .has_arg=false , .doc="do not force target to be accepted, just inform writing to it" } } + , { Flag::SourceOk , { .short_name=ExtraTflagChars[+ExtraTflag::SourceOk ].second , .has_arg=false , .doc="accept if target is actually a source" } } }} ; CmdLine cmd_line { syntax,argc,argv } ; // diff --git a/src/job_exec.cc b/src/job_exec.cc index 4085cf03..4a591f6a 100644 --- a/src/job_exec.cc +++ b/src/job_exec.cc @@ -191,10 +191,11 @@ Digest analyze( bool at_end , bool killed=false ) { break ; DF} if ( td.tflags[Tflag::Target] && !td.tflags[Tflag::Phony] ) { - if (td.tflags[Tflag::Static]) { + if ( td.tflags[Tflag::Static] && !td.extra_tflags[ExtraTflag::Optional] ) { if (unlnk ) append_to_string( res.msg , "missing static target " , mk_file(file,No/*exists*/) , '\n' ) ; } else { - if (ad.write==Yes) { if (unlnk ) td.tflags &= ~Tflag::Target ; } // unless static or phony, a target loses its official status if not actually produced + // unless static and non-optional or phony, a target loses its official status if not actually produced + if (ad.write==Yes) { if (unlnk ) td.tflags &= ~Tflag::Target ; } else { if (!is_target(file)) td.tflags &= ~Tflag::Target ; } } } @@ -342,7 +343,8 @@ int main( int argc , char* argv[] ) { end_report.msg += wash_report.first ; if (!wash_report.second) { end_report.digest.status = Status::LateLostErr ; goto End ; } g_gather.new_deps( start_overhead , ::move(g_start_info.deps) , g_start_info.stdin ) ; - for( auto const& [t,f] : g_match_dct.knowns ) if (f.is_target==Yes) g_gather.new_unlnk(start_overhead,t) ; + // non-optional static targets must be reported in all cases + for( auto const& [t,f] : g_match_dct.knowns ) if ( f.is_target==Yes && !f.extra_tflags()[ExtraTflag::Optional] ) g_gather.new_unlnk(start_overhead,t) ; // Fd child_stdin ; if (+g_start_info.stdin) child_stdin = open_read(g_start_info.stdin) ; diff --git a/src/lmakeserver/cmd.cc b/src/lmakeserver/cmd.cc index 6e73972b..e8b6c3a8 100644 --- a/src/lmakeserver/cmd.cc +++ b/src/lmakeserver/cmd.cc @@ -829,7 +829,7 @@ R"({ ::string flags_str ; /**/ flags_str += t->crc==Crc::None ? 'U' : +t->crc ? 'W' : '-' ; /**/ flags_str += ' ' ; - for( Tflag tf : All ) flags_str += td.tflags[tf] ? TflagChars[+tf] : '-' ; + for( Tflag tf : All ) flags_str += td.tflags[tf] ? TflagChars[+tf].second : '-' ; // _send_node( fd , ro , verbose , Maybe|!td.tflags[Tflag::Target]/*hide*/ , flags_str , t , lvl ) ; } @@ -913,10 +913,9 @@ R"({ _show_job(fd,ro,job,lvl) ; break ; case ReqKey::Deps : { - bool always = ro.flags[ReqFlag::Verbose] ; - ::string uphill_name = dir_name(target->name()) ; - double prio = -Infinity ; - if (+uphill_name) _send_node( fd , ro , always , Maybe/*hide*/ , "U" , target->dir() , lvl ) ; + bool always = ro.flags[ReqFlag::Verbose] ; + double prio = -Infinity ; + if ( target->is_plain() && +target->dir() ) _send_node( fd , ro , always , Maybe/*hide*/ , "U" , target->dir() , lvl ) ; for( JobTgt jt : target->conform_job_tgts() ) { bool hide = !jt.produces(target) ; if (always) _send_job( fd , ro , Yes , hide , jt , lvl ) ; diff --git a/src/lmakeserver/job.cc b/src/lmakeserver/job.cc index fc351408..8fb8b1e9 100644 --- a/src/lmakeserver/job.cc +++ b/src/lmakeserver/job.cc @@ -34,9 +34,9 @@ namespace Engine { for( Target t : targets ) { FileActionTag fat = {}/*garbage*/ ; // - if (t->polluted ) fat = FileActionTag::Unlnk ; // wash pollution - else if (t->crc==Crc::None ) fat = FileActionTag::None ; // nothing to wash - else if (t->is_src_anti() ) fat = FileActionTag::Src ; // dont touch sources, not even integrity check + if (t->polluted ) fat = FileActionTag::Unlnk ; // wash pollution + else if (t->crc==Crc::None ) fat = FileActionTag::None ; // nothing to wash + else if (t->is_src_anti() ) fat = FileActionTag::Src ; // dont touch sources, not even integrity check else if (!t.tflags[Tflag::Incremental]) fat = FileActionTag::Unlnk ; else if ( t.tflags[Tflag::NoUniquify ]) fat = FileActionTag::None ; else fat = FileActionTag::Uniquify ; @@ -44,10 +44,10 @@ namespace Engine { // trace("wash_target",t,fa) ; switch (fat) { - case FileActionTag::Src : if (t->crc!=Crc::None) locked_dirs.insert(t->dir()) ; break ; // nothing to do in job_exec, not even integrity check - case FileActionTag::Uniquify : locked_dirs.insert(t->dir()) ; actions.emplace_back(t,fa) ; break ; - case FileActionTag::None : if (t->crc!=Crc::None) locked_dirs.insert(t->dir()) ; actions.emplace_back(t,fa) ; break ; // integrity check in job_exec - case FileActionTag::Unlnk : actions.emplace_back(t,fa) ; break ; + case FileActionTag::Src : if ( +t->dir() && t->crc!=Crc::None ) locked_dirs.insert(t->dir()) ; break ; // nothing to do, not even integrity check + case FileActionTag::Uniquify : if ( +t->dir() ) locked_dirs.insert(t->dir()) ; actions.emplace_back(t,fa) ; break ; + case FileActionTag::None : if ( +t->dir() && t->crc!=Crc::None ) locked_dirs.insert(t->dir()) ; actions.emplace_back(t,fa) ; break ; // integrity check + case FileActionTag::Unlnk : actions.emplace_back(t,fa) ; break ; if ( !t->has_actual_job(idx()) && t->has_actual_job() && !t.tflags[Tflag::NoWarning] ) warnings.push_back(t) ; if ( Node td=t->dir() ; +td ) { // @@ -449,8 +449,12 @@ namespace Engine { // bool has_new_deps = false ; if (fresh_deps) { - ::uset old_deps ; for( Dep const& d : (*this)->deps ) old_deps.insert(d) ; + ::uset old_deps ; ::vector deps ; deps.reserve(digest.deps.size()) ; + for( Dep const& d : (*this)->deps ) + if (d->is_plain()) + for( Node dd=d ; +dd ; dd=dd->dir() ) + if (!old_deps.insert(dd).second) break ; // record old deps and all uphill dirs as these are implicit deps for( auto const& [dn,dd] : digest.deps ) { Dep dep { Node(dn) , dd } ; if (!old_deps.contains(dep)) { @@ -459,13 +463,14 @@ namespace Engine { // because of disk date granularity (usually a few ms) and because of date discrepancy between executing host and disk server (usually a few ms when using NTP) // this means that the file could actually have been accessed before and have gotten wrong data. // if this occurs, consider dep as unstable if it was not a known dep (we know known deps have been finish before job started). - if (dep.hot) dep.crc({}) ; + if (dep.hot) { trace("reset",dep) ; dep.crc({}) ; } } if (!dep.is_crc) { dep->full_refresh(true/*report_no_file*/,running_reqs_,dn) ; dep.acquire_crc() ; // retry crc acquisition in case previous cleaning aligned the dates seen_dep_date |= dep.is_crc ; // if a dep has become a crc, we must fix ancillary file } else if (dep.never_match()) { + dep->set_buildable() ; if (dep->is_src_anti()) dep->refresh_src_anti(true/*report_no_file*/,running_reqs_,dn) ; // the goal is to detect overwritten unstable_dep = true ; } @@ -507,7 +512,7 @@ namespace Engine { } CoarseDelay old_exec_time = (*this)->best_exec_time().first ; MakeAction end_action = fresh_deps||ok==Maybe ? MakeAction::End : MakeAction::GiveUp ; - bool all_done = true ; + bool all_done = true ; JobReason err_reason ; for( Req req : running_reqs_ ) (*this)->req_info(req).step(Step::End) ; // ensure no confusion with previous run for( Req req : running_reqs_ ) { diff --git a/src/lmakeserver/node.cc b/src/lmakeserver/node.cc index 619e6ab4..5993e62e 100644 --- a/src/lmakeserver/node.cc +++ b/src/lmakeserver/node.cc @@ -735,7 +735,7 @@ namespace Engine { ::string Dep::dflags_str() const { ::string res ; res.reserve(N) ; - for( Dflag df : All ) res.push_back( dflags[df] ? DflagChars[+df] : '-' ) ; + for( Dflag df : All ) res.push_back( dflags[df] ? DflagChars[+df].second : '-' ) ; return res ; } diff --git a/src/lmakeserver/rule.cc b/src/lmakeserver/rule.cc index 5c5aeddd..f57bdb5d 100644 --- a/src/lmakeserver/rule.cc +++ b/src/lmakeserver/rule.cc @@ -266,10 +266,11 @@ namespace Engine { for( auto [vc,i] : ctx ) { ::vmap_ss dct ; switch (vc) { - case VarCmd::Stem : cb_str(vc,i,r->stems [i].first,stems ()[i] ) ; break ; - case VarCmd::Match : cb_str(vc,i,r->matches[i].first,matches()[i] ) ; break ; - case VarCmd::Dep : cb_str(vc,i,deps() [i].first,deps ()[i].second) ; break ; - case VarCmd::Rsrc : { auto it = rsrcs().find(rsrcs_spec[i].first) ; if (it!=rsrcs().end()) cb_str(vc,i,it->first ,it->second ) ; } break ; + case VarCmd::Stem : cb_str(vc,i,r->stems [i].first,stems ()[i] ) ; break ; + case VarCmd::StarMatch : + case VarCmd::Match : cb_str(vc,i,r->matches[i].first,matches()[i] ) ; break ; + case VarCmd::Dep : cb_str(vc,i,deps() [i].first,deps ()[i].second) ; break ; + case VarCmd::Rsrc : { auto it = rsrcs().find(rsrcs_spec[i].first) ; if (it!=rsrcs().end()) cb_str(vc,i,it->first ,it->second ) ; } break ; // case VarCmd::Stems : for( VarIdx j=0 ; jn_static_stems ; j++ ) dct.emplace_back(r->stems [j].first,stems ()[j]) ; cb_dct(vc,i,"stems" ,dct ) ; break ; case VarCmd::Targets : for( VarIdx j=0 ; jn_static_targets ; j++ ) dct.emplace_back(r->matches[j].first,matches()[j]) ; cb_dct(vc,i,"targets" ,dct ) ; break ; @@ -528,7 +529,7 @@ namespace Engine { ::string res ; eval_ctx( match , rsrcs , [&]( VarCmd vc , VarIdx i , ::string const& key , ::string const& val ) -> void { - if ( vc!=VarCmd::Match || in_statics ) { + if (vc!=VarCmd::StarMatch) { append_to_string(res,key," = ",mk_py_str(val),'\n') ; return ; } @@ -695,7 +696,7 @@ namespace Engine { stem_defs.emplace( ::string(py_k.as_a()) , ::string(py_v.as_a()) ) ; // // augment stems with definitions found in job_name and targets - size_t unnamed_star_idx = 1 ; // free running while walking over job_name + targets + size_t unnamed_star_idx = 1 ; // free running while walking over job_name + targets auto augment_stems = [&]( ::string const& k , bool star , ::string const* re , bool star_only ) -> void { if (re) { auto [it,inserted] = stem_defs.emplace(k,*re) ; @@ -703,7 +704,7 @@ namespace Engine { } if ( !star_only || star ) { auto [it,inserted] = stem_stars.emplace(k,No|star) ; - if ( !inserted && (No|star)!=it->second ) it->second = Maybe ; // stem is used both as static and star + if ( !inserted && (No|star)!=it->second ) it->second = Maybe ; // stem is used both as static and star } } ; field = "job_name" ; @@ -718,8 +719,8 @@ namespace Engine { ::string job_name_msg = "job_name" ; for( auto const& [py_k,py_tkfs] : dct[field].as_a() ) { field = py_k.as_a() ; - ::string target = py_tkfs.as_a()[0].as_a() ; // . - MatchKind kind = mk_enum(py_tkfs.as_a()[1].as_a()) ; // targets are a tuple (target_pattern,kind,flags...) + ::string target = py_tkfs.as_a()[0].as_a() ; // . + MatchKind kind = mk_enum(py_tkfs.as_a()[1].as_a()) ; // targets are a tuple (target_pattern,kind,flags...) // avoid processing target if it is identical to job_name : this is not an optimization, it is to ensure unnamed_star_idx's match if (target!=job_name) { _parse_py( target , &unnamed_star_idx , @@ -736,7 +737,7 @@ namespace Engine { // // gather job_name and targets field = "job_name" ; - unnamed_star_idx = 1 ; // reset free running at each pass over job_name+targets + unnamed_star_idx = 1 ; // reset free running at each pass over job_name+targets VarIdx n_static_unnamed_stems = 0 ; bool job_name_is_star = false ; auto stem_words = []( ::string const& k , bool star , bool unnamed ) -> ::string { @@ -752,15 +753,15 @@ namespace Engine { ) ; // field = "matches" ; - { ::vmap_s star_matches ; // defer star matches so that static targets are put first - ::vmap_s static_matches[N] ; // defer star matches so that static targets are put first + { ::vmap_s star_matches ; // defer star matches so that static targets are put first + ::vmap_s static_matches[N] ; // defer star matches so that static targets are put first bool seen_top = false ; bool seen_target = false ; - for( auto const& [py_k,py_tkfs] : dct[field].as_a() ) { // targets are a tuple (target_pattern,flags...) + for( auto const& [py_k,py_tkfs] : dct[field].as_a() ) { // targets are a tuple (target_pattern,flags...) field = py_k.as_a() ; Sequence const& pyseq_tkfs = py_tkfs.as_a() ; - ::string target = pyseq_tkfs[0].as_a() ; // . - MatchKind kind = mk_enum(pyseq_tkfs[1].as_a()) ; // targets are a tuple (target_pattern,kind,flags...) + ::string target = pyseq_tkfs[0].as_a() ; // . + MatchKind kind = mk_enum(pyseq_tkfs[1].as_a()) ; // targets are a tuple (target_pattern,kind,flags...) bool is_star = false ; ::set_s missing_stems ; bool is_target = kind!=MatchKind::SideDeps ; @@ -793,25 +794,22 @@ namespace Engine { } ) ; } - bool is_native_star = is_star ; - if ( is_official_target ) tflags |= Tflag::Target ; - if ( !is_star && is_official_target ) tflags |= Tflag::Essential ; // static targets are essential by default - if ( is_target ) _split_flags( snake_str(kind) , pyseq_tkfs , 2/*n_skip*/ , tflags , extra_tflags ) ; - else _split_flags( snake_str(kind) , pyseq_tkfs , 2/*n_skip*/ , dflags , extra_dflags ) ; - if ( extra_tflags[ExtraTflag::Optional] ) is_star = true ; - if ( !is_star ) tflags |= Tflag::Static ; - if (is_target ) flags = {tflags,extra_tflags} ; - else flags = {dflags,extra_dflags} ; + if ( is_official_target ) tflags |= Tflag::Target ; + if ( !is_star && is_official_target ) tflags |= Tflag::Essential ; // static targets are essential by default + if ( !is_star ) tflags |= Tflag::Static ; + if ( is_target ) { _split_flags( snake_str(kind) , pyseq_tkfs , 2/*n_skip*/ , tflags , extra_tflags ) ; flags = {tflags,extra_tflags} ; } + else { _split_flags( snake_str(kind) , pyseq_tkfs , 2/*n_skip*/ , dflags , extra_dflags ) ; flags = {dflags,extra_dflags} ; } // check - if ( target.starts_with(root_dir_s) ) throw to_string(snake(kind)," must be relative to root dir : " ,target) ; - if ( !is_lcl(target) ) throw to_string(snake(kind)," must be local : " ,target) ; - if ( !is_canon(target) ) throw to_string(snake(kind)," must be canonical : " ,target) ; - if ( +missing_stems ) throw to_string("missing stems ",missing_stems," in ",snake(kind)," : ",target) ; - if ( !is_official_target && is_special() ) throw "flags are meaningless for source and anti-rules"s ; - if ( is_star && is_special() ) throw to_string("star ",kind,"s are meaningless for source and anti-rules") ; - if ( is_star && is_stdout ) throw "stdout cannot be directed to a star target"s ; - if ( tflags [Tflag ::Incremental] && is_stdout ) throw "stdout cannot be directed to an incremental target"s ; - if ( extra_tflags[ExtraTflag::Optional ] && is_native_star ) throw "star targets are natively optional" ; + if ( target.starts_with(root_dir_s) ) throw to_string(snake(kind)," must be relative to root dir : " ,target) ; + if ( !is_lcl(target) ) throw to_string(snake(kind)," must be local : " ,target) ; + if ( !is_canon(target) ) throw to_string(snake(kind)," must be canonical : " ,target) ; + if ( +missing_stems ) throw to_string("missing stems ",missing_stems," in ",snake(kind)," : ",target) ; + if ( !is_official_target && is_special() ) throw "flags are meaningless for source and anti-rules"s ; + if ( is_star && is_special() ) throw to_string("star ",kind,"s are meaningless for source and anti-rules") ; + if ( is_star && is_stdout ) throw "stdout cannot be directed to a star target"s ; + if ( tflags [Tflag ::Incremental] && is_stdout ) throw "stdout cannot be directed to an incremental target"s ; + if ( extra_tflags[ExtraTflag::Optional ] && is_star ) throw "star targets are natively optional" ; + if ( extra_tflags[ExtraTflag::Optional ] && tflags[Tflag::Phony] ) throw "cannot be simultaneously optional and phony" ; bool is_top = is_target ? extra_tflags[ExtraTflag::Top] : extra_dflags[ExtraDflag::Top] ; seen_top |= is_top ; seen_target |= is_official_target ; @@ -876,14 +874,14 @@ namespace Engine { // // acquire fields linked to job execution // - field = "ete" ; if (dct.contains(field)) Attrs::acquire( exec_time , &dct[field] ) ; - field = "force" ; if (dct.contains(field)) Attrs::acquire( force , &dct[field] ) ; - field = "is_python" ; if (dct.contains(field)) Attrs::acquire( is_python , &dct[field] ) ; else throw "not found"s ; - field = "max_submit_count" ; if (dct.contains(field)) Attrs::acquire( n_submits , &dct[field] , uint8_t(1) ) ; - field = "n_tokens" ; if (dct.contains(field)) Attrs::acquire( n_tokens , &dct[field] ) ; + field = "ete" ; if (dct.contains(field)) Attrs::acquire( exec_time , &dct[field] ) ; + field = "force" ; if (dct.contains(field)) Attrs::acquire( force , &dct[field] ) ; + field = "is_python" ; if (dct.contains(field)) Attrs::acquire( is_python , &dct[field] ) ; else throw "not found"s ; + field = "max_submit_count" ; if (dct.contains(field)) Attrs::acquire( n_submits , &dct[field] , uint8_t(1) ) ; + field = "n_tokens" ; if (dct.contains(field)) Attrs::acquire( n_tokens , &dct[field] ) ; // - /**/ var_idxs["targets" ] = {VarCmd::Targets,0 } ; - for( VarIdx mi=0 ; mi() , var_idxs , *this } ; @@ -997,10 +995,11 @@ namespace Engine { VarIdx i = decode_int (&fstr[ci+1]) ; ci += sizeof(VarIdx) ; res += '{' ; switch (vc) { - case VarCmd::Stem : res += rd.stems [i].first ; break ; - case VarCmd::Match : res += rd.matches [i].first ; break ; - case VarCmd::Dep : res += rd.deps_attrs.spec.deps [i].first ; break ; - case VarCmd::Rsrc : res += rd.submit_rsrcs_attrs.spec.rsrcs[i].first ; break ; + case VarCmd::Stem : res += rd.stems [i].first ; break ; + case VarCmd::StarMatch : + case VarCmd::Match : res += rd.matches [i].first ; break ; + case VarCmd::Dep : res += rd.deps_attrs.spec.deps [i].first ; break ; + case VarCmd::Rsrc : res += rd.submit_rsrcs_attrs.spec.rsrcs[i].first ; break ; DF} res += '}' ; } break ; @@ -1300,14 +1299,15 @@ namespace Engine { ::vector_s RuleData::_list_ctx(::vector const& ctx) const { ::vector_s res ; for( auto [vc,i] : ctx ) switch (vc) { - case VarCmd::Stem : res.push_back(stems [i].first) ; break ; - case VarCmd::Match : res.push_back(matches [i].first) ; break ; - case VarCmd::Dep : res.push_back(deps_attrs.spec.deps [i].first) ; break ; - case VarCmd::Rsrc : res.push_back(submit_rsrcs_attrs.spec.rsrcs[i].first) ; break ; - case VarCmd::Stems : res.push_back("stems" ) ; break ; - case VarCmd::Targets : res.push_back("targets" ) ; break ; - case VarCmd::Deps : res.push_back("deps" ) ; break ; - case VarCmd::Rsrcs : res.push_back("resources" ) ; break ; + case VarCmd::Stem : res.push_back(stems [i].first) ; break ; + case VarCmd::StarMatch : + case VarCmd::Match : res.push_back(matches [i].first) ; break ; + case VarCmd::Dep : res.push_back(deps_attrs.spec.deps [i].first) ; break ; + case VarCmd::Rsrc : res.push_back(submit_rsrcs_attrs.spec.rsrcs[i].first) ; break ; + case VarCmd::Stems : res.push_back("stems" ) ; break ; + case VarCmd::Targets : res.push_back("targets" ) ; break ; + case VarCmd::Deps : res.push_back("deps" ) ; break ; + case VarCmd::Rsrcs : res.push_back("resources" ) ; break ; DF} return res ; } @@ -1320,27 +1320,27 @@ namespace Engine { // if the need arises, we will add an "id" artificial field entering in match_crc to distinguish them void RuleData::_set_crcs() { bool special = is_special() ; - Hash::Xxh h ; // each crc continues after the previous one, so they are standalone + Hash::Xxh h ; // each crc continues after the previous one, so they are standalone // - ::vector_s targets ; + ::vmap_s targets ; for( auto const& [k,me] : matches ) if ( me.flags.is_target==Yes && me.flags.tflags()[Tflag::Target] ) - targets.push_back(me.pattern) ; // keys and flags have no influence on matching + targets.emplace_back(me.pattern,me.flags.extra_tflags()[ExtraTflag::Optional]) ; // keys and flags have no influence on matching, except Optional h.update(special) ; h.update(stems ) ; h.update(cwd_s ) ; h.update(targets) ; if (special) { - h.update(allow_ext) ; // only exists for special rules + h.update(allow_ext) ; // only exists for special rules } else { - h.update(job_name) ; // job_name has no effect for source & anti as it is only used to store jobs and there are none - deps_attrs.update_hash(h) ; // no deps for source & anti + h.update(job_name) ; // job_name has no effect for source & anti as it is only used to store jobs and there are none + deps_attrs.update_hash(h) ; // no deps for source & anti } match_crc = h.digest() ; // - if (special) return ; // source & anti are only capable of matching + if (special) return ; // source & anti are only capable of matching // - h.update(matches ) ; // these define names and influence cmd execution, all is not necessary but simpler to code + h.update(matches ) ; // these define names and influence cmd execution, all is not necessary but simpler to code h.update(force ) ; h.update(is_python ) ; cmd .update_hash(h) ; diff --git a/src/lmakeserver/rule.x.hh b/src/lmakeserver/rule.x.hh index 35b1d090..9c6aaf34 100644 --- a/src/lmakeserver/rule.x.hh +++ b/src/lmakeserver/rule.x.hh @@ -18,13 +18,16 @@ #ifdef STRUCT_DECL +// START_OF_VERSIONING ENUM( VarCmd , Stems , Stem -, Targets , Match +, Targets , Match , StarMatch , Deps , Dep , Rsrcs , Rsrc ) +// END_OF_VERSIONING +// START_OF_VERSIONING ENUM_1( EnvFlag , Dflt = Rsrc // @@ -32,7 +35,9 @@ ENUM_1( EnvFlag , Rsrc // consider variable as a resource : upon modification, rebuild job if it was in error , Cmd // consider variable as a cmd : upon modification, rebuild job ) +// END_OF_VERSIONING +// START_OF_VERSIONING ENUM_2( Special , Shared = Infinite // <=Shared means there is a single such rule , HasJobs = Plain // <=HasJobs means jobs can refer to this rule @@ -45,6 +50,7 @@ ENUM_2( Special , Anti , GenericSrc ) +// END_OF_VERSIONING namespace Engine { @@ -682,14 +688,16 @@ namespace Engine { RuleTgt( ) = default ; RuleTgt( Rule r , VarIdx ti ) : Rule{r} , tgt_idx{ti} {} // accesses - Rep operator+ ( ) const { return (+Rule(*this)<) | tgt_idx ; } - bool operator== (RuleTgt const&) const = default ; - ::partial_ordering operator<=>(RuleTgt const&) const = default ; - ::string const& key ( ) const { return _matches().first ; } - ::string const& target ( ) const { SWEAR(_matches().second.flags.tflags()[Tflag::Target]) ; return _matches().second.pattern ; } + Rep operator+ ( ) const { return (+Rule(*this)<) | tgt_idx ; } + bool operator== (RuleTgt const&) const = default ; + ::partial_ordering operator<=> (RuleTgt const&) const = default ; // - Tflags tflags() const { return _matches().second.flags.tflags() ; } - bool sure () const { return tgt_idx<(*this)->n_static_targets || tflags()[Tflag::Phony] ; } + ::string const& key () const { return _matches().first ; } + ::string const& target () const { SWEAR(_matches().second.flags.tflags()[Tflag::Target]) ; return _matches().second.pattern ; } + Tflags tflags () const { return _matches().second.flags.tflags () ; } + ExtraTflags extra_tflags() const { return _matches().second.flags.extra_tflags() ; } + // + bool sure() const { return ( tgt_idx<(*this)->n_static_targets && !extra_tflags()[ExtraTflag::Optional] ) || tflags()[Tflag::Phony] ; } private : ::pair_s const& _matches() const { return (*this)->matches[tgt_idx] ; } // services diff --git a/src/rpc_job.cc b/src/rpc_job.cc index 6490b05f..58e5bd50 100644 --- a/src/rpc_job.cc +++ b/src/rpc_job.cc @@ -11,6 +11,17 @@ using namespace Disk ; using namespace Hash ; +template static constexpr bool _chk_flags_tab(::array<::pair,N> tab) { + bool res = true ; + for( E e=E(0) ; e!=All ; e++ ) res &= tab[+e].first==e ; + return res ; +} + +static_assert(_chk_flags_tab(DflagChars )) ; +static_assert(_chk_flags_tab(ExtraDflagChars)) ; +static_assert(_chk_flags_tab(TflagChars )) ; +static_assert(_chk_flags_tab(ExtraTflagChars)) ; + // // FileAction // diff --git a/src/rpc_job.hh b/src/rpc_job.hh index 505ac511..d8332898 100644 --- a/src/rpc_job.hh +++ b/src/rpc_job.hh @@ -59,14 +59,13 @@ ENUM_2( Dflag // flags for deps , Static // is static dep, for internal use only ) // END_OF_VERSIONING -static constexpr char DflagChars[] = { - 'E' // Essential -, 'c' // Critical -, 'e' // IgnoreError -, 'r' // Required -, 'S' // Static -} ; -static_assert(::size(DflagChars)==N) ; +static constexpr ::amap> DflagChars {{ + { Dflag::Critical , 'c' } +, { Dflag::Essential , 'E' } +, { Dflag::IgnoreError , 'e' } +, { Dflag::Required , 'r' } +, { Dflag::Static , 'S' } +}} ; using Dflags = BitMap ; // START_OF_VERSIONING @@ -77,12 +76,11 @@ ENUM_1( ExtraDflag , StatReadData ) // END_OF_VERSIONING -static constexpr char ExtraDflagChars[] = { - 0 // Top -, 'I' // Ignore -, 'd' // StatReadData -} ; -static_assert(::size(ExtraDflagChars)==N) ; +static constexpr ::amap> ExtraDflagChars {{ + { ExtraDflag::Top , 0 } +, { ExtraDflag::Ignore , 'I' } +, { ExtraDflag::StatReadData , 'd' } +}} ; using ExtraDflags = BitMap ; // START_OF_VERSIONING @@ -98,16 +96,15 @@ ENUM_2( Tflag // flags for targets , Target // is a target, for internal use only ) // END_OF_VERSIONING -static constexpr char TflagChars[] = { - 'E' // Essential -, 'i' // Incremental -, 'p' // Phony -, 'u' // NoUniquify -, 'w' // NoWarning -, 'S' // Static -, 'T' // Target -} ; -static_assert(::size(TflagChars)==N) ; +static constexpr ::amap> TflagChars {{ + { Tflag::Essential , 'E' } +, { Tflag::Incremental , 'i' } +, { Tflag::NoUniquify , 'u' } +, { Tflag::NoWarning , 'w' } +, { Tflag::Phony , 'p' } +, { Tflag::Static , 'S' } +, { Tflag::Target , 'T' } +}} ; using Tflags = BitMap ; inline bool static_phony(Tflags tf) { return tf[Tflag::Target] && (tf[Tflag::Static]||tf[Tflag::Phony]) ; @@ -124,15 +121,14 @@ ENUM_1( ExtraTflag , Wash // target was unlinked when washing before job execution ) // END_OF_VERSIONING -static constexpr char ExtraTflagChars[] = { - 0 // Top -, 'I' // Ignore -, 0 // Optional -, 's' // SourceOk -, 'a' // Allow -, 0 // Wash -} ; -static_assert(::size(ExtraTflagChars)==N) ; +static constexpr ::amap> ExtraTflagChars {{ + { ExtraTflag::Top , 0 } +, { ExtraTflag::Ignore , 'I' } +, { ExtraTflag::Optional , 0 } +, { ExtraTflag::SourceOk , 's' } +, { ExtraTflag::Allow , 'a' } +, { ExtraTflag::Wash , 0 } +}} ; using ExtraTflags = BitMap ; // START_OF_VERSIONING diff --git a/src/utils.hh b/src/utils.hh index c1c8ba0b..51039d20 100644 --- a/src/utils.hh +++ b/src/utils.hh @@ -854,8 +854,8 @@ template struct EnumIterator { E val ; } ; -template EnumIterator begin(E ) { return EnumIterator(E(0)) ; } -template EnumIterator end (E e) { return EnumIterator(e ) ; } +template constexpr EnumIterator begin(E ) { return EnumIterator(E(0)) ; } +template constexpr EnumIterator end (E e) { return EnumIterator(e ) ; } template ::ostream& operator<<( ::ostream& os , BitMap const bm ) { os <<'(' ; diff --git a/unit_tests/bench.py b/unit_tests/bench.py new file mode 100644 index 00000000..02d243f8 --- /dev/null +++ b/unit_tests/bench.py @@ -0,0 +1,61 @@ +# This file is part of the open-lmake distribution (git@github.com:cesar-douady/open-lmake.git) +# Copyright (c) 2023 Doliam +# This program is free software: you can redistribute/modify under the terms of the GPL-v3 (https://www.gnu.org/licenses/gpl-3.0.html). +# This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + +r = 10 # number of executables in regression +n = 10 # number of executables in sources +l = 10 # number of objects per executable +p = 5 # number of deps per object + +if __name__!='__main__' : + + import os + + import lmake + from lmake.rules import Rule,PyRule + + gxx = 'clang' + + lmake.manifest = ( + 'Lmakefile.py' + , *( f'exe_{e}.file_{o}.c' for e in range(n) for o in range(l) ) + , *( f'inc_{i}.h' for i in range(n*l+p)) + ) + + class Compile(Rule) : + targets = { 'OBJ' : r'{File:.*}.o' } + deps = { 'SRC' : '{File}.c' } + autodep = 'ld_preload' # clang seems to be hostile to ld_audit + cmd = '{gxx} -c -o {OBJ} -xc {SRC}' + + class Link(Rule) : + targets = { 'EXE' : r'{File:.*}.exe' } + autodep = 'ld_preload' # clang seems to be hostile to ld_audit + def cmd() : + lmake.depend(*(f'{File}.file_{o}.o' for o in range(l))) # fake link + cmd = "{gxx} -o {EXE} {' '.join((f'{File}.file_{o}.o' for o in range(l)))}" + + class All(PyRule) : + target = r'all_{N:\d+}' + def cmd() : + lmake.depend(*(f'exe_{e}.exe' for e in range(int(N)))) + +else : + + import ut + + from lmake import multi_strip + + nl = '\n' + + for e in range(n) : + for o in range(l) : + print(multi_strip(f''' + {nl.join(f'#include "inc_{e*o+i}.h"' for i in range(p))} + int {'main' if o==0 else f'foo_{o}'}() {{ return 0 ; }} + '''),file=open(f'exe_{e}.file_{o}.c','w')) + + for i in range(n*l+p) : open(f'inc_{i}.h','w') + + ut.lmake( f'all_{r}' , new=... , may_rerun=r+1 , done=r*l+r , steady=1 ) # lmake all_10000 to reproduce bench conditions of : https://david.rothlis.net/ninja-benchmark diff --git a/unit_tests/link.py b/unit_tests/link.py index bb7670d9..dde554af 100644 --- a/unit_tests/link.py +++ b/unit_tests/link.py @@ -3,11 +3,15 @@ # This program is free software: you can redistribute/modify under the terms of the GPL-v3 (https://www.gnu.org/licenses/gpl-3.0.html). # This program is distributed WITHOUT ANY WARRANTY, without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +date_prec = 1 + if __name__!='__main__' : import lmake from lmake.rules import Rule + lmake.config.disk_date_precision = date_prec + lmake.manifest = ( 'Lmakefile.py' , 'a/src' @@ -24,10 +28,12 @@ class Test(Rule) : else : import os + import time import ut os.makedirs('a',exist_ok=True) print('src',file=open('a/src','w')) + time.sleep(date_prec) # ensure source a/src is old enough ut.lmake( 'test' , may_rerun=1 , done=2 , new=1 ) diff --git a/unit_tests/python2.py b/unit_tests/python2.py index 64574917..f14f217a 100644 --- a/unit_tests/python2.py +++ b/unit_tests/python2.py @@ -20,7 +20,7 @@ import sys import lmake - from lmake.rules import PyRule + from lmake.rules import Py2Rule lmake.manifest = ( 'Lmakefile.py' @@ -29,7 +29,7 @@ , 'a_dep' ) - class Cat(PyRule) : + class Cat(Py2Rule) : stems = { 'File1' : r'.*' , 'File2' : r'.*' @@ -39,7 +39,6 @@ class Cat(PyRule) : 'FIRST' : '{File1}' , 'SECOND' : '{File2}' } - python = python2 def cmd() : lmake.depend('a_dep') sys.stdout.write(open(FIRST ).read()) diff --git a/unit_tests/star.py b/unit_tests/star.py index 80a0e596..5f937a46 100644 --- a/unit_tests/star.py +++ b/unit_tests/star.py @@ -13,11 +13,18 @@ lmake.manifest = ( 'Lmakefile.py' , 'hello' + , 'ok.opt.ref' + , 'ko.opt.ref' ) class Opt(Rule) : targets = { 'DST' : ('{File:.*}.opt','Optional') } - cmd = '[ {File} != ok ] || echo > {DST}' + cmd = '[ {File} != ok ] || echo 1 > {DST}' + + class Opt2(Rule) : + prio = -1 + targets = { 'DST' : '{File:.*}.opt' } + cmd = 'echo 2 > {DST}' class Star(Rule) : targets = { 'DST' : ('{File:.*}.star{D*:\\d+}',) } @@ -32,15 +39,25 @@ class Cpy(Rule) : dep = '{File}' cmd = 'cat' + class Chk(Rule) : + target = '{File:.*}.ok' + deps = { + 'DUT' : '{File}' + , 'REF' : '{File}.ref' + } + cmd = 'diff {REF} {DUT}' + else : import ut - print( 'hello' , file=open('hello','w') ) + print( 'hello' , file=open('hello' ,'w') ) + print( '1' , file=open('ok.opt.ref','w') ) + print( '2' , file=open('ko.opt.ref','w') ) - ut.lmake( 'hello.star1.cpy' , done=2 , new=1 ) - ut.lmake( 'hello.star2' , done=0 , new=0 ) - ut.lmake( 'hello.star3' , rc=1 , done=0 , new=0 ) - ut.lmake( 'ok.opt' , done=1 , new=0 ) - ut.lmake( 'ko.opt' , rc=1 , steady=1 , new=0 ) # no rule to make target + ut.lmake( 'hello.star1.cpy' , done=2 , new=1 ) + ut.lmake( 'hello.star2' , done=0 , new=0 ) + ut.lmake( 'hello.star3' , rc=1 , done=0 , new=0 ) + ut.lmake( 'ok.opt.ok' , done=2 , new=1 ) # select Opt + ut.lmake( 'ko.opt.ok' , steady=1 , done=2 , new=1 ) # select Opt2