summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlessandro Ghedini <ghedo@debian.org>2018-10-03 10:00:52 +0100
committerJames Cowgill <jcowgill@debian.org>2018-10-03 10:00:52 +0100
commit9cc8e9367297f0f83542647b587ec80e8a210e77 (patch)
treef8cde9b6aadd8e16b0029ea45e4eeb6344864eba
parent736dfa105bfca6e0522a53710a0a8ecfdb8e2650 (diff)
Provide waf and related scripts
Note that, since upstream does not directly provide a compressed waf script, there's no need for us to repack the upstream tarball. Origin: vendor Forwarded: not-needed Last-Update: 2017-07-19 Gbp-Pq: Name 03_waf.patch
-rw-r--r--waf166
-rw-r--r--waflib/Build.py777
-rw-r--r--waflib/ConfigSet.py165
-rw-r--r--waflib/Configure.py368
-rw-r--r--waflib/Context.py406
-rw-r--r--waflib/Errors.py39
-rw-r--r--waflib/Logs.py203
-rw-r--r--waflib/Node.py478
-rw-r--r--waflib/Options.py200
-rw-r--r--waflib/Runner.py350
-rw-r--r--waflib/Scripting.py403
-rw-r--r--waflib/Task.py771
-rw-r--r--waflib/TaskGen.py471
-rw-r--r--waflib/Tools/__init__.py4
-rw-r--r--waflib/Tools/ar.py13
-rw-r--r--waflib/Tools/asm.py23
-rw-r--r--waflib/Tools/bison.py28
-rw-r--r--waflib/Tools/c.py26
-rw-r--r--waflib/Tools/c_aliases.py60
-rw-r--r--waflib/Tools/c_config.py805
-rw-r--r--waflib/Tools/c_osx.py121
-rw-r--r--waflib/Tools/c_preproc.py672
-rw-r--r--waflib/Tools/c_tests.py152
-rw-r--r--waflib/Tools/ccroot.py479
-rw-r--r--waflib/Tools/clang.py20
-rw-r--r--waflib/Tools/clangxx.py20
-rw-r--r--waflib/Tools/compiler_c.py44
-rw-r--r--waflib/Tools/compiler_cxx.py44
-rw-r--r--waflib/Tools/compiler_d.py41
-rw-r--r--waflib/Tools/compiler_fc.py43
-rw-r--r--waflib/Tools/cs.py113
-rw-r--r--waflib/Tools/cxx.py26
-rw-r--r--waflib/Tools/d.py54
-rw-r--r--waflib/Tools/d_config.py52
-rw-r--r--waflib/Tools/d_scan.py136
-rw-r--r--waflib/Tools/dbus.py29
-rw-r--r--waflib/Tools/dmd.py51
-rw-r--r--waflib/Tools/errcheck.py175
-rw-r--r--waflib/Tools/fc.py108
-rw-r--r--waflib/Tools/fc_config.py299
-rw-r--r--waflib/Tools/fc_scan.py64
-rw-r--r--waflib/Tools/flex.py38
-rw-r--r--waflib/Tools/g95.py54
-rw-r--r--waflib/Tools/gas.py12
-rw-r--r--waflib/Tools/gcc.py104
-rw-r--r--waflib/Tools/gdc.py35
-rw-r--r--waflib/Tools/gfortran.py71
-rw-r--r--waflib/Tools/glib2.py242
-rw-r--r--waflib/Tools/gnu_dirs.py66
-rw-r--r--waflib/Tools/gxx.py104
-rw-r--r--waflib/Tools/icc.py20
-rw-r--r--waflib/Tools/icpc.py20
-rw-r--r--waflib/Tools/ifort.py303
-rw-r--r--waflib/Tools/intltool.py101
-rw-r--r--waflib/Tools/irixcc.py51
-rw-r--r--waflib/Tools/javaw.py299
-rw-r--r--waflib/Tools/ldc2.py36
-rw-r--r--waflib/Tools/lua.py18
-rw-r--r--waflib/Tools/md5_tstamp.py24
-rw-r--r--waflib/Tools/msvc.py704
-rw-r--r--waflib/Tools/nasm.py16
-rw-r--r--waflib/Tools/nobuild.py11
-rw-r--r--waflib/Tools/perl.py85
-rw-r--r--waflib/Tools/python.py410
-rw-r--r--waflib/Tools/qt5.py497
-rw-r--r--waflib/Tools/ruby.py97
-rw-r--r--waflib/Tools/suncc.py48
-rw-r--r--waflib/Tools/suncxx.py48
-rw-r--r--waflib/Tools/tex.py327
-rw-r--r--waflib/Tools/vala.py218
-rw-r--r--waflib/Tools/waf_unit_test.py172
-rw-r--r--waflib/Tools/winres.py52
-rw-r--r--waflib/Tools/xlc.py44
-rw-r--r--waflib/Tools/xlcxx.py44
-rw-r--r--waflib/Utils.py615
-rw-r--r--waflib/__init__.py4
-rw-r--r--waflib/ansiterm.py238
-rw-r--r--waflib/extras/__init__.py4
-rw-r--r--waflib/extras/compat15.py305
-rw-r--r--waflib/fixpy2.py47
-rw-r--r--waflib/processor.py55
81 files changed, 14138 insertions, 0 deletions
diff --git a/waf b/waf
new file mode 100644
index 0000000..158e0cf
--- /dev/null
+++ b/waf
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# encoding: latin-1
+# Thomas Nagy, 2005-2018
+#
+"""
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import os, sys, inspect
+
+VERSION="2.0.9"
+REVISION="10a533182bd85c3f45a157fb5d62db50"
+GIT="c543921e7de1e319d9d3e425484d5a4d0794bb00"
+INSTALL=''
+C1='#1'
+C2='#,'
+C3='#*'
+cwd = os.getcwd()
+join = os.path.join
+
+
+WAF='waf'
+def b(x):
+ return x
+if sys.hexversion>0x300000f:
+ WAF='waf3'
+ def b(x):
+ return x.encode()
+
+def err(m):
+ print(('\033[91mError: %s\033[0m' % m))
+ sys.exit(1)
+
+def unpack_wafdir(dir, src):
+ f = open(src,'rb')
+ c = 'corrupt archive (%d)'
+ while 1:
+ line = f.readline()
+ if not line: err('run waf-light from a folder containing waflib')
+ if line == b('#==>\n'):
+ txt = f.readline()
+ if not txt: err(c % 1)
+ if f.readline() != b('#<==\n'): err(c % 2)
+ break
+ if not txt: err(c % 3)
+ txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
+
+ import shutil, tarfile
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ try:
+ for x in ('Tools', 'extras'):
+ os.makedirs(join(dir, 'waflib', x))
+ except OSError:
+ err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
+
+ os.chdir(dir)
+ tmp = 't.bz2'
+ t = open(tmp,'wb')
+ try: t.write(txt)
+ finally: t.close()
+
+ try:
+ t = tarfile.open(tmp)
+ except:
+ try:
+ os.system('bunzip2 t.bz2')
+ t = tarfile.open('t')
+ tmp = 't'
+ except:
+ os.chdir(cwd)
+ try: shutil.rmtree(dir)
+ except OSError: pass
+ err("Waf cannot be unpacked, check that bzip2 support is present")
+
+ try:
+ for x in t: t.extract(x)
+ finally:
+ t.close()
+
+ for x in ('Tools', 'extras'):
+ os.chmod(join('waflib',x), 493)
+
+ if sys.hexversion<0x300000f:
+ sys.path = [join(dir, 'waflib')] + sys.path
+ import fixpy2
+ fixpy2.fixdir(dir)
+
+ os.remove(tmp)
+ os.chdir(cwd)
+
+ try: dir = unicode(dir, 'mbcs')
+ except: pass
+ try:
+ from ctypes import windll
+ windll.kernel32.SetFileAttributesW(dir, 2)
+ except:
+ pass
+
+def test(dir):
+ try:
+ os.stat(join(dir, 'waflib'))
+ return os.path.abspath(dir)
+ except OSError:
+ pass
+
+def find_lib():
+ src = os.path.abspath(inspect.getfile(inspect.getmodule(err)))
+ base, name = os.path.split(src)
+
+ #devs use $WAFDIR
+ w=test(os.environ.get('WAFDIR', ''))
+ if w: return w
+
+ #waf-light
+ if name.endswith('waf-light'):
+ w = test(base)
+ if w: return w
+ err('waf-light requires waflib -> export WAFDIR=/folder')
+
+ dirname = '%s-%s-%s' % (WAF, VERSION, REVISION)
+ for i in (INSTALL,'/usr','/usr/local','/opt'):
+ w = test(i + '/lib/' + dirname)
+ if w: return w
+
+ #waf-local
+ dir = join(base, (sys.platform != 'win32' and '.' or '') + dirname)
+ w = test(dir)
+ if w: return w
+
+ #unpack
+ unpack_wafdir(dir, src)
+ return dir
+
+wafdir = find_lib()
+sys.path.insert(0, wafdir)
+
+if __name__ == '__main__':
+
+ from waflib import Scripting
+ Scripting.waf_entry_point(cwd, VERSION, wafdir)
+
diff --git a/waflib/Build.py b/waflib/Build.py
new file mode 100644
index 0000000..44c640a
--- /dev/null
+++ b/waflib/Build.py
@@ -0,0 +1,777 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,sys,errno,re,shutil,stat
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+from waflib import Node,Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors
+CACHE_DIR='c4che'
+CACHE_SUFFIX='_cache.py'
+INSTALL=1337
+UNINSTALL=-1337
+SAVED_ATTRS='root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
+CFG_FILES='cfg_files'
+POST_AT_ONCE=0
+POST_LAZY=1
+PROTOCOL=-1
+if sys.platform=='cli':
+ PROTOCOL=0
+class BuildContext(Context.Context):
+ '''executes the build'''
+ cmd='build'
+ variant=''
+ def __init__(self,**kw):
+ super(BuildContext,self).__init__(**kw)
+ self.is_install=0
+ self.top_dir=kw.get('top_dir',Context.top_dir)
+ self.out_dir=kw.get('out_dir',Context.out_dir)
+ self.run_dir=kw.get('run_dir',Context.run_dir)
+ self.launch_dir=Context.launch_dir
+ self.post_mode=POST_LAZY
+ self.cache_dir=kw.get('cache_dir')
+ if not self.cache_dir:
+ self.cache_dir=os.path.join(self.out_dir,CACHE_DIR)
+ self.all_envs={}
+ self.node_sigs={}
+ self.task_sigs={}
+ self.imp_sigs={}
+ self.node_deps={}
+ self.raw_deps={}
+ self.task_gen_cache_names={}
+ self.jobs=Options.options.jobs
+ self.targets=Options.options.targets
+ self.keep=Options.options.keep
+ self.progress_bar=Options.options.progress_bar
+ self.deps_man=Utils.defaultdict(list)
+ self.current_group=0
+ self.groups=[]
+ self.group_names={}
+ for v in SAVED_ATTRS:
+ if not hasattr(self,v):
+ setattr(self,v,{})
+ def get_variant_dir(self):
+ if not self.variant:
+ return self.out_dir
+ return os.path.join(self.out_dir,os.path.normpath(self.variant))
+ variant_dir=property(get_variant_dir,None)
+ def __call__(self,*k,**kw):
+ kw['bld']=self
+ ret=TaskGen.task_gen(*k,**kw)
+ self.task_gen_cache_names={}
+ self.add_to_group(ret,group=kw.get('group'))
+ return ret
+ def __copy__(self):
+ raise Errors.WafError('build contexts cannot be copied')
+ def load_envs(self):
+ node=self.root.find_node(self.cache_dir)
+ if not node:
+ raise Errors.WafError('The project was not configured: run "waf configure" first!')
+ lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True)
+ if not lst:
+ raise Errors.WafError('The cache directory is empty: reconfigure the project')
+ for x in lst:
+ name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/')
+ env=ConfigSet.ConfigSet(x.abspath())
+ self.all_envs[name]=env
+ for f in env[CFG_FILES]:
+ newnode=self.root.find_resource(f)
+ if not newnode or not newnode.exists():
+ raise Errors.WafError('Missing configuration file %r, reconfigure the project!'%f)
+ def init_dirs(self):
+ if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)):
+ raise Errors.WafError('The project was not configured: run "waf configure" first!')
+ self.path=self.srcnode=self.root.find_dir(self.top_dir)
+ self.bldnode=self.root.make_node(self.variant_dir)
+ self.bldnode.mkdir()
+ def execute(self):
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.execute_build()
+ def execute_build(self):
+ Logs.info("Waf: Entering directory `%s'",self.variant_dir)
+ self.recurse([self.run_dir])
+ self.pre_build()
+ self.timer=Utils.Timer()
+ try:
+ self.compile()
+ finally:
+ if self.progress_bar==1 and sys.stderr.isatty():
+ c=self.producer.processed or 1
+ m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL)
+ Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on})
+ Logs.info("Waf: Leaving directory `%s'",self.variant_dir)
+ try:
+ self.producer.bld=None
+ del self.producer
+ except AttributeError:
+ pass
+ self.post_build()
+ def restore(self):
+ try:
+ env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
+ except EnvironmentError:
+ pass
+ else:
+ if env.version<Context.HEXVERSION:
+ raise Errors.WafError('Project was configured with a different version of Waf, please reconfigure it')
+ for t in env.tools:
+ self.setup(**t)
+ dbfn=os.path.join(self.variant_dir,Context.DBFILE)
+ try:
+ data=Utils.readf(dbfn,'rb')
+ except(EnvironmentError,EOFError):
+ Logs.debug('build: Could not load the build cache %s (missing)',dbfn)
+ else:
+ try:
+ Node.pickle_lock.acquire()
+ Node.Nod3=self.node_class
+ try:
+ data=cPickle.loads(data)
+ except Exception as e:
+ Logs.debug('build: Could not pickle the build cache %s: %r',dbfn,e)
+ else:
+ for x in SAVED_ATTRS:
+ setattr(self,x,data.get(x,{}))
+ finally:
+ Node.pickle_lock.release()
+ self.init_dirs()
+ def store(self):
+ data={}
+ for x in SAVED_ATTRS:
+ data[x]=getattr(self,x)
+ db=os.path.join(self.variant_dir,Context.DBFILE)
+ try:
+ Node.pickle_lock.acquire()
+ Node.Nod3=self.node_class
+ x=cPickle.dumps(data,PROTOCOL)
+ finally:
+ Node.pickle_lock.release()
+ Utils.writef(db+'.tmp',x,m='wb')
+ try:
+ st=os.stat(db)
+ os.remove(db)
+ if not Utils.is_win32:
+ os.chown(db+'.tmp',st.st_uid,st.st_gid)
+ except(AttributeError,OSError):
+ pass
+ os.rename(db+'.tmp',db)
+ def compile(self):
+ Logs.debug('build: compile()')
+ self.producer=Runner.Parallel(self,self.jobs)
+ self.producer.biter=self.get_build_iterator()
+ try:
+ self.producer.start()
+ except KeyboardInterrupt:
+ if self.is_dirty():
+ self.store()
+ raise
+ else:
+ if self.is_dirty():
+ self.store()
+ if self.producer.error:
+ raise Errors.BuildError(self.producer.error)
+ def is_dirty(self):
+ return self.producer.dirty
+ def setup(self,tool,tooldir=None,funs=None):
+ if isinstance(tool,list):
+ for i in tool:
+ self.setup(i,tooldir)
+ return
+ module=Context.load_tool(tool,tooldir)
+ if hasattr(module,"setup"):
+ module.setup(self)
+ def get_env(self):
+ try:
+ return self.all_envs[self.variant]
+ except KeyError:
+ return self.all_envs['']
+ def set_env(self,val):
+ self.all_envs[self.variant]=val
+ env=property(get_env,set_env)
+ def add_manual_dependency(self,path,value):
+ if not path:
+ raise ValueError('Invalid input path %r'%path)
+ if isinstance(path,Node.Node):
+ node=path
+ elif os.path.isabs(path):
+ node=self.root.find_resource(path)
+ else:
+ node=self.path.find_resource(path)
+ if not node:
+ raise ValueError('Could not find the path %r'%path)
+ if isinstance(value,list):
+ self.deps_man[node].extend(value)
+ else:
+ self.deps_man[node].append(value)
+ def launch_node(self):
+ try:
+ return self.p_ln
+ except AttributeError:
+ self.p_ln=self.root.find_dir(self.launch_dir)
+ return self.p_ln
+ def hash_env_vars(self,env,vars_lst):
+ if not env.table:
+ env=env.parent
+ if not env:
+ return Utils.SIG_NIL
+ idx=str(id(env))+str(vars_lst)
+ try:
+ cache=self.cache_env
+ except AttributeError:
+ cache=self.cache_env={}
+ else:
+ try:
+ return self.cache_env[idx]
+ except KeyError:
+ pass
+ lst=[env[a]for a in vars_lst]
+ cache[idx]=ret=Utils.h_list(lst)
+ Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst)
+ return ret
+ def get_tgen_by_name(self,name):
+ cache=self.task_gen_cache_names
+ if not cache:
+ for g in self.groups:
+ for tg in g:
+ try:
+ cache[tg.name]=tg
+ except AttributeError:
+ pass
+ try:
+ return cache[name]
+ except KeyError:
+ raise Errors.WafError('Could not find a task generator for the name %r'%name)
+ def progress_line(self,idx,total,col1,col2):
+ if not sys.stderr.isatty():
+ return''
+ n=len(str(total))
+ Utils.rot_idx+=1
+ ind=Utils.rot_chr[Utils.rot_idx%4]
+ pc=(100.*idx)/total
+ fs="[%%%dd/%%d][%%s%%2d%%%%%%s][%s]["%(n,ind)
+ left=fs%(idx,total,col1,pc,col2)
+ right='][%s%s%s]'%(col1,self.timer,col2)
+ cols=Logs.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
+ if cols<7:
+ cols=7
+ ratio=((cols*idx)//total)-1
+ bar=('='*ratio+'>').ljust(cols)
+ msg=Logs.indicator%(left,bar,right)
+ return msg
+ def declare_chain(self,*k,**kw):
+ return TaskGen.declare_chain(*k,**kw)
+ def pre_build(self):
+ for m in getattr(self,'pre_funs',[]):
+ m(self)
+ def post_build(self):
+ for m in getattr(self,'post_funs',[]):
+ m(self)
+ def add_pre_fun(self,meth):
+ try:
+ self.pre_funs.append(meth)
+ except AttributeError:
+ self.pre_funs=[meth]
+ def add_post_fun(self,meth):
+ try:
+ self.post_funs.append(meth)
+ except AttributeError:
+ self.post_funs=[meth]
+ def get_group(self,x):
+ if not self.groups:
+ self.add_group()
+ if x is None:
+ return self.groups[self.current_group]
+ if x in self.group_names:
+ return self.group_names[x]
+ return self.groups[x]
+ def add_to_group(self,tgen,group=None):
+ assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.Task))
+ tgen.bld=self
+ self.get_group(group).append(tgen)
+ def get_group_name(self,g):
+ if not isinstance(g,list):
+ g=self.groups[g]
+ for x in self.group_names:
+ if id(self.group_names[x])==id(g):
+ return x
+ return''
+ def get_group_idx(self,tg):
+ se=id(tg)
+ for i,tmp in enumerate(self.groups):
+ for t in tmp:
+ if id(t)==se:
+ return i
+ return None
+ def add_group(self,name=None,move=True):
+ if name and name in self.group_names:
+ raise Errors.WafError('add_group: name %s already present',name)
+ g=[]
+ self.group_names[name]=g
+ self.groups.append(g)
+ if move:
+ self.current_group=len(self.groups)-1
+ def set_group(self,idx):
+ if isinstance(idx,str):
+ g=self.group_names[idx]
+ for i,tmp in enumerate(self.groups):
+ if id(g)==id(tmp):
+ self.current_group=i
+ break
+ else:
+ self.current_group=idx
+ def total(self):
+ total=0
+ for group in self.groups:
+ for tg in group:
+ try:
+ total+=len(tg.tasks)
+ except AttributeError:
+ total+=1
+ return total
+ def get_targets(self):
+ to_post=[]
+ min_grp=0
+ for name in self.targets.split(','):
+ tg=self.get_tgen_by_name(name)
+ m=self.get_group_idx(tg)
+ if m>min_grp:
+ min_grp=m
+ to_post=[tg]
+ elif m==min_grp:
+ to_post.append(tg)
+ return(min_grp,to_post)
+ def get_all_task_gen(self):
+ lst=[]
+ for g in self.groups:
+ lst.extend(g)
+ return lst
+ def post_group(self):
+ def tgpost(tg):
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+ if self.targets=='*':
+ for tg in self.groups[self.current_group]:
+ tgpost(tg)
+ elif self.targets:
+ if self.current_group<self._min_grp:
+ for tg in self.groups[self.current_group]:
+ tgpost(tg)
+ else:
+ for tg in self._exact_tg:
+ tg.post()
+ else:
+ ln=self.launch_node()
+ if ln.is_child_of(self.bldnode):
+ Logs.warn('Building from the build directory, forcing --targets=*')
+ ln=self.srcnode
+ elif not ln.is_child_of(self.srcnode):
+ Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)',ln.abspath(),self.srcnode.abspath())
+ ln=self.srcnode
+ for tg in self.groups[self.current_group]:
+ try:
+ p=tg.path
+ except AttributeError:
+ pass
+ else:
+ if p.is_child_of(ln):
+ tgpost(tg)
+ def get_tasks_group(self,idx):
+ tasks=[]
+ for tg in self.groups[idx]:
+ try:
+ tasks.extend(tg.tasks)
+ except AttributeError:
+ tasks.append(tg)
+ return tasks
+ def get_build_iterator(self):
+ if self.targets and self.targets!='*':
+ (self._min_grp,self._exact_tg)=self.get_targets()
+ if self.post_mode!=POST_LAZY:
+ for self.current_group,_ in enumerate(self.groups):
+ self.post_group()
+ for self.current_group,_ in enumerate(self.groups):
+ if self.post_mode!=POST_AT_ONCE:
+ self.post_group()
+ tasks=self.get_tasks_group(self.current_group)
+ Task.set_file_constraints(tasks)
+ Task.set_precedence_constraints(tasks)
+ self.cur_tasks=tasks
+ if tasks:
+ yield tasks
+ while 1:
+ yield[]
+ def install_files(self,dest,files,**kw):
+ assert(dest)
+ tg=self(features='install_task',install_to=dest,install_from=files,**kw)
+ tg.dest=tg.install_to
+ tg.type='install_files'
+ if not kw.get('postpone',True):
+ tg.post()
+ return tg
+ def install_as(self,dest,srcfile,**kw):
+ assert(dest)
+ tg=self(features='install_task',install_to=dest,install_from=srcfile,**kw)
+ tg.dest=tg.install_to
+ tg.type='install_as'
+ if not kw.get('postpone',True):
+ tg.post()
+ return tg
+ def symlink_as(self,dest,src,**kw):
+ assert(dest)
+ tg=self(features='install_task',install_to=dest,install_from=src,**kw)
+ tg.dest=tg.install_to
+ tg.type='symlink_as'
+ tg.link=src
+ if not kw.get('postpone',True):
+ tg.post()
+ return tg
+@TaskGen.feature('install_task')
+@TaskGen.before_method('process_rule','process_source')
+def process_install_task(self):
+ self.add_install_task(**self.__dict__)
+@TaskGen.taskgen_method
+def add_install_task(self,**kw):
+ if not self.bld.is_install:
+ return
+ if not kw['install_to']:
+ return
+ if kw['type']=='symlink_as'and Utils.is_win32:
+ if kw.get('win32_install'):
+ kw['type']='install_as'
+ else:
+ return
+ tsk=self.install_task=self.create_task('inst')
+ tsk.chmod=kw.get('chmod',Utils.O644)
+ tsk.link=kw.get('link','')or kw.get('install_from','')
+ tsk.relative_trick=kw.get('relative_trick',False)
+ tsk.type=kw['type']
+ tsk.install_to=tsk.dest=kw['install_to']
+ tsk.install_from=kw['install_from']
+ tsk.relative_base=kw.get('cwd')or kw.get('relative_base',self.path)
+ tsk.install_user=kw.get('install_user')
+ tsk.install_group=kw.get('install_group')
+ tsk.init_files()
+ if not kw.get('postpone',True):
+ tsk.run_now()
+ return tsk
+@TaskGen.taskgen_method
+def add_install_files(self,**kw):
+ kw['type']='install_files'
+ return self.add_install_task(**kw)
+@TaskGen.taskgen_method
+def add_install_as(self,**kw):
+ kw['type']='install_as'
+ return self.add_install_task(**kw)
+@TaskGen.taskgen_method
+def add_symlink_as(self,**kw):
+ kw['type']='symlink_as'
+ return self.add_install_task(**kw)
+class inst(Task.Task):
+ def __str__(self):
+ return''
+ def uid(self):
+ lst=self.inputs+self.outputs+[self.link,self.generator.path.abspath()]
+ return Utils.h_list(lst)
+ def init_files(self):
+ if self.type=='symlink_as':
+ inputs=[]
+ else:
+ inputs=self.generator.to_nodes(self.install_from)
+ if self.type=='install_as':
+ assert len(inputs)==1
+ self.set_inputs(inputs)
+ dest=self.get_install_path()
+ outputs=[]
+ if self.type=='symlink_as':
+ if self.relative_trick:
+ self.link=os.path.relpath(self.link,os.path.dirname(dest))
+ outputs.append(self.generator.bld.root.make_node(dest))
+ elif self.type=='install_as':
+ outputs.append(self.generator.bld.root.make_node(dest))
+ else:
+ for y in inputs:
+ if self.relative_trick:
+ destfile=os.path.join(dest,y.path_from(self.relative_base))
+ else:
+ destfile=os.path.join(dest,y.name)
+ outputs.append(self.generator.bld.root.make_node(destfile))
+ self.set_outputs(outputs)
+ def runnable_status(self):
+ ret=super(inst,self).runnable_status()
+ if ret==Task.SKIP_ME and self.generator.bld.is_install:
+ return Task.RUN_ME
+ return ret
+ def post_run(self):
+ pass
+ def get_install_path(self,destdir=True):
+ if isinstance(self.install_to,Node.Node):
+ dest=self.install_to.abspath()
+ else:
+ dest=Utils.subst_vars(self.install_to,self.env)
+ if destdir and Options.options.destdir:
+ dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep))
+ return dest
+ def copy_fun(self,src,tgt):
+ if Utils.is_win32 and len(tgt)>259 and not tgt.startswith('\\\\?\\'):
+ tgt='\\\\?\\'+tgt
+ shutil.copy2(src,tgt)
+ self.fix_perms(tgt)
+ def rm_empty_dirs(self,tgt):
+ while tgt:
+ tgt=os.path.dirname(tgt)
+ try:
+ os.rmdir(tgt)
+ except OSError:
+ break
+ def run(self):
+ is_install=self.generator.bld.is_install
+ if not is_install:
+ return
+ for x in self.outputs:
+ if is_install==INSTALL:
+ x.parent.mkdir()
+ if self.type=='symlink_as':
+ fun=is_install==INSTALL and self.do_link or self.do_unlink
+ fun(self.link,self.outputs[0].abspath())
+ else:
+ fun=is_install==INSTALL and self.do_install or self.do_uninstall
+ launch_node=self.generator.bld.launch_node()
+ for x,y in zip(self.inputs,self.outputs):
+ fun(x.abspath(),y.abspath(),x.path_from(launch_node))
+ def run_now(self):
+ status=self.runnable_status()
+ if status not in(Task.RUN_ME,Task.SKIP_ME):
+ raise Errors.TaskNotReady('Could not process %r: status %r'%(self,status))
+ self.run()
+ self.hasrun=Task.SUCCESS
+ def do_install(self,src,tgt,lbl,**kw):
+ if not Options.options.force:
+ try:
+ st1=os.stat(tgt)
+ st2=os.stat(src)
+ except OSError:
+ pass
+ else:
+ if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size:
+ if not self.generator.bld.progress_bar:
+ Logs.info('- install %s (from %s)',tgt,lbl)
+ return False
+ if not self.generator.bld.progress_bar:
+ Logs.info('+ install %s (from %s)',tgt,lbl)
+ try:
+ os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode))
+ except EnvironmentError:
+ pass
+ try:
+ os.remove(tgt)
+ except OSError:
+ pass
+ try:
+ self.copy_fun(src,tgt)
+ except EnvironmentError as e:
+ if not os.path.exists(src):
+ Logs.error('File %r does not exist',src)
+ elif not os.path.isfile(src):
+ Logs.error('Input %r is not a file',src)
+ raise Errors.WafError('Could not install the file %r'%tgt,e)
+ def fix_perms(self,tgt):
+ if not Utils.is_win32:
+ user=getattr(self,'install_user',None)or getattr(self.generator,'install_user',None)
+ group=getattr(self,'install_group',None)or getattr(self.generator,'install_group',None)
+ if user or group:
+ Utils.lchown(tgt,user or-1,group or-1)
+ if not os.path.islink(tgt):
+ os.chmod(tgt,self.chmod)
+ def do_link(self,src,tgt,**kw):
+ if os.path.islink(tgt)and os.readlink(tgt)==src:
+ if not self.generator.bld.progress_bar:
+ Logs.info('- symlink %s (to %s)',tgt,src)
+ else:
+ try:
+ os.remove(tgt)
+ except OSError:
+ pass
+ if not self.generator.bld.progress_bar:
+ Logs.info('+ symlink %s (to %s)',tgt,src)
+ os.symlink(src,tgt)
+ self.fix_perms(tgt)
+ def do_uninstall(self,src,tgt,lbl,**kw):
+ if not self.generator.bld.progress_bar:
+ Logs.info('- remove %s',tgt)
+ try:
+ os.remove(tgt)
+ except OSError as e:
+ if e.errno!=errno.ENOENT:
+ if not getattr(self,'uninstall_error',None):
+ self.uninstall_error=True
+ Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
+ if Logs.verbose>1:
+ Logs.warn('Could not remove %s (error code %r)',e.filename,e.errno)
+ self.rm_empty_dirs(tgt)
+ def do_unlink(self,src,tgt,**kw):
+ try:
+ if not self.generator.bld.progress_bar:
+ Logs.info('- remove %s',tgt)
+ os.remove(tgt)
+ except OSError:
+ pass
+ self.rm_empty_dirs(tgt)
+class InstallContext(BuildContext):
+ '''installs the targets on the system'''
+ cmd='install'
+ def __init__(self,**kw):
+ super(InstallContext,self).__init__(**kw)
+ self.is_install=INSTALL
+class UninstallContext(InstallContext):
+ '''removes the targets installed'''
+ cmd='uninstall'
+ def __init__(self,**kw):
+ super(UninstallContext,self).__init__(**kw)
+ self.is_install=UNINSTALL
+class CleanContext(BuildContext):
+ '''cleans the project'''
+ cmd='clean'
+ def execute(self):
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+ try:
+ self.clean()
+ finally:
+ self.store()
+ def clean(self):
+ Logs.debug('build: clean called')
+ if hasattr(self,'clean_files'):
+ for n in self.clean_files:
+ n.delete()
+ elif self.bldnode!=self.srcnode:
+ lst=[]
+ for env in self.all_envs.values():
+ lst.extend(self.root.find_or_declare(f)for f in env[CFG_FILES])
+ for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True):
+ if n in lst:
+ continue
+ n.delete()
+ self.root.children={}
+ for v in SAVED_ATTRS:
+ if v=='root':
+ continue
+ setattr(self,v,{})
+class ListContext(BuildContext):
+ '''lists the targets to execute'''
+ cmd='list'
+ def execute(self):
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
+ self.pre_build()
+ self.timer=Utils.Timer()
+ for g in self.groups:
+ for tg in g:
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+ try:
+ self.get_tgen_by_name('')
+ except Errors.WafError:
+ pass
+ targets=sorted(self.task_gen_cache_names)
+ line_just=max(len(t)for t in targets)if targets else 0
+ for target in targets:
+ tgen=self.task_gen_cache_names[target]
+ descript=getattr(tgen,'description','')
+ if descript:
+ target=target.ljust(line_just)
+ descript=': %s'%descript
+ Logs.pprint('GREEN',target,label=descript)
+class StepContext(BuildContext):
+ '''executes tasks in a step-by-step fashion, for debugging'''
+ cmd='step'
+ def __init__(self,**kw):
+ super(StepContext,self).__init__(**kw)
+ self.files=Options.options.files
+ def compile(self):
+ if not self.files:
+ Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
+ BuildContext.compile(self)
+ return
+ targets=[]
+ if self.targets and self.targets!='*':
+ targets=self.targets.split(',')
+ for g in self.groups:
+ for tg in g:
+ if targets and tg.name not in targets:
+ continue
+ try:
+ f=tg.post
+ except AttributeError:
+ pass
+ else:
+ f()
+ for pat in self.files.split(','):
+ matcher=self.get_matcher(pat)
+ for tg in g:
+ if isinstance(tg,Task.Task):
+ lst=[tg]
+ else:
+ lst=tg.tasks
+ for tsk in lst:
+ do_exec=False
+ for node in tsk.inputs:
+ if matcher(node,output=False):
+ do_exec=True
+ break
+ for node in tsk.outputs:
+ if matcher(node,output=True):
+ do_exec=True
+ break
+ if do_exec:
+ ret=tsk.run()
+ Logs.info('%s -> exit %r',tsk,ret)
+ def get_matcher(self,pat):
+ inn=True
+ out=True
+ if pat.startswith('in:'):
+ out=False
+ pat=pat.replace('in:','')
+ elif pat.startswith('out:'):
+ inn=False
+ pat=pat.replace('out:','')
+ anode=self.root.find_node(pat)
+ pattern=None
+ if not anode:
+ if not pat.startswith('^'):
+ pat='^.+?%s'%pat
+ if not pat.endswith('$'):
+ pat='%s$'%pat
+ pattern=re.compile(pat)
+ def match(node,output):
+ if output and not out:
+ return False
+ if not output and not inn:
+ return False
+ if anode:
+ return anode==node
+ else:
+ return pattern.match(node.abspath())
+ return match
+class EnvContext(BuildContext):
+ fun=cmd=None
+ def execute(self):
+ self.restore()
+ if not self.all_envs:
+ self.load_envs()
+ self.recurse([self.run_dir])
diff --git a/waflib/ConfigSet.py b/waflib/ConfigSet.py
new file mode 100644
index 0000000..8212586
--- /dev/null
+++ b/waflib/ConfigSet.py
@@ -0,0 +1,165 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import copy,re,os
+from waflib import Logs,Utils
+re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M)
+class ConfigSet(object):
+ __slots__=('table','parent')
+ def __init__(self,filename=None):
+ self.table={}
+ if filename:
+ self.load(filename)
+ def __contains__(self,key):
+ if key in self.table:
+ return True
+ try:
+ return self.parent.__contains__(key)
+ except AttributeError:
+ return False
+ def keys(self):
+ keys=set()
+ cur=self
+ while cur:
+ keys.update(cur.table.keys())
+ cur=getattr(cur,'parent',None)
+ keys=list(keys)
+ keys.sort()
+ return keys
+ def __iter__(self):
+ return iter(self.keys())
+ def __str__(self):
+ return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()])
+ def __getitem__(self,key):
+ try:
+ while 1:
+ x=self.table.get(key)
+ if not x is None:
+ return x
+ self=self.parent
+ except AttributeError:
+ return[]
+ def __setitem__(self,key,value):
+ self.table[key]=value
+ def __delitem__(self,key):
+ self[key]=[]
+ def __getattr__(self,name):
+ if name in self.__slots__:
+ return object.__getattribute__(self,name)
+ else:
+ return self[name]
+ def __setattr__(self,name,value):
+ if name in self.__slots__:
+ object.__setattr__(self,name,value)
+ else:
+ self[name]=value
+ def __delattr__(self,name):
+ if name in self.__slots__:
+ object.__delattr__(self,name)
+ else:
+ del self[name]
+ def derive(self):
+ newenv=ConfigSet()
+ newenv.parent=self
+ return newenv
+ def detach(self):
+ tbl=self.get_merged_dict()
+ try:
+ delattr(self,'parent')
+ except AttributeError:
+ pass
+ else:
+ keys=tbl.keys()
+ for x in keys:
+ tbl[x]=copy.deepcopy(tbl[x])
+ self.table=tbl
+ return self
+ def get_flat(self,key):
+ s=self[key]
+ if isinstance(s,str):
+ return s
+ return' '.join(s)
+ def _get_list_value_for_modification(self,key):
+ try:
+ value=self.table[key]
+ except KeyError:
+ try:
+ value=self.parent[key]
+ except AttributeError:
+ value=[]
+ else:
+ if isinstance(value,list):
+ value=value[:]
+ else:
+ value=[value]
+ self.table[key]=value
+ else:
+ if not isinstance(value,list):
+ self.table[key]=value=[value]
+ return value
+ def append_value(self,var,val):
+ if isinstance(val,str):
+ val=[val]
+ current_value=self._get_list_value_for_modification(var)
+ current_value.extend(val)
+ def prepend_value(self,var,val):
+ if isinstance(val,str):
+ val=[val]
+ self.table[var]=val+self._get_list_value_for_modification(var)
+ def append_unique(self,var,val):
+ if isinstance(val,str):
+ val=[val]
+ current_value=self._get_list_value_for_modification(var)
+ for x in val:
+ if x not in current_value:
+ current_value.append(x)
+ def get_merged_dict(self):
+ table_list=[]
+ env=self
+ while 1:
+ table_list.insert(0,env.table)
+ try:
+ env=env.parent
+ except AttributeError:
+ break
+ merged_table={}
+ for table in table_list:
+ merged_table.update(table)
+ return merged_table
+ def store(self,filename):
+ try:
+ os.makedirs(os.path.split(filename)[0])
+ except OSError:
+ pass
+ buf=[]
+ merged_table=self.get_merged_dict()
+ keys=list(merged_table.keys())
+ keys.sort()
+ try:
+ fun=ascii
+ except NameError:
+ fun=repr
+ for k in keys:
+ if k!='undo_stack':
+ buf.append('%s = %s\n'%(k,fun(merged_table[k])))
+ Utils.writef(filename,''.join(buf))
+ def load(self,filename):
+ tbl=self.table
+ code=Utils.readf(filename,m='rU')
+ for m in re_imp.finditer(code):
+ g=m.group
+ tbl[g(2)]=eval(g(3))
+ Logs.debug('env: %s',self.table)
+ def update(self,d):
+ self.table.update(d)
+ def stash(self):
+ orig=self.table
+ tbl=self.table=self.table.copy()
+ for x in tbl.keys():
+ tbl[x]=copy.deepcopy(tbl[x])
+ self.undo_stack=self.undo_stack+[orig]
+ def commit(self):
+ self.undo_stack.pop(-1)
+ def revert(self):
+ self.table=self.undo_stack.pop(-1)
diff --git a/waflib/Configure.py b/waflib/Configure.py
new file mode 100644
index 0000000..0e16fd2
--- /dev/null
+++ b/waflib/Configure.py
@@ -0,0 +1,368 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re,shlex,shutil,sys,time,traceback
+from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors
+WAF_CONFIG_LOG='config.log'
+autoconfig=False
+conf_template='''# project %(app)s configured on %(now)s by
+# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
+# using %(args)s
+#'''
+class ConfigurationContext(Context.Context):
+ '''configures the project'''
+ cmd='configure'
+ error_handlers=[]
+ def __init__(self,**kw):
+ super(ConfigurationContext,self).__init__(**kw)
+ self.environ=dict(os.environ)
+ self.all_envs={}
+ self.top_dir=None
+ self.out_dir=None
+ self.tools=[]
+ self.hash=0
+ self.files=[]
+ self.tool_cache=[]
+ self.setenv('')
+ def setenv(self,name,env=None):
+ if name not in self.all_envs or env:
+ if not env:
+ env=ConfigSet.ConfigSet()
+ self.prepare_env(env)
+ else:
+ env=env.derive()
+ self.all_envs[name]=env
+ self.variant=name
+ def get_env(self):
+ return self.all_envs[self.variant]
+ def set_env(self,val):
+ self.all_envs[self.variant]=val
+ env=property(get_env,set_env)
+ def init_dirs(self):
+ top=self.top_dir
+ if not top:
+ top=Options.options.top
+ if not top:
+ top=getattr(Context.g_module,Context.TOP,None)
+ if not top:
+ top=self.path.abspath()
+ top=os.path.abspath(top)
+ self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top)
+ assert(self.srcnode)
+ out=self.out_dir
+ if not out:
+ out=Options.options.out
+ if not out:
+ out=getattr(Context.g_module,Context.OUT,None)
+ if not out:
+ out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','')
+ out=os.path.realpath(out)
+ self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out)
+ self.bldnode.mkdir()
+ if not os.path.isdir(self.bldnode.abspath()):
+ conf.fatal('Could not create the build directory %s'%self.bldnode.abspath())
+ def execute(self):
+ self.init_dirs()
+ self.cachedir=self.bldnode.make_node(Build.CACHE_DIR)
+ self.cachedir.mkdir()
+ path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG)
+ self.logger=Logs.make_logger(path,'cfg')
+ app=getattr(Context.g_module,'APPNAME','')
+ if app:
+ ver=getattr(Context.g_module,'VERSION','')
+ if ver:
+ app="%s (%s)"%(app,ver)
+ params={'now':time.ctime(),'pyver':sys.hexversion,'systype':sys.platform,'args':" ".join(sys.argv),'wafver':Context.WAFVERSION,'abi':Context.ABI,'app':app}
+ self.to_log(conf_template%params)
+ self.msg('Setting top to',self.srcnode.abspath())
+ self.msg('Setting out to',self.bldnode.abspath())
+ if id(self.srcnode)==id(self.bldnode):
+ Logs.warn('Setting top == out')
+ elif id(self.path)!=id(self.srcnode):
+ if self.srcnode.is_child_of(self.path):
+ Logs.warn('Are you certain that you do not want to set top="." ?')
+ super(ConfigurationContext,self).execute()
+ self.store()
+ Context.top_dir=self.srcnode.abspath()
+ Context.out_dir=self.bldnode.abspath()
+ env=ConfigSet.ConfigSet()
+ env.argv=sys.argv
+ env.options=Options.options.__dict__
+ env.config_cmd=self.cmd
+ env.run_dir=Context.run_dir
+ env.top_dir=Context.top_dir
+ env.out_dir=Context.out_dir
+ env.hash=self.hash
+ env.files=self.files
+ env.environ=dict(self.environ)
+ if not(self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN')or getattr(Options.options,'no_lock_in_run')):
+ env.store(os.path.join(Context.run_dir,Options.lockfile))
+ if not(self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP')or getattr(Options.options,'no_lock_in_top')):
+ env.store(os.path.join(Context.top_dir,Options.lockfile))
+ if not(self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT')or getattr(Options.options,'no_lock_in_out')):
+ env.store(os.path.join(Context.out_dir,Options.lockfile))
+ def prepare_env(self,env):
+ if not env.PREFIX:
+ if Options.options.prefix or Utils.is_win32:
+ env.PREFIX=Options.options.prefix
+ else:
+ env.PREFIX='/'
+ if not env.BINDIR:
+ if Options.options.bindir:
+ env.BINDIR=Options.options.bindir
+ else:
+ env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env)
+ if not env.LIBDIR:
+ if Options.options.libdir:
+ env.LIBDIR=Options.options.libdir
+ else:
+ env.LIBDIR=Utils.subst_vars('${PREFIX}/lib%s'%Utils.lib64(),env)
+ def store(self):
+ n=self.cachedir.make_node('build.config.py')
+ n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools))
+ if not self.all_envs:
+ self.fatal('nothing to store in the configuration context!')
+ for key in self.all_envs:
+ tmpenv=self.all_envs[key]
+ tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX))
+ def load(self,tool_list,tooldir=None,funs=None,with_sys_path=True,cache=False):
+ tools=Utils.to_list(tool_list)
+ if tooldir:
+ tooldir=Utils.to_list(tooldir)
+ for tool in tools:
+ if cache:
+ mag=(tool,id(self.env),tooldir,funs)
+ if mag in self.tool_cache:
+ self.to_log('(tool %s is already loaded, skipping)'%tool)
+ continue
+ self.tool_cache.append(mag)
+ module=None
+ try:
+ module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path)
+ except ImportError as e:
+ self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,getattr(e,'waf_sys_path',sys.path),e))
+ except Exception as e:
+ self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs))
+ self.to_log(traceback.format_exc())
+ raise
+ if funs is not None:
+ self.eval_rules(funs)
+ else:
+ func=getattr(module,'configure',None)
+ if func:
+ if type(func)is type(Utils.readf):
+ func(self)
+ else:
+ self.eval_rules(func)
+ self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs})
+ def post_recurse(self,node):
+ super(ConfigurationContext,self).post_recurse(node)
+ self.hash=Utils.h_list((self.hash,node.read('rb')))
+ self.files.append(node.abspath())
+ def eval_rules(self,rules):
+ self.rules=Utils.to_list(rules)
+ for x in self.rules:
+ f=getattr(self,x)
+ if not f:
+ self.fatal('No such configuration function %r'%x)
+ f()
+def conf(f):
+ def fun(*k,**kw):
+ mandatory=kw.pop('mandatory',True)
+ try:
+ return f(*k,**kw)
+ except Errors.ConfigurationError:
+ if mandatory:
+ raise
+ fun.__name__=f.__name__
+ setattr(ConfigurationContext,f.__name__,fun)
+ setattr(Build.BuildContext,f.__name__,fun)
+ return f
+@conf
+def add_os_flags(self,var,dest=None,dup=False):
+ try:
+ flags=shlex.split(self.environ[var])
+ except KeyError:
+ return
+ if dup or''.join(flags)not in''.join(Utils.to_list(self.env[dest or var])):
+ self.env.append_value(dest or var,flags)
+@conf
+def cmd_to_list(self,cmd):
+ if isinstance(cmd,str):
+ if os.path.isfile(cmd):
+ return[cmd]
+ if os.sep=='/':
+ return shlex.split(cmd)
+ else:
+ try:
+ return shlex.split(cmd,posix=False)
+ except TypeError:
+ return shlex.split(cmd)
+ return cmd
+@conf
+def check_waf_version(self,mini='1.9.99',maxi='2.1.0',**kw):
+ self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)),**kw)
+ ver=Context.HEXVERSION
+ if Utils.num2ver(mini)>ver:
+ self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver))
+ if Utils.num2ver(maxi)<ver:
+ self.fatal('waf version should be at most %r (%r found)'%(Utils.num2ver(maxi),ver))
+ self.end_msg('ok',**kw)
+@conf
+def find_file(self,filename,path_list=[]):
+ for n in Utils.to_list(filename):
+ for d in Utils.to_list(path_list):
+ p=os.path.expanduser(os.path.join(d,n))
+ if os.path.exists(p):
+ return p
+ self.fatal('Could not find %r'%filename)
+@conf
+def find_program(self,filename,**kw):
+ exts=kw.get('exts',Utils.is_win32 and'.exe,.com,.bat,.cmd'or',.sh,.pl,.py')
+ environ=kw.get('environ',getattr(self,'environ',os.environ))
+ ret=''
+ filename=Utils.to_list(filename)
+ msg=kw.get('msg',', '.join(filename))
+ var=kw.get('var','')
+ if not var:
+ var=re.sub(r'[-.]','_',filename[0].upper())
+ path_list=kw.get('path_list','')
+ if path_list:
+ path_list=Utils.to_list(path_list)
+ else:
+ path_list=environ.get('PATH','').split(os.pathsep)
+ if kw.get('value'):
+ ret=self.cmd_to_list(kw['value'])
+ elif environ.get(var):
+ ret=self.cmd_to_list(environ[var])
+ elif self.env[var]:
+ ret=self.cmd_to_list(self.env[var])
+ else:
+ if not ret:
+ ret=self.find_binary(filename,exts.split(','),path_list)
+ if not ret and Utils.winreg:
+ ret=Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER,filename)
+ if not ret and Utils.winreg:
+ ret=Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE,filename)
+ ret=self.cmd_to_list(ret)
+ if ret:
+ if len(ret)==1:
+ retmsg=ret[0]
+ else:
+ retmsg=ret
+ else:
+ retmsg=False
+ self.msg('Checking for program %r'%msg,retmsg,**kw)
+ if not kw.get('quiet'):
+ self.to_log('find program=%r paths=%r var=%r -> %r'%(filename,path_list,var,ret))
+ if not ret:
+ self.fatal(kw.get('errmsg','')or'Could not find the program %r'%filename)
+ interpreter=kw.get('interpreter')
+ if interpreter is None:
+ if not Utils.check_exe(ret[0],env=environ):
+ self.fatal('Program %r is not executable'%ret)
+ self.env[var]=ret
+ else:
+ self.env[var]=self.env[interpreter]+ret
+ return ret
+@conf
+def find_binary(self,filenames,exts,paths):
+ for f in filenames:
+ for ext in exts:
+ exe_name=f+ext
+ if os.path.isabs(exe_name):
+ if os.path.isfile(exe_name):
+ return exe_name
+ else:
+ for path in paths:
+ x=os.path.expanduser(os.path.join(path,exe_name))
+ if os.path.isfile(x):
+ return x
+ return None
+@conf
+def run_build(self,*k,**kw):
+ lst=[str(v)for(p,v)in kw.items()if p!='env']
+ h=Utils.h_list(lst)
+ dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
+ try:
+ os.makedirs(dir)
+ except OSError:
+ pass
+ try:
+ os.stat(dir)
+ except OSError:
+ self.fatal('cannot use the configuration test folder %r'%dir)
+ cachemode=getattr(Options.options,'confcache',None)
+ if cachemode==1:
+ try:
+ proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build'))
+ except EnvironmentError:
+ pass
+ else:
+ ret=proj['cache_run_build']
+ if isinstance(ret,str)and ret.startswith('Test does not build'):
+ self.fatal(ret)
+ return ret
+ bdir=os.path.join(dir,'testbuild')
+ if not os.path.exists(bdir):
+ os.makedirs(bdir)
+ cls_name=kw.get('run_build_cls')or getattr(self,'run_build_cls','build')
+ self.test_bld=bld=Context.create_context(cls_name,top_dir=dir,out_dir=bdir)
+ bld.init_dirs()
+ bld.progress_bar=0
+ bld.targets='*'
+ bld.logger=self.logger
+ bld.all_envs.update(self.all_envs)
+ bld.env=kw['env']
+ bld.kw=kw
+ bld.conf=self
+ kw['build_fun'](bld)
+ ret=-1
+ try:
+ try:
+ bld.compile()
+ except Errors.WafError:
+ ret='Test does not build: %s'%traceback.format_exc()
+ self.fatal(ret)
+ else:
+ ret=getattr(bld,'retval',0)
+ finally:
+ if cachemode==1:
+ proj=ConfigSet.ConfigSet()
+ proj['cache_run_build']=ret
+ proj.store(os.path.join(dir,'cache_run_build'))
+ else:
+ shutil.rmtree(dir)
+ return ret
+@conf
+def ret_msg(self,msg,args):
+ if isinstance(msg,str):
+ return msg
+ return msg(args)
+@conf
+def test(self,*k,**kw):
+ if not'env'in kw:
+ kw['env']=self.env.derive()
+ if kw.get('validate'):
+ kw['validate'](kw)
+ self.start_msg(kw['msg'],**kw)
+ ret=None
+ try:
+ ret=self.run_build(*k,**kw)
+ except self.errors.ConfigurationError:
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
+ if Logs.verbose>1:
+ raise
+ else:
+ self.fatal('The configuration failed')
+ else:
+ kw['success']=ret
+ if kw.get('post_check'):
+ ret=kw['post_check'](kw)
+ if ret:
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
+ self.fatal('The configuration failed %r'%ret)
+ else:
+ self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
+ return ret
diff --git a/waflib/Context.py b/waflib/Context.py
new file mode 100644
index 0000000..b583930
--- /dev/null
+++ b/waflib/Context.py
@@ -0,0 +1,406 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re,imp,sys
+from waflib import Utils,Errors,Logs
+import waflib.Node
+HEXVERSION=0x2000900
+WAFVERSION="2.0.9"
+WAFREVISION="8a950e7bca9a3a9b1ae62aae039ef76e2adc4177"
+ABI=20
+DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI)
+APPNAME='APPNAME'
+VERSION='VERSION'
+TOP='top'
+OUT='out'
+WSCRIPT_FILE='wscript'
+launch_dir=''
+run_dir=''
+top_dir=''
+out_dir=''
+waf_dir=''
+default_encoding=Utils.console_encoding()
+g_module=None
+STDOUT=1
+STDERR=-1
+BOTH=0
+classes=[]
+def create_context(cmd_name,*k,**kw):
+ for x in classes:
+ if x.cmd==cmd_name:
+ return x(*k,**kw)
+ ctx=Context(*k,**kw)
+ ctx.fun=cmd_name
+ return ctx
+class store_context(type):
+ def __init__(cls,name,bases,dct):
+ super(store_context,cls).__init__(name,bases,dct)
+ name=cls.__name__
+ if name in('ctx','Context'):
+ return
+ try:
+ cls.cmd
+ except AttributeError:
+ raise Errors.WafError('Missing command for the context class %r (cmd)'%name)
+ if not getattr(cls,'fun',None):
+ cls.fun=cls.cmd
+ classes.insert(0,cls)
+ctx=store_context('ctx',(object,),{})
+class Context(ctx):
+ errors=Errors
+ tools={}
+ def __init__(self,**kw):
+ try:
+ rd=kw['run_dir']
+ except KeyError:
+ rd=run_dir
+ self.node_class=type('Nod3',(waflib.Node.Node,),{})
+ self.node_class.__module__='waflib.Node'
+ self.node_class.ctx=self
+ self.root=self.node_class('',None)
+ self.cur_script=None
+ self.path=self.root.find_dir(rd)
+ self.stack_path=[]
+ self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self}
+ self.logger=None
+ def finalize(self):
+ try:
+ logger=self.logger
+ except AttributeError:
+ pass
+ else:
+ Logs.free_logger(logger)
+ delattr(self,'logger')
+ def load(self,tool_list,*k,**kw):
+ tools=Utils.to_list(tool_list)
+ path=Utils.to_list(kw.get('tooldir',''))
+ with_sys_path=kw.get('with_sys_path',True)
+ for t in tools:
+ module=load_tool(t,path,with_sys_path=with_sys_path)
+ fun=getattr(module,kw.get('name',self.fun),None)
+ if fun:
+ fun(self)
+ def execute(self):
+ self.recurse([os.path.dirname(g_module.root_path)])
+ def pre_recurse(self,node):
+ self.stack_path.append(self.cur_script)
+ self.cur_script=node
+ self.path=node.parent
+ def post_recurse(self,node):
+ self.cur_script=self.stack_path.pop()
+ if self.cur_script:
+ self.path=self.cur_script.parent
+ def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None):
+ try:
+ cache=self.recurse_cache
+ except AttributeError:
+ cache=self.recurse_cache={}
+ for d in Utils.to_list(dirs):
+ if not os.path.isabs(d):
+ d=os.path.join(self.path.abspath(),d)
+ WSCRIPT=os.path.join(d,WSCRIPT_FILE)
+ WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun)
+ node=self.root.find_node(WSCRIPT_FUN)
+ if node and(not once or node not in cache):
+ cache[node]=True
+ self.pre_recurse(node)
+ try:
+ function_code=node.read('rU',encoding)
+ exec(compile(function_code,node.abspath(),'exec'),self.exec_dict)
+ finally:
+ self.post_recurse(node)
+ elif not node:
+ node=self.root.find_node(WSCRIPT)
+ tup=(node,name or self.fun)
+ if node and(not once or tup not in cache):
+ cache[tup]=True
+ self.pre_recurse(node)
+ try:
+ wscript_module=load_module(node.abspath(),encoding=encoding)
+ user_function=getattr(wscript_module,(name or self.fun),None)
+ if not user_function:
+ if not mandatory:
+ continue
+ raise Errors.WafError('No function %r defined in %s'%(name or self.fun,node.abspath()))
+ user_function(self)
+ finally:
+ self.post_recurse(node)
+ elif not node:
+ if not mandatory:
+ continue
+ try:
+ os.listdir(d)
+ except OSError:
+ raise Errors.WafError('Cannot read the folder %r'%d)
+ raise Errors.WafError('No wscript file in directory %s'%d)
+ def log_command(self,cmd,kw):
+ if Logs.verbose:
+ fmt=os.environ.get('WAF_CMD_FORMAT')
+ if fmt=='string':
+ if not isinstance(cmd,str):
+ cmd=Utils.shell_escape(cmd)
+ Logs.debug('runner: %r',cmd)
+ Logs.debug('runner_env: kw=%s',kw)
+ def exec_command(self,cmd,**kw):
+ subprocess=Utils.subprocess
+ kw['shell']=isinstance(cmd,str)
+ self.log_command(cmd,kw)
+ if self.logger:
+ self.logger.info(cmd)
+ if'stdout'not in kw:
+ kw['stdout']=subprocess.PIPE
+ if'stderr'not in kw:
+ kw['stderr']=subprocess.PIPE
+ if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError('Program %s not found!'%cmd[0])
+ cargs={}
+ if'timeout'in kw:
+ if sys.hexversion>=0x3030000:
+ cargs['timeout']=kw['timeout']
+ if not'start_new_session'in kw:
+ kw['start_new_session']=True
+ del kw['timeout']
+ if'input'in kw:
+ if kw['input']:
+ cargs['input']=kw['input']
+ kw['stdin']=subprocess.PIPE
+ del kw['input']
+ if'cwd'in kw:
+ if not isinstance(kw['cwd'],str):
+ kw['cwd']=kw['cwd'].abspath()
+ encoding=kw.pop('decode_as',default_encoding)
+ try:
+ ret,out,err=Utils.run_process(cmd,kw,cargs)
+ except Exception as e:
+ raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
+ if out:
+ if not isinstance(out,str):
+ out=out.decode(encoding,errors='replace')
+ if self.logger:
+ self.logger.debug('out: %s',out)
+ else:
+ Logs.info(out,extra={'stream':sys.stdout,'c1':''})
+ if err:
+ if not isinstance(err,str):
+ err=err.decode(encoding,errors='replace')
+ if self.logger:
+ self.logger.error('err: %s'%err)
+ else:
+ Logs.info(err,extra={'stream':sys.stderr,'c1':''})
+ return ret
+ def cmd_and_log(self,cmd,**kw):
+ subprocess=Utils.subprocess
+ kw['shell']=isinstance(cmd,str)
+ self.log_command(cmd,kw)
+ quiet=kw.pop('quiet',None)
+ to_ret=kw.pop('output',STDOUT)
+ if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
+ raise Errors.WafError('Program %r not found!'%cmd[0])
+ kw['stdout']=kw['stderr']=subprocess.PIPE
+ if quiet is None:
+ self.to_log(cmd)
+ cargs={}
+ if'timeout'in kw:
+ if sys.hexversion>=0x3030000:
+ cargs['timeout']=kw['timeout']
+ if not'start_new_session'in kw:
+ kw['start_new_session']=True
+ del kw['timeout']
+ if'input'in kw:
+ if kw['input']:
+ cargs['input']=kw['input']
+ kw['stdin']=subprocess.PIPE
+ del kw['input']
+ if'cwd'in kw:
+ if not isinstance(kw['cwd'],str):
+ kw['cwd']=kw['cwd'].abspath()
+ encoding=kw.pop('decode_as',default_encoding)
+ try:
+ ret,out,err=Utils.run_process(cmd,kw,cargs)
+ except Exception as e:
+ raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
+ if not isinstance(out,str):
+ out=out.decode(encoding,errors='replace')
+ if not isinstance(err,str):
+ err=err.decode(encoding,errors='replace')
+ if out and quiet!=STDOUT and quiet!=BOTH:
+ self.to_log('out: %s'%out)
+ if err and quiet!=STDERR and quiet!=BOTH:
+ self.to_log('err: %s'%err)
+ if ret:
+ e=Errors.WafError('Command %r returned %r'%(cmd,ret))
+ e.returncode=ret
+ e.stderr=err
+ e.stdout=out
+ raise e
+ if to_ret==BOTH:
+ return(out,err)
+ elif to_ret==STDERR:
+ return err
+ return out
+ def fatal(self,msg,ex=None):
+ if self.logger:
+ self.logger.info('from %s: %s'%(self.path.abspath(),msg))
+ try:
+ logfile=self.logger.handlers[0].baseFilename
+ except AttributeError:
+ pass
+ else:
+ if os.environ.get('WAF_PRINT_FAILURE_LOG'):
+ msg='Log from (%s):\n%s\n'%(logfile,Utils.readf(logfile))
+ else:
+ msg='%s\n(complete log in %s)'%(msg,logfile)
+ raise self.errors.ConfigurationError(msg,ex=ex)
+ def to_log(self,msg):
+ if not msg:
+ return
+ if self.logger:
+ self.logger.info(msg)
+ else:
+ sys.stderr.write(str(msg))
+ sys.stderr.flush()
+ def msg(self,*k,**kw):
+ try:
+ msg=kw['msg']
+ except KeyError:
+ msg=k[0]
+ self.start_msg(msg,**kw)
+ try:
+ result=kw['result']
+ except KeyError:
+ result=k[1]
+ color=kw.get('color')
+ if not isinstance(color,str):
+ color=result and'GREEN'or'YELLOW'
+ self.end_msg(result,color,**kw)
+ def start_msg(self,*k,**kw):
+ if kw.get('quiet'):
+ return
+ msg=kw.get('msg')or k[0]
+ try:
+ if self.in_msg:
+ self.in_msg+=1
+ return
+ except AttributeError:
+ self.in_msg=0
+ self.in_msg+=1
+ try:
+ self.line_just=max(self.line_just,len(msg))
+ except AttributeError:
+ self.line_just=max(40,len(msg))
+ for x in(self.line_just*'-',msg):
+ self.to_log(x)
+ Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
+ def end_msg(self,*k,**kw):
+ if kw.get('quiet'):
+ return
+ self.in_msg-=1
+ if self.in_msg:
+ return
+ result=kw.get('result')or k[0]
+ defcolor='GREEN'
+ if result is True:
+ msg='ok'
+ elif not result:
+ msg='not found'
+ defcolor='YELLOW'
+ else:
+ msg=str(result)
+ self.to_log(msg)
+ try:
+ color=kw['color']
+ except KeyError:
+ if len(k)>1 and k[1]in Logs.colors_lst:
+ color=k[1]
+ else:
+ color=defcolor
+ Logs.pprint(color,msg)
+ def load_special_tools(self,var,ban=[]):
+ if os.path.isdir(waf_dir):
+ lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
+ for x in lst:
+ if not x.name in ban:
+ load_tool(x.name.replace('.py',''))
+ else:
+ from zipfile import PyZipFile
+ waflibs=PyZipFile(waf_dir)
+ lst=waflibs.namelist()
+ for x in lst:
+ if not re.match('waflib/extras/%s'%var.replace('*','.*'),var):
+ continue
+ f=os.path.basename(x)
+ doban=False
+ for b in ban:
+ r=b.replace('*','.*')
+ if re.match(r,f):
+ doban=True
+ if not doban:
+ f=f.replace('.py','')
+ load_tool(f)
+cache_modules={}
+def load_module(path,encoding=None):
+ try:
+ return cache_modules[path]
+ except KeyError:
+ pass
+ module=imp.new_module(WSCRIPT_FILE)
+ try:
+ code=Utils.readf(path,m='rU',encoding=encoding)
+ except EnvironmentError:
+ raise Errors.WafError('Could not read the file %r'%path)
+ module_dir=os.path.dirname(path)
+ sys.path.insert(0,module_dir)
+ try:
+ exec(compile(code,path,'exec'),module.__dict__)
+ finally:
+ sys.path.remove(module_dir)
+ cache_modules[path]=module
+ return module
+def load_tool(tool,tooldir=None,ctx=None,with_sys_path=True):
+ if tool=='java':
+ tool='javaw'
+ else:
+ tool=tool.replace('++','xx')
+ if not with_sys_path:
+ back_path=sys.path
+ sys.path=[]
+ try:
+ if tooldir:
+ assert isinstance(tooldir,list)
+ sys.path=tooldir+sys.path
+ try:
+ __import__(tool)
+ except ImportError as e:
+ e.waf_sys_path=list(sys.path)
+ raise
+ finally:
+ for d in tooldir:
+ sys.path.remove(d)
+ ret=sys.modules[tool]
+ Context.tools[tool]=ret
+ return ret
+ else:
+ if not with_sys_path:
+ sys.path.insert(0,waf_dir)
+ try:
+ for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'):
+ try:
+ __import__(x%tool)
+ break
+ except ImportError:
+ x=None
+ else:
+ __import__(tool)
+ except ImportError as e:
+ e.waf_sys_path=list(sys.path)
+ raise
+ finally:
+ if not with_sys_path:
+ sys.path.remove(waf_dir)
+ ret=sys.modules[x%tool]
+ Context.tools[tool]=ret
+ return ret
+ finally:
+ if not with_sys_path:
+ sys.path+=back_path
diff --git a/waflib/Errors.py b/waflib/Errors.py
new file mode 100644
index 0000000..3ef76fc
--- /dev/null
+++ b/waflib/Errors.py
@@ -0,0 +1,39 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import traceback,sys
+class WafError(Exception):
+ def __init__(self,msg='',ex=None):
+ Exception.__init__(self)
+ self.msg=msg
+ assert not isinstance(msg,Exception)
+ self.stack=[]
+ if ex:
+ if not msg:
+ self.msg=str(ex)
+ if isinstance(ex,WafError):
+ self.stack=ex.stack
+ else:
+ self.stack=traceback.extract_tb(sys.exc_info()[2])
+ self.stack+=traceback.extract_stack()[:-1]
+ self.verbose_msg=''.join(traceback.format_list(self.stack))
+ def __str__(self):
+ return str(self.msg)
+class BuildError(WafError):
+ def __init__(self,error_tasks=[]):
+ self.tasks=error_tasks
+ WafError.__init__(self,self.format_error())
+ def format_error(self):
+ lst=['Build failed']
+ for tsk in self.tasks:
+ txt=tsk.format_error()
+ if txt:
+ lst.append(txt)
+ return'\n'.join(lst)
+class ConfigurationError(WafError):
+ pass
+class TaskRescan(WafError):
+ pass
+class TaskNotReady(WafError):
+ pass
diff --git a/waflib/Logs.py b/waflib/Logs.py
new file mode 100644
index 0000000..4a1f7f8
--- /dev/null
+++ b/waflib/Logs.py
@@ -0,0 +1,203 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re,traceback,sys
+from waflib import Utils,ansiterm
+if not os.environ.get('NOSYNC',False):
+ if sys.stdout.isatty()and id(sys.stdout)==id(sys.__stdout__):
+ sys.stdout=ansiterm.AnsiTerm(sys.stdout)
+ if sys.stderr.isatty()and id(sys.stderr)==id(sys.__stderr__):
+ sys.stderr=ansiterm.AnsiTerm(sys.stderr)
+import logging
+LOG_FORMAT=os.environ.get('WAF_LOG_FORMAT','%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
+HOUR_FORMAT=os.environ.get('WAF_HOUR_FORMAT','%H:%M:%S')
+zones=[]
+verbose=0
+colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','GREY':'\x1b[37m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
+indicator='\r\x1b[K%s%s%s'
+try:
+ unicode
+except NameError:
+ unicode=None
+def enable_colors(use):
+ if use==1:
+ if not(sys.stderr.isatty()or sys.stdout.isatty()):
+ use=0
+ if Utils.is_win32 and os.name!='java':
+ term=os.environ.get('TERM','')
+ else:
+ term=os.environ.get('TERM','dumb')
+ if term in('dumb','emacs'):
+ use=0
+ if use>=1:
+ os.environ['TERM']='vt100'
+ colors_lst['USE']=use
+try:
+ get_term_cols=ansiterm.get_term_cols
+except AttributeError:
+ def get_term_cols():
+ return 80
+get_term_cols.__doc__="""
+ Returns the console width in characters.
+
+ :return: the number of characters per line
+ :rtype: int
+ """
+def get_color(cl):
+ if colors_lst['USE']:
+ return colors_lst.get(cl,'')
+ return''
+class color_dict(object):
+ def __getattr__(self,a):
+ return get_color(a)
+ def __call__(self,a):
+ return get_color(a)
+colors=color_dict()
+re_log=re.compile(r'(\w+): (.*)',re.M)
+class log_filter(logging.Filter):
+ def __init__(self,name=''):
+ logging.Filter.__init__(self,name)
+ def filter(self,rec):
+ rec.zone=rec.module
+ if rec.levelno>=logging.INFO:
+ return True
+ m=re_log.match(rec.msg)
+ if m:
+ rec.zone=m.group(1)
+ rec.msg=m.group(2)
+ if zones:
+ return getattr(rec,'zone','')in zones or'*'in zones
+ elif not verbose>2:
+ return False
+ return True
+class log_handler(logging.StreamHandler):
+ def emit(self,record):
+ try:
+ try:
+ self.stream=record.stream
+ except AttributeError:
+ if record.levelno>=logging.WARNING:
+ record.stream=self.stream=sys.stderr
+ else:
+ record.stream=self.stream=sys.stdout
+ self.emit_override(record)
+ self.flush()
+ except(KeyboardInterrupt,SystemExit):
+ raise
+ except:
+ self.handleError(record)
+ def emit_override(self,record,**kw):
+ self.terminator=getattr(record,'terminator','\n')
+ stream=self.stream
+ if unicode:
+ msg=self.formatter.format(record)
+ fs='%s'+self.terminator
+ try:
+ if(isinstance(msg,unicode)and getattr(stream,'encoding',None)):
+ fs=fs.decode(stream.encoding)
+ try:
+ stream.write(fs%msg)
+ except UnicodeEncodeError:
+ stream.write((fs%msg).encode(stream.encoding))
+ else:
+ stream.write(fs%msg)
+ except UnicodeError:
+ stream.write((fs%msg).encode('utf-8'))
+ else:
+ logging.StreamHandler.emit(self,record)
+class formatter(logging.Formatter):
+ def __init__(self):
+ logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
+ def format(self,rec):
+ try:
+ msg=rec.msg.decode('utf-8')
+ except Exception:
+ msg=rec.msg
+ use=colors_lst['USE']
+ if(use==1 and rec.stream.isatty())or use==2:
+ c1=getattr(rec,'c1',None)
+ if c1 is None:
+ c1=''
+ if rec.levelno>=logging.ERROR:
+ c1=colors.RED
+ elif rec.levelno>=logging.WARNING:
+ c1=colors.YELLOW
+ elif rec.levelno>=logging.INFO:
+ c1=colors.GREEN
+ c2=getattr(rec,'c2',colors.NORMAL)
+ msg='%s%s%s'%(c1,msg,c2)
+ else:
+ msg=re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))','',msg)
+ if rec.levelno>=logging.INFO:
+ if rec.args:
+ return msg%rec.args
+ return msg
+ rec.msg=msg
+ rec.c1=colors.PINK
+ rec.c2=colors.NORMAL
+ return logging.Formatter.format(self,rec)
+log=None
+def debug(*k,**kw):
+ if verbose:
+ k=list(k)
+ k[0]=k[0].replace('\n',' ')
+ log.debug(*k,**kw)
+def error(*k,**kw):
+ log.error(*k,**kw)
+ if verbose>2:
+ st=traceback.extract_stack()
+ if st:
+ st=st[:-1]
+ buf=[]
+ for filename,lineno,name,line in st:
+ buf.append(' File %r, line %d, in %s'%(filename,lineno,name))
+ if line:
+ buf.append(' %s'%line.strip())
+ if buf:
+ log.error('\n'.join(buf))
+def warn(*k,**kw):
+ log.warn(*k,**kw)
+def info(*k,**kw):
+ log.info(*k,**kw)
+def init_log():
+ global log
+ log=logging.getLogger('waflib')
+ log.handlers=[]
+ log.filters=[]
+ hdlr=log_handler()
+ hdlr.setFormatter(formatter())
+ log.addHandler(hdlr)
+ log.addFilter(log_filter())
+ log.setLevel(logging.DEBUG)
+def make_logger(path,name):
+ logger=logging.getLogger(name)
+ if sys.hexversion>0x3000000:
+ encoding=sys.stdout.encoding
+ else:
+ encoding=None
+ hdlr=logging.FileHandler(path,'w',encoding=encoding)
+ formatter=logging.Formatter('%(message)s')
+ hdlr.setFormatter(formatter)
+ logger.addHandler(hdlr)
+ logger.setLevel(logging.DEBUG)
+ return logger
+def make_mem_logger(name,to_log,size=8192):
+ from logging.handlers import MemoryHandler
+ logger=logging.getLogger(name)
+ hdlr=MemoryHandler(size,target=to_log)
+ formatter=logging.Formatter('%(message)s')
+ hdlr.setFormatter(formatter)
+ logger.addHandler(hdlr)
+ logger.memhandler=hdlr
+ logger.setLevel(logging.DEBUG)
+ return logger
+def free_logger(logger):
+ try:
+ for x in logger.handlers:
+ x.close()
+ logger.removeHandler(x)
+ except Exception:
+ pass
+def pprint(col,msg,label='',sep='\n'):
+ info('%s%s%s %s',colors(col),msg,colors.NORMAL,label,extra={'terminator':sep})
diff --git a/waflib/Node.py b/waflib/Node.py
new file mode 100644
index 0000000..dc979d6
--- /dev/null
+++ b/waflib/Node.py
@@ -0,0 +1,478 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re,sys,shutil
+from waflib import Utils,Errors
+exclude_regs='''
+**/*~
+**/#*#
+**/.#*
+**/%*%
+**/._*
+**/*.swp
+**/CVS
+**/CVS/**
+**/.cvsignore
+**/SCCS
+**/SCCS/**
+**/vssver.scc
+**/.svn
+**/.svn/**
+**/BitKeeper
+**/.git
+**/.git/**
+**/.gitignore
+**/.bzr
+**/.bzrignore
+**/.bzr/**
+**/.hg
+**/.hg/**
+**/_MTN
+**/_MTN/**
+**/.arch-ids
+**/{arch}
+**/_darcs
+**/_darcs/**
+**/.intlcache
+**/.DS_Store'''
+def ant_matcher(s,ignorecase):
+ reflags=re.I if ignorecase else 0
+ ret=[]
+ for x in Utils.to_list(s):
+ x=x.replace('\\','/').replace('//','/')
+ if x.endswith('/'):
+ x+='**'
+ accu=[]
+ for k in x.split('/'):
+ if k=='**':
+ accu.append(k)
+ else:
+ k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+')
+ k='^%s$'%k
+ try:
+ exp=re.compile(k,flags=reflags)
+ except Exception as e:
+ raise Errors.WafError('Invalid pattern: %s'%k,e)
+ else:
+ accu.append(exp)
+ ret.append(accu)
+ return ret
+def ant_sub_filter(name,nn):
+ ret=[]
+ for lst in nn:
+ if not lst:
+ pass
+ elif lst[0]=='**':
+ ret.append(lst)
+ if len(lst)>1:
+ if lst[1].match(name):
+ ret.append(lst[2:])
+ else:
+ ret.append([])
+ elif lst[0].match(name):
+ ret.append(lst[1:])
+ return ret
+def ant_sub_matcher(name,pats):
+ nacc=ant_sub_filter(name,pats[0])
+ nrej=ant_sub_filter(name,pats[1])
+ if[]in nrej:
+ nacc=[]
+ return[nacc,nrej]
+class Node(object):
+ dict_class=dict
+ __slots__=('name','parent','children','cache_abspath','cache_isdir')
+ def __init__(self,name,parent):
+ self.name=name
+ self.parent=parent
+ if parent:
+ if name in parent.children:
+ raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent))
+ parent.children[name]=self
+ def __setstate__(self,data):
+ self.name=data[0]
+ self.parent=data[1]
+ if data[2]is not None:
+ self.children=self.dict_class(data[2])
+ def __getstate__(self):
+ return(self.name,self.parent,getattr(self,'children',None))
+ def __str__(self):
+ return self.abspath()
+ def __repr__(self):
+ return self.abspath()
+ def __copy__(self):
+ raise Errors.WafError('nodes are not supposed to be copied')
+ def read(self,flags='r',encoding='latin-1'):
+ return Utils.readf(self.abspath(),flags,encoding)
+ def write(self,data,flags='w',encoding='latin-1'):
+ Utils.writef(self.abspath(),data,flags,encoding)
+ def read_json(self,convert=True,encoding='utf-8'):
+ import json
+ object_pairs_hook=None
+ if convert and sys.hexversion<0x3000000:
+ try:
+ _type=unicode
+ except NameError:
+ _type=str
+ def convert(value):
+ if isinstance(value,list):
+ return[convert(element)for element in value]
+ elif isinstance(value,_type):
+ return str(value)
+ else:
+ return value
+ def object_pairs(pairs):
+ return dict((str(pair[0]),convert(pair[1]))for pair in pairs)
+ object_pairs_hook=object_pairs
+ return json.loads(self.read(encoding=encoding),object_pairs_hook=object_pairs_hook)
+ def write_json(self,data,pretty=True):
+ import json
+ indent=2
+ separators=(',',': ')
+ sort_keys=pretty
+ newline=os.linesep
+ if not pretty:
+ indent=None
+ separators=(',',':')
+ newline=''
+ output=json.dumps(data,indent=indent,separators=separators,sort_keys=sort_keys)+newline
+ self.write(output,encoding='utf-8')
+ def exists(self):
+ return os.path.exists(self.abspath())
+ def isdir(self):
+ return os.path.isdir(self.abspath())
+ def chmod(self,val):
+ os.chmod(self.abspath(),val)
+ def delete(self,evict=True):
+ try:
+ try:
+ if os.path.isdir(self.abspath()):
+ shutil.rmtree(self.abspath())
+ else:
+ os.remove(self.abspath())
+ except OSError:
+ if os.path.exists(self.abspath()):
+ raise
+ finally:
+ if evict:
+ self.evict()
+ def evict(self):
+ del self.parent.children[self.name]
+ def suffix(self):
+ k=max(0,self.name.rfind('.'))
+ return self.name[k:]
+ def height(self):
+ d=self
+ val=-1
+ while d:
+ d=d.parent
+ val+=1
+ return val
+ def listdir(self):
+ lst=Utils.listdir(self.abspath())
+ lst.sort()
+ return lst
+ def mkdir(self):
+ if self.isdir():
+ return
+ try:
+ self.parent.mkdir()
+ except OSError:
+ pass
+ if self.name:
+ try:
+ os.makedirs(self.abspath())
+ except OSError:
+ pass
+ if not self.isdir():
+ raise Errors.WafError('Could not create the directory %r'%self)
+ try:
+ self.children
+ except AttributeError:
+ self.children=self.dict_class()
+ def find_node(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in Utils.split_path(lst)if x and x!='.']
+ if lst and lst[0].startswith('\\\\')and not self.parent:
+ node=self.ctx.root.make_node(lst[0])
+ node.cache_isdir=True
+ return node.find_node(lst[1:])
+ cur=self
+ for x in lst:
+ if x=='..':
+ cur=cur.parent or cur
+ continue
+ try:
+ ch=cur.children
+ except AttributeError:
+ cur.children=self.dict_class()
+ else:
+ try:
+ cur=ch[x]
+ continue
+ except KeyError:
+ pass
+ cur=self.__class__(x,cur)
+ if not cur.exists():
+ cur.evict()
+ return None
+ if not cur.exists():
+ cur.evict()
+ return None
+ return cur
+ def make_node(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in Utils.split_path(lst)if x and x!='.']
+ cur=self
+ for x in lst:
+ if x=='..':
+ cur=cur.parent or cur
+ continue
+ try:
+ cur=cur.children[x]
+ except AttributeError:
+ cur.children=self.dict_class()
+ except KeyError:
+ pass
+ else:
+ continue
+ cur=self.__class__(x,cur)
+ return cur
+ def search_node(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in Utils.split_path(lst)if x and x!='.']
+ cur=self
+ for x in lst:
+ if x=='..':
+ cur=cur.parent or cur
+ else:
+ try:
+ cur=cur.children[x]
+ except(AttributeError,KeyError):
+ return None
+ return cur
+ def path_from(self,node):
+ c1=self
+ c2=node
+ c1h=c1.height()
+ c2h=c2.height()
+ lst=[]
+ up=0
+ while c1h>c2h:
+ lst.append(c1.name)
+ c1=c1.parent
+ c1h-=1
+ while c2h>c1h:
+ up+=1
+ c2=c2.parent
+ c2h-=1
+ while not c1 is c2:
+ lst.append(c1.name)
+ up+=1
+ c1=c1.parent
+ c2=c2.parent
+ if c1.parent:
+ lst.extend(['..']*up)
+ lst.reverse()
+ return os.sep.join(lst)or'.'
+ else:
+ return self.abspath()
+ def abspath(self):
+ try:
+ return self.cache_abspath
+ except AttributeError:
+ pass
+ if not self.parent:
+ val=os.sep
+ elif not self.parent.name:
+ val=os.sep+self.name
+ else:
+ val=self.parent.abspath()+os.sep+self.name
+ self.cache_abspath=val
+ return val
+ if Utils.is_win32:
+ def abspath(self):
+ try:
+ return self.cache_abspath
+ except AttributeError:
+ pass
+ if not self.parent:
+ val=''
+ elif not self.parent.name:
+ val=self.name+os.sep
+ else:
+ val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name
+ self.cache_abspath=val
+ return val
+ def is_child_of(self,node):
+ p=self
+ diff=self.height()-node.height()
+ while diff>0:
+ diff-=1
+ p=p.parent
+ return p is node
+ def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True,quiet=False):
+ dircont=self.listdir()
+ dircont.sort()
+ try:
+ lst=set(self.children.keys())
+ except AttributeError:
+ self.children=self.dict_class()
+ else:
+ if remove:
+ for x in lst-set(dircont):
+ self.children[x].evict()
+ for name in dircont:
+ npats=accept(name,pats)
+ if npats and npats[0]:
+ accepted=[]in npats[0]
+ node=self.make_node([name])
+ isdir=node.isdir()
+ if accepted:
+ if isdir:
+ if dir:
+ yield node
+ elif src:
+ yield node
+ if isdir:
+ node.cache_isdir=True
+ if maxdepth:
+ for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove,quiet=quiet):
+ yield k
+ def ant_glob(self,*k,**kw):
+ src=kw.get('src',True)
+ dir=kw.get('dir')
+ excl=kw.get('excl',exclude_regs)
+ incl=k and k[0]or kw.get('incl','**')
+ remove=kw.get('remove',True)
+ maxdepth=kw.get('maxdepth',25)
+ ignorecase=kw.get('ignorecase',False)
+ quiet=kw.get('quiet',False)
+ pats=(ant_matcher(incl,ignorecase),ant_matcher(excl,ignorecase))
+ if kw.get('generator'):
+ return Utils.lazy_generator(self.ant_iter,(ant_sub_matcher,maxdepth,pats,dir,src,remove,quiet))
+ it=self.ant_iter(ant_sub_matcher,maxdepth,pats,dir,src,remove,quiet)
+ if kw.get('flat'):
+ return' '.join(x.path_from(self)for x in it)
+ return list(it)
+ def is_src(self):
+ cur=self
+ x=self.ctx.srcnode
+ y=self.ctx.bldnode
+ while cur.parent:
+ if cur is y:
+ return False
+ if cur is x:
+ return True
+ cur=cur.parent
+ return False
+ def is_bld(self):
+ cur=self
+ y=self.ctx.bldnode
+ while cur.parent:
+ if cur is y:
+ return True
+ cur=cur.parent
+ return False
+ def get_src(self):
+ cur=self
+ x=self.ctx.srcnode
+ y=self.ctx.bldnode
+ lst=[]
+ while cur.parent:
+ if cur is y:
+ lst.reverse()
+ return x.make_node(lst)
+ if cur is x:
+ return self
+ lst.append(cur.name)
+ cur=cur.parent
+ return self
+ def get_bld(self):
+ cur=self
+ x=self.ctx.srcnode
+ y=self.ctx.bldnode
+ lst=[]
+ while cur.parent:
+ if cur is y:
+ return self
+ if cur is x:
+ lst.reverse()
+ return self.ctx.bldnode.make_node(lst)
+ lst.append(cur.name)
+ cur=cur.parent
+ lst.reverse()
+ if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'):
+ lst[0]=lst[0][0]
+ return self.ctx.bldnode.make_node(['__root__']+lst)
+ def find_resource(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in Utils.split_path(lst)if x and x!='.']
+ node=self.get_bld().search_node(lst)
+ if not node:
+ node=self.get_src().find_node(lst)
+ if node and node.isdir():
+ return None
+ return node
+ def find_or_declare(self,lst):
+ if isinstance(lst,str)and os.path.isabs(lst):
+ node=self.ctx.root.make_node(lst)
+ else:
+ node=self.get_bld().make_node(lst)
+ node.parent.mkdir()
+ return node
+ def find_dir(self,lst):
+ if isinstance(lst,str):
+ lst=[x for x in Utils.split_path(lst)if x and x!='.']
+ node=self.find_node(lst)
+ if node and not node.isdir():
+ return None
+ return node
+ def change_ext(self,ext,ext_in=None):
+ name=self.name
+ if ext_in is None:
+ k=name.rfind('.')
+ if k>=0:
+ name=name[:k]+ext
+ else:
+ name=name+ext
+ else:
+ name=name[:-len(ext_in)]+ext
+ return self.parent.find_or_declare([name])
+ def bldpath(self):
+ return self.path_from(self.ctx.bldnode)
+ def srcpath(self):
+ return self.path_from(self.ctx.srcnode)
+ def relpath(self):
+ cur=self
+ x=self.ctx.bldnode
+ while cur.parent:
+ if cur is x:
+ return self.bldpath()
+ cur=cur.parent
+ return self.srcpath()
+ def bld_dir(self):
+ return self.parent.bldpath()
+ def h_file(self):
+ return Utils.h_file(self.abspath())
+ def get_bld_sig(self):
+ try:
+ cache=self.ctx.cache_sig
+ except AttributeError:
+ cache=self.ctx.cache_sig={}
+ try:
+ ret=cache[self]
+ except KeyError:
+ p=self.abspath()
+ try:
+ ret=cache[self]=self.h_file()
+ except EnvironmentError:
+ if self.isdir():
+ st=os.stat(p)
+ ret=cache[self]=Utils.h_list([p,st.st_ino,st.st_mode])
+ return ret
+ raise
+ return ret
+pickle_lock=Utils.threading.Lock()
+class Nod3(Node):
+ pass
diff --git a/waflib/Options.py b/waflib/Options.py
new file mode 100644
index 0000000..b61c60a
--- /dev/null
+++ b/waflib/Options.py
@@ -0,0 +1,200 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,tempfile,optparse,sys,re
+from waflib import Logs,Utils,Context,Errors
+options=optparse.Values()
+commands=[]
+envvars=[]
+lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform)
+class opt_parser(optparse.OptionParser):
+ def __init__(self,ctx,allow_unknown=False):
+ optparse.OptionParser.__init__(self,conflict_handler='resolve',add_help_option=False,version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION))
+ self.formatter.width=Logs.get_term_cols()
+ self.ctx=ctx
+ self.allow_unknown=allow_unknown
+ def _process_args(self,largs,rargs,values):
+ while rargs:
+ try:
+ optparse.OptionParser._process_args(self,largs,rargs,values)
+ except(optparse.BadOptionError,optparse.AmbiguousOptionError)as e:
+ if self.allow_unknown:
+ largs.append(e.opt_str)
+ else:
+ self.error(str(e))
+ def print_usage(self,file=None):
+ return self.print_help(file)
+ def get_usage(self):
+ cmds_str={}
+ for cls in Context.classes:
+ if not cls.cmd or cls.cmd=='options'or cls.cmd.startswith('_'):
+ continue
+ s=cls.__doc__ or''
+ cmds_str[cls.cmd]=s
+ if Context.g_module:
+ for(k,v)in Context.g_module.__dict__.items():
+ if k in('options','init','shutdown'):
+ continue
+ if type(v)is type(Context.create_context):
+ if v.__doc__ and not k.startswith('_'):
+ cmds_str[k]=v.__doc__
+ just=0
+ for k in cmds_str:
+ just=max(just,len(k))
+ lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()]
+ lst.sort()
+ ret='\n'.join(lst)
+ return'''waf [commands] [options]
+
+Main commands (example: ./waf build -j4)
+%s
+'''%ret
+class OptionsContext(Context.Context):
+ cmd='options'
+ fun='options'
+ def __init__(self,**kw):
+ super(OptionsContext,self).__init__(**kw)
+ self.parser=opt_parser(self)
+ self.option_groups={}
+ jobs=self.jobs()
+ p=self.add_option
+ color=os.environ.get('NOCOLOR','')and'no'or'auto'
+ if os.environ.get('CLICOLOR','')=='0':
+ color='no'
+ elif os.environ.get('CLICOLOR_FORCE','')=='1':
+ color='yes'
+ p('-c','--color',dest='colors',default=color,action='store',help='whether to use colors (yes/no/auto) [default: auto]',choices=('yes','no','auto'))
+ p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs)
+ p('-k','--keep',dest='keep',default=0,action='count',help='continue despite errors (-kk to try harder)')
+ p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]')
+ p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)')
+ p('--profile',dest='profile',default=0,action='store_true',help=optparse.SUPPRESS_HELP)
+ p('--pdb',dest='pdb',default=0,action='store_true',help=optparse.SUPPRESS_HELP)
+ p('-h','--help',dest='whelp',default=0,action='store_true',help="show this help message and exit")
+ gr=self.add_option_group('Configuration options')
+ self.option_groups['configure options']=gr
+ gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out')
+ gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top')
+ gr.add_option('--no-lock-in-run',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_run')
+ gr.add_option('--no-lock-in-out',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_out')
+ gr.add_option('--no-lock-in-top',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_top')
+ default_prefix=getattr(Context.g_module,'default_prefix',os.environ.get('PREFIX'))
+ if not default_prefix:
+ if Utils.unversioned_sys_platform()=='win32':
+ d=tempfile.gettempdir()
+ default_prefix=d[0].upper()+d[1:]
+ else:
+ default_prefix='/usr/local/'
+ gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix)
+ gr.add_option('--bindir',dest='bindir',help='bindir')
+ gr.add_option('--libdir',dest='libdir',help='libdir')
+ gr=self.add_option_group('Build and installation options')
+ self.option_groups['build and install options']=gr
+ gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output')
+ gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"')
+ gr=self.add_option_group('Step options')
+ self.option_groups['step options']=gr
+ gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
+ default_destdir=os.environ.get('DESTDIR','')
+ gr=self.add_option_group('Installation and uninstallation options')
+ self.option_groups['install/uninstall options']=gr
+ gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir')
+ gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation')
+ gr.add_option('--distcheck-args',metavar='ARGS',help='arguments to pass to distcheck',default=None,action='store')
+ def jobs(self):
+ count=int(os.environ.get('JOBS',0))
+ if count<1:
+ if'NUMBER_OF_PROCESSORS'in os.environ:
+ count=int(os.environ.get('NUMBER_OF_PROCESSORS',1))
+ else:
+ if hasattr(os,'sysconf_names'):
+ if'SC_NPROCESSORS_ONLN'in os.sysconf_names:
+ count=int(os.sysconf('SC_NPROCESSORS_ONLN'))
+ elif'SC_NPROCESSORS_CONF'in os.sysconf_names:
+ count=int(os.sysconf('SC_NPROCESSORS_CONF'))
+ if not count and os.name not in('nt','java'):
+ try:
+ tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0)
+ except Errors.WafError:
+ pass
+ else:
+ if re.match('^[0-9]+$',tmp):
+ count=int(tmp)
+ if count<1:
+ count=1
+ elif count>1024:
+ count=1024
+ return count
+ def add_option(self,*k,**kw):
+ return self.parser.add_option(*k,**kw)
+ def add_option_group(self,*k,**kw):
+ try:
+ gr=self.option_groups[k[0]]
+ except KeyError:
+ gr=self.parser.add_option_group(*k,**kw)
+ self.option_groups[k[0]]=gr
+ return gr
+ def get_option_group(self,opt_str):
+ try:
+ return self.option_groups[opt_str]
+ except KeyError:
+ for group in self.parser.option_groups:
+ if group.title==opt_str:
+ return group
+ return None
+ def sanitize_path(self,path,cwd=None):
+ if not cwd:
+ cwd=Context.launch_dir
+ p=os.path.expanduser(path)
+ p=os.path.join(cwd,p)
+ p=os.path.normpath(p)
+ p=os.path.abspath(p)
+ return p
+ def parse_cmd_args(self,_args=None,cwd=None,allow_unknown=False):
+ self.parser.allow_unknown=allow_unknown
+ (options,leftover_args)=self.parser.parse_args(args=_args)
+ envvars=[]
+ commands=[]
+ for arg in leftover_args:
+ if'='in arg:
+ envvars.append(arg)
+ elif arg!='options':
+ commands.append(arg)
+ for name in'top out destdir prefix bindir libdir'.split():
+ if getattr(options,name,None):
+ path=self.sanitize_path(getattr(options,name),cwd)
+ setattr(options,name,path)
+ return options,commands,envvars
+ def init_module_vars(self,arg_options,arg_commands,arg_envvars):
+ options.__dict__.clear()
+ del commands[:]
+ del envvars[:]
+ options.__dict__.update(arg_options.__dict__)
+ commands.extend(arg_commands)
+ envvars.extend(arg_envvars)
+ for var in envvars:
+ (name,value)=var.split('=',1)
+ os.environ[name.strip()]=value
+ def init_logs(self,options,commands,envvars):
+ Logs.verbose=options.verbose
+ if options.verbose>=1:
+ self.load('errcheck')
+ colors={'yes':2,'auto':1,'no':0}[options.colors]
+ Logs.enable_colors(colors)
+ if options.zones:
+ Logs.zones=options.zones.split(',')
+ if not Logs.verbose:
+ Logs.verbose=1
+ elif Logs.verbose>0:
+ Logs.zones=['runner']
+ if Logs.verbose>2:
+ Logs.zones=['*']
+ def parse_args(self,_args=None):
+ options,commands,envvars=self.parse_cmd_args()
+ self.init_logs(options,commands,envvars)
+ self.init_module_vars(options,commands,envvars)
+ def execute(self):
+ super(OptionsContext,self).execute()
+ self.parse_args()
+ Utils.alloc_process_pool(options.jobs)
diff --git a/waflib/Runner.py b/waflib/Runner.py
new file mode 100644
index 0000000..c6480a3
--- /dev/null
+++ b/waflib/Runner.py
@@ -0,0 +1,350 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import heapq,traceback
+try:
+ from queue import Queue,PriorityQueue
+except ImportError:
+ from Queue import Queue
+ try:
+ from Queue import PriorityQueue
+ except ImportError:
+ class PriorityQueue(Queue):
+ def _init(self,maxsize):
+ self.maxsize=maxsize
+ self.queue=[]
+ def _put(self,item):
+ heapq.heappush(self.queue,item)
+ def _get(self):
+ return heapq.heappop(self.queue)
+from waflib import Utils,Task,Errors,Logs
+GAP=5
+class PriorityTasks(object):
+ def __init__(self):
+ self.lst=[]
+ def __len__(self):
+ return len(self.lst)
+ def __iter__(self):
+ return iter(self.lst)
+ def clear(self):
+ self.lst=[]
+ def append(self,task):
+ heapq.heappush(self.lst,task)
+ def appendleft(self,task):
+ heapq.heappush(self.lst,task)
+ def pop(self):
+ return heapq.heappop(self.lst)
+ def extend(self,lst):
+ if self.lst:
+ for x in lst:
+ self.append(x)
+ else:
+ if isinstance(lst,list):
+ self.lst=lst
+ heapq.heapify(lst)
+ else:
+ self.lst=lst.lst
+class Consumer(Utils.threading.Thread):
+ def __init__(self,spawner,task):
+ Utils.threading.Thread.__init__(self)
+ self.task=task
+ self.spawner=spawner
+ self.setDaemon(1)
+ self.start()
+ def run(self):
+ try:
+ if not self.spawner.master.stop:
+ self.spawner.master.process_task(self.task)
+ finally:
+ self.spawner.sem.release()
+ self.spawner.master.out.put(self.task)
+ self.task=None
+ self.spawner=None
+class Spawner(Utils.threading.Thread):
+ def __init__(self,master):
+ Utils.threading.Thread.__init__(self)
+ self.master=master
+ self.sem=Utils.threading.Semaphore(master.numjobs)
+ self.setDaemon(1)
+ self.start()
+ def run(self):
+ try:
+ self.loop()
+ except Exception:
+ pass
+ def loop(self):
+ master=self.master
+ while 1:
+ task=master.ready.get()
+ self.sem.acquire()
+ if not master.stop:
+ task.log_display(task.generator.bld)
+ Consumer(self,task)
+class Parallel(object):
+ def __init__(self,bld,j=2):
+ self.numjobs=j
+ self.bld=bld
+ self.outstanding=PriorityTasks()
+ self.postponed=PriorityTasks()
+ self.incomplete=set()
+ self.ready=PriorityQueue(0)
+ self.out=Queue(0)
+ self.count=0
+ self.processed=0
+ self.stop=False
+ self.error=[]
+ self.biter=None
+ self.dirty=False
+ self.revdeps=Utils.defaultdict(set)
+ self.spawner=Spawner(self)
+ def get_next_task(self):
+ if not self.outstanding:
+ return None
+ return self.outstanding.pop()
+ def postpone(self,tsk):
+ self.postponed.append(tsk)
+ def refill_task_list(self):
+ while self.count>self.numjobs*GAP:
+ self.get_out()
+ while not self.outstanding:
+ if self.count:
+ self.get_out()
+ if self.outstanding:
+ break
+ elif self.postponed:
+ try:
+ cond=self.deadlock==self.processed
+ except AttributeError:
+ pass
+ else:
+ if cond:
+ lst=[]
+ for tsk in self.postponed:
+ deps=[id(x)for x in tsk.run_after if not x.hasrun]
+ lst.append('%s\t-> %r'%(repr(tsk),deps))
+ if not deps:
+ lst.append('\n task %r dependencies are done, check its *runnable_status*?'%id(tsk))
+ raise Errors.WafError('Deadlock detected: check the task build order%s'%''.join(lst))
+ self.deadlock=self.processed
+ if self.postponed:
+ self.outstanding.extend(self.postponed)
+ self.postponed.clear()
+ elif not self.count:
+ if self.incomplete:
+ for x in self.incomplete:
+ for k in x.run_after:
+ if not k.hasrun:
+ break
+ else:
+ self.incomplete.remove(x)
+ self.outstanding.append(x)
+ break
+ else:
+ raise Errors.WafError('Broken revdeps detected on %r'%self.incomplete)
+ else:
+ tasks=next(self.biter)
+ ready,waiting=self.prio_and_split(tasks)
+ self.outstanding.extend(ready)
+ self.incomplete.update(waiting)
+ self.total=self.bld.total()
+ break
+ def add_more_tasks(self,tsk):
+ if getattr(tsk,'more_tasks',None):
+ more=set(tsk.more_tasks)
+ groups_done=set()
+ def iteri(a,b):
+ for x in a:
+ yield x
+ for x in b:
+ yield x
+ for x in iteri(self.outstanding,self.incomplete):
+ for k in x.run_after:
+ if isinstance(k,Task.TaskGroup):
+ if k not in groups_done:
+ groups_done.add(k)
+ for j in k.prev&more:
+ self.revdeps[j].add(k)
+ elif k in more:
+ self.revdeps[k].add(x)
+ ready,waiting=self.prio_and_split(tsk.more_tasks)
+ self.outstanding.extend(ready)
+ self.incomplete.update(waiting)
+ self.total+=len(tsk.more_tasks)
+ def mark_finished(self,tsk):
+ def try_unfreeze(x):
+ if x in self.incomplete:
+ for k in x.run_after:
+ if not k.hasrun:
+ break
+ else:
+ self.incomplete.remove(x)
+ self.outstanding.append(x)
+ if tsk in self.revdeps:
+ for x in self.revdeps[tsk]:
+ if isinstance(x,Task.TaskGroup):
+ x.prev.remove(tsk)
+ if not x.prev:
+ for k in x.next:
+ k.run_after.remove(x)
+ try_unfreeze(k)
+ x.next=[]
+ else:
+ try_unfreeze(x)
+ del self.revdeps[tsk]
+ def get_out(self):
+ tsk=self.out.get()
+ if not self.stop:
+ self.add_more_tasks(tsk)
+ self.mark_finished(tsk)
+ self.count-=1
+ self.dirty=True
+ return tsk
+ def add_task(self,tsk):
+ self.ready.put(tsk)
+ def process_task(self,tsk):
+ tsk.process()
+ if tsk.hasrun!=Task.SUCCESS:
+ self.error_handler(tsk)
+ def skip(self,tsk):
+ tsk.hasrun=Task.SKIPPED
+ self.mark_finished(tsk)
+ def cancel(self,tsk):
+ tsk.hasrun=Task.CANCELED
+ self.mark_finished(tsk)
+ def error_handler(self,tsk):
+ if not self.bld.keep:
+ self.stop=True
+ self.error.append(tsk)
+ def task_status(self,tsk):
+ try:
+ return tsk.runnable_status()
+ except Exception:
+ self.processed+=1
+ tsk.err_msg=traceback.format_exc()
+ if not self.stop and self.bld.keep:
+ self.skip(tsk)
+ if self.bld.keep==1:
+ if Logs.verbose>1 or not self.error:
+ self.error.append(tsk)
+ self.stop=True
+ else:
+ if Logs.verbose>1:
+ self.error.append(tsk)
+ return Task.EXCEPTION
+ tsk.hasrun=Task.EXCEPTION
+ self.error_handler(tsk)
+ return Task.EXCEPTION
+ def start(self):
+ self.total=self.bld.total()
+ while not self.stop:
+ self.refill_task_list()
+ tsk=self.get_next_task()
+ if not tsk:
+ if self.count:
+ continue
+ else:
+ break
+ if tsk.hasrun:
+ self.processed+=1
+ continue
+ if self.stop:
+ break
+ st=self.task_status(tsk)
+ if st==Task.RUN_ME:
+ self.count+=1
+ self.processed+=1
+ if self.numjobs==1:
+ tsk.log_display(tsk.generator.bld)
+ try:
+ self.process_task(tsk)
+ finally:
+ self.out.put(tsk)
+ else:
+ self.add_task(tsk)
+ elif st==Task.ASK_LATER:
+ self.postpone(tsk)
+ elif st==Task.SKIP_ME:
+ self.processed+=1
+ self.skip(tsk)
+ self.add_more_tasks(tsk)
+ elif st==Task.CANCEL_ME:
+ if Logs.verbose>1:
+ self.error.append(tsk)
+ self.processed+=1
+ self.cancel(tsk)
+ while self.error and self.count:
+ self.get_out()
+ self.ready.put(None)
+ if not self.stop:
+ assert not self.count
+ assert not self.postponed
+ assert not self.incomplete
+ def prio_and_split(self,tasks):
+ for x in tasks:
+ x.visited=0
+ reverse=self.revdeps
+ groups_done=set()
+ for x in tasks:
+ for k in x.run_after:
+ if isinstance(k,Task.TaskGroup):
+ if k not in groups_done:
+ groups_done.add(k)
+ for j in k.prev:
+ reverse[j].add(k)
+ else:
+ reverse[k].add(x)
+ def visit(n):
+ if isinstance(n,Task.TaskGroup):
+ return sum(visit(k)for k in n.next)
+ if n.visited==0:
+ n.visited=1
+ if n in reverse:
+ rev=reverse[n]
+ n.prio_order=n.tree_weight+len(rev)+sum(visit(k)for k in rev)
+ else:
+ n.prio_order=n.tree_weight
+ n.visited=2
+ elif n.visited==1:
+ raise Errors.WafError('Dependency cycle found!')
+ return n.prio_order
+ for x in tasks:
+ if x.visited!=0:
+ continue
+ try:
+ visit(x)
+ except Errors.WafError:
+ self.debug_cycles(tasks,reverse)
+ ready=[]
+ waiting=[]
+ for x in tasks:
+ for k in x.run_after:
+ if not k.hasrun:
+ waiting.append(x)
+ break
+ else:
+ ready.append(x)
+ return(ready,waiting)
+ def debug_cycles(self,tasks,reverse):
+ tmp={}
+ for x in tasks:
+ tmp[x]=0
+ def visit(n,acc):
+ if isinstance(n,Task.TaskGroup):
+ for k in n.next:
+ visit(k,acc)
+ return
+ if tmp[n]==0:
+ tmp[n]=1
+ for k in reverse.get(n,[]):
+ visit(k,[n]+acc)
+ tmp[n]=2
+ elif tmp[n]==1:
+ lst=[]
+ for tsk in acc:
+ lst.append(repr(tsk))
+ if tsk is n:
+ break
+ raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s'%''.join(lst))
+ for x in tasks:
+ visit(x,[])
diff --git a/waflib/Scripting.py b/waflib/Scripting.py
new file mode 100644
index 0000000..f7a3809
--- /dev/null
+++ b/waflib/Scripting.py
@@ -0,0 +1,403 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from __future__ import with_statement
+import os,shlex,shutil,traceback,errno,sys,stat
+from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node
+build_dir_override=None
+no_climb_commands=['configure']
+default_cmd="build"
+def waf_entry_point(current_directory,version,wafdir):
+ Logs.init_log()
+ if Context.WAFVERSION!=version:
+ Logs.error('Waf script %r and library %r do not match (directory %r)',version,Context.WAFVERSION,wafdir)
+ sys.exit(1)
+ Context.waf_dir=wafdir
+ Context.run_dir=Context.launch_dir=current_directory
+ start_dir=current_directory
+ no_climb=os.environ.get('NOCLIMB')
+ if len(sys.argv)>1:
+ potential_wscript=os.path.join(current_directory,sys.argv[1])
+ if os.path.basename(potential_wscript)==Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
+ path=os.path.normpath(os.path.dirname(potential_wscript))
+ start_dir=os.path.abspath(path)
+ no_climb=True
+ sys.argv.pop(1)
+ ctx=Context.create_context('options')
+ (options,commands,env)=ctx.parse_cmd_args(allow_unknown=True)
+ if options.top:
+ start_dir=Context.run_dir=Context.top_dir=options.top
+ no_climb=True
+ if options.out:
+ Context.out_dir=options.out
+ if not no_climb:
+ for k in no_climb_commands:
+ for y in commands:
+ if y.startswith(k):
+ no_climb=True
+ break
+ cur=start_dir
+ while cur:
+ try:
+ lst=os.listdir(cur)
+ except OSError:
+ lst=[]
+ Logs.error('Directory %r is unreadable!',cur)
+ if Options.lockfile in lst:
+ env=ConfigSet.ConfigSet()
+ try:
+ env.load(os.path.join(cur,Options.lockfile))
+ ino=os.stat(cur)[stat.ST_INO]
+ except EnvironmentError:
+ pass
+ else:
+ for x in(env.run_dir,env.top_dir,env.out_dir):
+ if not x:
+ continue
+ if Utils.is_win32:
+ if cur==x:
+ load=True
+ break
+ else:
+ try:
+ ino2=os.stat(x)[stat.ST_INO]
+ except OSError:
+ pass
+ else:
+ if ino==ino2:
+ load=True
+ break
+ else:
+ Logs.warn('invalid lock file in %s',cur)
+ load=False
+ if load:
+ Context.run_dir=env.run_dir
+ Context.top_dir=env.top_dir
+ Context.out_dir=env.out_dir
+ break
+ if not Context.run_dir:
+ if Context.WSCRIPT_FILE in lst:
+ Context.run_dir=cur
+ next=os.path.dirname(cur)
+ if next==cur:
+ break
+ cur=next
+ if no_climb:
+ break
+ if not Context.run_dir:
+ if options.whelp:
+ Logs.warn('These are the generic options (no wscript/project found)')
+ ctx.parser.print_help()
+ sys.exit(0)
+ Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)',Context.WSCRIPT_FILE)
+ sys.exit(1)
+ try:
+ os.chdir(Context.run_dir)
+ except OSError:
+ Logs.error('Waf: The folder %r is unreadable',Context.run_dir)
+ sys.exit(1)
+ try:
+ set_main_module(os.path.normpath(os.path.join(Context.run_dir,Context.WSCRIPT_FILE)))
+ except Errors.WafError as e:
+ Logs.pprint('RED',e.verbose_msg)
+ Logs.error(str(e))
+ sys.exit(1)
+ except Exception as e:
+ Logs.error('Waf: The wscript in %r is unreadable',Context.run_dir)
+ traceback.print_exc(file=sys.stdout)
+ sys.exit(2)
+ if options.profile:
+ import cProfile,pstats
+ cProfile.runctx('from waflib import Scripting; Scripting.run_commands()',{},{},'profi.txt')
+ p=pstats.Stats('profi.txt')
+ p.sort_stats('time').print_stats(75)
+ else:
+ try:
+ try:
+ run_commands()
+ except:
+ if options.pdb:
+ import pdb
+ type,value,tb=sys.exc_info()
+ traceback.print_exc()
+ pdb.post_mortem(tb)
+ else:
+ raise
+ except Errors.WafError as e:
+ if Logs.verbose>1:
+ Logs.pprint('RED',e.verbose_msg)
+ Logs.error(e.msg)
+ sys.exit(1)
+ except SystemExit:
+ raise
+ except Exception as e:
+ traceback.print_exc(file=sys.stdout)
+ sys.exit(2)
+ except KeyboardInterrupt:
+ Logs.pprint('RED','Interrupted')
+ sys.exit(68)
+def set_main_module(file_path):
+ Context.g_module=Context.load_module(file_path)
+ Context.g_module.root_path=file_path
+ def set_def(obj):
+ name=obj.__name__
+ if not name in Context.g_module.__dict__:
+ setattr(Context.g_module,name,obj)
+ for k in(dist,distclean,distcheck):
+ set_def(k)
+ if not'init'in Context.g_module.__dict__:
+ Context.g_module.init=Utils.nada
+ if not'shutdown'in Context.g_module.__dict__:
+ Context.g_module.shutdown=Utils.nada
+ if not'options'in Context.g_module.__dict__:
+ Context.g_module.options=Utils.nada
+def parse_options():
+ ctx=Context.create_context('options')
+ ctx.execute()
+ if not Options.commands:
+ Options.commands.append(default_cmd)
+ if Options.options.whelp:
+ ctx.parser.print_help()
+ sys.exit(0)
+def run_command(cmd_name):
+ ctx=Context.create_context(cmd_name)
+ ctx.log_timer=Utils.Timer()
+ ctx.options=Options.options
+ ctx.cmd=cmd_name
+ try:
+ ctx.execute()
+ finally:
+ ctx.finalize()
+ return ctx
+def run_commands():
+ parse_options()
+ run_command('init')
+ while Options.commands:
+ cmd_name=Options.commands.pop(0)
+ ctx=run_command(cmd_name)
+ Logs.info('%r finished successfully (%s)',cmd_name,ctx.log_timer)
+ run_command('shutdown')
+def distclean_dir(dirname):
+ for(root,dirs,files)in os.walk(dirname):
+ for f in files:
+ if f.endswith(('.o','.moc','.exe')):
+ fname=os.path.join(root,f)
+ try:
+ os.remove(fname)
+ except OSError:
+ Logs.warn('Could not remove %r',fname)
+ for x in(Context.DBFILE,'config.log'):
+ try:
+ os.remove(x)
+ except OSError:
+ pass
+ try:
+ shutil.rmtree('c4che')
+ except OSError:
+ pass
+def distclean(ctx):
+ '''removes build folders and data'''
+ def remove_and_log(k,fun):
+ try:
+ fun(k)
+ except EnvironmentError as e:
+ if e.errno!=errno.ENOENT:
+ Logs.warn('Could not remove %r',k)
+ if not Options.commands:
+ for k in os.listdir('.'):
+ for x in'.waf-2 waf-2 .waf3-2 waf3-2'.split():
+ if k.startswith(x):
+ remove_and_log(k,shutil.rmtree)
+ cur='.'
+ if ctx.options.no_lock_in_top:
+ cur=ctx.options.out
+ try:
+ lst=os.listdir(cur)
+ except OSError:
+ Logs.warn('Could not read %r',cur)
+ return
+ if Options.lockfile in lst:
+ f=os.path.join(cur,Options.lockfile)
+ try:
+ env=ConfigSet.ConfigSet(f)
+ except EnvironmentError:
+ Logs.warn('Could not read %r',f)
+ return
+ if not env.out_dir or not env.top_dir:
+ Logs.warn('Invalid lock file %r',f)
+ return
+ if env.out_dir==env.top_dir:
+ distclean_dir(env.out_dir)
+ else:
+ remove_and_log(env.out_dir,shutil.rmtree)
+ for k in(env.out_dir,env.top_dir,env.run_dir):
+ p=os.path.join(k,Options.lockfile)
+ remove_and_log(p,os.remove)
+class Dist(Context.Context):
+ '''creates an archive containing the project source code'''
+ cmd='dist'
+ fun='dist'
+ algo='tar.bz2'
+ ext_algo={}
+ def execute(self):
+ self.recurse([os.path.dirname(Context.g_module.root_path)])
+ self.archive()
+ def archive(self):
+ import tarfile
+ arch_name=self.get_arch_name()
+ try:
+ self.base_path
+ except AttributeError:
+ self.base_path=self.path
+ node=self.base_path.make_node(arch_name)
+ try:
+ node.delete()
+ except OSError:
+ pass
+ files=self.get_files()
+ if self.algo.startswith('tar.'):
+ tar=tarfile.open(node.abspath(),'w:'+self.algo.replace('tar.',''))
+ for x in files:
+ self.add_tar_file(x,tar)
+ tar.close()
+ elif self.algo=='zip':
+ import zipfile
+ zip=zipfile.ZipFile(node.abspath(),'w',compression=zipfile.ZIP_DEFLATED)
+ for x in files:
+ archive_name=self.get_base_name()+'/'+x.path_from(self.base_path)
+ zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED)
+ zip.close()
+ else:
+ self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
+ try:
+ from hashlib import sha256
+ except ImportError:
+ digest=''
+ else:
+ digest=' (sha256=%r)'%sha256(node.read(flags='rb')).hexdigest()
+ Logs.info('New archive created: %s%s',self.arch_name,digest)
+ def get_tar_path(self,node):
+ return node.abspath()
+ def add_tar_file(self,x,tar):
+ p=self.get_tar_path(x)
+ tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path))
+ tinfo.uid=0
+ tinfo.gid=0
+ tinfo.uname='root'
+ tinfo.gname='root'
+ if os.path.isfile(p):
+ with open(p,'rb')as f:
+ tar.addfile(tinfo,fileobj=f)
+ else:
+ tar.addfile(tinfo)
+ def get_tar_prefix(self):
+ try:
+ return self.tar_prefix
+ except AttributeError:
+ return self.get_base_name()
+ def get_arch_name(self):
+ try:
+ self.arch_name
+ except AttributeError:
+ self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo)
+ return self.arch_name
+ def get_base_name(self):
+ try:
+ self.base_name
+ except AttributeError:
+ appname=getattr(Context.g_module,Context.APPNAME,'noname')
+ version=getattr(Context.g_module,Context.VERSION,'1.0')
+ self.base_name=appname+'-'+version
+ return self.base_name
+ def get_excl(self):
+ try:
+ return self.excl
+ except AttributeError:
+ self.excl=Node.exclude_regs+' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
+ if Context.out_dir:
+ nd=self.root.find_node(Context.out_dir)
+ if nd:
+ self.excl+=' '+nd.path_from(self.base_path)
+ return self.excl
+ def get_files(self):
+ try:
+ files=self.files
+ except AttributeError:
+ files=self.base_path.ant_glob('**/*',excl=self.get_excl())
+ return files
+def dist(ctx):
+ '''makes a tarball for redistributing the sources'''
+ pass
+class DistCheck(Dist):
+ fun='distcheck'
+ cmd='distcheck'
+ def execute(self):
+ self.recurse([os.path.dirname(Context.g_module.root_path)])
+ self.archive()
+ self.check()
+ def make_distcheck_cmd(self,tmpdir):
+ cfg=[]
+ if Options.options.distcheck_args:
+ cfg=shlex.split(Options.options.distcheck_args)
+ else:
+ cfg=[x for x in sys.argv if x.startswith('-')]
+ cmd=[sys.executable,sys.argv[0],'configure','build','install','uninstall','--destdir='+tmpdir]+cfg
+ return cmd
+ def check(self):
+ import tempfile,tarfile
+ with tarfile.open(self.get_arch_name())as t:
+ for x in t:
+ t.extract(x)
+ instdir=tempfile.mkdtemp('.inst',self.get_base_name())
+ cmd=self.make_distcheck_cmd(instdir)
+ ret=Utils.subprocess.Popen(cmd,cwd=self.get_base_name()).wait()
+ if ret:
+ raise Errors.WafError('distcheck failed with code %r'%ret)
+ if os.path.exists(instdir):
+ raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir)
+ shutil.rmtree(self.get_base_name())
+def distcheck(ctx):
+ '''checks if the project compiles (tarball from 'dist')'''
+ pass
+def autoconfigure(execute_method):
+ def execute(self):
+ if not Configure.autoconfig:
+ return execute_method(self)
+ env=ConfigSet.ConfigSet()
+ do_config=False
+ try:
+ env.load(os.path.join(Context.top_dir,Options.lockfile))
+ except EnvironmentError:
+ Logs.warn('Configuring the project')
+ do_config=True
+ else:
+ if env.run_dir!=Context.run_dir:
+ do_config=True
+ else:
+ h=0
+ for f in env.files:
+ try:
+ h=Utils.h_list((h,Utils.readf(f,'rb')))
+ except EnvironmentError:
+ do_config=True
+ break
+ else:
+ do_config=h!=env.hash
+ if do_config:
+ cmd=env.config_cmd or'configure'
+ if Configure.autoconfig=='clobber':
+ tmp=Options.options.__dict__
+ if env.options:
+ Options.options.__dict__=env.options
+ try:
+ run_command(cmd)
+ finally:
+ Options.options.__dict__=tmp
+ else:
+ run_command(cmd)
+ run_command(self.cmd)
+ else:
+ return execute_method(self)
+ return execute
+Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute)
diff --git a/waflib/Task.py b/waflib/Task.py
new file mode 100644
index 0000000..400910f
--- /dev/null
+++ b/waflib/Task.py
@@ -0,0 +1,771 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re,sys,tempfile,traceback
+from waflib import Utils,Logs,Errors
+NOT_RUN=0
+MISSING=1
+CRASHED=2
+EXCEPTION=3
+CANCELED=4
+SKIPPED=8
+SUCCESS=9
+ASK_LATER=-1
+SKIP_ME=-2
+RUN_ME=-3
+CANCEL_ME=-4
+COMPILE_TEMPLATE_SHELL='''
+def f(tsk):
+ env = tsk.env
+ gen = tsk.generator
+ bld = gen.bld
+ cwdx = tsk.get_cwd()
+ p = env.get_flat
+ tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
+ return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None)
+'''
+COMPILE_TEMPLATE_NOSHELL='''
+def f(tsk):
+ env = tsk.env
+ gen = tsk.generator
+ bld = gen.bld
+ cwdx = tsk.get_cwd()
+ def to_list(xx):
+ if isinstance(xx, str): return [xx]
+ return xx
+ def merge(lst1, lst2):
+ if lst1 and lst2:
+ return lst1[:-1] + [lst1[-1] + lst2[0]] + lst2[1:]
+ return lst1 + lst2
+ lst = []
+ %s
+ if '' in lst:
+ lst = [x for x in lst if x]
+ tsk.last_cmd = lst
+ return tsk.exec_command(lst, cwd=cwdx, env=env.env or None)
+'''
+COMPILE_TEMPLATE_SIG_VARS='''
+def f(tsk):
+ super(tsk.__class__, tsk).sig_vars()
+ env = tsk.env
+ gen = tsk.generator
+ bld = gen.bld
+ cwdx = tsk.get_cwd()
+ p = env.get_flat
+ buf = []
+ %s
+ tsk.m.update(repr(buf).encode())
+'''
+classes={}
+class store_task_type(type):
+ def __init__(cls,name,bases,dict):
+ super(store_task_type,cls).__init__(name,bases,dict)
+ name=cls.__name__
+ if name!='evil'and name!='Task':
+ if getattr(cls,'run_str',None):
+ (f,dvars)=compile_fun(cls.run_str,cls.shell)
+ cls.hcode=Utils.h_cmd(cls.run_str)
+ cls.orig_run_str=cls.run_str
+ cls.run_str=None
+ cls.run=f
+ cls.vars=list(set(cls.vars+dvars))
+ cls.vars.sort()
+ if cls.vars:
+ fun=compile_sig_vars(cls.vars)
+ if fun:
+ cls.sig_vars=fun
+ elif getattr(cls,'run',None)and not'hcode'in cls.__dict__:
+ cls.hcode=Utils.h_cmd(cls.run)
+ getattr(cls,'register',classes)[name]=cls
+evil=store_task_type('evil',(object,),{})
+class Task(evil):
+ vars=[]
+ always_run=False
+ shell=False
+ color='GREEN'
+ ext_in=[]
+ ext_out=[]
+ before=[]
+ after=[]
+ hcode=Utils.SIG_NIL
+ keep_last_cmd=False
+ weight=0
+ tree_weight=0
+ prio_order=0
+ __slots__=('hasrun','generator','env','inputs','outputs','dep_nodes','run_after')
+ def __init__(self,*k,**kw):
+ self.hasrun=NOT_RUN
+ try:
+ self.generator=kw['generator']
+ except KeyError:
+ self.generator=self
+ self.env=kw['env']
+ self.inputs=[]
+ self.outputs=[]
+ self.dep_nodes=[]
+ self.run_after=set()
+ def __lt__(self,other):
+ return self.priority()>other.priority()
+ def __le__(self,other):
+ return self.priority()>=other.priority()
+ def __gt__(self,other):
+ return self.priority()<other.priority()
+ def __ge__(self,other):
+ return self.priority()<=other.priority()
+ def get_cwd(self):
+ bld=self.generator.bld
+ ret=getattr(self,'cwd',None)or getattr(bld,'cwd',bld.bldnode)
+ if isinstance(ret,str):
+ if os.path.isabs(ret):
+ ret=bld.root.make_node(ret)
+ else:
+ ret=self.generator.path.make_node(ret)
+ return ret
+ def quote_flag(self,x):
+ old=x
+ if'\\'in x:
+ x=x.replace('\\','\\\\')
+ if'"'in x:
+ x=x.replace('"','\\"')
+ if old!=x or' 'in x or'\t'in x or"'"in x:
+ x='"%s"'%x
+ return x
+ def priority(self):
+ return(self.weight+self.prio_order,-getattr(self.generator,'tg_idx_count',0))
+ def split_argfile(self,cmd):
+ return([cmd[0]],[self.quote_flag(x)for x in cmd[1:]])
+ def exec_command(self,cmd,**kw):
+ if not'cwd'in kw:
+ kw['cwd']=self.get_cwd()
+ if hasattr(self,'timeout'):
+ kw['timeout']=self.timeout
+ if self.env.PATH:
+ env=kw['env']=dict(kw.get('env')or self.env.env or os.environ)
+ env['PATH']=self.env.PATH if isinstance(self.env.PATH,str)else os.pathsep.join(self.env.PATH)
+ if hasattr(self,'stdout'):
+ kw['stdout']=self.stdout
+ if hasattr(self,'stderr'):
+ kw['stderr']=self.stderr
+ if not isinstance(cmd,str)and(len(repr(cmd))>=8192 if Utils.is_win32 else len(cmd)>200000):
+ cmd,args=self.split_argfile(cmd)
+ try:
+ (fd,tmp)=tempfile.mkstemp()
+ os.write(fd,'\r\n'.join(args).encode())
+ os.close(fd)
+ if Logs.verbose:
+ Logs.debug('argfile: @%r -> %r',tmp,args)
+ return self.generator.bld.exec_command(cmd+['@'+tmp],**kw)
+ finally:
+ try:
+ os.remove(tmp)
+ except OSError:
+ pass
+ else:
+ return self.generator.bld.exec_command(cmd,**kw)
+ def process(self):
+ try:
+ del self.generator.bld.task_sigs[self.uid()]
+ except KeyError:
+ pass
+ try:
+ ret=self.run()
+ except Exception:
+ self.err_msg=traceback.format_exc()
+ self.hasrun=EXCEPTION
+ else:
+ if ret:
+ self.err_code=ret
+ self.hasrun=CRASHED
+ else:
+ try:
+ self.post_run()
+ except Errors.WafError:
+ pass
+ except Exception:
+ self.err_msg=traceback.format_exc()
+ self.hasrun=EXCEPTION
+ else:
+ self.hasrun=SUCCESS
+ if self.hasrun!=SUCCESS and self.scan:
+ try:
+ del self.generator.bld.imp_sigs[self.uid()]
+ except KeyError:
+ pass
+ def log_display(self,bld):
+ if self.generator.bld.progress_bar==3:
+ return
+ s=self.display()
+ if s:
+ if bld.logger:
+ logger=bld.logger
+ else:
+ logger=Logs
+ if self.generator.bld.progress_bar==1:
+ c1=Logs.colors.cursor_off
+ c2=Logs.colors.cursor_on
+ logger.info(s,extra={'stream':sys.stderr,'terminator':'','c1':c1,'c2':c2})
+ else:
+ logger.info(s,extra={'terminator':'','c1':'','c2':''})
+ def display(self):
+ col1=Logs.colors(self.color)
+ col2=Logs.colors.NORMAL
+ master=self.generator.bld.producer
+ def cur():
+ return master.processed-master.ready.qsize()
+ if self.generator.bld.progress_bar==1:
+ return self.generator.bld.progress_line(cur(),master.total,col1,col2)
+ if self.generator.bld.progress_bar==2:
+ ela=str(self.generator.bld.timer)
+ try:
+ ins=','.join([n.name for n in self.inputs])
+ except AttributeError:
+ ins=''
+ try:
+ outs=','.join([n.name for n in self.outputs])
+ except AttributeError:
+ outs=''
+ return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela)
+ s=str(self)
+ if not s:
+ return None
+ total=master.total
+ n=len(str(total))
+ fs='[%%%dd/%%%dd] %%s%%s%%s%%s\n'%(n,n)
+ kw=self.keyword()
+ if kw:
+ kw+=' '
+ return fs%(cur(),total,kw,col1,s,col2)
+ def hash_constraints(self):
+ return(tuple(self.before),tuple(self.after),tuple(self.ext_in),tuple(self.ext_out),self.__class__.__name__,self.hcode)
+ def format_error(self):
+ if Logs.verbose:
+ msg=': %r\n%r'%(self,getattr(self,'last_cmd',''))
+ else:
+ msg=' (run with -v to display more information)'
+ name=getattr(self.generator,'name','')
+ if getattr(self,"err_msg",None):
+ return self.err_msg
+ elif not self.hasrun:
+ return'task in %r was not executed for some reason: %r'%(name,self)
+ elif self.hasrun==CRASHED:
+ try:
+ return' -> task in %r failed with exit status %r%s'%(name,self.err_code,msg)
+ except AttributeError:
+ return' -> task in %r failed%s'%(name,msg)
+ elif self.hasrun==MISSING:
+ return' -> missing files in %r%s'%(name,msg)
+ elif self.hasrun==CANCELED:
+ return' -> %r canceled because of missing dependencies'%name
+ else:
+ return'invalid status for task in %r: %r'%(name,self.hasrun)
+ def colon(self,var1,var2):
+ tmp=self.env[var1]
+ if not tmp:
+ return[]
+ if isinstance(var2,str):
+ it=self.env[var2]
+ else:
+ it=var2
+ if isinstance(tmp,str):
+ return[tmp%x for x in it]
+ else:
+ lst=[]
+ for y in it:
+ lst.extend(tmp)
+ lst.append(y)
+ return lst
+ def __str__(self):
+ name=self.__class__.__name__
+ if self.outputs:
+ if name.endswith(('lib','program'))or not self.inputs:
+ node=self.outputs[0]
+ return node.path_from(node.ctx.launch_node())
+ if not(self.inputs or self.outputs):
+ return self.__class__.__name__
+ if len(self.inputs)==1:
+ node=self.inputs[0]
+ return node.path_from(node.ctx.launch_node())
+ src_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.inputs])
+ tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs])
+ if self.outputs:
+ sep=' -> '
+ else:
+ sep=''
+ return'%s: %s%s%s'%(self.__class__.__name__,src_str,sep,tgt_str)
+ def keyword(self):
+ name=self.__class__.__name__
+ if name.endswith(('lib','program')):
+ return'Linking'
+ if len(self.inputs)==1 and len(self.outputs)==1:
+ return'Compiling'
+ if not self.inputs:
+ if self.outputs:
+ return'Creating'
+ else:
+ return'Running'
+ return'Processing'
+ def __repr__(self):
+ try:
+ ins=",".join([x.name for x in self.inputs])
+ outs=",".join([x.name for x in self.outputs])
+ except AttributeError:
+ ins=",".join([str(x)for x in self.inputs])
+ outs=",".join([str(x)for x in self.outputs])
+ return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}'])
+ def uid(self):
+ try:
+ return self.uid_
+ except AttributeError:
+ m=Utils.md5(self.__class__.__name__)
+ up=m.update
+ for x in self.inputs+self.outputs:
+ up(x.abspath())
+ self.uid_=m.digest()
+ return self.uid_
+ def set_inputs(self,inp):
+ if isinstance(inp,list):
+ self.inputs+=inp
+ else:
+ self.inputs.append(inp)
+ def set_outputs(self,out):
+ if isinstance(out,list):
+ self.outputs+=out
+ else:
+ self.outputs.append(out)
+ def set_run_after(self,task):
+ assert isinstance(task,Task)
+ self.run_after.add(task)
+ def signature(self):
+ try:
+ return self.cache_sig
+ except AttributeError:
+ pass
+ self.m=Utils.md5(self.hcode)
+ self.sig_explicit_deps()
+ self.sig_vars()
+ if self.scan:
+ try:
+ self.sig_implicit_deps()
+ except Errors.TaskRescan:
+ return self.signature()
+ ret=self.cache_sig=self.m.digest()
+ return ret
+ def runnable_status(self):
+ bld=self.generator.bld
+ if bld.is_install<0:
+ return SKIP_ME
+ for t in self.run_after:
+ if not t.hasrun:
+ return ASK_LATER
+ elif t.hasrun<SKIPPED:
+ return CANCEL_ME
+ try:
+ new_sig=self.signature()
+ except Errors.TaskNotReady:
+ return ASK_LATER
+ key=self.uid()
+ try:
+ prev_sig=bld.task_sigs[key]
+ except KeyError:
+ Logs.debug('task: task %r must run: it was never run before or the task code changed',self)
+ return RUN_ME
+ if new_sig!=prev_sig:
+ Logs.debug('task: task %r must run: the task signature changed',self)
+ return RUN_ME
+ for node in self.outputs:
+ sig=bld.node_sigs.get(node)
+ if not sig:
+ Logs.debug('task: task %r must run: an output node has no signature',self)
+ return RUN_ME
+ if sig!=key:
+ Logs.debug('task: task %r must run: an output node was produced by another task',self)
+ return RUN_ME
+ if not node.exists():
+ Logs.debug('task: task %r must run: an output node does not exist',self)
+ return RUN_ME
+ return(self.always_run and RUN_ME)or SKIP_ME
+ def post_run(self):
+ bld=self.generator.bld
+ for node in self.outputs:
+ if not node.exists():
+ self.hasrun=MISSING
+ self.err_msg='-> missing file: %r'%node.abspath()
+ raise Errors.WafError(self.err_msg)
+ bld.node_sigs[node]=self.uid()
+ bld.task_sigs[self.uid()]=self.signature()
+ if not self.keep_last_cmd:
+ try:
+ del self.last_cmd
+ except AttributeError:
+ pass
+ def sig_explicit_deps(self):
+ bld=self.generator.bld
+ upd=self.m.update
+ for x in self.inputs+self.dep_nodes:
+ upd(x.get_bld_sig())
+ if bld.deps_man:
+ additional_deps=bld.deps_man
+ for x in self.inputs+self.outputs:
+ try:
+ d=additional_deps[x]
+ except KeyError:
+ continue
+ for v in d:
+ try:
+ v=v.get_bld_sig()
+ except AttributeError:
+ if hasattr(v,'__call__'):
+ v=v()
+ upd(v)
+ def sig_deep_inputs(self):
+ bld=self.generator.bld
+ lst=[bld.task_sigs[bld.node_sigs[node]]for node in(self.inputs+self.dep_nodes)if node.is_bld()]
+ self.m.update(Utils.h_list(lst))
+ def sig_vars(self):
+ sig=self.generator.bld.hash_env_vars(self.env,self.vars)
+ self.m.update(sig)
+ scan=None
+ def sig_implicit_deps(self):
+ bld=self.generator.bld
+ key=self.uid()
+ prev=bld.imp_sigs.get(key,[])
+ if prev:
+ try:
+ if prev==self.compute_sig_implicit_deps():
+ return prev
+ except Errors.TaskNotReady:
+ raise
+ except EnvironmentError:
+ for x in bld.node_deps.get(self.uid(),[]):
+ if not x.is_bld()and not x.exists():
+ try:
+ del x.parent.children[x.name]
+ except KeyError:
+ pass
+ del bld.imp_sigs[key]
+ raise Errors.TaskRescan('rescan')
+ (bld.node_deps[key],bld.raw_deps[key])=self.scan()
+ if Logs.verbose:
+ Logs.debug('deps: scanner for %s: %r; unresolved: %r',self,bld.node_deps[key],bld.raw_deps[key])
+ try:
+ bld.imp_sigs[key]=self.compute_sig_implicit_deps()
+ except EnvironmentError:
+ for k in bld.node_deps.get(self.uid(),[]):
+ if not k.exists():
+ Logs.warn('Dependency %r for %r is missing: check the task declaration and the build order!',k,self)
+ raise
+ def compute_sig_implicit_deps(self):
+ upd=self.m.update
+ self.are_implicit_nodes_ready()
+ for k in self.generator.bld.node_deps.get(self.uid(),[]):
+ upd(k.get_bld_sig())
+ return self.m.digest()
+ def are_implicit_nodes_ready(self):
+ bld=self.generator.bld
+ try:
+ cache=bld.dct_implicit_nodes
+ except AttributeError:
+ bld.dct_implicit_nodes=cache={}
+ try:
+ dct=cache[bld.current_group]
+ except KeyError:
+ dct=cache[bld.current_group]={}
+ for tsk in bld.cur_tasks:
+ for x in tsk.outputs:
+ dct[x]=tsk
+ modified=False
+ for x in bld.node_deps.get(self.uid(),[]):
+ if x in dct:
+ self.run_after.add(dct[x])
+ modified=True
+ if modified:
+ for tsk in self.run_after:
+ if not tsk.hasrun:
+ raise Errors.TaskNotReady('not ready')
+if sys.hexversion>0x3000000:
+ def uid(self):
+ try:
+ return self.uid_
+ except AttributeError:
+ m=Utils.md5(self.__class__.__name__.encode('latin-1','xmlcharrefreplace'))
+ up=m.update
+ for x in self.inputs+self.outputs:
+ up(x.abspath().encode('latin-1','xmlcharrefreplace'))
+ self.uid_=m.digest()
+ return self.uid_
+ uid.__doc__=Task.uid.__doc__
+ Task.uid=uid
+def is_before(t1,t2):
+ to_list=Utils.to_list
+ for k in to_list(t2.ext_in):
+ if k in to_list(t1.ext_out):
+ return 1
+ if t1.__class__.__name__ in to_list(t2.after):
+ return 1
+ if t2.__class__.__name__ in to_list(t1.before):
+ return 1
+ return 0
+def set_file_constraints(tasks):
+ ins=Utils.defaultdict(set)
+ outs=Utils.defaultdict(set)
+ for x in tasks:
+ for a in x.inputs:
+ ins[a].add(x)
+ for a in x.dep_nodes:
+ ins[a].add(x)
+ for a in x.outputs:
+ outs[a].add(x)
+ links=set(ins.keys()).intersection(outs.keys())
+ for k in links:
+ for a in ins[k]:
+ a.run_after.update(outs[k])
+class TaskGroup(object):
+ def __init__(self,prev,next):
+ self.prev=prev
+ self.next=next
+ self.done=False
+ def get_hasrun(self):
+ for k in self.prev:
+ if not k.hasrun:
+ return NOT_RUN
+ return SUCCESS
+ hasrun=property(get_hasrun,None)
+def set_precedence_constraints(tasks):
+ cstr_groups=Utils.defaultdict(list)
+ for x in tasks:
+ h=x.hash_constraints()
+ cstr_groups[h].append(x)
+ keys=list(cstr_groups.keys())
+ maxi=len(keys)
+ for i in range(maxi):
+ t1=cstr_groups[keys[i]][0]
+ for j in range(i+1,maxi):
+ t2=cstr_groups[keys[j]][0]
+ if is_before(t1,t2):
+ a=i
+ b=j
+ elif is_before(t2,t1):
+ a=j
+ b=i
+ else:
+ continue
+ a=cstr_groups[keys[a]]
+ b=cstr_groups[keys[b]]
+ if len(a)<2 or len(b)<2:
+ for x in b:
+ x.run_after.update(a)
+ else:
+ group=TaskGroup(set(a),set(b))
+ for x in b:
+ x.run_after.add(group)
+def funex(c):
+ dc={}
+ exec(c,dc)
+ return dc['f']
+re_cond=re.compile('(?P<var>\w+)|(?P<or>\|)|(?P<and>&)')
+re_novar=re.compile(r'^(SRC|TGT)\W+.*?$')
+reg_act=re.compile(r'(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})',re.M)
+def compile_fun_shell(line):
+ extr=[]
+ def repl(match):
+ g=match.group
+ if g('dollar'):
+ return"$"
+ elif g('backslash'):
+ return'\\\\'
+ elif g('subst'):
+ extr.append((g('var'),g('code')))
+ return"%s"
+ return None
+ line=reg_act.sub(repl,line)or line
+ dvars=[]
+ def add_dvar(x):
+ if x not in dvars:
+ dvars.append(x)
+ def replc(m):
+ if m.group('and'):
+ return' and '
+ elif m.group('or'):
+ return' or '
+ else:
+ x=m.group('var')
+ add_dvar(x)
+ return'env[%r]'%x
+ parm=[]
+ app=parm.append
+ for(var,meth)in extr:
+ if var=='SRC':
+ if meth:
+ app('tsk.inputs%s'%meth)
+ else:
+ app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
+ elif var=='TGT':
+ if meth:
+ app('tsk.outputs%s'%meth)
+ else:
+ app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
+ elif meth:
+ if meth.startswith(':'):
+ add_dvar(var)
+ m=meth[1:]
+ if m=='SRC':
+ m='[a.path_from(cwdx) for a in tsk.inputs]'
+ elif m=='TGT':
+ m='[a.path_from(cwdx) for a in tsk.outputs]'
+ elif re_novar.match(m):
+ m='[tsk.inputs%s]'%m[3:]
+ elif re_novar.match(m):
+ m='[tsk.outputs%s]'%m[3:]
+ else:
+ add_dvar(m)
+ if m[:3]not in('tsk','gen','bld'):
+ m='%r'%m
+ app('" ".join(tsk.colon(%r, %s))'%(var,m))
+ elif meth.startswith('?'):
+ expr=re_cond.sub(replc,meth[1:])
+ app('p(%r) if (%s) else ""'%(var,expr))
+ else:
+ call='%s%s'%(var,meth)
+ add_dvar(call)
+ app(call)
+ else:
+ add_dvar(var)
+ app("p('%s')"%var)
+ if parm:
+ parm="%% (%s) "%(',\n\t\t'.join(parm))
+ else:
+ parm=''
+ c=COMPILE_TEMPLATE_SHELL%(line,parm)
+ Logs.debug('action: %s',c.strip().splitlines())
+ return(funex(c),dvars)
+reg_act_noshell=re.compile(r"(?P<space>\s+)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})|(?P<text>([^$ \t\n\r\f\v]|\$\$)+)",re.M)
+def compile_fun_noshell(line):
+ buf=[]
+ dvars=[]
+ merge=False
+ app=buf.append
+ def add_dvar(x):
+ if x not in dvars:
+ dvars.append(x)
+ def replc(m):
+ if m.group('and'):
+ return' and '
+ elif m.group('or'):
+ return' or '
+ else:
+ x=m.group('var')
+ add_dvar(x)
+ return'env[%r]'%x
+ for m in reg_act_noshell.finditer(line):
+ if m.group('space'):
+ merge=False
+ continue
+ elif m.group('text'):
+ app('[%r]'%m.group('text').replace('$$','$'))
+ elif m.group('subst'):
+ var=m.group('var')
+ code=m.group('code')
+ if var=='SRC':
+ if code:
+ app('[tsk.inputs%s]'%code)
+ else:
+ app('[a.path_from(cwdx) for a in tsk.inputs]')
+ elif var=='TGT':
+ if code:
+ app('[tsk.outputs%s]'%code)
+ else:
+ app('[a.path_from(cwdx) for a in tsk.outputs]')
+ elif code:
+ if code.startswith(':'):
+ add_dvar(var)
+ m=code[1:]
+ if m=='SRC':
+ m='[a.path_from(cwdx) for a in tsk.inputs]'
+ elif m=='TGT':
+ m='[a.path_from(cwdx) for a in tsk.outputs]'
+ elif re_novar.match(m):
+ m='[tsk.inputs%s]'%m[3:]
+ elif re_novar.match(m):
+ m='[tsk.outputs%s]'%m[3:]
+ else:
+ add_dvar(m)
+ if m[:3]not in('tsk','gen','bld'):
+ m='%r'%m
+ app('tsk.colon(%r, %s)'%(var,m))
+ elif code.startswith('?'):
+ expr=re_cond.sub(replc,code[1:])
+ app('to_list(env[%r] if (%s) else [])'%(var,expr))
+ else:
+ call='%s%s'%(var,code)
+ add_dvar(call)
+ app('gen.to_list(%s)'%call)
+ else:
+ app('to_list(env[%r])'%var)
+ add_dvar(var)
+ if merge:
+ tmp='merge(%s, %s)'%(buf[-2],buf[-1])
+ del buf[-1]
+ buf[-1]=tmp
+ merge=True
+ buf=['lst.extend(%s)'%x for x in buf]
+ fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf)
+ Logs.debug('action: %s',fun.strip().splitlines())
+ return(funex(fun),dvars)
+def compile_fun(line,shell=False):
+ if isinstance(line,str):
+ if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0:
+ shell=True
+ else:
+ dvars_lst=[]
+ funs_lst=[]
+ for x in line:
+ if isinstance(x,str):
+ fun,dvars=compile_fun(x,shell)
+ dvars_lst+=dvars
+ funs_lst.append(fun)
+ else:
+ funs_lst.append(x)
+ def composed_fun(task):
+ for x in funs_lst:
+ ret=x(task)
+ if ret:
+ return ret
+ return None
+ return composed_fun,dvars_lst
+ if shell:
+ return compile_fun_shell(line)
+ else:
+ return compile_fun_noshell(line)
+def compile_sig_vars(vars):
+ buf=[]
+ for x in sorted(vars):
+ if x[:3]in('tsk','gen','bld'):
+ buf.append('buf.append(%s)'%x)
+ if buf:
+ return funex(COMPILE_TEMPLATE_SIG_VARS%'\n\t'.join(buf))
+ return None
+def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None):
+ params={'vars':vars or[],'color':color,'name':name,'shell':shell,'scan':scan,}
+ if isinstance(func,str)or isinstance(func,tuple):
+ params['run_str']=func
+ else:
+ params['run']=func
+ cls=type(Task)(name,(Task,),params)
+ classes[name]=cls
+ if ext_in:
+ cls.ext_in=Utils.to_list(ext_in)
+ if ext_out:
+ cls.ext_out=Utils.to_list(ext_out)
+ if before:
+ cls.before=Utils.to_list(before)
+ if after:
+ cls.after=Utils.to_list(after)
+ return cls
+def deep_inputs(cls):
+ def sig_explicit_deps(self):
+ Task.sig_explicit_deps(self)
+ Task.sig_deep_inputs(self)
+ cls.sig_explicit_deps=sig_explicit_deps
+ return cls
+TaskBase=Task
diff --git a/waflib/TaskGen.py b/waflib/TaskGen.py
new file mode 100644
index 0000000..b857eec
--- /dev/null
+++ b/waflib/TaskGen.py
@@ -0,0 +1,471 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import copy,re,os,functools
+from waflib import Task,Utils,Logs,Errors,ConfigSet,Node
+feats=Utils.defaultdict(set)
+HEADER_EXTS=['.h','.hpp','.hxx','.hh']
+class task_gen(object):
+ mappings=Utils.ordered_iter_dict()
+ prec=Utils.defaultdict(set)
+ def __init__(self,*k,**kw):
+ self.source=[]
+ self.target=''
+ self.meths=[]
+ self.features=[]
+ self.tasks=[]
+ if not'bld'in kw:
+ self.env=ConfigSet.ConfigSet()
+ self.idx=0
+ self.path=None
+ else:
+ self.bld=kw['bld']
+ self.env=self.bld.env.derive()
+ self.path=self.bld.path
+ path=self.path.abspath()
+ try:
+ self.idx=self.bld.idx[path]=self.bld.idx.get(path,0)+1
+ except AttributeError:
+ self.bld.idx={}
+ self.idx=self.bld.idx[path]=1
+ try:
+ self.tg_idx_count=self.bld.tg_idx_count=self.bld.tg_idx_count+1
+ except AttributeError:
+ self.tg_idx_count=self.bld.tg_idx_count=1
+ for key,val in kw.items():
+ setattr(self,key,val)
+ def __str__(self):
+ return"<task_gen %r declared in %s>"%(self.name,self.path.abspath())
+ def __repr__(self):
+ lst=[]
+ for x in self.__dict__:
+ if x not in('env','bld','compiled_tasks','tasks'):
+ lst.append("%s=%s"%(x,repr(getattr(self,x))))
+ return"bld(%s) in %s"%(", ".join(lst),self.path.abspath())
+ def get_cwd(self):
+ return self.bld.bldnode
+ def get_name(self):
+ try:
+ return self._name
+ except AttributeError:
+ if isinstance(self.target,list):
+ lst=[str(x)for x in self.target]
+ name=self._name=','.join(lst)
+ else:
+ name=self._name=str(self.target)
+ return name
+ def set_name(self,name):
+ self._name=name
+ name=property(get_name,set_name)
+ def to_list(self,val):
+ if isinstance(val,str):
+ return val.split()
+ else:
+ return val
+ def post(self):
+ if getattr(self,'posted',None):
+ return False
+ self.posted=True
+ keys=set(self.meths)
+ keys.update(feats['*'])
+ self.features=Utils.to_list(self.features)
+ for x in self.features:
+ st=feats[x]
+ if st:
+ keys.update(st)
+ elif not x in Task.classes:
+ Logs.warn('feature %r does not exist - bind at least one method to it?',x)
+ prec={}
+ prec_tbl=self.prec
+ for x in prec_tbl:
+ if x in keys:
+ prec[x]=prec_tbl[x]
+ tmp=[]
+ for a in keys:
+ for x in prec.values():
+ if a in x:
+ break
+ else:
+ tmp.append(a)
+ tmp.sort(reverse=True)
+ out=[]
+ while tmp:
+ e=tmp.pop()
+ if e in keys:
+ out.append(e)
+ try:
+ nlst=prec[e]
+ except KeyError:
+ pass
+ else:
+ del prec[e]
+ for x in nlst:
+ for y in prec:
+ if x in prec[y]:
+ break
+ else:
+ tmp.append(x)
+ tmp.sort(reverse=True)
+ if prec:
+ buf=['Cycle detected in the method execution:']
+ for k,v in prec.items():
+ buf.append('- %s after %s'%(k,[x for x in v if x in prec]))
+ raise Errors.WafError('\n'.join(buf))
+ self.meths=out
+ Logs.debug('task_gen: posting %s %d',self,id(self))
+ for x in out:
+ try:
+ v=getattr(self,x)
+ except AttributeError:
+ raise Errors.WafError('%r is not a valid task generator method'%x)
+ Logs.debug('task_gen: -> %s (%d)',x,id(self))
+ v()
+ Logs.debug('task_gen: posted %s',self.name)
+ return True
+ def get_hook(self,node):
+ name=node.name
+ for k in self.mappings:
+ try:
+ if name.endswith(k):
+ return self.mappings[k]
+ except TypeError:
+ if k.match(name):
+ return self.mappings[k]
+ keys=list(self.mappings.keys())
+ raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)"%(node,keys))
+ def create_task(self,name,src=None,tgt=None,**kw):
+ task=Task.classes[name](env=self.env.derive(),generator=self)
+ if src:
+ task.set_inputs(src)
+ if tgt:
+ task.set_outputs(tgt)
+ task.__dict__.update(kw)
+ self.tasks.append(task)
+ return task
+ def clone(self,env):
+ newobj=self.bld()
+ for x in self.__dict__:
+ if x in('env','bld'):
+ continue
+ elif x in('path','features'):
+ setattr(newobj,x,getattr(self,x))
+ else:
+ setattr(newobj,x,copy.copy(getattr(self,x)))
+ newobj.posted=False
+ if isinstance(env,str):
+ newobj.env=self.bld.all_envs[env].derive()
+ else:
+ newobj.env=env.derive()
+ return newobj
+def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False):
+ ext_in=Utils.to_list(ext_in)
+ ext_out=Utils.to_list(ext_out)
+ if not name:
+ name=rule
+ cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell)
+ def x_file(self,node):
+ if ext_in:
+ _ext_in=ext_in[0]
+ tsk=self.create_task(name,node)
+ cnt=0
+ ext=decider(self,node)if decider else cls.ext_out
+ for x in ext:
+ k=node.change_ext(x,ext_in=_ext_in)
+ tsk.outputs.append(k)
+ if reentrant!=None:
+ if cnt<int(reentrant):
+ self.source.append(k)
+ else:
+ for y in self.mappings:
+ if k.name.endswith(y):
+ self.source.append(k)
+ break
+ cnt+=1
+ if install_path:
+ self.install_task=self.add_install_files(install_to=install_path,install_from=tsk.outputs)
+ return tsk
+ for x in cls.ext_in:
+ task_gen.mappings[x]=x_file
+ return x_file
+def taskgen_method(func):
+ setattr(task_gen,func.__name__,func)
+ return func
+def feature(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for name in k:
+ feats[name].update([func.__name__])
+ return func
+ return deco
+def before_method(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for fun_name in k:
+ task_gen.prec[func.__name__].add(fun_name)
+ return func
+ return deco
+before=before_method
+def after_method(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for fun_name in k:
+ task_gen.prec[fun_name].add(func.__name__)
+ return func
+ return deco
+after=after_method
+def extension(*k):
+ def deco(func):
+ setattr(task_gen,func.__name__,func)
+ for x in k:
+ task_gen.mappings[x]=func
+ return func
+ return deco
+@taskgen_method
+def to_nodes(self,lst,path=None):
+ tmp=[]
+ path=path or self.path
+ find=path.find_resource
+ if isinstance(lst,Node.Node):
+ lst=[lst]
+ for x in Utils.to_list(lst):
+ if isinstance(x,str):
+ node=find(x)
+ elif hasattr(x,'name'):
+ node=x
+ else:
+ tmp.extend(self.to_nodes(x))
+ continue
+ if not node:
+ raise Errors.WafError('source not found: %r in %r'%(x,self))
+ tmp.append(node)
+ return tmp
+@feature('*')
+def process_source(self):
+ self.source=self.to_nodes(getattr(self,'source',[]))
+ for node in self.source:
+ self.get_hook(node)(self,node)
+@feature('*')
+@before_method('process_source')
+def process_rule(self):
+ if not getattr(self,'rule',None):
+ return
+ name=str(getattr(self,'name',None)or self.target or getattr(self.rule,'__name__',self.rule))
+ try:
+ cache=self.bld.cache_rule_attr
+ except AttributeError:
+ cache=self.bld.cache_rule_attr={}
+ chmod=getattr(self,'chmod',None)
+ shell=getattr(self,'shell',True)
+ color=getattr(self,'color','BLUE')
+ scan=getattr(self,'scan',None)
+ _vars=getattr(self,'vars',[])
+ cls_str=getattr(self,'cls_str',None)
+ cls_keyword=getattr(self,'cls_keyword',None)
+ use_cache=getattr(self,'cache_rule','True')
+ deep_inputs=getattr(self,'deep_inputs',False)
+ scan_val=has_deps=hasattr(self,'deps')
+ if scan:
+ scan_val=id(scan)
+ key=Utils.h_list((name,self.rule,chmod,shell,color,cls_str,cls_keyword,scan_val,_vars,deep_inputs))
+ cls=None
+ if use_cache:
+ try:
+ cls=cache[key]
+ except KeyError:
+ pass
+ if not cls:
+ rule=self.rule
+ if chmod is not None:
+ def chmod_fun(tsk):
+ for x in tsk.outputs:
+ os.chmod(x.abspath(),tsk.generator.chmod)
+ if isinstance(rule,tuple):
+ rule=list(rule)
+ rule.append(chmod_fun)
+ rule=tuple(rule)
+ else:
+ rule=(rule,chmod_fun)
+ cls=Task.task_factory(name,rule,_vars,shell=shell,color=color)
+ if cls_str:
+ setattr(cls,'__str__',self.cls_str)
+ if cls_keyword:
+ setattr(cls,'keyword',self.cls_keyword)
+ if deep_inputs:
+ Task.deep_inputs(cls)
+ if scan:
+ cls.scan=self.scan
+ elif has_deps:
+ def scan(self):
+ nodes=[]
+ for x in self.generator.to_list(getattr(self.generator,'deps',None)):
+ node=self.generator.path.find_resource(x)
+ if not node:
+ self.generator.bld.fatal('Could not find %r (was it declared?)'%x)
+ nodes.append(node)
+ return[nodes,[]]
+ cls.scan=scan
+ if use_cache:
+ cache[key]=cls
+ tsk=self.create_task(name)
+ for x in('after','before','ext_in','ext_out'):
+ setattr(tsk,x,getattr(self,x,[]))
+ if hasattr(self,'stdout'):
+ tsk.stdout=self.stdout
+ if hasattr(self,'stderr'):
+ tsk.stderr=self.stderr
+ if getattr(self,'timeout',None):
+ tsk.timeout=self.timeout
+ if getattr(self,'always',None):
+ tsk.always_run=True
+ if getattr(self,'target',None):
+ if isinstance(self.target,str):
+ self.target=self.target.split()
+ if not isinstance(self.target,list):
+ self.target=[self.target]
+ for x in self.target:
+ if isinstance(x,str):
+ tsk.outputs.append(self.path.find_or_declare(x))
+ else:
+ x.parent.mkdir()
+ tsk.outputs.append(x)
+ if getattr(self,'install_path',None):
+ self.install_task=self.add_install_files(install_to=self.install_path,install_from=tsk.outputs,chmod=getattr(self,'chmod',Utils.O644))
+ if getattr(self,'source',None):
+ tsk.inputs=self.to_nodes(self.source)
+ self.source=[]
+ if getattr(self,'cwd',None):
+ tsk.cwd=self.cwd
+ if isinstance(tsk.run,functools.partial):
+ tsk.run=functools.partial(tsk.run,tsk)
+@feature('seq')
+def sequence_order(self):
+ if self.meths and self.meths[-1]!='sequence_order':
+ self.meths.append('sequence_order')
+ return
+ if getattr(self,'seq_start',None):
+ return
+ if getattr(self.bld,'prev',None):
+ self.bld.prev.post()
+ for x in self.bld.prev.tasks:
+ for y in self.tasks:
+ y.set_run_after(x)
+ self.bld.prev=self
+re_m4=re.compile('@(\w+)@',re.M)
+class subst_pc(Task.Task):
+ def force_permissions(self):
+ if getattr(self.generator,'chmod',None):
+ for x in self.outputs:
+ os.chmod(x.abspath(),self.generator.chmod)
+ def run(self):
+ if getattr(self.generator,'is_copy',None):
+ for i,x in enumerate(self.outputs):
+ x.write(self.inputs[i].read('rb'),'wb')
+ stat=os.stat(self.inputs[i].abspath())
+ os.utime(self.outputs[i].abspath(),(stat.st_atime,stat.st_mtime))
+ self.force_permissions()
+ return None
+ if getattr(self.generator,'fun',None):
+ ret=self.generator.fun(self)
+ if not ret:
+ self.force_permissions()
+ return ret
+ code=self.inputs[0].read(encoding=getattr(self.generator,'encoding','latin-1'))
+ if getattr(self.generator,'subst_fun',None):
+ code=self.generator.subst_fun(self,code)
+ if code is not None:
+ self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','latin-1'))
+ self.force_permissions()
+ return None
+ code=code.replace('%','%%')
+ lst=[]
+ def repl(match):
+ g=match.group
+ if g(1):
+ lst.append(g(1))
+ return"%%(%s)s"%g(1)
+ return''
+ code=getattr(self.generator,'re_m4',re_m4).sub(repl,code)
+ try:
+ d=self.generator.dct
+ except AttributeError:
+ d={}
+ for x in lst:
+ tmp=getattr(self.generator,x,'')or self.env[x]or self.env[x.upper()]
+ try:
+ tmp=''.join(tmp)
+ except TypeError:
+ tmp=str(tmp)
+ d[x]=tmp
+ code=code%d
+ self.outputs[0].write(code,encoding=getattr(self.generator,'encoding','latin-1'))
+ self.generator.bld.raw_deps[self.uid()]=lst
+ try:
+ delattr(self,'cache_sig')
+ except AttributeError:
+ pass
+ self.force_permissions()
+ def sig_vars(self):
+ bld=self.generator.bld
+ env=self.env
+ upd=self.m.update
+ if getattr(self.generator,'fun',None):
+ upd(Utils.h_fun(self.generator.fun).encode())
+ if getattr(self.generator,'subst_fun',None):
+ upd(Utils.h_fun(self.generator.subst_fun).encode())
+ vars=self.generator.bld.raw_deps.get(self.uid(),[])
+ act_sig=bld.hash_env_vars(env,vars)
+ upd(act_sig)
+ lst=[getattr(self.generator,x,'')for x in vars]
+ upd(Utils.h_list(lst))
+ return self.m.digest()
+@extension('.pc.in')
+def add_pcfile(self,node):
+ tsk=self.create_task('subst_pc',node,node.change_ext('.pc','.pc.in'))
+ self.install_task=self.add_install_files(install_to=getattr(self,'install_path','${LIBDIR}/pkgconfig/'),install_from=tsk.outputs)
+class subst(subst_pc):
+ pass
+@feature('subst')
+@before_method('process_source','process_rule')
+def process_subst(self):
+ src=Utils.to_list(getattr(self,'source',[]))
+ if isinstance(src,Node.Node):
+ src=[src]
+ tgt=Utils.to_list(getattr(self,'target',[]))
+ if isinstance(tgt,Node.Node):
+ tgt=[tgt]
+ if len(src)!=len(tgt):
+ raise Errors.WafError('invalid number of source/target for %r'%self)
+ for x,y in zip(src,tgt):
+ if not x or not y:
+ raise Errors.WafError('null source or target for %r'%self)
+ a,b=None,None
+ if isinstance(x,str)and isinstance(y,str)and x==y:
+ a=self.path.find_node(x)
+ b=self.path.get_bld().make_node(y)
+ if not os.path.isfile(b.abspath()):
+ b.parent.mkdir()
+ else:
+ if isinstance(x,str):
+ a=self.path.find_resource(x)
+ elif isinstance(x,Node.Node):
+ a=x
+ if isinstance(y,str):
+ b=self.path.find_or_declare(y)
+ elif isinstance(y,Node.Node):
+ b=y
+ if not a:
+ raise Errors.WafError('could not find %r for %r'%(x,self))
+ tsk=self.create_task('subst',a,b)
+ for k in('after','before','ext_in','ext_out'):
+ val=getattr(self,k,None)
+ if val:
+ setattr(tsk,k,val)
+ for xt in HEADER_EXTS:
+ if b.name.endswith(xt):
+ tsk.ext_in=tsk.ext_in+['.h']
+ break
+ inst_to=getattr(self,'install_path',None)
+ if inst_to:
+ self.install_task=self.add_install_files(install_to=inst_to,install_from=b,chmod=getattr(self,'chmod',Utils.O644))
+ self.source=[]
diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py
new file mode 100644
index 0000000..55e850d
--- /dev/null
+++ b/waflib/Tools/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
diff --git a/waflib/Tools/ar.py b/waflib/Tools/ar.py
new file mode 100644
index 0000000..5921ce1
--- /dev/null
+++ b/waflib/Tools/ar.py
@@ -0,0 +1,13 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Configure import conf
+@conf
+def find_ar(conf):
+ conf.load('ar')
+def configure(conf):
+ conf.find_program('ar',var='AR')
+ conf.add_os_flags('ARFLAGS')
+ if not conf.env.ARFLAGS:
+ conf.env.ARFLAGS=['rcs']
diff --git a/waflib/Tools/asm.py b/waflib/Tools/asm.py
new file mode 100644
index 0000000..d6a6d45
--- /dev/null
+++ b/waflib/Tools/asm.py
@@ -0,0 +1,23 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Task
+from waflib.Tools.ccroot import link_task,stlink_task
+from waflib.TaskGen import extension
+class asm(Task.Task):
+ color='BLUE'
+ run_str='${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+@extension('.s','.S','.asm','.ASM','.spp','.SPP')
+def asm_hook(self,node):
+ return self.create_compiled_task('asm',node)
+class asmprogram(link_task):
+ run_str='${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
+ ext_out=['.bin']
+ inst_to='${BINDIR}'
+class asmshlib(asmprogram):
+ inst_to='${LIBDIR}'
+class asmstlib(stlink_task):
+ pass
+def configure(conf):
+ conf.env.ASMPATH_ST='-I%s'
diff --git a/waflib/Tools/bison.py b/waflib/Tools/bison.py
new file mode 100644
index 0000000..146921f
--- /dev/null
+++ b/waflib/Tools/bison.py
@@ -0,0 +1,28 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Task
+from waflib.TaskGen import extension
+class bison(Task.Task):
+ color='BLUE'
+ run_str='${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
+ ext_out=['.h']
+@extension('.y','.yc','.yy')
+def big_bison(self,node):
+ has_h='-d'in self.env.BISONFLAGS
+ outs=[]
+ if node.name.endswith('.yc'):
+ outs.append(node.change_ext('.tab.cc'))
+ if has_h:
+ outs.append(node.change_ext('.tab.hh'))
+ else:
+ outs.append(node.change_ext('.tab.c'))
+ if has_h:
+ outs.append(node.change_ext('.tab.h'))
+ tsk=self.create_task('bison',node,outs)
+ tsk.cwd=node.parent.get_bld()
+ self.source.append(outs[0])
+def configure(conf):
+ conf.find_program('bison',var='BISON')
+ conf.env.BISONFLAGS=['-d']
diff --git a/waflib/Tools/c.py b/waflib/Tools/c.py
new file mode 100644
index 0000000..7c794f1
--- /dev/null
+++ b/waflib/Tools/c.py
@@ -0,0 +1,26 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import TaskGen,Task
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task,stlink_task
+@TaskGen.extension('.c')
+def c_hook(self,node):
+ if not self.env.CC and self.env.CXX:
+ return self.create_compiled_task('cxx',node)
+ return self.create_compiled_task('c',node)
+class c(Task.Task):
+ run_str='${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+ vars=['CCDEPS']
+ ext_in=['.h']
+ scan=c_preproc.scan
+class cprogram(link_task):
+ run_str='${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
+ ext_out=['.bin']
+ vars=['LINKDEPS']
+ inst_to='${BINDIR}'
+class cshlib(cprogram):
+ inst_to='${LIBDIR}'
+class cstlib(stlink_task):
+ pass
diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py
new file mode 100644
index 0000000..b1c1031
--- /dev/null
+++ b/waflib/Tools/c_aliases.py
@@ -0,0 +1,60 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Utils,Errors
+from waflib.Configure import conf
+def get_extensions(lst):
+ ret=[]
+ for x in Utils.to_list(lst):
+ if not isinstance(x,str):
+ x=x.name
+ ret.append(x[x.rfind('.')+1:])
+ return ret
+def sniff_features(**kw):
+ exts=get_extensions(kw['source'])
+ typ=kw['typ']
+ feats=[]
+ for x in'cxx cpp c++ cc C'.split():
+ if x in exts:
+ feats.append('cxx')
+ break
+ if'c'in exts or'vala'in exts or'gs'in exts:
+ feats.append('c')
+ for x in'f f90 F F90 for FOR'.split():
+ if x in exts:
+ feats.append('fc')
+ break
+ if'd'in exts:
+ feats.append('d')
+ if'java'in exts:
+ feats.append('java')
+ return'java'
+ if typ in('program','shlib','stlib'):
+ will_link=False
+ for x in feats:
+ if x in('cxx','d','fc','c'):
+ feats.append(x+typ)
+ will_link=True
+ if not will_link and not kw.get('features',[]):
+ raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?'%kw)
+ return feats
+def set_features(kw,typ):
+ kw['typ']=typ
+ kw['features']=Utils.to_list(kw.get('features',[]))+Utils.to_list(sniff_features(**kw))
+@conf
+def program(bld,*k,**kw):
+ set_features(kw,'program')
+ return bld(*k,**kw)
+@conf
+def shlib(bld,*k,**kw):
+ set_features(kw,'shlib')
+ return bld(*k,**kw)
+@conf
+def stlib(bld,*k,**kw):
+ set_features(kw,'stlib')
+ return bld(*k,**kw)
+@conf
+def objects(bld,*k,**kw):
+ set_features(kw,'objects')
+ return bld(*k,**kw)
diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py
new file mode 100644
index 0000000..6347171
--- /dev/null
+++ b/waflib/Tools/c_config.py
@@ -0,0 +1,805 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from __future__ import with_statement
+import os,re,shlex
+from waflib import Build,Utils,Task,Options,Logs,Errors,Runner
+from waflib.TaskGen import after_method,feature
+from waflib.Configure import conf
+WAF_CONFIG_H='config.h'
+DEFKEYS='define_key'
+INCKEYS='include_key'
+SNIP_EMPTY_PROGRAM='''
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ return 0;
+}
+'''
+MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'cygwin','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'}
+MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh','__xtensa__':'xtensa',}
+@conf
+def parse_flags(self,line,uselib_store,env=None,force_static=False,posix=None):
+ assert(isinstance(line,str))
+ env=env or self.env
+ if posix is None:
+ posix=True
+ if'\\'in line:
+ posix=('\\ 'in line)or('\\\\'in line)
+ lex=shlex.shlex(line,posix=posix)
+ lex.whitespace_split=True
+ lex.commenters=''
+ lst=list(lex)
+ uselib=uselib_store
+ def app(var,val):
+ env.append_value('%s_%s'%(var,uselib),val)
+ def appu(var,val):
+ env.append_unique('%s_%s'%(var,uselib),val)
+ static=False
+ while lst:
+ x=lst.pop(0)
+ st=x[:2]
+ ot=x[2:]
+ if st=='-I'or st=='/I':
+ if not ot:
+ ot=lst.pop(0)
+ appu('INCLUDES',ot)
+ elif st=='-i':
+ tmp=[x,lst.pop(0)]
+ app('CFLAGS',tmp)
+ app('CXXFLAGS',tmp)
+ elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'):
+ if not ot:
+ ot=lst.pop(0)
+ app('DEFINES',ot)
+ elif st=='-l':
+ if not ot:
+ ot=lst.pop(0)
+ prefix='STLIB'if(force_static or static)else'LIB'
+ app(prefix,ot)
+ elif st=='-L':
+ if not ot:
+ ot=lst.pop(0)
+ prefix='STLIBPATH'if(force_static or static)else'LIBPATH'
+ appu(prefix,ot)
+ elif x.startswith('/LIBPATH:'):
+ prefix='STLIBPATH'if(force_static or static)else'LIBPATH'
+ appu(prefix,x.replace('/LIBPATH:',''))
+ elif x.startswith('-std='):
+ prefix='CXXFLAGS'if'++'in x else'CFLAGS'
+ app(prefix,x)
+ elif x.startswith('+')or x in('-pthread','-fPIC','-fpic','-fPIE','-fpie'):
+ app('CFLAGS',x)
+ app('CXXFLAGS',x)
+ app('LINKFLAGS',x)
+ elif x=='-framework':
+ appu('FRAMEWORK',lst.pop(0))
+ elif x.startswith('-F'):
+ appu('FRAMEWORKPATH',x[2:])
+ elif x=='-Wl,-rpath'or x=='-Wl,-R':
+ app('RPATH',lst.pop(0).lstrip('-Wl,'))
+ elif x.startswith('-Wl,-R,'):
+ app('RPATH',x[7:])
+ elif x.startswith('-Wl,-R'):
+ app('RPATH',x[6:])
+ elif x.startswith('-Wl,-rpath,'):
+ app('RPATH',x[11:])
+ elif x=='-Wl,-Bstatic'or x=='-Bstatic':
+ static=True
+ elif x=='-Wl,-Bdynamic'or x=='-Bdynamic':
+ static=False
+ elif x.startswith('-Wl')or x in('-rdynamic','-pie'):
+ app('LINKFLAGS',x)
+ elif x.startswith(('-m','-f','-dynamic','-O','-g')):
+ app('CFLAGS',x)
+ app('CXXFLAGS',x)
+ elif x.startswith('-bundle'):
+ app('LINKFLAGS',x)
+ elif x.startswith(('-undefined','-Xlinker')):
+ arg=lst.pop(0)
+ app('LINKFLAGS',[x,arg])
+ elif x.startswith(('-arch','-isysroot')):
+ tmp=[x,lst.pop(0)]
+ app('CFLAGS',tmp)
+ app('CXXFLAGS',tmp)
+ app('LINKFLAGS',tmp)
+ elif x.endswith(('.a','.so','.dylib','.lib')):
+ appu('LINKFLAGS',x)
+ else:
+ self.to_log('Unhandled flag %r'%x)
+@conf
+def validate_cfg(self,kw):
+ if not'path'in kw:
+ if not self.env.PKGCONFIG:
+ self.find_program('pkg-config',var='PKGCONFIG')
+ kw['path']=self.env.PKGCONFIG
+ s=('atleast_pkgconfig_version'in kw)+('modversion'in kw)+('package'in kw)
+ if s!=1:
+ raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set')
+ if not'msg'in kw:
+ if'atleast_pkgconfig_version'in kw:
+ kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version']
+ elif'modversion'in kw:
+ kw['msg']='Checking for %r version'%kw['modversion']
+ else:
+ kw['msg']='Checking for %r'%(kw['package'])
+ if not'okmsg'in kw and not'modversion'in kw:
+ kw['okmsg']='yes'
+ if not'errmsg'in kw:
+ kw['errmsg']='not found'
+ if'atleast_pkgconfig_version'in kw:
+ pass
+ elif'modversion'in kw:
+ if not'uselib_store'in kw:
+ kw['uselib_store']=kw['modversion']
+ if not'define_name'in kw:
+ kw['define_name']='%s_VERSION'%Utils.quote_define_name(kw['uselib_store'])
+ else:
+ if not'uselib_store'in kw:
+ kw['uselib_store']=Utils.to_list(kw['package'])[0].upper()
+ if not'define_name'in kw:
+ kw['define_name']=self.have_define(kw['uselib_store'])
+@conf
+def exec_cfg(self,kw):
+ path=Utils.to_list(kw['path'])
+ env=self.env.env or None
+ if kw.get('pkg_config_path'):
+ if not env:
+ env=dict(self.environ)
+ env['PKG_CONFIG_PATH']=kw['pkg_config_path']
+ def define_it():
+ define_name=kw['define_name']
+ if kw.get('global_define',1):
+ self.define(define_name,1,False)
+ else:
+ self.env.append_unique('DEFINES_%s'%kw['uselib_store'],"%s=1"%define_name)
+ if kw.get('add_have_to_env',1):
+ self.env[define_name]=1
+ if'atleast_pkgconfig_version'in kw:
+ cmd=path+['--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
+ self.cmd_and_log(cmd,env=env)
+ return
+ if'modversion'in kw:
+ version=self.cmd_and_log(path+['--modversion',kw['modversion']],env=env).strip()
+ if not'okmsg'in kw:
+ kw['okmsg']=version
+ self.define(kw['define_name'],version)
+ return version
+ lst=[]+path
+ defi=kw.get('define_variable')
+ if not defi:
+ defi=self.env.PKG_CONFIG_DEFINES or{}
+ for key,val in defi.items():
+ lst.append('--define-variable=%s=%s'%(key,val))
+ static=kw.get('force_static',False)
+ if'args'in kw:
+ args=Utils.to_list(kw['args'])
+ if'--static'in args or'--static-libs'in args:
+ static=True
+ lst+=args
+ lst.extend(Utils.to_list(kw['package']))
+ if'variables'in kw:
+ v_env=kw.get('env',self.env)
+ vars=Utils.to_list(kw['variables'])
+ for v in vars:
+ val=self.cmd_and_log(lst+['--variable='+v],env=env).strip()
+ var='%s_%s'%(kw['uselib_store'],v)
+ v_env[var]=val
+ return
+ ret=self.cmd_and_log(lst,env=env)
+ define_it()
+ self.parse_flags(ret,kw['uselib_store'],kw.get('env',self.env),force_static=static,posix=kw.get('posix'))
+ return ret
+@conf
+def check_cfg(self,*k,**kw):
+ self.validate_cfg(kw)
+ if'msg'in kw:
+ self.start_msg(kw['msg'],**kw)
+ ret=None
+ try:
+ ret=self.exec_cfg(kw)
+ except self.errors.WafError:
+ if'errmsg'in kw:
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
+ if Logs.verbose>1:
+ raise
+ else:
+ self.fatal('The configuration failed')
+ else:
+ if not ret:
+ ret=True
+ kw['success']=ret
+ if'okmsg'in kw:
+ self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
+ return ret
+def build_fun(bld):
+ if bld.kw['compile_filename']:
+ node=bld.srcnode.make_node(bld.kw['compile_filename'])
+ node.write(bld.kw['code'])
+ o=bld(features=bld.kw['features'],source=bld.kw['compile_filename'],target='testprog')
+ for k,v in bld.kw.items():
+ setattr(o,k,v)
+ if not bld.kw.get('quiet'):
+ bld.conf.to_log("==>\n%s\n<=="%bld.kw['code'])
+@conf
+def validate_c(self,kw):
+ for x in('type_name','field_name','function_name'):
+ if x in kw:
+ Logs.warn('Invalid argument %r in test'%x)
+ if not'build_fun'in kw:
+ kw['build_fun']=build_fun
+ if not'env'in kw:
+ kw['env']=self.env.derive()
+ env=kw['env']
+ if not'compiler'in kw and not'features'in kw:
+ kw['compiler']='c'
+ if env.CXX_NAME and Task.classes.get('cxx'):
+ kw['compiler']='cxx'
+ if not self.env.CXX:
+ self.fatal('a c++ compiler is required')
+ else:
+ if not self.env.CC:
+ self.fatal('a c compiler is required')
+ if not'compile_mode'in kw:
+ kw['compile_mode']='c'
+ if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler')=='cxx':
+ kw['compile_mode']='cxx'
+ if not'type'in kw:
+ kw['type']='cprogram'
+ if not'features'in kw:
+ if not'header_name'in kw or kw.get('link_header_test',True):
+ kw['features']=[kw['compile_mode'],kw['type']]
+ else:
+ kw['features']=[kw['compile_mode']]
+ else:
+ kw['features']=Utils.to_list(kw['features'])
+ if not'compile_filename'in kw:
+ kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'')
+ def to_header(dct):
+ if'header_name'in dct:
+ dct=Utils.to_list(dct['header_name'])
+ return''.join(['#include <%s>\n'%x for x in dct])
+ return''
+ if'framework_name'in kw:
+ fwkname=kw['framework_name']
+ if not'uselib_store'in kw:
+ kw['uselib_store']=fwkname.upper()
+ if not kw.get('no_header'):
+ fwk='%s/%s.h'%(fwkname,fwkname)
+ if kw.get('remove_dot_h'):
+ fwk=fwk[:-2]
+ val=kw.get('header_name',[])
+ kw['header_name']=Utils.to_list(val)+[fwk]
+ kw['msg']='Checking for framework %s'%fwkname
+ kw['framework']=fwkname
+ elif'header_name'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for header %s'%kw['header_name']
+ l=Utils.to_list(kw['header_name'])
+ assert len(l),'list of headers in header_name is empty'
+ kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM
+ if not'uselib_store'in kw:
+ kw['uselib_store']=l[0].upper()
+ if not'define_name'in kw:
+ kw['define_name']=self.have_define(l[0])
+ if'lib'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for library %s'%kw['lib']
+ if not'uselib_store'in kw:
+ kw['uselib_store']=kw['lib'].upper()
+ if'stlib'in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for static library %s'%kw['stlib']
+ if not'uselib_store'in kw:
+ kw['uselib_store']=kw['stlib'].upper()
+ if'fragment'in kw:
+ kw['code']=kw['fragment']
+ if not'msg'in kw:
+ kw['msg']='Checking for code snippet'
+ if not'errmsg'in kw:
+ kw['errmsg']='no'
+ for(flagsname,flagstype)in(('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')):
+ if flagsname in kw:
+ if not'msg'in kw:
+ kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
+ if not'errmsg'in kw:
+ kw['errmsg']='no'
+ if not'execute'in kw:
+ kw['execute']=False
+ if kw['execute']:
+ kw['features'].append('test_exec')
+ kw['chmod']=Utils.O755
+ if not'errmsg'in kw:
+ kw['errmsg']='not found'
+ if not'okmsg'in kw:
+ kw['okmsg']='yes'
+ if not'code'in kw:
+ kw['code']=SNIP_EMPTY_PROGRAM
+ if self.env[INCKEYS]:
+ kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code']
+ if kw.get('merge_config_header')or env.merge_config_header:
+ kw['code']='%s\n\n%s'%(self.get_config_header(),kw['code'])
+ env.DEFINES=[]
+ if not kw.get('success'):
+ kw['success']=None
+ if'define_name'in kw:
+ self.undefine(kw['define_name'])
+ if not'msg'in kw:
+ self.fatal('missing "msg" in conf.check(...)')
+@conf
+def post_check(self,*k,**kw):
+ is_success=0
+ if kw['execute']:
+ if kw['success']is not None:
+ if kw.get('define_ret'):
+ is_success=kw['success']
+ else:
+ is_success=(kw['success']==0)
+ else:
+ is_success=(kw['success']==0)
+ if kw.get('define_name'):
+ comment=kw.get('comment','')
+ define_name=kw['define_name']
+ if kw['execute']and kw.get('define_ret')and isinstance(is_success,str):
+ if kw.get('global_define',1):
+ self.define(define_name,is_success,quote=kw.get('quote',1),comment=comment)
+ else:
+ if kw.get('quote',1):
+ succ='"%s"'%is_success
+ else:
+ succ=int(is_success)
+ val='%s=%s'%(define_name,succ)
+ var='DEFINES_%s'%kw['uselib_store']
+ self.env.append_value(var,val)
+ else:
+ if kw.get('global_define',1):
+ self.define_cond(define_name,is_success,comment=comment)
+ else:
+ var='DEFINES_%s'%kw['uselib_store']
+ self.env.append_value(var,'%s=%s'%(define_name,int(is_success)))
+ if kw.get('add_have_to_env',1):
+ if kw.get('uselib_store'):
+ self.env[self.have_define(kw['uselib_store'])]=1
+ elif kw['execute']and kw.get('define_ret'):
+ self.env[define_name]=is_success
+ else:
+ self.env[define_name]=int(is_success)
+ if'header_name'in kw:
+ if kw.get('auto_add_header_name'):
+ self.env.append_value(INCKEYS,Utils.to_list(kw['header_name']))
+ if is_success and'uselib_store'in kw:
+ from waflib.Tools import ccroot
+ _vars=set()
+ for x in kw['features']:
+ if x in ccroot.USELIB_VARS:
+ _vars|=ccroot.USELIB_VARS[x]
+ for k in _vars:
+ x=k.lower()
+ if x in kw:
+ self.env.append_value(k+'_'+kw['uselib_store'],kw[x])
+ return is_success
+@conf
+def check(self,*k,**kw):
+ self.validate_c(kw)
+ self.start_msg(kw['msg'],**kw)
+ ret=None
+ try:
+ ret=self.run_build(*k,**kw)
+ except self.errors.ConfigurationError:
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
+ if Logs.verbose>1:
+ raise
+ else:
+ self.fatal('The configuration failed')
+ else:
+ kw['success']=ret
+ ret=self.post_check(*k,**kw)
+ if not ret:
+ self.end_msg(kw['errmsg'],'YELLOW',**kw)
+ self.fatal('The configuration failed %r'%ret)
+ else:
+ self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw)
+ return ret
+class test_exec(Task.Task):
+ color='PINK'
+ def run(self):
+ if getattr(self.generator,'rpath',None):
+ if getattr(self.generator,'define_ret',False):
+ self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
+ else:
+ self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()])
+ else:
+ env=self.env.env or{}
+ env.update(dict(os.environ))
+ for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'):
+ env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'')
+ if getattr(self.generator,'define_ret',False):
+ self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env)
+ else:
+ self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env)
+@feature('test_exec')
+@after_method('apply_link')
+def test_exec_fun(self):
+ self.create_task('test_exec',self.link_task.outputs[0])
+@conf
+def check_cxx(self,*k,**kw):
+ kw['compiler']='cxx'
+ return self.check(*k,**kw)
+@conf
+def check_cc(self,*k,**kw):
+ kw['compiler']='c'
+ return self.check(*k,**kw)
+@conf
+def set_define_comment(self,key,comment):
+ coms=self.env.DEFINE_COMMENTS
+ if not coms:
+ coms=self.env.DEFINE_COMMENTS={}
+ coms[key]=comment or''
+@conf
+def get_define_comment(self,key):
+ coms=self.env.DEFINE_COMMENTS or{}
+ return coms.get(key,'')
+@conf
+def define(self,key,val,quote=True,comment=''):
+ assert isinstance(key,str)
+ if not key:
+ return
+ if val is True:
+ val=1
+ elif val in(False,None):
+ val=0
+ if isinstance(val,int)or isinstance(val,float):
+ s='%s=%s'
+ else:
+ s=quote and'%s="%s"'or'%s=%s'
+ app=s%(key,str(val))
+ ban=key+'='
+ lst=self.env.DEFINES
+ for x in lst:
+ if x.startswith(ban):
+ lst[lst.index(x)]=app
+ break
+ else:
+ self.env.append_value('DEFINES',app)
+ self.env.append_unique(DEFKEYS,key)
+ self.set_define_comment(key,comment)
+@conf
+def undefine(self,key,comment=''):
+ assert isinstance(key,str)
+ if not key:
+ return
+ ban=key+'='
+ lst=[x for x in self.env.DEFINES if not x.startswith(ban)]
+ self.env.DEFINES=lst
+ self.env.append_unique(DEFKEYS,key)
+ self.set_define_comment(key,comment)
+@conf
+def define_cond(self,key,val,comment=''):
+ assert isinstance(key,str)
+ if not key:
+ return
+ if val:
+ self.define(key,1,comment=comment)
+ else:
+ self.undefine(key,comment=comment)
+@conf
+def is_defined(self,key):
+ assert key and isinstance(key,str)
+ ban=key+'='
+ for x in self.env.DEFINES:
+ if x.startswith(ban):
+ return True
+ return False
+@conf
+def get_define(self,key):
+ assert key and isinstance(key,str)
+ ban=key+'='
+ for x in self.env.DEFINES:
+ if x.startswith(ban):
+ return x[len(ban):]
+ return None
+@conf
+def have_define(self,key):
+ return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key)
+@conf
+def write_config_header(self,configfile='',guard='',top=False,defines=True,headers=False,remove=True,define_prefix=''):
+ if not configfile:
+ configfile=WAF_CONFIG_H
+ waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile)
+ node=top and self.bldnode or self.path.get_bld()
+ node=node.make_node(configfile)
+ node.parent.mkdir()
+ lst=['/* WARNING! All changes made to this file will be lost! */\n']
+ lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard))
+ lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix))
+ lst.append('\n#endif /* %s */\n'%waf_guard)
+ node.write('\n'.join(lst))
+ self.env.append_unique(Build.CFG_FILES,[node.abspath()])
+ if remove:
+ for key in self.env[DEFKEYS]:
+ self.undefine(key)
+ self.env[DEFKEYS]=[]
+@conf
+def get_config_header(self,defines=True,headers=False,define_prefix=''):
+ lst=[]
+ if self.env.WAF_CONFIG_H_PRELUDE:
+ lst.append(self.env.WAF_CONFIG_H_PRELUDE)
+ if headers:
+ for x in self.env[INCKEYS]:
+ lst.append('#include <%s>'%x)
+ if defines:
+ tbl={}
+ for k in self.env.DEFINES:
+ a,_,b=k.partition('=')
+ tbl[a]=b
+ for k in self.env[DEFKEYS]:
+ caption=self.get_define_comment(k)
+ if caption:
+ caption=' /* %s */'%caption
+ try:
+ txt='#define %s%s %s%s'%(define_prefix,k,tbl[k],caption)
+ except KeyError:
+ txt='/* #undef %s%s */%s'%(define_prefix,k,caption)
+ lst.append(txt)
+ return"\n".join(lst)
+@conf
+def cc_add_flags(conf):
+ conf.add_os_flags('CPPFLAGS',dup=False)
+ conf.add_os_flags('CFLAGS',dup=False)
+@conf
+def cxx_add_flags(conf):
+ conf.add_os_flags('CPPFLAGS',dup=False)
+ conf.add_os_flags('CXXFLAGS',dup=False)
+@conf
+def link_add_flags(conf):
+ conf.add_os_flags('LINKFLAGS',dup=False)
+ conf.add_os_flags('LDFLAGS',dup=False)
+@conf
+def cc_load_tools(conf):
+ if not conf.env.DEST_OS:
+ conf.env.DEST_OS=Utils.unversioned_sys_platform()
+ conf.load('c')
+@conf
+def cxx_load_tools(conf):
+ if not conf.env.DEST_OS:
+ conf.env.DEST_OS=Utils.unversioned_sys_platform()
+ conf.load('cxx')
+@conf
+def get_cc_version(conf,cc,gcc=False,icc=False,clang=False):
+ cmd=cc+['-dM','-E','-']
+ env=conf.env.env or None
+ try:
+ out,err=conf.cmd_and_log(cmd,output=0,input='\n'.encode(),env=env)
+ except Errors.WafError:
+ conf.fatal('Could not determine the compiler version %r'%cmd)
+ if gcc:
+ if out.find('__INTEL_COMPILER')>=0:
+ conf.fatal('The intel compiler pretends to be gcc')
+ if out.find('__GNUC__')<0 and out.find('__clang__')<0:
+ conf.fatal('Could not determine the compiler type')
+ if icc and out.find('__INTEL_COMPILER')<0:
+ conf.fatal('Not icc/icpc')
+ if clang and out.find('__clang__')<0:
+ conf.fatal('Not clang/clang++')
+ if not clang and out.find('__clang__')>=0:
+ conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure')
+ k={}
+ if icc or gcc or clang:
+ out=out.splitlines()
+ for line in out:
+ lst=shlex.split(line)
+ if len(lst)>2:
+ key=lst[1]
+ val=lst[2]
+ k[key]=val
+ def isD(var):
+ return var in k
+ if not conf.env.DEST_OS:
+ conf.env.DEST_OS=''
+ for i in MACRO_TO_DESTOS:
+ if isD(i):
+ conf.env.DEST_OS=MACRO_TO_DESTOS[i]
+ break
+ else:
+ if isD('__APPLE__')and isD('__MACH__'):
+ conf.env.DEST_OS='darwin'
+ elif isD('__unix__'):
+ conf.env.DEST_OS='generic'
+ if isD('__ELF__'):
+ conf.env.DEST_BINFMT='elf'
+ elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'):
+ conf.env.DEST_BINFMT='pe'
+ if not conf.env.IMPLIBDIR:
+ conf.env.IMPLIBDIR=conf.env.LIBDIR
+ conf.env.LIBDIR=conf.env.BINDIR
+ elif isD('__APPLE__'):
+ conf.env.DEST_BINFMT='mac-o'
+ if not conf.env.DEST_BINFMT:
+ conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS)
+ for i in MACRO_TO_DEST_CPU:
+ if isD(i):
+ conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i]
+ break
+ Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')]))
+ if icc:
+ ver=k['__INTEL_COMPILER']
+ conf.env.CC_VERSION=(ver[:-2],ver[-2],ver[-1])
+ else:
+ if isD('__clang__')and isD('__clang_major__'):
+ conf.env.CC_VERSION=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__'])
+ else:
+ conf.env.CC_VERSION=(k['__GNUC__'],k['__GNUC_MINOR__'],k.get('__GNUC_PATCHLEVEL__','0'))
+ return k
+@conf
+def get_xlc_version(conf,cc):
+ cmd=cc+['-qversion']
+ try:
+ out,err=conf.cmd_and_log(cmd,output=0)
+ except Errors.WafError:
+ conf.fatal('Could not find xlc %r'%cmd)
+ for v in(r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",):
+ version_re=re.compile(v,re.I).search
+ match=version_re(out or err)
+ if match:
+ k=match.groupdict()
+ conf.env.CC_VERSION=(k['major'],k['minor'])
+ break
+ else:
+ conf.fatal('Could not determine the XLC version.')
+@conf
+def get_suncc_version(conf,cc):
+ cmd=cc+['-V']
+ try:
+ out,err=conf.cmd_and_log(cmd,output=0)
+ except Errors.WafError as e:
+ if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')):
+ conf.fatal('Could not find suncc %r'%cmd)
+ out=e.stdout
+ err=e.stderr
+ version=(out or err)
+ version=version.splitlines()[0]
+ version_re=re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)',re.I).search
+ match=version_re(version)
+ if match:
+ k=match.groupdict()
+ conf.env.CC_VERSION=(k['major'],k['minor'])
+ else:
+ conf.fatal('Could not determine the suncc version.')
+@conf
+def add_as_needed(self):
+ if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME):
+ self.env.append_unique('LINKFLAGS','-Wl,--as-needed')
+class cfgtask(Task.Task):
+ def __init__(self,*k,**kw):
+ Task.Task.__init__(self,*k,**kw)
+ self.run_after=set()
+ def display(self):
+ return''
+ def runnable_status(self):
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+ return Task.RUN_ME
+ def uid(self):
+ return Utils.SIG_NIL
+ def signature(self):
+ return Utils.SIG_NIL
+ def run(self):
+ conf=self.conf
+ bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath())
+ bld.env=conf.env
+ bld.init_dirs()
+ bld.in_msg=1
+ bld.logger=self.logger
+ bld.multicheck_task=self
+ args=self.args
+ try:
+ if'func'in args:
+ bld.test(build_fun=args['func'],msg=args.get('msg',''),okmsg=args.get('okmsg',''),errmsg=args.get('errmsg',''),)
+ else:
+ args['multicheck_mandatory']=args.get('mandatory',True)
+ args['mandatory']=True
+ try:
+ bld.check(**args)
+ finally:
+ args['mandatory']=args['multicheck_mandatory']
+ except Exception:
+ return 1
+ def process(self):
+ Task.Task.process(self)
+ if'msg'in self.args:
+ with self.generator.bld.multicheck_lock:
+ self.conf.start_msg(self.args['msg'])
+ if self.hasrun==Task.NOT_RUN:
+ self.conf.end_msg('test cancelled','YELLOW')
+ elif self.hasrun!=Task.SUCCESS:
+ self.conf.end_msg(self.args.get('errmsg','no'),'YELLOW')
+ else:
+ self.conf.end_msg(self.args.get('okmsg','yes'),'GREEN')
+@conf
+def multicheck(self,*k,**kw):
+ self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw)
+ for var in('DEFINES',DEFKEYS):
+ self.env.append_value(var,[])
+ self.env.DEFINE_COMMENTS=self.env.DEFINE_COMMENTS or{}
+ class par(object):
+ def __init__(self):
+ self.keep=False
+ self.task_sigs={}
+ self.progress_bar=0
+ def total(self):
+ return len(tasks)
+ def to_log(self,*k,**kw):
+ return
+ bld=par()
+ bld.keep=kw.get('run_all_tests',True)
+ bld.imp_sigs={}
+ tasks=[]
+ id_to_task={}
+ for dct in k:
+ x=Task.classes['cfgtask'](bld=bld,env=None)
+ tasks.append(x)
+ x.args=dct
+ x.bld=bld
+ x.conf=self
+ x.args=dct
+ x.logger=Logs.make_mem_logger(str(id(x)),self.logger)
+ if'id'in dct:
+ id_to_task[dct['id']]=x
+ for x in tasks:
+ for key in Utils.to_list(x.args.get('before_tests',[])):
+ tsk=id_to_task[key]
+ if not tsk:
+ raise ValueError('No test named %r'%key)
+ tsk.run_after.add(x)
+ for key in Utils.to_list(x.args.get('after_tests',[])):
+ tsk=id_to_task[key]
+ if not tsk:
+ raise ValueError('No test named %r'%key)
+ x.run_after.add(tsk)
+ def it():
+ yield tasks
+ while 1:
+ yield[]
+ bld.producer=p=Runner.Parallel(bld,Options.options.jobs)
+ bld.multicheck_lock=Utils.threading.Lock()
+ p.biter=it()
+ self.end_msg('started')
+ p.start()
+ for x in tasks:
+ x.logger.memhandler.flush()
+ self.start_msg('-> processing test results')
+ if p.error:
+ for x in p.error:
+ if getattr(x,'err_msg',None):
+ self.to_log(x.err_msg)
+ self.end_msg('fail',color='RED')
+ raise Errors.WafError('There is an error in the library, read config.log for more information')
+ failure_count=0
+ for x in tasks:
+ if x.hasrun not in(Task.SUCCESS,Task.NOT_RUN):
+ failure_count+=1
+ if failure_count:
+ self.end_msg(kw.get('errmsg','%s test failed'%failure_count),color='YELLOW',**kw)
+ else:
+ self.end_msg('all ok',**kw)
+ for x in tasks:
+ if x.hasrun!=Task.SUCCESS:
+ if x.args.get('mandatory',True):
+ self.fatal(kw.get('fatalmsg')or'One of the tests has failed, read config.log for more information')
+@conf
+def check_gcc_o_space(self,mode='c'):
+ if int(self.env.CC_VERSION[0])>4:
+ return
+ self.env.stash()
+ if mode=='c':
+ self.env.CCLNK_TGT_F=['-o','']
+ elif mode=='cxx':
+ self.env.CXXLNK_TGT_F=['-o','']
+ features='%s %sshlib'%(mode,mode)
+ try:
+ self.check(msg='Checking if the -o link must be split from arguments',fragment=SNIP_EMPTY_PROGRAM,features=features)
+ except self.errors.ConfigurationError:
+ self.env.revert()
+ else:
+ self.env.commit()
diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py
new file mode 100644
index 0000000..847b433
--- /dev/null
+++ b/waflib/Tools/c_osx.py
@@ -0,0 +1,121 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,shutil,platform
+from waflib import Task,Utils
+from waflib.TaskGen import taskgen_method,feature,after_method,before_method
+app_info='''
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
+<plist version="0.9">
+<dict>
+ <key>CFBundlePackageType</key>
+ <string>APPL</string>
+ <key>CFBundleGetInfoString</key>
+ <string>Created by Waf</string>
+ <key>CFBundleSignature</key>
+ <string>????</string>
+ <key>NOTE</key>
+ <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
+ <key>CFBundleExecutable</key>
+ <string>{app_name}</string>
+</dict>
+</plist>
+'''
+@feature('c','cxx')
+def set_macosx_deployment_target(self):
+ if self.env.MACOSX_DEPLOYMENT_TARGET:
+ os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env.MACOSX_DEPLOYMENT_TARGET
+ elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ:
+ if Utils.unversioned_sys_platform()=='darwin':
+ os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2])
+@taskgen_method
+def create_bundle_dirs(self,name,out):
+ dir=out.parent.find_or_declare(name)
+ dir.mkdir()
+ macos=dir.find_or_declare(['Contents','MacOS'])
+ macos.mkdir()
+ return dir
+def bundle_name_for_output(out):
+ name=out.name
+ k=name.rfind('.')
+ if k>=0:
+ name=name[:k]+'.app'
+ else:
+ name=name+'.app'
+ return name
+@feature('cprogram','cxxprogram')
+@after_method('apply_link')
+def create_task_macapp(self):
+ if self.env.MACAPP or getattr(self,'mac_app',False):
+ out=self.link_task.outputs[0]
+ name=bundle_name_for_output(out)
+ dir=self.create_bundle_dirs(name,out)
+ n1=dir.find_or_declare(['Contents','MacOS',out.name])
+ self.apptask=self.create_task('macapp',self.link_task.outputs,n1)
+ inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name
+ self.add_install_files(install_to=inst_to,install_from=n1,chmod=Utils.O755)
+ if getattr(self,'mac_files',None):
+ mac_files_root=getattr(self,'mac_files_root',None)
+ if isinstance(mac_files_root,str):
+ mac_files_root=self.path.find_node(mac_files_root)
+ if not mac_files_root:
+ self.bld.fatal('Invalid mac_files_root %r'%self.mac_files_root)
+ res_dir=n1.parent.parent.make_node('Resources')
+ inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name
+ for node in self.to_nodes(self.mac_files):
+ relpath=node.path_from(mac_files_root or node.parent)
+ self.create_task('macapp',node,res_dir.make_node(relpath))
+ self.add_install_as(install_to=os.path.join(inst_to,relpath),install_from=node)
+ if getattr(self.bld,'is_install',None):
+ self.install_task.hasrun=Task.SKIP_ME
+@feature('cprogram','cxxprogram')
+@after_method('apply_link')
+def create_task_macplist(self):
+ if self.env.MACAPP or getattr(self,'mac_app',False):
+ out=self.link_task.outputs[0]
+ name=bundle_name_for_output(out)
+ dir=self.create_bundle_dirs(name,out)
+ n1=dir.find_or_declare(['Contents','Info.plist'])
+ self.plisttask=plisttask=self.create_task('macplist',[],n1)
+ plisttask.context={'app_name':self.link_task.outputs[0].name,'env':self.env}
+ plist_ctx=getattr(self,'plist_context',None)
+ if(plist_ctx):
+ plisttask.context.update(plist_ctx)
+ if getattr(self,'mac_plist',False):
+ node=self.path.find_resource(self.mac_plist)
+ if node:
+ plisttask.inputs.append(node)
+ else:
+ plisttask.code=self.mac_plist
+ else:
+ plisttask.code=app_info
+ inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name
+ self.add_install_files(install_to=inst_to,install_from=n1)
+@feature('cshlib','cxxshlib')
+@before_method('apply_link','propagate_uselib_vars')
+def apply_bundle(self):
+ if self.env.MACBUNDLE or getattr(self,'mac_bundle',False):
+ self.env.LINKFLAGS_cshlib=self.env.LINKFLAGS_cxxshlib=[]
+ self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN
+ use=self.use=self.to_list(getattr(self,'use',[]))
+ if not'MACBUNDLE'in use:
+ use.append('MACBUNDLE')
+app_dirs=['Contents','Contents/MacOS','Contents/Resources']
+class macapp(Task.Task):
+ color='PINK'
+ def run(self):
+ self.outputs[0].parent.mkdir()
+ shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath())
+class macplist(Task.Task):
+ color='PINK'
+ ext_in=['.bin']
+ def run(self):
+ if getattr(self,'code',None):
+ txt=self.code
+ else:
+ txt=self.inputs[0].read()
+ context=getattr(self,'context',{})
+ txt=txt.format(**context)
+ self.outputs[0].write(txt)
diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py
new file mode 100644
index 0000000..8781b73
--- /dev/null
+++ b/waflib/Tools/c_preproc.py
@@ -0,0 +1,672 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re,string,traceback
+from waflib import Logs,Utils,Errors
+class PreprocError(Errors.WafError):
+ pass
+FILE_CACHE_SIZE=100000
+LINE_CACHE_SIZE=100000
+POPFILE='-'
+recursion_limit=150
+go_absolute=False
+standard_includes=['/usr/local/include','/usr/include']
+if Utils.is_win32:
+ standard_includes=[]
+use_trigraphs=0
+strict_quotes=0
+g_optrans={'not':'!','not_eq':'!','and':'&&','and_eq':'&=','or':'||','or_eq':'|=','xor':'^','xor_eq':'^=','bitand':'&','bitor':'|','compl':'~',}
+re_lines=re.compile('^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE)
+re_mac=re.compile("^[a-zA-Z_]\w*")
+re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
+re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE)
+re_nl=re.compile('\\\\\r*\n',re.MULTILINE)
+re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE)
+trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')]
+chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39}
+NUM='i'
+OP='O'
+IDENT='T'
+STR='s'
+CHAR='c'
+tok_types=[NUM,STR,IDENT,OP]
+exp_types=[r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',]
+re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M)
+accepted='a'
+ignored='i'
+undefined='u'
+skipped='s'
+def repl(m):
+ s=m.group()
+ if s[0]=='/':
+ return' '
+ return s
+prec={}
+ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',',']
+for x,syms in enumerate(ops):
+ for u in syms.split():
+ prec[u]=x
+def reduce_nums(val_1,val_2,val_op):
+ try:
+ a=0+val_1
+ except TypeError:
+ a=int(val_1)
+ try:
+ b=0+val_2
+ except TypeError:
+ b=int(val_2)
+ d=val_op
+ if d=='%':
+ c=a%b
+ elif d=='+':
+ c=a+b
+ elif d=='-':
+ c=a-b
+ elif d=='*':
+ c=a*b
+ elif d=='/':
+ c=a/b
+ elif d=='^':
+ c=a^b
+ elif d=='==':
+ c=int(a==b)
+ elif d=='|'or d=='bitor':
+ c=a|b
+ elif d=='||'or d=='or':
+ c=int(a or b)
+ elif d=='&'or d=='bitand':
+ c=a&b
+ elif d=='&&'or d=='and':
+ c=int(a and b)
+ elif d=='!='or d=='not_eq':
+ c=int(a!=b)
+ elif d=='^'or d=='xor':
+ c=int(a^b)
+ elif d=='<=':
+ c=int(a<=b)
+ elif d=='<':
+ c=int(a<b)
+ elif d=='>':
+ c=int(a>b)
+ elif d=='>=':
+ c=int(a>=b)
+ elif d=='<<':
+ c=a<<b
+ elif d=='>>':
+ c=a>>b
+ else:
+ c=0
+ return c
+def get_num(lst):
+ if not lst:
+ raise PreprocError('empty list for get_num')
+ (p,v)=lst[0]
+ if p==OP:
+ if v=='(':
+ count_par=1
+ i=1
+ while i<len(lst):
+ (p,v)=lst[i]
+ if p==OP:
+ if v==')':
+ count_par-=1
+ if count_par==0:
+ break
+ elif v=='(':
+ count_par+=1
+ i+=1
+ else:
+ raise PreprocError('rparen expected %r'%lst)
+ (num,_)=get_term(lst[1:i])
+ return(num,lst[i+1:])
+ elif v=='+':
+ return get_num(lst[1:])
+ elif v=='-':
+ num,lst=get_num(lst[1:])
+ return(reduce_nums('-1',num,'*'),lst)
+ elif v=='!':
+ num,lst=get_num(lst[1:])
+ return(int(not int(num)),lst)
+ elif v=='~':
+ num,lst=get_num(lst[1:])
+ return(~int(num),lst)
+ else:
+ raise PreprocError('Invalid op token %r for get_num'%lst)
+ elif p==NUM:
+ return v,lst[1:]
+ elif p==IDENT:
+ return 0,lst[1:]
+ else:
+ raise PreprocError('Invalid token %r for get_num'%lst)
+def get_term(lst):
+ if not lst:
+ raise PreprocError('empty list for get_term')
+ num,lst=get_num(lst)
+ if not lst:
+ return(num,[])
+ (p,v)=lst[0]
+ if p==OP:
+ if v==',':
+ return get_term(lst[1:])
+ elif v=='?':
+ count_par=0
+ i=1
+ while i<len(lst):
+ (p,v)=lst[i]
+ if p==OP:
+ if v==')':
+ count_par-=1
+ elif v=='(':
+ count_par+=1
+ elif v==':':
+ if count_par==0:
+ break
+ i+=1
+ else:
+ raise PreprocError('rparen expected %r'%lst)
+ if int(num):
+ return get_term(lst[1:i])
+ else:
+ return get_term(lst[i+1:])
+ else:
+ num2,lst=get_num(lst[1:])
+ if not lst:
+ num2=reduce_nums(num,num2,v)
+ return get_term([(NUM,num2)]+lst)
+ p2,v2=lst[0]
+ if p2!=OP:
+ raise PreprocError('op expected %r'%lst)
+ if prec[v2]>=prec[v]:
+ num2=reduce_nums(num,num2,v)
+ return get_term([(NUM,num2)]+lst)
+ else:
+ num3,lst=get_num(lst[1:])
+ num3=reduce_nums(num2,num3,v2)
+ return get_term([(NUM,num),(p,v),(NUM,num3)]+lst)
+ raise PreprocError('cannot reduce %r'%lst)
+def reduce_eval(lst):
+ num,lst=get_term(lst)
+ return(NUM,num)
+def stringize(lst):
+ lst=[str(v2)for(p2,v2)in lst]
+ return"".join(lst)
+def paste_tokens(t1,t2):
+ p1=None
+ if t1[0]==OP and t2[0]==OP:
+ p1=OP
+ elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM):
+ p1=IDENT
+ elif t1[0]==NUM and t2[0]==NUM:
+ p1=NUM
+ if not p1:
+ raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2))
+ return(p1,t1[1]+t2[1])
+def reduce_tokens(lst,defs,ban=[]):
+ i=0
+ while i<len(lst):
+ (p,v)=lst[i]
+ if p==IDENT and v=="defined":
+ del lst[i]
+ if i<len(lst):
+ (p2,v2)=lst[i]
+ if p2==IDENT:
+ if v2 in defs:
+ lst[i]=(NUM,1)
+ else:
+ lst[i]=(NUM,0)
+ elif p2==OP and v2=='(':
+ del lst[i]
+ (p2,v2)=lst[i]
+ del lst[i]
+ if v2 in defs:
+ lst[i]=(NUM,1)
+ else:
+ lst[i]=(NUM,0)
+ else:
+ raise PreprocError('Invalid define expression %r'%lst)
+ elif p==IDENT and v in defs:
+ if isinstance(defs[v],str):
+ a,b=extract_macro(defs[v])
+ defs[v]=b
+ macro_def=defs[v]
+ to_add=macro_def[1]
+ if isinstance(macro_def[0],list):
+ del lst[i]
+ accu=to_add[:]
+ reduce_tokens(accu,defs,ban+[v])
+ for tmp in accu:
+ lst.insert(i,tmp)
+ i+=1
+ else:
+ args=[]
+ del lst[i]
+ if i>=len(lst):
+ raise PreprocError('expected ( after %r (got nothing)'%v)
+ (p2,v2)=lst[i]
+ if p2!=OP or v2!='(':
+ raise PreprocError('expected ( after %r'%v)
+ del lst[i]
+ one_param=[]
+ count_paren=0
+ while i<len(lst):
+ p2,v2=lst[i]
+ del lst[i]
+ if p2==OP and count_paren==0:
+ if v2=='(':
+ one_param.append((p2,v2))
+ count_paren+=1
+ elif v2==')':
+ if one_param:
+ args.append(one_param)
+ break
+ elif v2==',':
+ if not one_param:
+ raise PreprocError('empty param in funcall %r'%v)
+ args.append(one_param)
+ one_param=[]
+ else:
+ one_param.append((p2,v2))
+ else:
+ one_param.append((p2,v2))
+ if v2=='(':
+ count_paren+=1
+ elif v2==')':
+ count_paren-=1
+ else:
+ raise PreprocError('malformed macro')
+ accu=[]
+ arg_table=macro_def[0]
+ j=0
+ while j<len(to_add):
+ (p2,v2)=to_add[j]
+ if p2==OP and v2=='#':
+ if j+1<len(to_add)and to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
+ toks=args[arg_table[to_add[j+1][1]]]
+ accu.append((STR,stringize(toks)))
+ j+=1
+ else:
+ accu.append((p2,v2))
+ elif p2==OP and v2=='##':
+ if accu and j+1<len(to_add):
+ t1=accu[-1]
+ if to_add[j+1][0]==IDENT and to_add[j+1][1]in arg_table:
+ toks=args[arg_table[to_add[j+1][1]]]
+ if toks:
+ accu[-1]=paste_tokens(t1,toks[0])
+ accu.extend(toks[1:])
+ else:
+ accu.append((p2,v2))
+ accu.extend(toks)
+ elif to_add[j+1][0]==IDENT and to_add[j+1][1]=='__VA_ARGS__':
+ va_toks=[]
+ st=len(macro_def[0])
+ pt=len(args)
+ for x in args[pt-st+1:]:
+ va_toks.extend(x)
+ va_toks.append((OP,','))
+ if va_toks:
+ va_toks.pop()
+ if len(accu)>1:
+ (p3,v3)=accu[-1]
+ (p4,v4)=accu[-2]
+ if v3=='##':
+ accu.pop()
+ if v4==','and pt<st:
+ accu.pop()
+ accu+=va_toks
+ else:
+ accu[-1]=paste_tokens(t1,to_add[j+1])
+ j+=1
+ else:
+ accu.append((p2,v2))
+ elif p2==IDENT and v2 in arg_table:
+ toks=args[arg_table[v2]]
+ reduce_tokens(toks,defs,ban+[v])
+ accu.extend(toks)
+ else:
+ accu.append((p2,v2))
+ j+=1
+ reduce_tokens(accu,defs,ban+[v])
+ for x in range(len(accu)-1,-1,-1):
+ lst.insert(i,accu[x])
+ i+=1
+def eval_macro(lst,defs):
+ reduce_tokens(lst,defs,[])
+ if not lst:
+ raise PreprocError('missing tokens to evaluate')
+ if lst:
+ p,v=lst[0]
+ if p==IDENT and v not in defs:
+ raise PreprocError('missing macro %r'%lst)
+ p,v=reduce_eval(lst)
+ return int(v)!=0
+def extract_macro(txt):
+ t=tokenize(txt)
+ if re_fun.search(txt):
+ p,name=t[0]
+ p,v=t[1]
+ if p!=OP:
+ raise PreprocError('expected (')
+ i=1
+ pindex=0
+ params={}
+ prev='('
+ while 1:
+ i+=1
+ p,v=t[i]
+ if prev=='(':
+ if p==IDENT:
+ params[v]=pindex
+ pindex+=1
+ prev=p
+ elif p==OP and v==')':
+ break
+ else:
+ raise PreprocError('unexpected token (3)')
+ elif prev==IDENT:
+ if p==OP and v==',':
+ prev=v
+ elif p==OP and v==')':
+ break
+ else:
+ raise PreprocError('comma or ... expected')
+ elif prev==',':
+ if p==IDENT:
+ params[v]=pindex
+ pindex+=1
+ prev=p
+ elif p==OP and v=='...':
+ raise PreprocError('not implemented (1)')
+ else:
+ raise PreprocError('comma or ... expected (2)')
+ elif prev=='...':
+ raise PreprocError('not implemented (2)')
+ else:
+ raise PreprocError('unexpected else')
+ return(name,[params,t[i+1:]])
+ else:
+ (p,v)=t[0]
+ if len(t)>1:
+ return(v,[[],t[1:]])
+ else:
+ return(v,[[],[('T','')]])
+re_include=re.compile('^\s*(<(?:.*)>|"(?:.*)")')
+def extract_include(txt,defs):
+ m=re_include.search(txt)
+ if m:
+ txt=m.group(1)
+ return txt[0],txt[1:-1]
+ toks=tokenize(txt)
+ reduce_tokens(toks,defs,['waf_include'])
+ if not toks:
+ raise PreprocError('could not parse include %r'%txt)
+ if len(toks)==1:
+ if toks[0][0]==STR:
+ return'"',toks[0][1]
+ else:
+ if toks[0][1]=='<'and toks[-1][1]=='>':
+ ret='<',stringize(toks).lstrip('<').rstrip('>')
+ return ret
+ raise PreprocError('could not parse include %r'%txt)
+def parse_char(txt):
+ if not txt:
+ raise PreprocError('attempted to parse a null char')
+ if txt[0]!='\\':
+ return ord(txt)
+ c=txt[1]
+ if c=='x':
+ if len(txt)==4 and txt[3]in string.hexdigits:
+ return int(txt[2:],16)
+ return int(txt[2:],16)
+ elif c.isdigit():
+ if c=='0'and len(txt)==2:
+ return 0
+ for i in 3,2,1:
+ if len(txt)>i and txt[1:1+i].isdigit():
+ return(1+i,int(txt[1:1+i],8))
+ else:
+ try:
+ return chr_esc[c]
+ except KeyError:
+ raise PreprocError('could not parse char literal %r'%txt)
+def tokenize(s):
+ return tokenize_private(s)[:]
+def tokenize_private(s):
+ ret=[]
+ for match in re_clexer.finditer(s):
+ m=match.group
+ for name in tok_types:
+ v=m(name)
+ if v:
+ if name==IDENT:
+ if v in g_optrans:
+ name=OP
+ elif v.lower()=="true":
+ v=1
+ name=NUM
+ elif v.lower()=="false":
+ v=0
+ name=NUM
+ elif name==NUM:
+ if m('oct'):
+ v=int(v,8)
+ elif m('hex'):
+ v=int(m('hex'),16)
+ elif m('n0'):
+ v=m('n0')
+ else:
+ v=m('char')
+ if v:
+ v=parse_char(v)
+ else:
+ v=m('n2')or m('n4')
+ elif name==OP:
+ if v=='%:':
+ v='#'
+ elif v=='%:%:':
+ v='##'
+ elif name==STR:
+ v=v[1:-1]
+ ret.append((name,v))
+ break
+ return ret
+def format_defines(lst):
+ ret=[]
+ for y in lst:
+ if y:
+ pos=y.find('=')
+ if pos==-1:
+ ret.append(y)
+ elif pos>0:
+ ret.append('%s %s'%(y[:pos],y[pos+1:]))
+ else:
+ raise ValueError('Invalid define expression %r'%y)
+ return ret
+class c_parser(object):
+ def __init__(self,nodepaths=None,defines=None):
+ self.lines=[]
+ if defines is None:
+ self.defs={}
+ else:
+ self.defs=dict(defines)
+ self.state=[]
+ self.count_files=0
+ self.currentnode_stack=[]
+ self.nodepaths=nodepaths or[]
+ self.nodes=[]
+ self.names=[]
+ self.curfile=''
+ self.ban_includes=set()
+ self.listed=set()
+ def cached_find_resource(self,node,filename):
+ try:
+ cache=node.ctx.preproc_cache_node
+ except AttributeError:
+ cache=node.ctx.preproc_cache_node=Utils.lru_cache(FILE_CACHE_SIZE)
+ key=(node,filename)
+ try:
+ return cache[key]
+ except KeyError:
+ ret=node.find_resource(filename)
+ if ret:
+ if getattr(ret,'children',None):
+ ret=None
+ elif ret.is_child_of(node.ctx.bldnode):
+ tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
+ if tmp and getattr(tmp,'children',None):
+ ret=None
+ cache[key]=ret
+ return ret
+ def tryfind(self,filename,kind='"',env=None):
+ if filename.endswith('.moc'):
+ self.names.append(filename)
+ return None
+ self.curfile=filename
+ found=None
+ if kind=='"':
+ if env.MSVC_VERSION:
+ for n in reversed(self.currentnode_stack):
+ found=self.cached_find_resource(n,filename)
+ if found:
+ break
+ else:
+ found=self.cached_find_resource(self.currentnode_stack[-1],filename)
+ if not found:
+ for n in self.nodepaths:
+ found=self.cached_find_resource(n,filename)
+ if found:
+ break
+ listed=self.listed
+ if found and not found in self.ban_includes:
+ if found not in listed:
+ listed.add(found)
+ self.nodes.append(found)
+ self.addlines(found)
+ else:
+ if filename not in listed:
+ listed.add(filename)
+ self.names.append(filename)
+ return found
+ def filter_comments(self,node):
+ code=node.read()
+ if use_trigraphs:
+ for(a,b)in trig_def:
+ code=code.split(a).join(b)
+ code=re_nl.sub('',code)
+ code=re_cpp.sub(repl,code)
+ return re_lines.findall(code)
+ def parse_lines(self,node):
+ try:
+ cache=node.ctx.preproc_cache_lines
+ except AttributeError:
+ cache=node.ctx.preproc_cache_lines=Utils.lru_cache(LINE_CACHE_SIZE)
+ try:
+ return cache[node]
+ except KeyError:
+ cache[node]=lines=self.filter_comments(node)
+ lines.append((POPFILE,''))
+ lines.reverse()
+ return lines
+ def addlines(self,node):
+ self.currentnode_stack.append(node.parent)
+ self.count_files+=1
+ if self.count_files>recursion_limit:
+ raise PreprocError('recursion limit exceeded')
+ if Logs.verbose:
+ Logs.debug('preproc: reading file %r',node)
+ try:
+ lines=self.parse_lines(node)
+ except EnvironmentError:
+ raise PreprocError('could not read the file %r'%node)
+ except Exception:
+ if Logs.verbose>0:
+ Logs.error('parsing %r failed %s',node,traceback.format_exc())
+ else:
+ self.lines.extend(lines)
+ def start(self,node,env):
+ Logs.debug('preproc: scanning %s (in %s)',node.name,node.parent.name)
+ self.current_file=node
+ self.addlines(node)
+ if env.DEFINES:
+ lst=format_defines(env.DEFINES)
+ lst.reverse()
+ self.lines.extend([('define',x)for x in lst])
+ while self.lines:
+ (token,line)=self.lines.pop()
+ if token==POPFILE:
+ self.count_files-=1
+ self.currentnode_stack.pop()
+ continue
+ try:
+ state=self.state
+ if token[:2]=='if':
+ state.append(undefined)
+ elif token=='endif':
+ state.pop()
+ if token[0]!='e':
+ if skipped in self.state or ignored in self.state:
+ continue
+ if token=='if':
+ ret=eval_macro(tokenize(line),self.defs)
+ if ret:
+ state[-1]=accepted
+ else:
+ state[-1]=ignored
+ elif token=='ifdef':
+ m=re_mac.match(line)
+ if m and m.group()in self.defs:
+ state[-1]=accepted
+ else:
+ state[-1]=ignored
+ elif token=='ifndef':
+ m=re_mac.match(line)
+ if m and m.group()in self.defs:
+ state[-1]=ignored
+ else:
+ state[-1]=accepted
+ elif token=='include'or token=='import':
+ (kind,inc)=extract_include(line,self.defs)
+ self.current_file=self.tryfind(inc,kind,env)
+ if token=='import':
+ self.ban_includes.add(self.current_file)
+ elif token=='elif':
+ if state[-1]==accepted:
+ state[-1]=skipped
+ elif state[-1]==ignored:
+ if eval_macro(tokenize(line),self.defs):
+ state[-1]=accepted
+ elif token=='else':
+ if state[-1]==accepted:
+ state[-1]=skipped
+ elif state[-1]==ignored:
+ state[-1]=accepted
+ elif token=='define':
+ try:
+ self.defs[self.define_name(line)]=line
+ except AttributeError:
+ raise PreprocError('Invalid define line %r'%line)
+ elif token=='undef':
+ m=re_mac.match(line)
+ if m and m.group()in self.defs:
+ self.defs.__delitem__(m.group())
+ elif token=='pragma':
+ if re_pragma_once.match(line.lower()):
+ self.ban_includes.add(self.current_file)
+ except Exception as e:
+ if Logs.verbose:
+ Logs.debug('preproc: line parsing failed (%s): %s %s',e,line,traceback.format_exc())
+ def define_name(self,line):
+ return re_mac.match(line).group()
+def scan(task):
+ try:
+ incn=task.generator.includes_nodes
+ except AttributeError:
+ raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator)
+ if go_absolute:
+ nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes]
+ else:
+ nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)]
+ tmp=c_parser(nodepaths)
+ tmp.start(task.inputs[0],task.env)
+ return(tmp.nodes,tmp.names)
diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py
new file mode 100644
index 0000000..30b9f38
--- /dev/null
+++ b/waflib/Tools/c_tests.py
@@ -0,0 +1,152 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Task
+from waflib.Configure import conf
+from waflib.TaskGen import feature,before_method,after_method
+LIB_CODE='''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllexport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void) { return 9; }
+'''
+MAIN_CODE='''
+#ifdef _MSC_VER
+#define testEXPORT __declspec(dllimport)
+#else
+#define testEXPORT
+#endif
+testEXPORT int lib_func(void);
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ return !(lib_func() == 9);
+}
+'''
+@feature('link_lib_test')
+@before_method('process_source')
+def link_lib_test_fun(self):
+ def write_test_file(task):
+ task.outputs[0].write(task.generator.code)
+ rpath=[]
+ if getattr(self,'add_rpath',False):
+ rpath=[self.bld.path.get_bld().abspath()]
+ mode=self.mode
+ m='%s %s'%(mode,mode)
+ ex=self.test_exec and'test_exec'or''
+ bld=self.bld
+ bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE)
+ bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE)
+ bld(features='%sshlib'%m,source='test.'+mode,target='test')
+ bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath)
+@conf
+def check_library(self,mode=None,test_exec=True):
+ if not mode:
+ mode='c'
+ if self.env.CXX:
+ mode='cxx'
+ self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec)
+INLINE_CODE='''
+typedef int foo_t;
+static %s foo_t static_foo () {return 0; }
+%s foo_t foo () {
+ return 0;
+}
+'''
+INLINE_VALUES=['inline','__inline__','__inline']
+@conf
+def check_inline(self,**kw):
+ self.start_msg('Checking for inline')
+ if not'define_name'in kw:
+ kw['define_name']='INLINE_MACRO'
+ if not'features'in kw:
+ if self.env.CXX:
+ kw['features']=['cxx']
+ else:
+ kw['features']=['c']
+ for x in INLINE_VALUES:
+ kw['fragment']=INLINE_CODE%(x,x)
+ try:
+ self.check(**kw)
+ except self.errors.ConfigurationError:
+ continue
+ else:
+ self.end_msg(x)
+ if x!='inline':
+ self.define('inline',x,quote=False)
+ return x
+ self.fatal('could not use inline functions')
+LARGE_FRAGMENT='''#include <unistd.h>
+int main(int argc, char **argv) {
+ (void)argc; (void)argv;
+ return !(sizeof(off_t) >= 8);
+}
+'''
+@conf
+def check_large_file(self,**kw):
+ if not'define_name'in kw:
+ kw['define_name']='HAVE_LARGEFILE'
+ if not'execute'in kw:
+ kw['execute']=True
+ if not'features'in kw:
+ if self.env.CXX:
+ kw['features']=['cxx','cxxprogram']
+ else:
+ kw['features']=['c','cprogram']
+ kw['fragment']=LARGE_FRAGMENT
+ kw['msg']='Checking for large file support'
+ ret=True
+ try:
+ if self.env.DEST_BINFMT!='pe':
+ ret=self.check(**kw)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ if ret:
+ return True
+ kw['msg']='Checking for -D_FILE_OFFSET_BITS=64'
+ kw['defines']=['_FILE_OFFSET_BITS=64']
+ try:
+ ret=self.check(**kw)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ self.define('_FILE_OFFSET_BITS',64)
+ return ret
+ self.fatal('There is no support for large files')
+ENDIAN_FRAGMENT='''
+short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
+short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
+int use_ascii (int i) {
+ return ascii_mm[i] + ascii_ii[i];
+}
+short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
+short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
+int use_ebcdic (int i) {
+ return ebcdic_mm[i] + ebcdic_ii[i];
+}
+extern int foo;
+'''
+class grep_for_endianness(Task.Task):
+ color='PINK'
+ def run(self):
+ txt=self.inputs[0].read(flags='rb').decode('latin-1')
+ if txt.find('LiTTleEnDian')>-1:
+ self.generator.tmp.append('little')
+ elif txt.find('BIGenDianSyS')>-1:
+ self.generator.tmp.append('big')
+ else:
+ return-1
+@feature('grep_for_endianness')
+@after_method('process_source')
+def grep_for_endianness_fun(self):
+ self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0])
+@conf
+def check_endianness(self):
+ tmp=[]
+ def check_msg(self):
+ return tmp[0]
+ self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg='Checking for endianness',define='ENDIANNESS',tmp=tmp,okmsg=check_msg)
+ return tmp[0]
diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py
new file mode 100644
index 0000000..bb807ec
--- /dev/null
+++ b/waflib/Tools/ccroot.py
@@ -0,0 +1,479 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re
+from waflib import Task,Utils,Node,Errors,Logs
+from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension
+from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests
+from waflib.Configure import conf
+SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib']
+USELIB_VARS=Utils.defaultdict(set)
+USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH'])
+USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH'])
+USELIB_VARS['d']=set(['INCLUDES','DFLAGS'])
+USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH'])
+USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS'])
+USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS'])
+USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS'])
+USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS'])
+USELIB_VARS['asm']=set(['ASFLAGS'])
+@taskgen_method
+def create_compiled_task(self,name,node):
+ out='%s.%d.o'%(node.name,self.idx)
+ task=self.create_task(name,node,node.parent.find_or_declare(out))
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks=[task]
+ return task
+@taskgen_method
+def to_incnodes(self,inlst):
+ lst=[]
+ seen=set()
+ for x in self.to_list(inlst):
+ if x in seen or not x:
+ continue
+ seen.add(x)
+ if isinstance(x,Node.Node):
+ lst.append(x)
+ else:
+ if os.path.isabs(x):
+ lst.append(self.bld.root.make_node(x)or x)
+ else:
+ if x[0]=='#':
+ p=self.bld.bldnode.make_node(x[1:])
+ v=self.bld.srcnode.make_node(x[1:])
+ else:
+ p=self.path.get_bld().make_node(x)
+ v=self.path.make_node(x)
+ if p.is_child_of(self.bld.bldnode):
+ p.mkdir()
+ lst.append(p)
+ lst.append(v)
+ return lst
+@feature('c','cxx','d','asm','fc','includes')
+@after_method('propagate_uselib_vars','process_source')
+def apply_incpaths(self):
+ lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env.INCLUDES)
+ self.includes_nodes=lst
+ cwd=self.get_cwd()
+ self.env.INCPATHS=[x.path_from(cwd)for x in lst]
+class link_task(Task.Task):
+ color='YELLOW'
+ weight=3
+ inst_to=None
+ chmod=Utils.O755
+ def add_target(self,target):
+ if isinstance(target,str):
+ base=self.generator.path
+ if target.startswith('#'):
+ target=target[1:]
+ base=self.generator.bld.bldnode
+ pattern=self.env[self.__class__.__name__+'_PATTERN']
+ if not pattern:
+ pattern='%s'
+ folder,name=os.path.split(target)
+ if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None):
+ nums=self.generator.vnum.split('.')
+ if self.env.DEST_BINFMT=='pe':
+ name=name+'-'+nums[0]
+ elif self.env.DEST_OS=='openbsd':
+ pattern='%s.%s'%(pattern,nums[0])
+ if len(nums)>=2:
+ pattern+='.%s'%nums[1]
+ if folder:
+ tmp=folder+os.sep+pattern%name
+ else:
+ tmp=pattern%name
+ target=base.find_or_declare(tmp)
+ self.set_outputs(target)
+ def exec_command(self,*k,**kw):
+ ret=super(link_task,self).exec_command(*k,**kw)
+ if not ret and self.env.DO_MANIFEST:
+ ret=self.exec_mf()
+ return ret
+ def exec_mf(self):
+ if not self.env.MT:
+ return 0
+ manifest=None
+ for out_node in self.outputs:
+ if out_node.name.endswith('.manifest'):
+ manifest=out_node.abspath()
+ break
+ else:
+ return 0
+ mode=''
+ for x in Utils.to_list(self.generator.features):
+ if x in('cprogram','cxxprogram','fcprogram','fcprogram_test'):
+ mode=1
+ elif x in('cshlib','cxxshlib','fcshlib'):
+ mode=2
+ Logs.debug('msvc: embedding manifest in mode %r',mode)
+ lst=[]+self.env.MT
+ lst.extend(Utils.to_list(self.env.MTFLAGS))
+ lst.extend(['-manifest',manifest])
+ lst.append('-outputresource:%s;%s'%(self.outputs[0].abspath(),mode))
+ return super(link_task,self).exec_command(lst)
+class stlink_task(link_task):
+ run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
+ chmod=Utils.O644
+def rm_tgt(cls):
+ old=cls.run
+ def wrap(self):
+ try:
+ os.remove(self.outputs[0].abspath())
+ except OSError:
+ pass
+ return old(self)
+ setattr(cls,'run',wrap)
+rm_tgt(stlink_task)
+@feature('c','cxx','d','fc','asm')
+@after_method('process_source')
+def apply_link(self):
+ for x in self.features:
+ if x=='cprogram'and'cxx'in self.features:
+ x='cxxprogram'
+ elif x=='cshlib'and'cxx'in self.features:
+ x='cxxshlib'
+ if x in Task.classes:
+ if issubclass(Task.classes[x],link_task):
+ link=x
+ break
+ else:
+ return
+ objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])]
+ self.link_task=self.create_task(link,objs)
+ self.link_task.add_target(self.target)
+ try:
+ inst_to=self.install_path
+ except AttributeError:
+ inst_to=self.link_task.inst_to
+ if inst_to:
+ self.install_task=self.add_install_files(install_to=inst_to,install_from=self.link_task.outputs[:],chmod=self.link_task.chmod,task=self.link_task)
+@taskgen_method
+def use_rec(self,name,**kw):
+ if name in self.tmp_use_not or name in self.tmp_use_seen:
+ return
+ try:
+ y=self.bld.get_tgen_by_name(name)
+ except Errors.WafError:
+ self.uselib.append(name)
+ self.tmp_use_not.add(name)
+ return
+ self.tmp_use_seen.append(name)
+ y.post()
+ y.tmp_use_objects=objects=kw.get('objects',True)
+ y.tmp_use_stlib=stlib=kw.get('stlib',True)
+ try:
+ link_task=y.link_task
+ except AttributeError:
+ y.tmp_use_var=''
+ else:
+ objects=False
+ if not isinstance(link_task,stlink_task):
+ stlib=False
+ y.tmp_use_var='LIB'
+ else:
+ y.tmp_use_var='STLIB'
+ p=self.tmp_use_prec
+ for x in self.to_list(getattr(y,'use',[])):
+ if self.env["STLIB_"+x]:
+ continue
+ try:
+ p[x].append(name)
+ except KeyError:
+ p[x]=[name]
+ self.use_rec(x,objects=objects,stlib=stlib)
+@feature('c','cxx','d','use','fc')
+@before_method('apply_incpaths','propagate_uselib_vars')
+@after_method('apply_link','process_source')
+def process_use(self):
+ use_not=self.tmp_use_not=set()
+ self.tmp_use_seen=[]
+ use_prec=self.tmp_use_prec={}
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ self.includes=self.to_list(getattr(self,'includes',[]))
+ names=self.to_list(getattr(self,'use',[]))
+ for x in names:
+ self.use_rec(x)
+ for x in use_not:
+ if x in use_prec:
+ del use_prec[x]
+ out=self.tmp_use_sorted=[]
+ tmp=[]
+ for x in self.tmp_use_seen:
+ for k in use_prec.values():
+ if x in k:
+ break
+ else:
+ tmp.append(x)
+ while tmp:
+ e=tmp.pop()
+ out.append(e)
+ try:
+ nlst=use_prec[e]
+ except KeyError:
+ pass
+ else:
+ del use_prec[e]
+ for x in nlst:
+ for y in use_prec:
+ if x in use_prec[y]:
+ break
+ else:
+ tmp.append(x)
+ if use_prec:
+ raise Errors.WafError('Cycle detected in the use processing %r'%use_prec)
+ out.reverse()
+ link_task=getattr(self,'link_task',None)
+ for x in out:
+ y=self.bld.get_tgen_by_name(x)
+ var=y.tmp_use_var
+ if var and link_task:
+ if var=='LIB'or y.tmp_use_stlib or x in names:
+ self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]])
+ self.link_task.dep_nodes.extend(y.link_task.outputs)
+ tmp_path=y.link_task.outputs[0].parent.path_from(self.get_cwd())
+ self.env.append_unique(var+'PATH',[tmp_path])
+ else:
+ if y.tmp_use_objects:
+ self.add_objects_from_tgen(y)
+ if getattr(y,'export_includes',None):
+ self.includes=self.includes+y.to_incnodes(y.export_includes)
+ if getattr(y,'export_defines',None):
+ self.env.append_value('DEFINES',self.to_list(y.export_defines))
+ for x in names:
+ try:
+ y=self.bld.get_tgen_by_name(x)
+ except Errors.WafError:
+ if not self.env['STLIB_'+x]and not x in self.uselib:
+ self.uselib.append(x)
+ else:
+ for k in self.to_list(getattr(y,'use',[])):
+ if not self.env['STLIB_'+k]and not k in self.uselib:
+ self.uselib.append(k)
+@taskgen_method
+def accept_node_to_link(self,node):
+ return not node.name.endswith('.pdb')
+@taskgen_method
+def add_objects_from_tgen(self,tg):
+ try:
+ link_task=self.link_task
+ except AttributeError:
+ pass
+ else:
+ for tsk in getattr(tg,'compiled_tasks',[]):
+ for x in tsk.outputs:
+ if self.accept_node_to_link(x):
+ link_task.inputs.append(x)
+@taskgen_method
+def get_uselib_vars(self):
+ _vars=set()
+ for x in self.features:
+ if x in USELIB_VARS:
+ _vars|=USELIB_VARS[x]
+ return _vars
+@feature('c','cxx','d','fc','javac','cs','uselib','asm')
+@after_method('process_use')
+def propagate_uselib_vars(self):
+ _vars=self.get_uselib_vars()
+ env=self.env
+ app=env.append_value
+ feature_uselib=self.features+self.to_list(getattr(self,'uselib',[]))
+ for var in _vars:
+ y=var.lower()
+ val=getattr(self,y,[])
+ if val:
+ app(var,self.to_list(val))
+ for x in feature_uselib:
+ val=env['%s_%s'%(var,x)]
+ if val:
+ app(var,val)
+@feature('cshlib','cxxshlib','fcshlib')
+@after_method('apply_link')
+def apply_implib(self):
+ if not self.env.DEST_BINFMT=='pe':
+ return
+ dll=self.link_task.outputs[0]
+ if isinstance(self.target,Node.Node):
+ name=self.target.name
+ else:
+ name=os.path.split(self.target)[1]
+ implib=self.env.implib_PATTERN%name
+ implib=dll.parent.find_or_declare(implib)
+ self.env.append_value('LINKFLAGS',self.env.IMPLIB_ST%implib.bldpath())
+ self.link_task.outputs.append(implib)
+ if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe':
+ node=self.path.find_resource(self.defs)
+ if not node:
+ raise Errors.WafError('invalid def file %r'%self.defs)
+ if self.env.def_PATTERN:
+ self.env.append_value('LINKFLAGS',self.env.def_PATTERN%node.path_from(self.get_cwd()))
+ self.link_task.dep_nodes.append(node)
+ else:
+ self.link_task.inputs.append(node)
+ if getattr(self,'install_task',None):
+ try:
+ inst_to=self.install_path_implib
+ except AttributeError:
+ try:
+ inst_to=self.install_path
+ except AttributeError:
+ inst_to='${IMPLIBDIR}'
+ self.install_task.install_to='${BINDIR}'
+ if not self.env.IMPLIBDIR:
+ self.env.IMPLIBDIR=self.env.LIBDIR
+ self.implib_install_task=self.add_install_files(install_to=inst_to,install_from=implib,chmod=self.link_task.chmod,task=self.link_task)
+re_vnum=re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$')
+@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum')
+@after_method('apply_link','propagate_uselib_vars')
+def apply_vnum(self):
+ if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'):
+ return
+ link=self.link_task
+ if not re_vnum.match(self.vnum):
+ raise Errors.WafError('Invalid vnum %r for target %r'%(self.vnum,getattr(self,'name',self)))
+ nums=self.vnum.split('.')
+ node=link.outputs[0]
+ cnum=getattr(self,'cnum',str(nums[0]))
+ cnums=cnum.split('.')
+ if len(cnums)>len(nums)or nums[0:len(cnums)]!=cnums:
+ raise Errors.WafError('invalid compatibility version %s'%cnum)
+ libname=node.name
+ if libname.endswith('.dylib'):
+ name3=libname.replace('.dylib','.%s.dylib'%self.vnum)
+ name2=libname.replace('.dylib','.%s.dylib'%cnum)
+ else:
+ name3=libname+'.'+self.vnum
+ name2=libname+'.'+cnum
+ if self.env.SONAME_ST:
+ v=self.env.SONAME_ST%name2
+ self.env.append_value('LINKFLAGS',v.split())
+ if self.env.DEST_OS!='openbsd':
+ outs=[node.parent.make_node(name3)]
+ if name2!=name3:
+ outs.append(node.parent.make_node(name2))
+ self.create_task('vnum',node,outs)
+ if getattr(self,'install_task',None):
+ self.install_task.hasrun=Task.SKIPPED
+ path=self.install_task.install_to
+ if self.env.DEST_OS=='openbsd':
+ libname=self.link_task.outputs[0].name
+ t1=self.add_install_as(install_to='%s/%s'%(path,libname),install_from=node,chmod=self.link_task.chmod)
+ self.vnum_install_task=(t1,)
+ else:
+ t1=self.add_install_as(install_to=path+os.sep+name3,install_from=node,chmod=self.link_task.chmod)
+ t3=self.add_symlink_as(install_to=path+os.sep+libname,install_from=name3)
+ if name2!=name3:
+ t2=self.add_symlink_as(install_to=path+os.sep+name2,install_from=name3)
+ self.vnum_install_task=(t1,t2,t3)
+ else:
+ self.vnum_install_task=(t1,t3)
+ if'-dynamiclib'in self.env.LINKFLAGS:
+ try:
+ inst_to=self.install_path
+ except AttributeError:
+ inst_to=self.link_task.inst_to
+ if inst_to:
+ p=Utils.subst_vars(inst_to,self.env)
+ path=os.path.join(p,name2)
+ self.env.append_value('LINKFLAGS',['-install_name',path])
+ self.env.append_value('LINKFLAGS','-Wl,-compatibility_version,%s'%cnum)
+ self.env.append_value('LINKFLAGS','-Wl,-current_version,%s'%self.vnum)
+class vnum(Task.Task):
+ color='CYAN'
+ ext_in=['.bin']
+ def keyword(self):
+ return'Symlinking'
+ def run(self):
+ for x in self.outputs:
+ path=x.abspath()
+ try:
+ os.remove(path)
+ except OSError:
+ pass
+ try:
+ os.symlink(self.inputs[0].name,path)
+ except OSError:
+ return 1
+class fake_shlib(link_task):
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ return Task.SKIP_ME
+class fake_stlib(stlink_task):
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ return Task.SKIP_ME
+@conf
+def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]):
+ return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines)
+@conf
+def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]):
+ return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines)
+lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],}
+@feature('fake_lib')
+def process_lib(self):
+ node=None
+ names=[x%self.name for x in lib_patterns[self.lib_type]]
+ for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS:
+ if not isinstance(x,Node.Node):
+ x=self.bld.root.find_node(x)or self.path.find_node(x)
+ if not x:
+ continue
+ for y in names:
+ node=x.find_node(y)
+ if node:
+ try:
+ Utils.h_file(node.abspath())
+ except EnvironmentError:
+ raise ValueError('Could not read %r'%y)
+ break
+ else:
+ continue
+ break
+ else:
+ raise Errors.WafError('could not find library %r'%self.name)
+ self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node])
+ self.target=self.name
+class fake_o(Task.Task):
+ def runnable_status(self):
+ return Task.SKIP_ME
+@extension('.o','.obj')
+def add_those_o_files(self,node):
+ tsk=self.create_task('fake_o',[],node)
+ try:
+ self.compiled_tasks.append(tsk)
+ except AttributeError:
+ self.compiled_tasks=[tsk]
+@feature('fake_obj')
+@before_method('process_source')
+def process_objs(self):
+ for node in self.to_nodes(self.source):
+ self.add_those_o_files(node)
+ self.source=[]
+@conf
+def read_object(self,obj):
+ if not isinstance(obj,self.path.__class__):
+ obj=self.path.find_resource(obj)
+ return self(features='fake_obj',source=obj,name=obj.name)
+@feature('cxxprogram','cprogram')
+@after_method('apply_link','process_use')
+def set_full_paths_hpux(self):
+ if self.env.DEST_OS!='hp-ux':
+ return
+ base=self.bld.bldnode.abspath()
+ for var in['LIBPATH','STLIBPATH']:
+ lst=[]
+ for x in self.env[var]:
+ if x.startswith('/'):
+ lst.append(x)
+ else:
+ lst.append(os.path.normpath(os.path.join(base,x)))
+ self.env[var]=lst
diff --git a/waflib/Tools/clang.py b/waflib/Tools/clang.py
new file mode 100644
index 0000000..9379f5a
--- /dev/null
+++ b/waflib/Tools/clang.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar,gcc
+from waflib.Configure import conf
+@conf
+def find_clang(conf):
+ cc=conf.find_program('clang',var='CC')
+ conf.get_cc_version(cc,clang=True)
+ conf.env.CC_NAME='clang'
+def configure(conf):
+ conf.find_clang()
+ conf.find_program(['llvm-ar','ar'],var='AR')
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/clangxx.py b/waflib/Tools/clangxx.py
new file mode 100644
index 0000000..fc97135
--- /dev/null
+++ b/waflib/Tools/clangxx.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar,gxx
+from waflib.Configure import conf
+@conf
+def find_clangxx(conf):
+ cxx=conf.find_program('clang++',var='CXX')
+ conf.get_cc_version(cxx,clang=True)
+ conf.env.CXX_NAME='clang'
+def configure(conf):
+ conf.find_clangxx()
+ conf.find_program(['llvm-ar','ar'],var='AR')
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.gxx_modifier_platform()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/compiler_c.py b/waflib/Tools/compiler_c.py
new file mode 100644
index 0000000..ee607be
--- /dev/null
+++ b/waflib/Tools/compiler_c.py
@@ -0,0 +1,44 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib.Tools import ccroot
+from waflib import Utils
+from waflib.Logs import debug
+c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'osf1V':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['clang','gcc'],}
+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=c_compiler.get(build_platform,c_compiler['default'])
+ return' '.join(possible_compiler_list)
+def configure(conf):
+ try:
+ test_for_compiler=conf.options.check_c_compiler or default_compilers()
+ except AttributeError:
+ conf.fatal("Add options(opt): opt.load('compiler_c')")
+ for compiler in re.split('[ ,]+',test_for_compiler):
+ conf.env.stash()
+ conf.start_msg('Checking for %r (C compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError as e:
+ conf.env.revert()
+ conf.end_msg(False)
+ debug('compiler_c: %r',e)
+ else:
+ if conf.env.CC:
+ conf.end_msg(conf.env.get_flat('CC'))
+ conf.env.COMPILER_CC=compiler
+ conf.env.commit()
+ break
+ conf.env.revert()
+ conf.end_msg(False)
+ else:
+ conf.fatal('could not configure a C compiler!')
+def options(opt):
+ test_for_compiler=default_compilers()
+ opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py'])
+ cc_compiler_opts=opt.add_option_group('Configuration options')
+ cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler")
+ for x in test_for_compiler.split():
+ opt.load('%s'%x)
diff --git a/waflib/Tools/compiler_cxx.py b/waflib/Tools/compiler_cxx.py
new file mode 100644
index 0000000..cbd267f
--- /dev/null
+++ b/waflib/Tools/compiler_cxx.py
@@ -0,0 +1,44 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib.Tools import ccroot
+from waflib import Utils
+from waflib.Logs import debug
+cxx_compiler={'win32':['msvc','g++','clang++'],'cygwin':['g++'],'darwin':['clang++','g++'],'aix':['xlc++','g++','clang++'],'linux':['g++','clang++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'osf1V':['g++'],'gnu':['g++','clang++'],'java':['g++','msvc','clang++','icpc'],'default':['clang++','g++']}
+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=cxx_compiler.get(build_platform,cxx_compiler['default'])
+ return' '.join(possible_compiler_list)
+def configure(conf):
+ try:
+ test_for_compiler=conf.options.check_cxx_compiler or default_compilers()
+ except AttributeError:
+ conf.fatal("Add options(opt): opt.load('compiler_cxx')")
+ for compiler in re.split('[ ,]+',test_for_compiler):
+ conf.env.stash()
+ conf.start_msg('Checking for %r (C++ compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError as e:
+ conf.env.revert()
+ conf.end_msg(False)
+ debug('compiler_cxx: %r',e)
+ else:
+ if conf.env.CXX:
+ conf.end_msg(conf.env.get_flat('CXX'))
+ conf.env.COMPILER_CXX=compiler
+ conf.env.commit()
+ break
+ conf.env.revert()
+ conf.end_msg(False)
+ else:
+ conf.fatal('could not configure a C++ compiler!')
+def options(opt):
+ test_for_compiler=default_compilers()
+ opt.load_special_tools('cxx_*.py')
+ cxx_compiler_opts=opt.add_option_group('Configuration options')
+ cxx_compiler_opts.add_option('--check-cxx-compiler',default=None,help='list of C++ compilers to try [%s]'%test_for_compiler,dest="check_cxx_compiler")
+ for x in test_for_compiler.split():
+ opt.load('%s'%x)
diff --git a/waflib/Tools/compiler_d.py b/waflib/Tools/compiler_d.py
new file mode 100644
index 0000000..2ca7e26
--- /dev/null
+++ b/waflib/Tools/compiler_d.py
@@ -0,0 +1,41 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils,Logs
+d_compiler={'default':['gdc','dmd','ldc2']}
+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=d_compiler.get(build_platform,d_compiler['default'])
+ return' '.join(possible_compiler_list)
+def configure(conf):
+ try:
+ test_for_compiler=conf.options.check_d_compiler or default_compilers()
+ except AttributeError:
+ conf.fatal("Add options(opt): opt.load('compiler_d')")
+ for compiler in re.split('[ ,]+',test_for_compiler):
+ conf.env.stash()
+ conf.start_msg('Checking for %r (D compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError as e:
+ conf.env.revert()
+ conf.end_msg(False)
+ Logs.debug('compiler_d: %r',e)
+ else:
+ if conf.env.D:
+ conf.end_msg(conf.env.get_flat('D'))
+ conf.env.COMPILER_D=compiler
+ conf.env.commit()
+ break
+ conf.env.revert()
+ conf.end_msg(False)
+ else:
+ conf.fatal('could not configure a D compiler!')
+def options(opt):
+ test_for_compiler=default_compilers()
+ d_compiler_opts=opt.add_option_group('Configuration options')
+ d_compiler_opts.add_option('--check-d-compiler',default=None,help='list of D compilers to try [%s]'%test_for_compiler,dest='check_d_compiler')
+ for x in test_for_compiler.split():
+ opt.load('%s'%x)
diff --git a/waflib/Tools/compiler_fc.py b/waflib/Tools/compiler_fc.py
new file mode 100644
index 0000000..8b23a2b
--- /dev/null
+++ b/waflib/Tools/compiler_fc.py
@@ -0,0 +1,43 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils,Logs
+from waflib.Tools import fc
+fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']}
+def default_compilers():
+ build_platform=Utils.unversioned_sys_platform()
+ possible_compiler_list=fc_compiler.get(build_platform,fc_compiler['default'])
+ return' '.join(possible_compiler_list)
+def configure(conf):
+ try:
+ test_for_compiler=conf.options.check_fortran_compiler or default_compilers()
+ except AttributeError:
+ conf.fatal("Add options(opt): opt.load('compiler_fc')")
+ for compiler in re.split('[ ,]+',test_for_compiler):
+ conf.env.stash()
+ conf.start_msg('Checking for %r (Fortran compiler)'%compiler)
+ try:
+ conf.load(compiler)
+ except conf.errors.ConfigurationError as e:
+ conf.env.revert()
+ conf.end_msg(False)
+ Logs.debug('compiler_fortran: %r',e)
+ else:
+ if conf.env.FC:
+ conf.end_msg(conf.env.get_flat('FC'))
+ conf.env.COMPILER_FORTRAN=compiler
+ conf.env.commit()
+ break
+ conf.env.revert()
+ conf.end_msg(False)
+ else:
+ conf.fatal('could not configure a Fortran compiler!')
+def options(opt):
+ test_for_compiler=default_compilers()
+ opt.load_special_tools('fc_*.py')
+ fortran_compiler_opts=opt.add_option_group('Configuration options')
+ fortran_compiler_opts.add_option('--check-fortran-compiler',default=None,help='list of Fortran compiler to try [%s]'%test_for_compiler,dest="check_fortran_compiler")
+ for x in test_for_compiler.split():
+ opt.load('%s'%x)
diff --git a/waflib/Tools/cs.py b/waflib/Tools/cs.py
new file mode 100644
index 0000000..df73c94
--- /dev/null
+++ b/waflib/Tools/cs.py
@@ -0,0 +1,113 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Utils,Task,Options,Errors
+from waflib.TaskGen import before_method,after_method,feature
+from waflib.Tools import ccroot
+from waflib.Configure import conf
+ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES'])
+ccroot.lib_patterns['csshlib']=['%s']
+@feature('cs')
+@before_method('process_source')
+def apply_cs(self):
+ cs_nodes=[]
+ no_nodes=[]
+ for x in self.to_nodes(self.source):
+ if x.name.endswith('.cs'):
+ cs_nodes.append(x)
+ else:
+ no_nodes.append(x)
+ self.source=no_nodes
+ bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe')
+ self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen))
+ tsk.env.CSTYPE='/target:%s'%bintype
+ tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath()
+ self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu'))
+ inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}')
+ if inst_to:
+ mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644)
+ self.install_task=self.add_install_files(install_to=inst_to,install_from=self.cs_task.outputs[:],chmod=mod)
+@feature('cs')
+@after_method('apply_cs')
+def use_cs(self):
+ names=self.to_list(getattr(self,'use',[]))
+ get=self.bld.get_tgen_by_name
+ for x in names:
+ try:
+ y=get(x)
+ except Errors.WafError:
+ self.env.append_value('CSFLAGS','/reference:%s'%x)
+ continue
+ y.post()
+ tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None)
+ if not tsk:
+ self.bld.fatal('cs task has no link task for use %r'%self)
+ self.cs_task.dep_nodes.extend(tsk.outputs)
+ self.cs_task.set_run_after(tsk)
+ self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath())
+@feature('cs')
+@after_method('apply_cs','use_cs')
+def debug_cs(self):
+ csdebug=getattr(self,'csdebug',self.env.CSDEBUG)
+ if not csdebug:
+ return
+ node=self.cs_task.outputs[0]
+ if self.env.CS_NAME=='mono':
+ out=node.parent.find_or_declare(node.name+'.mdb')
+ else:
+ out=node.change_ext('.pdb')
+ self.cs_task.outputs.append(out)
+ if getattr(self,'install_task',None):
+ self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=out)
+ if csdebug=='pdbonly':
+ val=['/debug+','/debug:pdbonly']
+ elif csdebug=='full':
+ val=['/debug+','/debug:full']
+ else:
+ val=['/debug-']
+ self.env.append_value('CSFLAGS',val)
+@feature('cs')
+@after_method('debug_cs')
+def doc_cs(self):
+ csdoc=getattr(self,'csdoc',self.env.CSDOC)
+ if not csdoc:
+ return
+ node=self.cs_task.outputs[0]
+ out=node.change_ext('.xml')
+ self.cs_task.outputs.append(out)
+ if getattr(self,'install_task',None):
+ self.doc_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=out)
+ self.env.append_value('CSFLAGS','/doc:%s'%out.abspath())
+class mcs(Task.Task):
+ color='YELLOW'
+ run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
+ def split_argfile(self,cmd):
+ inline=[cmd[0]]
+ infile=[]
+ for x in cmd[1:]:
+ if x.lower()=='/noconfig':
+ inline.append(x)
+ else:
+ infile.append(self.quote_flag(x))
+ return(inline,infile)
+def configure(conf):
+ csc=getattr(Options.options,'cscbinary',None)
+ if csc:
+ conf.env.MCS=csc
+ conf.find_program(['csc','mcs','gmcs'],var='MCS')
+ conf.env.ASS_ST='/r:%s'
+ conf.env.RES_ST='/resource:%s'
+ conf.env.CS_NAME='csc'
+ if str(conf.env.MCS).lower().find('mcs')>-1:
+ conf.env.CS_NAME='mono'
+def options(opt):
+ opt.add_option('--with-csc-binary',type='string',dest='cscbinary')
+class fake_csshlib(Task.Task):
+ color='YELLOW'
+ inst_to=None
+ def runnable_status(self):
+ return Task.SKIP_ME
+@conf
+def read_csshlib(self,name,paths=[]):
+ return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib')
diff --git a/waflib/Tools/cxx.py b/waflib/Tools/cxx.py
new file mode 100644
index 0000000..e63ad8b
--- /dev/null
+++ b/waflib/Tools/cxx.py
@@ -0,0 +1,26 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import TaskGen,Task
+from waflib.Tools import c_preproc
+from waflib.Tools.ccroot import link_task,stlink_task
+@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
+def cxx_hook(self,node):
+ return self.create_compiled_task('cxx',node)
+if not'.c'in TaskGen.task_gen.mappings:
+ TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp']
+class cxx(Task.Task):
+ run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
+ vars=['CXXDEPS']
+ ext_in=['.h']
+ scan=c_preproc.scan
+class cxxprogram(link_task):
+ run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
+ vars=['LINKDEPS']
+ ext_out=['.bin']
+ inst_to='${BINDIR}'
+class cxxshlib(cxxprogram):
+ inst_to='${LIBDIR}'
+class cxxstlib(stlink_task):
+ pass
diff --git a/waflib/Tools/d.py b/waflib/Tools/d.py
new file mode 100644
index 0000000..6d1c3c6
--- /dev/null
+++ b/waflib/Tools/d.py
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Utils,Task,Errors
+from waflib.TaskGen import taskgen_method,feature,extension
+from waflib.Tools import d_scan,d_config
+from waflib.Tools.ccroot import link_task,stlink_task
+class d(Task.Task):
+ color='GREEN'
+ run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
+ scan=d_scan.scan
+class d_with_header(d):
+ run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
+class d_header(Task.Task):
+ color='BLUE'
+ run_str='${D} ${D_HEADER} ${SRC}'
+class dprogram(link_task):
+ run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
+ inst_to='${BINDIR}'
+class dshlib(dprogram):
+ inst_to='${LIBDIR}'
+class dstlib(stlink_task):
+ pass
+@extension('.d','.di','.D')
+def d_hook(self,node):
+ ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o'
+ out='%s.%d.%s'%(node.name,self.idx,ext)
+ def create_compiled_task(self,name,node):
+ task=self.create_task(name,node,node.parent.find_or_declare(out))
+ try:
+ self.compiled_tasks.append(task)
+ except AttributeError:
+ self.compiled_tasks=[task]
+ return task
+ if getattr(self,'generate_headers',None):
+ tsk=create_compiled_task(self,'d_with_header',node)
+ tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
+ else:
+ tsk=create_compiled_task(self,'d',node)
+ return tsk
+@taskgen_method
+def generate_header(self,filename):
+ try:
+ self.header_lst.append([filename,self.install_path])
+ except AttributeError:
+ self.header_lst=[[filename,self.install_path]]
+@feature('d')
+def process_header(self):
+ for i in getattr(self,'header_lst',[]):
+ node=self.path.find_resource(i[0])
+ if not node:
+ raise Errors.WafError('file %r not found on d obj'%i[0])
+ self.create_task('d_header',node,node.change_ext('.di'))
diff --git a/waflib/Tools/d_config.py b/waflib/Tools/d_config.py
new file mode 100644
index 0000000..3b4bdf0
--- /dev/null
+++ b/waflib/Tools/d_config.py
@@ -0,0 +1,52 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Utils
+from waflib.Configure import conf
+@conf
+def d_platform_flags(self):
+ v=self.env
+ if not v.DEST_OS:
+ v.DEST_OS=Utils.unversioned_sys_platform()
+ binfmt=Utils.destos_to_binfmt(self.env.DEST_OS)
+ if binfmt=='pe':
+ v.dprogram_PATTERN='%s.exe'
+ v.dshlib_PATTERN='lib%s.dll'
+ v.dstlib_PATTERN='lib%s.a'
+ elif binfmt=='mac-o':
+ v.dprogram_PATTERN='%s'
+ v.dshlib_PATTERN='lib%s.dylib'
+ v.dstlib_PATTERN='lib%s.a'
+ else:
+ v.dprogram_PATTERN='%s'
+ v.dshlib_PATTERN='lib%s.so'
+ v.dstlib_PATTERN='lib%s.a'
+DLIB='''
+version(D_Version2) {
+ import std.stdio;
+ int main() {
+ writefln("phobos2");
+ return 0;
+ }
+} else {
+ version(Tango) {
+ import tango.stdc.stdio;
+ int main() {
+ printf("tango");
+ return 0;
+ }
+ } else {
+ import std.stdio;
+ int main() {
+ writefln("phobos1");
+ return 0;
+ }
+ }
+}
+'''
+@conf
+def check_dlibrary(self,execute=True):
+ ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True)
+ if execute:
+ self.env.DLIBRARY=ret.strip()
diff --git a/waflib/Tools/d_scan.py b/waflib/Tools/d_scan.py
new file mode 100644
index 0000000..09ccfa9
--- /dev/null
+++ b/waflib/Tools/d_scan.py
@@ -0,0 +1,136 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils
+def filter_comments(filename):
+ txt=Utils.readf(filename)
+ i=0
+ buf=[]
+ max=len(txt)
+ begin=0
+ while i<max:
+ c=txt[i]
+ if c=='"'or c=="'":
+ buf.append(txt[begin:i])
+ delim=c
+ i+=1
+ while i<max:
+ c=txt[i]
+ if c==delim:
+ break
+ elif c=='\\':
+ i+=1
+ i+=1
+ i+=1
+ begin=i
+ elif c=='/':
+ buf.append(txt[begin:i])
+ i+=1
+ if i==max:
+ break
+ c=txt[i]
+ if c=='+':
+ i+=1
+ nesting=1
+ c=None
+ while i<max:
+ prev=c
+ c=txt[i]
+ if prev=='/'and c=='+':
+ nesting+=1
+ c=None
+ elif prev=='+'and c=='/':
+ nesting-=1
+ if nesting==0:
+ break
+ c=None
+ i+=1
+ elif c=='*':
+ i+=1
+ c=None
+ while i<max:
+ prev=c
+ c=txt[i]
+ if prev=='*'and c=='/':
+ break
+ i+=1
+ elif c=='/':
+ i+=1
+ while i<max and txt[i]!='\n':
+ i+=1
+ else:
+ begin=i-1
+ continue
+ i+=1
+ begin=i
+ buf.append(' ')
+ else:
+ i+=1
+ buf.append(txt[begin:])
+ return buf
+class d_parser(object):
+ def __init__(self,env,incpaths):
+ self.allnames=[]
+ self.re_module=re.compile("module\s+([^;]+)")
+ self.re_import=re.compile("import\s+([^;]+)")
+ self.re_import_bindings=re.compile("([^:]+):(.*)")
+ self.re_import_alias=re.compile("[^=]+=(.+)")
+ self.env=env
+ self.nodes=[]
+ self.names=[]
+ self.incpaths=incpaths
+ def tryfind(self,filename):
+ found=0
+ for n in self.incpaths:
+ found=n.find_resource(filename.replace('.','/')+'.d')
+ if found:
+ self.nodes.append(found)
+ self.waiting.append(found)
+ break
+ if not found:
+ if not filename in self.names:
+ self.names.append(filename)
+ def get_strings(self,code):
+ self.module=''
+ lst=[]
+ mod_name=self.re_module.search(code)
+ if mod_name:
+ self.module=re.sub('\s+','',mod_name.group(1))
+ import_iterator=self.re_import.finditer(code)
+ if import_iterator:
+ for import_match in import_iterator:
+ import_match_str=re.sub('\s+','',import_match.group(1))
+ bindings_match=self.re_import_bindings.match(import_match_str)
+ if bindings_match:
+ import_match_str=bindings_match.group(1)
+ matches=import_match_str.split(',')
+ for match in matches:
+ alias_match=self.re_import_alias.match(match)
+ if alias_match:
+ match=alias_match.group(1)
+ lst.append(match)
+ return lst
+ def start(self,node):
+ self.waiting=[node]
+ while self.waiting:
+ nd=self.waiting.pop(0)
+ self.iter(nd)
+ def iter(self,node):
+ path=node.abspath()
+ code="".join(filter_comments(path))
+ names=self.get_strings(code)
+ for x in names:
+ if x in self.allnames:
+ continue
+ self.allnames.append(x)
+ self.tryfind(x)
+def scan(self):
+ env=self.env
+ gruik=d_parser(env,self.generator.includes_nodes)
+ node=self.inputs[0]
+ gruik.start(node)
+ nodes=gruik.nodes
+ names=gruik.names
+ return(nodes,names)
diff --git a/waflib/Tools/dbus.py b/waflib/Tools/dbus.py
new file mode 100644
index 0000000..c54ab7a
--- /dev/null
+++ b/waflib/Tools/dbus.py
@@ -0,0 +1,29 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Task,Errors
+from waflib.TaskGen import taskgen_method,before_method
+@taskgen_method
+def add_dbus_file(self,filename,prefix,mode):
+ if not hasattr(self,'dbus_lst'):
+ self.dbus_lst=[]
+ if not'process_dbus'in self.meths:
+ self.meths.append('process_dbus')
+ self.dbus_lst.append([filename,prefix,mode])
+@before_method('process_source')
+def process_dbus(self):
+ for filename,prefix,mode in getattr(self,'dbus_lst',[]):
+ node=self.path.find_resource(filename)
+ if not node:
+ raise Errors.WafError('file not found '+filename)
+ tsk=self.create_task('dbus_binding_tool',node,node.change_ext('.h'))
+ tsk.env.DBUS_BINDING_TOOL_PREFIX=prefix
+ tsk.env.DBUS_BINDING_TOOL_MODE=mode
+class dbus_binding_tool(Task.Task):
+ color='BLUE'
+ ext_out=['.h']
+ run_str='${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
+ shell=True
+def configure(conf):
+ conf.find_program('dbus-binding-tool',var='DBUS_BINDING_TOOL')
diff --git a/waflib/Tools/dmd.py b/waflib/Tools/dmd.py
new file mode 100644
index 0000000..2711628
--- /dev/null
+++ b/waflib/Tools/dmd.py
@@ -0,0 +1,51 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import sys
+from waflib.Tools import ar,d
+from waflib.Configure import conf
+@conf
+def find_dmd(conf):
+ conf.find_program(['dmd','dmd2','ldc'],var='D')
+ out=conf.cmd_and_log(conf.env.D+['--help'])
+ if out.find("D Compiler v")==-1:
+ out=conf.cmd_and_log(conf.env.D+['-version'])
+ if out.find("based on DMD v1.")==-1:
+ conf.fatal("detected compiler is not dmd/ldc")
+@conf
+def common_flags_ldc(conf):
+ v=conf.env
+ v.DFLAGS=['-d-version=Posix']
+ v.LINKFLAGS=[]
+ v.DFLAGS_dshlib=['-relocation-model=pic']
+@conf
+def common_flags_dmd(conf):
+ v=conf.env
+ v.D_SRC_F=['-c']
+ v.D_TGT_F='-of%s'
+ v.D_LINKER=v.D
+ v.DLNK_SRC_F=''
+ v.DLNK_TGT_F='-of%s'
+ v.DINC_ST='-I%s'
+ v.DSHLIB_MARKER=v.DSTLIB_MARKER=''
+ v.DSTLIB_ST=v.DSHLIB_ST='-L-l%s'
+ v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L-L%s'
+ v.LINKFLAGS_dprogram=['-quiet']
+ v.DFLAGS_dshlib=['-fPIC']
+ v.LINKFLAGS_dshlib=['-L-shared']
+ v.DHEADER_ext='.di'
+ v.DFLAGS_d_with_header=['-H','-Hf']
+ v.D_HDR_F='%s'
+def configure(conf):
+ conf.find_dmd()
+ if sys.platform=='win32':
+ out=conf.cmd_and_log(conf.env.D+['--help'])
+ if out.find('D Compiler v2.')>-1:
+ conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
+ conf.load('ar')
+ conf.load('d')
+ conf.common_flags_dmd()
+ conf.d_platform_flags()
+ if str(conf.env.D).find('ldc')>-1:
+ conf.common_flags_ldc()
diff --git a/waflib/Tools/errcheck.py b/waflib/Tools/errcheck.py
new file mode 100644
index 0000000..f993e58
--- /dev/null
+++ b/waflib/Tools/errcheck.py
@@ -0,0 +1,175 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy','uses':'use',}
+meths_typos=['__call__','program','shlib','stlib','objects']
+import sys
+from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils
+from waflib.Tools import ccroot
+def check_same_targets(self):
+ mp=Utils.defaultdict(list)
+ uids={}
+ def check_task(tsk):
+ if not isinstance(tsk,Task.Task):
+ return
+ if hasattr(tsk,'no_errcheck_out'):
+ return
+ for node in tsk.outputs:
+ mp[node].append(tsk)
+ try:
+ uids[tsk.uid()].append(tsk)
+ except KeyError:
+ uids[tsk.uid()]=[tsk]
+ for g in self.groups:
+ for tg in g:
+ try:
+ for tsk in tg.tasks:
+ check_task(tsk)
+ except AttributeError:
+ check_task(tg)
+ dupe=False
+ for(k,v)in mp.items():
+ if len(v)>1:
+ dupe=True
+ msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"")
+ Logs.error(msg)
+ for x in v:
+ if Logs.verbose>1:
+ Logs.error(' %d. %r',1+v.index(x),x.generator)
+ else:
+ Logs.error(' %d. %r in %r',1+v.index(x),x.generator.name,getattr(x.generator,'path',None))
+ Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')
+ if not dupe:
+ for(k,v)in uids.items():
+ if len(v)>1:
+ Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
+ tg_details=tsk.generator.name
+ if Logs.verbose>2:
+ tg_details=tsk.generator
+ for tsk in v:
+ Logs.error(' - object %r (%r) defined in %r',tsk.__class__.__name__,tsk,tg_details)
+def check_invalid_constraints(self):
+ feat=set()
+ for x in list(TaskGen.feats.values()):
+ feat.union(set(x))
+ for(x,y)in TaskGen.task_gen.prec.items():
+ feat.add(x)
+ feat.union(set(y))
+ ext=set()
+ for x in TaskGen.task_gen.mappings.values():
+ ext.add(x.__name__)
+ invalid=ext&feat
+ if invalid:
+ Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method',list(invalid))
+ for cls in list(Task.classes.values()):
+ if sys.hexversion>0x3000000 and issubclass(cls,Task.Task)and isinstance(cls.hcode,str):
+ raise Errors.WafError('Class %r has hcode value %r of type <str>, expecting <bytes> (use Utils.h_cmd() ?)'%(cls,cls.hcode))
+ for x in('before','after'):
+ for y in Utils.to_list(getattr(cls,x,[])):
+ if not Task.classes.get(y):
+ Logs.error('Erroneous order constraint %r=%r on task class %r',x,y,cls.__name__)
+ if getattr(cls,'rule',None):
+ Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")',cls.__name__)
+def replace(m):
+ oldcall=getattr(Build.BuildContext,m)
+ def call(self,*k,**kw):
+ ret=oldcall(self,*k,**kw)
+ for x in typos:
+ if x in kw:
+ if x=='iscopy'and'subst'in getattr(self,'features',''):
+ continue
+ Logs.error('Fix the typo %r -> %r on %r',x,typos[x],ret)
+ return ret
+ setattr(Build.BuildContext,m,call)
+def enhance_lib():
+ for m in meths_typos:
+ replace(m)
+ def ant_glob(self,*k,**kw):
+ if k:
+ lst=Utils.to_list(k[0])
+ for pat in lst:
+ sp=pat.split('/')
+ if'..'in sp:
+ Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'",k[0])
+ if'.'in sp:
+ Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'",k[0])
+ return self.old_ant_glob(*k,**kw)
+ Node.Node.old_ant_glob=Node.Node.ant_glob
+ Node.Node.ant_glob=ant_glob
+ def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True,quiet=False):
+ if remove:
+ try:
+ if self.is_child_of(self.ctx.bldnode)and not quiet:
+ quiet=True
+ Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False',self)
+ except AttributeError:
+ pass
+ return self.old_ant_iter(accept,maxdepth,pats,dir,src,remove,quiet)
+ Node.Node.old_ant_iter=Node.Node.ant_iter
+ Node.Node.ant_iter=ant_iter
+ old=Task.is_before
+ def is_before(t1,t2):
+ ret=old(t1,t2)
+ if ret and old(t2,t1):
+ Logs.error('Contradictory order constraints in classes %r %r',t1,t2)
+ return ret
+ Task.is_before=is_before
+ def check_err_features(self):
+ lst=self.to_list(self.features)
+ if'shlib'in lst:
+ Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
+ for x in('c','cxx','d','fc'):
+ if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]:
+ Logs.error('%r features is probably missing %r',self,x)
+ TaskGen.feature('*')(check_err_features)
+ def check_err_order(self):
+ if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features):
+ for x in('before','after','ext_in','ext_out'):
+ if hasattr(self,x):
+ Logs.warn('Erroneous order constraint %r on non-rule based task generator %r',x,self)
+ else:
+ for x in('before','after'):
+ for y in self.to_list(getattr(self,x,[])):
+ if not Task.classes.get(y):
+ Logs.error('Erroneous order constraint %s=%r on %r (no such class)',x,y,self)
+ TaskGen.feature('*')(check_err_order)
+ def check_compile(self):
+ check_invalid_constraints(self)
+ try:
+ ret=self.orig_compile()
+ finally:
+ check_same_targets(self)
+ return ret
+ Build.BuildContext.orig_compile=Build.BuildContext.compile
+ Build.BuildContext.compile=check_compile
+ def use_rec(self,name,**kw):
+ try:
+ y=self.bld.get_tgen_by_name(name)
+ except Errors.WafError:
+ pass
+ else:
+ idx=self.bld.get_group_idx(self)
+ odx=self.bld.get_group_idx(y)
+ if odx>idx:
+ msg="Invalid 'use' across build groups:"
+ if Logs.verbose>1:
+ msg+='\n target %r\n uses:\n %r'%(self,y)
+ else:
+ msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name)
+ raise Errors.WafError(msg)
+ self.orig_use_rec(name,**kw)
+ TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec
+ TaskGen.task_gen.use_rec=use_rec
+ def _getattr(self,name,default=None):
+ if name=='append'or name=='add':
+ raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
+ elif name=='prepend':
+ raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
+ if name in self.__slots__:
+ return super(ConfigSet.ConfigSet,self).__getattr__(name,default)
+ else:
+ return self[name]
+ ConfigSet.ConfigSet.__getattr__=_getattr
+def options(opt):
+ enhance_lib()
diff --git a/waflib/Tools/fc.py b/waflib/Tools/fc.py
new file mode 100644
index 0000000..7e63b7c
--- /dev/null
+++ b/waflib/Tools/fc.py
@@ -0,0 +1,108 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Utils,Task,Errors
+from waflib.Tools import ccroot,fc_config,fc_scan
+from waflib.TaskGen import extension
+from waflib.Configure import conf
+ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES','FCPPFLAGS'])
+ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
+ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS'])
+@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
+def fc_hook(self,node):
+ return self.create_compiled_task('fc',node)
+@conf
+def modfile(conf,name):
+ return{'lower':name.lower()+'.mod','lower.MOD':name.lower()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower']
+def get_fortran_tasks(tsk):
+ bld=tsk.generator.bld
+ tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator))
+ return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)]
+class fc(Task.Task):
+ color='GREEN'
+ run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
+ vars=["FORTRANMODPATHFLAG"]
+ def scan(self):
+ tmp=fc_scan.fortran_parser(self.generator.includes_nodes)
+ tmp.task=self
+ tmp.start(self.inputs[0])
+ return(tmp.nodes,tmp.names)
+ def runnable_status(self):
+ if getattr(self,'mod_fortran_done',None):
+ return super(fc,self).runnable_status()
+ bld=self.generator.bld
+ lst=get_fortran_tasks(self)
+ for tsk in lst:
+ tsk.mod_fortran_done=True
+ for tsk in lst:
+ ret=tsk.runnable_status()
+ if ret==Task.ASK_LATER:
+ for x in lst:
+ x.mod_fortran_done=None
+ return Task.ASK_LATER
+ ins=Utils.defaultdict(set)
+ outs=Utils.defaultdict(set)
+ for tsk in lst:
+ key=tsk.uid()
+ for x in bld.raw_deps[key]:
+ if x.startswith('MOD@'):
+ name=bld.modfile(x.replace('MOD@',''))
+ node=bld.srcnode.find_or_declare(name)
+ tsk.set_outputs(node)
+ outs[node].add(tsk)
+ for tsk in lst:
+ key=tsk.uid()
+ for x in bld.raw_deps[key]:
+ if x.startswith('USE@'):
+ name=bld.modfile(x.replace('USE@',''))
+ node=bld.srcnode.find_resource(name)
+ if node and node not in tsk.outputs:
+ if not node in bld.node_deps[key]:
+ bld.node_deps[key].append(node)
+ ins[node].add(tsk)
+ for k in ins.keys():
+ for a in ins[k]:
+ a.run_after.update(outs[k])
+ tmp=[]
+ for t in outs[k]:
+ tmp.extend(t.outputs)
+ a.dep_nodes.extend(tmp)
+ a.dep_nodes.sort(key=lambda x:x.abspath())
+ for tsk in lst:
+ try:
+ delattr(tsk,'cache_sig')
+ except AttributeError:
+ pass
+ return super(fc,self).runnable_status()
+class fcprogram(ccroot.link_task):
+ color='YELLOW'
+ run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
+ inst_to='${BINDIR}'
+class fcshlib(fcprogram):
+ inst_to='${LIBDIR}'
+class fcstlib(ccroot.stlink_task):
+ pass
+class fcprogram_test(fcprogram):
+ def runnable_status(self):
+ ret=super(fcprogram_test,self).runnable_status()
+ if ret==Task.SKIP_ME:
+ ret=Task.RUN_ME
+ return ret
+ def exec_command(self,cmd,**kw):
+ bld=self.generator.bld
+ kw['shell']=isinstance(cmd,str)
+ kw['stdout']=kw['stderr']=Utils.subprocess.PIPE
+ kw['cwd']=self.get_cwd()
+ bld.out=bld.err=''
+ bld.to_log('command: %s\n'%cmd)
+ kw['output']=0
+ try:
+ (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw)
+ except Errors.WafError:
+ return-1
+ if bld.out:
+ bld.to_log('out: %s\n'%bld.out)
+ if bld.err:
+ bld.to_log('err: %s\n'%bld.err)
diff --git a/waflib/Tools/fc_config.py b/waflib/Tools/fc_config.py
new file mode 100644
index 0000000..d0d4c45
--- /dev/null
+++ b/waflib/Tools/fc_config.py
@@ -0,0 +1,299 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re,os,sys,shlex
+from waflib.Configure import conf
+from waflib.TaskGen import feature,before_method
+FC_FRAGMENT=' program main\n end program main\n'
+FC_FRAGMENT2=' PROGRAM MAIN\n END\n'
+@conf
+def fc_flags(conf):
+ v=conf.env
+ v.FC_SRC_F=[]
+ v.FC_TGT_F=['-c','-o']
+ v.FCINCPATH_ST='-I%s'
+ v.FCDEFINES_ST='-D%s'
+ if not v.LINK_FC:
+ v.LINK_FC=v.FC
+ v.FCLNK_SRC_F=[]
+ v.FCLNK_TGT_F=['-o']
+ v.FCFLAGS_fcshlib=['-fpic']
+ v.LINKFLAGS_fcshlib=['-shared']
+ v.fcshlib_PATTERN='lib%s.so'
+ v.fcstlib_PATTERN='lib%s.a'
+ v.FCLIB_ST='-l%s'
+ v.FCLIBPATH_ST='-L%s'
+ v.FCSTLIB_ST='-l%s'
+ v.FCSTLIBPATH_ST='-L%s'
+ v.FCSTLIB_MARKER='-Wl,-Bstatic'
+ v.FCSHLIB_MARKER='-Wl,-Bdynamic'
+ v.SONAME_ST='-Wl,-h,%s'
+@conf
+def fc_add_flags(conf):
+ conf.add_os_flags('FCPPFLAGS',dup=False)
+ conf.add_os_flags('FCFLAGS',dup=False)
+ conf.add_os_flags('LINKFLAGS',dup=False)
+ conf.add_os_flags('LDFLAGS',dup=False)
+@conf
+def check_fortran(self,*k,**kw):
+ self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app')
+@conf
+def check_fc(self,*k,**kw):
+ kw['compiler']='fc'
+ if not'compile_mode'in kw:
+ kw['compile_mode']='fc'
+ if not'type'in kw:
+ kw['type']='fcprogram'
+ if not'compile_filename'in kw:
+ kw['compile_filename']='test.f90'
+ if not'code'in kw:
+ kw['code']=FC_FRAGMENT
+ return self.check(*k,**kw)
+@conf
+def fortran_modifier_darwin(conf):
+ v=conf.env
+ v.FCFLAGS_fcshlib=['-fPIC']
+ v.LINKFLAGS_fcshlib=['-dynamiclib']
+ v.fcshlib_PATTERN='lib%s.dylib'
+ v.FRAMEWORKPATH_ST='-F%s'
+ v.FRAMEWORK_ST=['-framework']
+ v.LINKFLAGS_fcstlib=[]
+ v.FCSHLIB_MARKER=''
+ v.FCSTLIB_MARKER=''
+ v.SONAME_ST=''
+@conf
+def fortran_modifier_win32(conf):
+ v=conf.env
+ v.fcprogram_PATTERN=v.fcprogram_test_PATTERN='%s.exe'
+ v.fcshlib_PATTERN='%s.dll'
+ v.implib_PATTERN='%s.dll.a'
+ v.IMPLIB_ST='-Wl,--out-implib,%s'
+ v.FCFLAGS_fcshlib=[]
+ v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+@conf
+def fortran_modifier_cygwin(conf):
+ fortran_modifier_win32(conf)
+ v=conf.env
+ v.fcshlib_PATTERN='cyg%s.dll'
+ v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base'])
+ v.FCFLAGS_fcshlib=[]
+@conf
+def check_fortran_dummy_main(self,*k,**kw):
+ if not self.env.CC:
+ self.fatal('A c compiler is required for check_fortran_dummy_main')
+ lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN']
+ lst.extend([m.lower()for m in lst])
+ lst.append('')
+ self.start_msg('Detecting whether we need a dummy main')
+ for main in lst:
+ kw['fortran_main']=main
+ try:
+ self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True)
+ if not main:
+ self.env.FC_MAIN=-1
+ self.end_msg('no')
+ else:
+ self.env.FC_MAIN=main
+ self.end_msg('yes %s'%main)
+ break
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ self.end_msg('not found')
+ self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
+GCC_DRIVER_LINE=re.compile('^Driving:')
+POSIX_STATIC_EXT=re.compile('\S+\.a')
+POSIX_LIB_FLAGS=re.compile('-l\S+')
+@conf
+def is_link_verbose(self,txt):
+ assert isinstance(txt,str)
+ for line in txt.splitlines():
+ if not GCC_DRIVER_LINE.search(line):
+ if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line):
+ return True
+ return False
+@conf
+def check_fortran_verbose_flag(self,*k,**kw):
+ self.start_msg('fortran link verbose flag')
+ for x in('-v','--verbose','-verbose','-V'):
+ try:
+ self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out):
+ self.end_msg(x)
+ break
+ else:
+ self.end_msg('failure')
+ self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
+ self.env.FC_VERBOSE_FLAG=x
+ return x
+LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*']
+if os.name=='nt':
+ LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname'])
+else:
+ LINKFLAGS_IGNORED.append(r'-lgcc*')
+RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED]
+def _match_ignore(line):
+ for i in RLINKFLAGS_IGNORED:
+ if i.match(line):
+ return True
+ return False
+def parse_fortran_link(lines):
+ final_flags=[]
+ for line in lines:
+ if not GCC_DRIVER_LINE.match(line):
+ _parse_flink_line(line,final_flags)
+ return final_flags
+SPACE_OPTS=re.compile('^-[LRuYz]$')
+NOSPACE_OPTS=re.compile('^-[RL]')
+def _parse_flink_token(lexer,token,tmp_flags):
+ if _match_ignore(token):
+ pass
+ elif token.startswith('-lkernel32')and sys.platform=='cygwin':
+ tmp_flags.append(token)
+ elif SPACE_OPTS.match(token):
+ t=lexer.get_token()
+ if t.startswith('P,'):
+ t=t[2:]
+ for opt in t.split(os.pathsep):
+ tmp_flags.append('-L%s'%opt)
+ elif NOSPACE_OPTS.match(token):
+ tmp_flags.append(token)
+ elif POSIX_LIB_FLAGS.match(token):
+ tmp_flags.append(token)
+ else:
+ pass
+ t=lexer.get_token()
+ return t
+def _parse_flink_line(line,final_flags):
+ lexer=shlex.shlex(line,posix=True)
+ lexer.whitespace_split=True
+ t=lexer.get_token()
+ tmp_flags=[]
+ while t:
+ t=_parse_flink_token(lexer,t,tmp_flags)
+ final_flags.extend(tmp_flags)
+ return final_flags
+@conf
+def check_fortran_clib(self,autoadd=True,*k,**kw):
+ if not self.env.FC_VERBOSE_FLAG:
+ self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
+ self.start_msg('Getting fortran runtime link flags')
+ try:
+ self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG])
+ except Exception:
+ self.end_msg(False)
+ if kw.get('mandatory',True):
+ conf.fatal('Could not find the c library flags')
+ else:
+ out=self.test_bld.err
+ flags=parse_fortran_link(out.splitlines())
+ self.end_msg('ok (%s)'%' '.join(flags))
+ self.env.LINKFLAGS_CLIB=flags
+ return flags
+ return[]
+def getoutput(conf,cmd,stdin=False):
+ from waflib import Errors
+ if conf.env.env:
+ env=conf.env.env
+ else:
+ env=dict(os.environ)
+ env['LANG']='C'
+ input=stdin and'\n'.encode()or None
+ try:
+ out,err=conf.cmd_and_log(cmd,env=env,output=0,input=input)
+ except Errors.WafError as e:
+ if not(hasattr(e,'stderr')and hasattr(e,'stdout')):
+ raise e
+ else:
+ out=e.stdout
+ err=e.stderr
+ except Exception:
+ conf.fatal('could not determine the compiler version %r'%cmd)
+ return(out,err)
+ROUTINES_CODE="""\
+ subroutine foobar()
+ return
+ end
+ subroutine foo_bar()
+ return
+ end
+"""
+MAIN_CODE="""
+void %(dummy_func_nounder)s(void);
+void %(dummy_func_under)s(void);
+int %(main_func_name)s() {
+ %(dummy_func_nounder)s();
+ %(dummy_func_under)s();
+ return 0;
+}
+"""
+@feature('link_main_routines_func')
+@before_method('process_source')
+def link_main_routines_tg_method(self):
+ def write_test_file(task):
+ task.outputs[0].write(task.generator.code)
+ bld=self.bld
+ bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__)
+ bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE)
+ bld(features='fc fcstlib',source='test.f',target='test')
+ bld(features='c fcprogram',source='main.c',target='app',use='test')
+def mangling_schemes():
+ for u in('_',''):
+ for du in('','_'):
+ for c in("lower","upper"):
+ yield(u,du,c)
+def mangle_name(u,du,c,name):
+ return getattr(name,c)()+u+(name.find('_')!=-1 and du or'')
+@conf
+def check_fortran_mangling(self,*k,**kw):
+ if not self.env.CC:
+ self.fatal('A c compiler is required for link_main_routines')
+ if not self.env.FC:
+ self.fatal('A fortran compiler is required for link_main_routines')
+ if not self.env.FC_MAIN:
+ self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
+ self.start_msg('Getting fortran mangling scheme')
+ for(u,du,c)in mangling_schemes():
+ try:
+ self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',dummy_func_nounder=mangle_name(u,du,c,'foobar'),dummy_func_under=mangle_name(u,du,c,'foo_bar'),main_func_name=self.env.FC_MAIN)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c))
+ self.env.FORTRAN_MANGLING=(u,du,c)
+ break
+ else:
+ self.end_msg(False)
+ self.fatal('mangler not found')
+ return(u,du,c)
+@feature('pyext')
+@before_method('propagate_uselib_vars','apply_link')
+def set_lib_pat(self):
+ self.env.fcshlib_PATTERN=self.env.pyext_PATTERN
+@conf
+def detect_openmp(self):
+ for x in('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
+ try:
+ self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP')
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ break
+ else:
+ self.fatal('Could not find OpenMP')
+@conf
+def check_gfortran_o_space(self):
+ if self.env.FC_NAME!='GFORTRAN'or int(self.env.FC_VERSION[0])>4:
+ return
+ self.env.stash()
+ self.env.FCLNK_TGT_F=['-o','']
+ try:
+ self.check_fc(msg='Checking if the -o link must be split from arguments',fragment=FC_FRAGMENT,features='fc fcshlib')
+ except self.errors.ConfigurationError:
+ self.env.revert()
+ else:
+ self.env.commit()
diff --git a/waflib/Tools/fc_scan.py b/waflib/Tools/fc_scan.py
new file mode 100644
index 0000000..c07a22d
--- /dev/null
+++ b/waflib/Tools/fc_scan.py
@@ -0,0 +1,64 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
+USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
+re_inc=re.compile(INC_REGEX,re.I)
+re_use=re.compile(USE_REGEX,re.I)
+re_mod=re.compile(MOD_REGEX,re.I)
+class fortran_parser(object):
+ def __init__(self,incpaths):
+ self.seen=[]
+ self.nodes=[]
+ self.names=[]
+ self.incpaths=incpaths
+ def find_deps(self,node):
+ txt=node.read()
+ incs=[]
+ uses=[]
+ mods=[]
+ for line in txt.splitlines():
+ m=re_inc.search(line)
+ if m:
+ incs.append(m.group(1))
+ m=re_use.search(line)
+ if m:
+ uses.append(m.group(1))
+ m=re_mod.search(line)
+ if m:
+ mods.append(m.group(1))
+ return(incs,uses,mods)
+ def start(self,node):
+ self.waiting=[node]
+ while self.waiting:
+ nd=self.waiting.pop(0)
+ self.iter(nd)
+ def iter(self,node):
+ incs,uses,mods=self.find_deps(node)
+ for x in incs:
+ if x in self.seen:
+ continue
+ self.seen.append(x)
+ self.tryfind_header(x)
+ for x in uses:
+ name="USE@%s"%x
+ if not name in self.names:
+ self.names.append(name)
+ for x in mods:
+ name="MOD@%s"%x
+ if not name in self.names:
+ self.names.append(name)
+ def tryfind_header(self,filename):
+ found=None
+ for n in self.incpaths:
+ found=n.find_resource(filename)
+ if found:
+ self.nodes.append(found)
+ self.waiting.append(found)
+ break
+ if not found:
+ if not filename in self.names:
+ self.names.append(filename)
diff --git a/waflib/Tools/flex.py b/waflib/Tools/flex.py
new file mode 100644
index 0000000..1f1620e
--- /dev/null
+++ b/waflib/Tools/flex.py
@@ -0,0 +1,38 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re
+from waflib import Task,TaskGen
+from waflib.Tools import ccroot
+def decide_ext(self,node):
+ if'cxx'in self.features:
+ return['.lex.cc']
+ return['.lex.c']
+def flexfun(tsk):
+ env=tsk.env
+ bld=tsk.generator.bld
+ wd=bld.variant_dir
+ def to_list(xx):
+ if isinstance(xx,str):
+ return[xx]
+ return xx
+ tsk.last_cmd=lst=[]
+ lst.extend(to_list(env.FLEX))
+ lst.extend(to_list(env.FLEXFLAGS))
+ inputs=[a.path_from(tsk.get_cwd())for a in tsk.inputs]
+ if env.FLEX_MSYS:
+ inputs=[x.replace(os.sep,'/')for x in inputs]
+ lst.extend(inputs)
+ lst=[x for x in lst if x]
+ txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
+ tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n'))
+TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,)
+Task.classes['flex'].vars=['FLEXFLAGS','FLEX']
+ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
+ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')
+def configure(conf):
+ conf.find_program('flex',var='FLEX')
+ conf.env.FLEXFLAGS=['-t']
+ if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX[0]):
+ conf.env.FLEX_MSYS=True
diff --git a/waflib/Tools/g95.py b/waflib/Tools/g95.py
new file mode 100644
index 0000000..b62adcd
--- /dev/null
+++ b/waflib/Tools/g95.py
@@ -0,0 +1,54 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan,ar
+from waflib.Configure import conf
+@conf
+def find_g95(conf):
+ fc=conf.find_program('g95',var='FC')
+ conf.get_g95_version(fc)
+ conf.env.FC_NAME='G95'
+@conf
+def g95_flags(conf):
+ v=conf.env
+ v.FCFLAGS_fcshlib=['-fPIC']
+ v.FORTRANMODFLAG=['-fmod=','']
+ v.FCFLAGS_DEBUG=['-Werror']
+@conf
+def g95_modifier_win32(conf):
+ fc_config.fortran_modifier_win32(conf)
+@conf
+def g95_modifier_cygwin(conf):
+ fc_config.fortran_modifier_cygwin(conf)
+@conf
+def g95_modifier_darwin(conf):
+ fc_config.fortran_modifier_darwin(conf)
+@conf
+def g95_modifier_platform(conf):
+ dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform()
+ g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None)
+ if g95_modifier_func:
+ g95_modifier_func()
+@conf
+def get_g95_version(conf,fc):
+ version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
+ cmd=fc+['--version']
+ out,err=fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match=version_re(out)
+ else:
+ match=version_re(err)
+ if not match:
+ conf.fatal('cannot determine g95 version')
+ k=match.groupdict()
+ conf.env.FC_VERSION=(k['major'],k['minor'])
+def configure(conf):
+ conf.find_g95()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.g95_flags()
+ conf.g95_modifier_platform()
diff --git a/waflib/Tools/gas.py b/waflib/Tools/gas.py
new file mode 100644
index 0000000..4817c23
--- /dev/null
+++ b/waflib/Tools/gas.py
@@ -0,0 +1,12 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import waflib.Tools.asm
+from waflib.Tools import ar
+def configure(conf):
+ conf.find_program(['gas','gcc'],var='AS')
+ conf.env.AS_TGT_F=['-c','-o']
+ conf.env.ASLNK_TGT_F=['-o']
+ conf.find_ar()
+ conf.load('asm')
diff --git a/waflib/Tools/gcc.py b/waflib/Tools/gcc.py
new file mode 100644
index 0000000..12afcc6
--- /dev/null
+++ b/waflib/Tools/gcc.py
@@ -0,0 +1,104 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+@conf
+def find_gcc(conf):
+ cc=conf.find_program(['gcc','cc'],var='CC')
+ conf.get_cc_version(cc,gcc=True)
+ conf.env.CC_NAME='gcc'
+@conf
+def gcc_common_flags(conf):
+ v=conf.env
+ v.CC_SRC_F=[]
+ v.CC_TGT_F=['-c','-o']
+ if not v.LINK_CC:
+ v.LINK_CC=v.CC
+ v.CCLNK_SRC_F=[]
+ v.CCLNK_TGT_F=['-o']
+ v.CPPPATH_ST='-I%s'
+ v.DEFINES_ST='-D%s'
+ v.LIB_ST='-l%s'
+ v.LIBPATH_ST='-L%s'
+ v.STLIB_ST='-l%s'
+ v.STLIBPATH_ST='-L%s'
+ v.RPATH_ST='-Wl,-rpath,%s'
+ v.SONAME_ST='-Wl,-h,%s'
+ v.SHLIB_MARKER='-Wl,-Bdynamic'
+ v.STLIB_MARKER='-Wl,-Bstatic'
+ v.cprogram_PATTERN='%s'
+ v.CFLAGS_cshlib=['-fPIC']
+ v.LINKFLAGS_cshlib=['-shared']
+ v.cshlib_PATTERN='lib%s.so'
+ v.LINKFLAGS_cstlib=['-Wl,-Bstatic']
+ v.cstlib_PATTERN='lib%s.a'
+ v.LINKFLAGS_MACBUNDLE=['-bundle','-undefined','dynamic_lookup']
+ v.CFLAGS_MACBUNDLE=['-fPIC']
+ v.macbundle_PATTERN='%s.bundle'
+@conf
+def gcc_modifier_win32(conf):
+ v=conf.env
+ v.cprogram_PATTERN='%s.exe'
+ v.cshlib_PATTERN='%s.dll'
+ v.implib_PATTERN='%s.dll.a'
+ v.IMPLIB_ST='-Wl,--out-implib,%s'
+ v.CFLAGS_cshlib=[]
+ v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+@conf
+def gcc_modifier_cygwin(conf):
+ gcc_modifier_win32(conf)
+ v=conf.env
+ v.cshlib_PATTERN='cyg%s.dll'
+ v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base'])
+ v.CFLAGS_cshlib=[]
+@conf
+def gcc_modifier_darwin(conf):
+ v=conf.env
+ v.CFLAGS_cshlib=['-fPIC']
+ v.LINKFLAGS_cshlib=['-dynamiclib']
+ v.cshlib_PATTERN='lib%s.dylib'
+ v.FRAMEWORKPATH_ST='-F%s'
+ v.FRAMEWORK_ST=['-framework']
+ v.ARCH_ST=['-arch']
+ v.LINKFLAGS_cstlib=[]
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.SONAME_ST=[]
+@conf
+def gcc_modifier_aix(conf):
+ v=conf.env
+ v.LINKFLAGS_cprogram=['-Wl,-brtl']
+ v.LINKFLAGS_cshlib=['-shared','-Wl,-brtl,-bexpfull']
+ v.SHLIB_MARKER=[]
+@conf
+def gcc_modifier_hpux(conf):
+ v=conf.env
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.CFLAGS_cshlib=['-fPIC','-DPIC']
+ v.cshlib_PATTERN='lib%s.sl'
+@conf
+def gcc_modifier_openbsd(conf):
+ conf.env.SONAME_ST=[]
+@conf
+def gcc_modifier_osf1V(conf):
+ v=conf.env
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.SONAME_ST=[]
+@conf
+def gcc_modifier_platform(conf):
+ gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None)
+ if gcc_modifier_func:
+ gcc_modifier_func()
+def configure(conf):
+ conf.find_gcc()
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
+ conf.check_gcc_o_space()
diff --git a/waflib/Tools/gdc.py b/waflib/Tools/gdc.py
new file mode 100644
index 0000000..c809930
--- /dev/null
+++ b/waflib/Tools/gdc.py
@@ -0,0 +1,35 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ar,d
+from waflib.Configure import conf
+@conf
+def find_gdc(conf):
+ conf.find_program('gdc',var='D')
+ out=conf.cmd_and_log(conf.env.D+['--version'])
+ if out.find("gdc")==-1:
+ conf.fatal("detected compiler is not gdc")
+@conf
+def common_flags_gdc(conf):
+ v=conf.env
+ v.DFLAGS=[]
+ v.D_SRC_F=['-c']
+ v.D_TGT_F='-o%s'
+ v.D_LINKER=v.D
+ v.DLNK_SRC_F=''
+ v.DLNK_TGT_F='-o%s'
+ v.DINC_ST='-I%s'
+ v.DSHLIB_MARKER=v.DSTLIB_MARKER=''
+ v.DSTLIB_ST=v.DSHLIB_ST='-l%s'
+ v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L%s'
+ v.LINKFLAGS_dshlib=['-shared']
+ v.DHEADER_ext='.di'
+ v.DFLAGS_d_with_header='-fintfc'
+ v.D_HDR_F='-fintfc-file=%s'
+def configure(conf):
+ conf.find_gdc()
+ conf.load('ar')
+ conf.load('d')
+ conf.common_flags_gdc()
+ conf.d_platform_flags()
diff --git a/waflib/Tools/gfortran.py b/waflib/Tools/gfortran.py
new file mode 100644
index 0000000..47d005a
--- /dev/null
+++ b/waflib/Tools/gfortran.py
@@ -0,0 +1,71 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib import Utils
+from waflib.Tools import fc,fc_config,fc_scan,ar
+from waflib.Configure import conf
+@conf
+def find_gfortran(conf):
+ fc=conf.find_program(['gfortran','g77'],var='FC')
+ conf.get_gfortran_version(fc)
+ conf.env.FC_NAME='GFORTRAN'
+@conf
+def gfortran_flags(conf):
+ v=conf.env
+ v.FCFLAGS_fcshlib=['-fPIC']
+ v.FORTRANMODFLAG=['-J','']
+ v.FCFLAGS_DEBUG=['-Werror']
+@conf
+def gfortran_modifier_win32(conf):
+ fc_config.fortran_modifier_win32(conf)
+@conf
+def gfortran_modifier_cygwin(conf):
+ fc_config.fortran_modifier_cygwin(conf)
+@conf
+def gfortran_modifier_darwin(conf):
+ fc_config.fortran_modifier_darwin(conf)
+@conf
+def gfortran_modifier_platform(conf):
+ dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform()
+ gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None)
+ if gfortran_modifier_func:
+ gfortran_modifier_func()
+@conf
+def get_gfortran_version(conf,fc):
+ version_re=re.compile(r"GNU\s*Fortran",re.I).search
+ cmd=fc+['--version']
+ out,err=fc_config.getoutput(conf,cmd,stdin=False)
+ if out:
+ match=version_re(out)
+ else:
+ match=version_re(err)
+ if not match:
+ conf.fatal('Could not determine the compiler type')
+ cmd=fc+['-dM','-E','-']
+ out,err=fc_config.getoutput(conf,cmd,stdin=True)
+ if out.find('__GNUC__')<0:
+ conf.fatal('Could not determine the compiler type')
+ k={}
+ out=out.splitlines()
+ import shlex
+ for line in out:
+ lst=shlex.split(line)
+ if len(lst)>2:
+ key=lst[1]
+ val=lst[2]
+ k[key]=val
+ def isD(var):
+ return var in k
+ def isT(var):
+ return var in k and k[var]!='0'
+ conf.env.FC_VERSION=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
+def configure(conf):
+ conf.find_gfortran()
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.gfortran_flags()
+ conf.gfortran_modifier_platform()
+ conf.check_gfortran_o_space()
diff --git a/waflib/Tools/glib2.py b/waflib/Tools/glib2.py
new file mode 100644
index 0000000..ba5a71e
--- /dev/null
+++ b/waflib/Tools/glib2.py
@@ -0,0 +1,242 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os
+import functools
+from waflib import Context,Task,Utils,Options,Errors,Logs
+from waflib.TaskGen import taskgen_method,before_method,feature,extension
+from waflib.Configure import conf
+@taskgen_method
+def add_marshal_file(self,filename,prefix):
+ if not hasattr(self,'marshal_list'):
+ self.marshal_list=[]
+ self.meths.append('process_marshal')
+ self.marshal_list.append((filename,prefix))
+@before_method('process_source')
+def process_marshal(self):
+ for f,prefix in getattr(self,'marshal_list',[]):
+ node=self.path.find_resource(f)
+ if not node:
+ raise Errors.WafError('file not found %r'%f)
+ h_node=node.change_ext('.h')
+ c_node=node.change_ext('.c')
+ task=self.create_task('glib_genmarshal',node,[h_node,c_node])
+ task.env.GLIB_GENMARSHAL_PREFIX=prefix
+ self.source=self.to_nodes(getattr(self,'source',[]))
+ self.source.append(c_node)
+class glib_genmarshal(Task.Task):
+ vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL']
+ color='BLUE'
+ ext_out=['.h']
+ def run(self):
+ bld=self.generator.bld
+ get=self.env.get_flat
+ cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath())
+ ret=bld.exec_command(cmd1)
+ if ret:
+ return ret
+ c='''#include "%s"\n'''%self.outputs[0].name
+ self.outputs[1].write(c)
+ cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath())
+ return bld.exec_command(cmd2)
+@taskgen_method
+def add_enums_from_template(self,source='',target='',template='',comments=''):
+ if not hasattr(self,'enums_list'):
+ self.enums_list=[]
+ self.meths.append('process_enums')
+ self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments})
+@taskgen_method
+def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''):
+ if not hasattr(self,'enums_list'):
+ self.enums_list=[]
+ self.meths.append('process_enums')
+ self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments})
+@before_method('process_source')
+def process_enums(self):
+ for enum in getattr(self,'enums_list',[]):
+ task=self.create_task('glib_mkenums')
+ env=task.env
+ inputs=[]
+ source_list=self.to_list(enum['source'])
+ if not source_list:
+ raise Errors.WafError('missing source '+str(enum))
+ source_list=[self.path.find_resource(k)for k in source_list]
+ inputs+=source_list
+ env.GLIB_MKENUMS_SOURCE=[k.abspath()for k in source_list]
+ if not enum['target']:
+ raise Errors.WafError('missing target '+str(enum))
+ tgt_node=self.path.find_or_declare(enum['target'])
+ if tgt_node.name.endswith('.c'):
+ self.source.append(tgt_node)
+ env.GLIB_MKENUMS_TARGET=tgt_node.abspath()
+ options=[]
+ if enum['template']:
+ template_node=self.path.find_resource(enum['template'])
+ options.append('--template %s'%(template_node.abspath()))
+ inputs.append(template_node)
+ params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'}
+ for param,option in params.items():
+ if enum[param]:
+ options.append('%s %r'%(option,enum[param]))
+ env.GLIB_MKENUMS_OPTIONS=' '.join(options)
+ task.set_inputs(inputs)
+ task.set_outputs(tgt_node)
+class glib_mkenums(Task.Task):
+ run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
+ color='PINK'
+ ext_out=['.h']
+@taskgen_method
+def add_settings_schemas(self,filename_list):
+ if not hasattr(self,'settings_schema_files'):
+ self.settings_schema_files=[]
+ if not isinstance(filename_list,list):
+ filename_list=[filename_list]
+ self.settings_schema_files.extend(filename_list)
+@taskgen_method
+def add_settings_enums(self,namespace,filename_list):
+ if hasattr(self,'settings_enum_namespace'):
+ raise Errors.WafError("Tried to add gsettings enums to %r more than once"%self.name)
+ self.settings_enum_namespace=namespace
+ if not isinstance(filename_list,list):
+ filename_list=[filename_list]
+ self.settings_enum_files=filename_list
+@feature('glib2')
+def process_settings(self):
+ enums_tgt_node=[]
+ install_files=[]
+ settings_schema_files=getattr(self,'settings_schema_files',[])
+ if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
+ raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
+ if hasattr(self,'settings_enum_files'):
+ enums_task=self.create_task('glib_mkenums')
+ source_list=self.settings_enum_files
+ source_list=[self.path.find_resource(k)for k in source_list]
+ enums_task.set_inputs(source_list)
+ enums_task.env.GLIB_MKENUMS_SOURCE=[k.abspath()for k in source_list]
+ target=self.settings_enum_namespace+'.enums.xml'
+ tgt_node=self.path.find_or_declare(target)
+ enums_task.set_outputs(tgt_node)
+ enums_task.env.GLIB_MKENUMS_TARGET=tgt_node.abspath()
+ enums_tgt_node=[tgt_node]
+ install_files.append(tgt_node)
+ options='--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" '%(self.settings_enum_namespace)
+ enums_task.env.GLIB_MKENUMS_OPTIONS=options
+ for schema in settings_schema_files:
+ schema_task=self.create_task('glib_validate_schema')
+ schema_node=self.path.find_resource(schema)
+ if not schema_node:
+ raise Errors.WafError("Cannot find the schema file %r"%schema)
+ install_files.append(schema_node)
+ source_list=enums_tgt_node+[schema_node]
+ schema_task.set_inputs(source_list)
+ schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS=[("--schema-file="+k.abspath())for k in source_list]
+ target_node=schema_node.change_ext('.xml.valid')
+ schema_task.set_outputs(target_node)
+ schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT=target_node.abspath()
+ def compile_schemas_callback(bld):
+ if not bld.is_install:
+ return
+ compile_schemas=Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
+ destdir=Options.options.destdir
+ paths=bld._compile_schemas_registered
+ if destdir:
+ paths=(os.path.join(destdir,path.lstrip(os.sep))for path in paths)
+ for path in paths:
+ Logs.pprint('YELLOW','Updating GSettings schema cache %r'%path)
+ if self.bld.exec_command(compile_schemas+[path]):
+ Logs.warn('Could not update GSettings schema cache %r'%path)
+ if self.bld.is_install:
+ schemadir=self.env.GSETTINGSSCHEMADIR
+ if not schemadir:
+ raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
+ if install_files:
+ self.add_install_files(install_to=schemadir,install_from=install_files)
+ registered_schemas=getattr(self.bld,'_compile_schemas_registered',None)
+ if not registered_schemas:
+ registered_schemas=self.bld._compile_schemas_registered=set()
+ self.bld.add_post_fun(compile_schemas_callback)
+ registered_schemas.add(schemadir)
+class glib_validate_schema(Task.Task):
+ run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
+ color='PINK'
+@extension('.gresource.xml')
+def process_gresource_source(self,node):
+ if not self.env.GLIB_COMPILE_RESOURCES:
+ raise Errors.WafError("Unable to process GResource file - glib-compile-resources was not found during configure")
+ if'gresource'in self.features:
+ return
+ h_node=node.change_ext('_xml.h')
+ c_node=node.change_ext('_xml.c')
+ self.create_task('glib_gresource_source',node,[h_node,c_node])
+ self.source.append(c_node)
+@feature('gresource')
+def process_gresource_bundle(self):
+ for i in self.to_list(self.source):
+ node=self.path.find_resource(i)
+ task=self.create_task('glib_gresource_bundle',node,node.change_ext(''))
+ inst_to=getattr(self,'install_path',None)
+ if inst_to:
+ self.add_install_files(install_to=inst_to,install_from=task.outputs)
+class glib_gresource_base(Task.Task):
+ color='BLUE'
+ base_cmd='${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
+ def scan(self):
+ bld=self.generator.bld
+ kw={}
+ kw['cwd']=self.get_cwd()
+ kw['quiet']=Context.BOTH
+ cmd=Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s'%(self.inputs[0].parent.srcpath(),self.inputs[0].bld_dir(),self.inputs[0].bldpath()),self.env)
+ output=bld.cmd_and_log(cmd,**kw)
+ nodes=[]
+ names=[]
+ for dep in output.splitlines():
+ if dep:
+ node=bld.bldnode.find_node(dep)
+ if node:
+ nodes.append(node)
+ else:
+ names.append(dep)
+ return(nodes,names)
+class glib_gresource_source(glib_gresource_base):
+ vars=['GLIB_COMPILE_RESOURCES']
+ fun_h=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[0].abspath()} --generate-header ${SRC}')
+ fun_c=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[1].abspath()} --generate-source ${SRC}')
+ ext_out=['.h']
+ def run(self):
+ return self.fun_h[0](self)or self.fun_c[0](self)
+class glib_gresource_bundle(glib_gresource_base):
+ run_str=glib_gresource_base.base_cmd+' --target=${TGT} ${SRC}'
+ shell=True
+@conf
+def find_glib_genmarshal(conf):
+ conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')
+@conf
+def find_glib_mkenums(conf):
+ if not conf.env.PERL:
+ conf.find_program('perl',var='PERL')
+ conf.find_program('glib-mkenums',interpreter='PERL',var='GLIB_MKENUMS')
+@conf
+def find_glib_compile_schemas(conf):
+ conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS')
+ def getstr(varname):
+ return getattr(Options.options,varname,getattr(conf.env,varname,''))
+ gsettingsschemadir=getstr('GSETTINGSSCHEMADIR')
+ if not gsettingsschemadir:
+ datadir=getstr('DATADIR')
+ if not datadir:
+ prefix=conf.env.PREFIX
+ datadir=os.path.join(prefix,'share')
+ gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas')
+ conf.env.GSETTINGSSCHEMADIR=gsettingsschemadir
+@conf
+def find_glib_compile_resources(conf):
+ conf.find_program('glib-compile-resources',var='GLIB_COMPILE_RESOURCES')
+def configure(conf):
+ conf.find_glib_genmarshal()
+ conf.find_glib_mkenums()
+ conf.find_glib_compile_schemas(mandatory=False)
+ conf.find_glib_compile_resources(mandatory=False)
+def options(opt):
+ gr=opt.add_option_group('Installation directories')
+ gr.add_option('--gsettingsschemadir',help='GSettings schema location [DATADIR/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py
new file mode 100644
index 0000000..21a6288
--- /dev/null
+++ b/waflib/Tools/gnu_dirs.py
@@ -0,0 +1,66 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re
+from waflib import Utils,Options,Context
+gnuopts='''
+bindir, user commands, ${EXEC_PREFIX}/bin
+sbindir, system binaries, ${EXEC_PREFIX}/sbin
+libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
+sysconfdir, host-specific configuration, ${PREFIX}/etc
+sharedstatedir, architecture-independent variable data, ${PREFIX}/com
+localstatedir, variable data, ${PREFIX}/var
+libdir, object code libraries, ${EXEC_PREFIX}/lib%s
+includedir, header files, ${PREFIX}/include
+oldincludedir, header files for non-GCC compilers, /usr/include
+datarootdir, architecture-independent data root, ${PREFIX}/share
+datadir, architecture-independent data, ${DATAROOTDIR}
+infodir, GNU "info" documentation, ${DATAROOTDIR}/info
+localedir, locale-dependent data, ${DATAROOTDIR}/locale
+mandir, manual pages, ${DATAROOTDIR}/man
+docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
+htmldir, HTML documentation, ${DOCDIR}
+dvidir, DVI documentation, ${DOCDIR}
+pdfdir, PDF documentation, ${DOCDIR}
+psdir, PostScript documentation, ${DOCDIR}
+'''%Utils.lib64()
+_options=[x.split(', ')for x in gnuopts.splitlines()if x]
+def configure(conf):
+ def get_param(varname,default):
+ return getattr(Options.options,varname,'')or default
+ env=conf.env
+ env.LIBDIR=env.BINDIR=[]
+ env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX)
+ env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE
+ complete=False
+ iter=0
+ while not complete and iter<len(_options)+1:
+ iter+=1
+ complete=True
+ for name,help,default in _options:
+ name=name.upper()
+ if not env[name]:
+ try:
+ env[name]=Utils.subst_vars(get_param(name,default).replace('/',os.sep),env)
+ except TypeError:
+ complete=False
+ if not complete:
+ lst=[x for x,_,_ in _options if not env[x.upper()]]
+ raise conf.errors.WafError('Variable substitution failure %r'%lst)
+def options(opt):
+ inst_dir=opt.add_option_group('Installation prefix','By default, "waf install" will put the files in\
+ "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
+ than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
+ for k in('--prefix','--destdir'):
+ option=opt.parser.get_option(k)
+ if option:
+ opt.parser.remove_option(k)
+ inst_dir.add_option(option)
+ inst_dir.add_option('--exec-prefix',help='installation prefix for binaries [PREFIX]',default='',dest='EXEC_PREFIX')
+ dirs_options=opt.add_option_group('Installation directories')
+ for name,help,default in _options:
+ option_name='--'+name
+ str_default=default
+ str_help='%s [%s]'%(help,re.sub(r'\$\{([^}]+)\}',r'\1',str_default))
+ dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())
diff --git a/waflib/Tools/gxx.py b/waflib/Tools/gxx.py
new file mode 100644
index 0000000..1ba1393
--- /dev/null
+++ b/waflib/Tools/gxx.py
@@ -0,0 +1,104 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+@conf
+def find_gxx(conf):
+ cxx=conf.find_program(['g++','c++'],var='CXX')
+ conf.get_cc_version(cxx,gcc=True)
+ conf.env.CXX_NAME='gcc'
+@conf
+def gxx_common_flags(conf):
+ v=conf.env
+ v.CXX_SRC_F=[]
+ v.CXX_TGT_F=['-c','-o']
+ if not v.LINK_CXX:
+ v.LINK_CXX=v.CXX
+ v.CXXLNK_SRC_F=[]
+ v.CXXLNK_TGT_F=['-o']
+ v.CPPPATH_ST='-I%s'
+ v.DEFINES_ST='-D%s'
+ v.LIB_ST='-l%s'
+ v.LIBPATH_ST='-L%s'
+ v.STLIB_ST='-l%s'
+ v.STLIBPATH_ST='-L%s'
+ v.RPATH_ST='-Wl,-rpath,%s'
+ v.SONAME_ST='-Wl,-h,%s'
+ v.SHLIB_MARKER='-Wl,-Bdynamic'
+ v.STLIB_MARKER='-Wl,-Bstatic'
+ v.cxxprogram_PATTERN='%s'
+ v.CXXFLAGS_cxxshlib=['-fPIC']
+ v.LINKFLAGS_cxxshlib=['-shared']
+ v.cxxshlib_PATTERN='lib%s.so'
+ v.LINKFLAGS_cxxstlib=['-Wl,-Bstatic']
+ v.cxxstlib_PATTERN='lib%s.a'
+ v.LINKFLAGS_MACBUNDLE=['-bundle','-undefined','dynamic_lookup']
+ v.CXXFLAGS_MACBUNDLE=['-fPIC']
+ v.macbundle_PATTERN='%s.bundle'
+@conf
+def gxx_modifier_win32(conf):
+ v=conf.env
+ v.cxxprogram_PATTERN='%s.exe'
+ v.cxxshlib_PATTERN='%s.dll'
+ v.implib_PATTERN='%s.dll.a'
+ v.IMPLIB_ST='-Wl,--out-implib,%s'
+ v.CXXFLAGS_cxxshlib=[]
+ v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
+@conf
+def gxx_modifier_cygwin(conf):
+ gxx_modifier_win32(conf)
+ v=conf.env
+ v.cxxshlib_PATTERN='cyg%s.dll'
+ v.append_value('LINKFLAGS_cxxshlib',['-Wl,--enable-auto-image-base'])
+ v.CXXFLAGS_cxxshlib=[]
+@conf
+def gxx_modifier_darwin(conf):
+ v=conf.env
+ v.CXXFLAGS_cxxshlib=['-fPIC']
+ v.LINKFLAGS_cxxshlib=['-dynamiclib']
+ v.cxxshlib_PATTERN='lib%s.dylib'
+ v.FRAMEWORKPATH_ST='-F%s'
+ v.FRAMEWORK_ST=['-framework']
+ v.ARCH_ST=['-arch']
+ v.LINKFLAGS_cxxstlib=[]
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.SONAME_ST=[]
+@conf
+def gxx_modifier_aix(conf):
+ v=conf.env
+ v.LINKFLAGS_cxxprogram=['-Wl,-brtl']
+ v.LINKFLAGS_cxxshlib=['-shared','-Wl,-brtl,-bexpfull']
+ v.SHLIB_MARKER=[]
+@conf
+def gxx_modifier_hpux(conf):
+ v=conf.env
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.CFLAGS_cxxshlib=['-fPIC','-DPIC']
+ v.cxxshlib_PATTERN='lib%s.sl'
+@conf
+def gxx_modifier_openbsd(conf):
+ conf.env.SONAME_ST=[]
+@conf
+def gcc_modifier_osf1V(conf):
+ v=conf.env
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.SONAME_ST=[]
+@conf
+def gxx_modifier_platform(conf):
+ gxx_modifier_func=getattr(conf,'gxx_modifier_'+conf.env.DEST_OS,None)
+ if gxx_modifier_func:
+ gxx_modifier_func()
+def configure(conf):
+ conf.find_gxx()
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.gxx_modifier_platform()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
+ conf.check_gcc_o_space('cxx')
diff --git a/waflib/Tools/icc.py b/waflib/Tools/icc.py
new file mode 100644
index 0000000..bffd7d1
--- /dev/null
+++ b/waflib/Tools/icc.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import sys
+from waflib.Tools import ccroot,ar,gcc
+from waflib.Configure import conf
+@conf
+def find_icc(conf):
+ cc=conf.find_program(['icc','ICL'],var='CC')
+ conf.get_cc_version(cc,icc=True)
+ conf.env.CC_NAME='icc'
+def configure(conf):
+ conf.find_icc()
+ conf.find_ar()
+ conf.gcc_common_flags()
+ conf.gcc_modifier_platform()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/icpc.py b/waflib/Tools/icpc.py
new file mode 100644
index 0000000..f7bdb0f
--- /dev/null
+++ b/waflib/Tools/icpc.py
@@ -0,0 +1,20 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import sys
+from waflib.Tools import ccroot,ar,gxx
+from waflib.Configure import conf
+@conf
+def find_icpc(conf):
+ cxx=conf.find_program('icpc',var='CXX')
+ conf.get_cc_version(cxx,icc=True)
+ conf.env.CXX_NAME='icc'
+def configure(conf):
+ conf.find_icpc()
+ conf.find_ar()
+ conf.gxx_common_flags()
+ conf.gxx_modifier_platform()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/ifort.py b/waflib/Tools/ifort.py
new file mode 100644
index 0000000..2cbae10
--- /dev/null
+++ b/waflib/Tools/ifort.py
@@ -0,0 +1,303 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re,traceback
+from waflib import Utils,Logs,Errors
+from waflib.Tools import fc,fc_config,fc_scan,ar,ccroot
+from waflib.Configure import conf
+from waflib.TaskGen import after_method,feature
+@conf
+def find_ifort(conf):
+ fc=conf.find_program('ifort',var='FC')
+ conf.get_ifort_version(fc)
+ conf.env.FC_NAME='IFORT'
+@conf
+def ifort_modifier_win32(self):
+ v=self.env
+ v.IFORT_WIN32=True
+ v.FCSTLIB_MARKER=''
+ v.FCSHLIB_MARKER=''
+ v.FCLIB_ST=v.FCSTLIB_ST='%s.lib'
+ v.FCLIBPATH_ST=v.STLIBPATH_ST='/LIBPATH:%s'
+ v.FCINCPATH_ST='/I%s'
+ v.FCDEFINES_ST='/D%s'
+ v.fcprogram_PATTERN=v.fcprogram_test_PATTERN='%s.exe'
+ v.fcshlib_PATTERN='%s.dll'
+ v.fcstlib_PATTERN=v.implib_PATTERN='%s.lib'
+ v.FCLNK_TGT_F='/out:'
+ v.FC_TGT_F=['/c','/o','']
+ v.FCFLAGS_fcshlib=''
+ v.LINKFLAGS_fcshlib='/DLL'
+ v.AR_TGT_F='/out:'
+ v.IMPLIB_ST='/IMPLIB:%s'
+ v.append_value('LINKFLAGS','/subsystem:console')
+ if v.IFORT_MANIFEST:
+ v.append_value('LINKFLAGS',['/MANIFEST'])
+@conf
+def ifort_modifier_darwin(conf):
+ fc_config.fortran_modifier_darwin(conf)
+@conf
+def ifort_modifier_platform(conf):
+ dest_os=conf.env.DEST_OS or Utils.unversioned_sys_platform()
+ ifort_modifier_func=getattr(conf,'ifort_modifier_'+dest_os,None)
+ if ifort_modifier_func:
+ ifort_modifier_func()
+@conf
+def get_ifort_version(conf,fc):
+ version_re=re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
+ if Utils.is_win32:
+ cmd=fc
+ else:
+ cmd=fc+['-logo']
+ out,err=fc_config.getoutput(conf,cmd,stdin=False)
+ match=version_re(out)or version_re(err)
+ if not match:
+ conf.fatal('cannot determine ifort version.')
+ k=match.groupdict()
+ conf.env.FC_VERSION=(k['major'],k['minor'])
+def configure(conf):
+ if Utils.is_win32:
+ compiler,version,path,includes,libdirs,arch=conf.detect_ifort()
+ v=conf.env
+ v.DEST_CPU=arch
+ v.PATH=path
+ v.INCLUDES=includes
+ v.LIBPATH=libdirs
+ v.MSVC_COMPILER=compiler
+ try:
+ v.MSVC_VERSION=float(version)
+ except ValueError:
+ v.MSVC_VERSION=float(version[:-3])
+ conf.find_ifort_win32()
+ conf.ifort_modifier_win32()
+ else:
+ conf.find_ifort()
+ conf.find_program('xiar',var='AR')
+ conf.find_ar()
+ conf.fc_flags()
+ conf.fc_add_flags()
+ conf.ifort_modifier_platform()
+all_ifort_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
+@conf
+def gather_ifort_versions(conf,versions):
+ version_pattern=re.compile('^...?.?\....?.?')
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
+ except OSError:
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\Fortran')
+ except OSError:
+ return
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except OSError:
+ break
+ index+=1
+ if not version_pattern.match(version):
+ continue
+ targets={}
+ for target,arch in all_ifort_platforms:
+ if target=='intel64':
+ targetDir='EM64T_NATIVE'
+ else:
+ targetDir=target
+ try:
+ Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+ icl_version=Utils.winreg.OpenKey(all_versions,version)
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ except OSError:
+ pass
+ else:
+ batch_file=os.path.join(path,'bin','ifortvars.bat')
+ if os.path.isfile(batch_file):
+ targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+ for target,arch in all_ifort_platforms:
+ try:
+ icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ except OSError:
+ continue
+ else:
+ batch_file=os.path.join(path,'bin','ifortvars.bat')
+ if os.path.isfile(batch_file):
+ targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+ major=version[0:2]
+ versions['intel '+major]=targets
+@conf
+def setup_ifort(conf,versiondict):
+ platforms=Utils.to_list(conf.env.MSVC_TARGETS)or[i for i,j in all_ifort_platforms]
+ desired_versions=conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
+ for version in desired_versions:
+ try:
+ targets=versiondict[version]
+ except KeyError:
+ continue
+ for arch in platforms:
+ try:
+ cfg=targets[arch]
+ except KeyError:
+ continue
+ cfg.evaluate()
+ if cfg.is_valid:
+ compiler,revision=version.rsplit(' ',1)
+ return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+ conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r'%(desired_versions,list(versiondict.keys())))
+@conf
+def get_ifort_version_win32(conf,compiler,version,target,vcvars):
+ try:
+ conf.msvc_cnt+=1
+ except AttributeError:
+ conf.msvc_cnt=1
+ batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt)
+ batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+"""%(vcvars,target))
+ sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()])
+ batfile.delete()
+ lines=sout.splitlines()
+ if not lines[0]:
+ lines.pop(0)
+ MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
+ for line in lines:
+ if line.startswith('PATH='):
+ path=line[5:]
+ MSVC_PATH=path.split(';')
+ elif line.startswith('INCLUDE='):
+ MSVC_INCDIR=[i for i in line[8:].split(';')if i]
+ elif line.startswith('LIB='):
+ MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
+ if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
+ conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')
+ env=dict(os.environ)
+ env.update(PATH=path)
+ compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+ fc=conf.find_program(compiler_name,path_list=MSVC_PATH)
+ if'CL'in env:
+ del(env['CL'])
+ try:
+ conf.cmd_and_log(fc+['/help'],env=env)
+ except UnicodeError:
+ st=traceback.format_exc()
+ if conf.logger:
+ conf.logger.error(st)
+ conf.fatal('ifort: Unicode error - check the code page?')
+ except Exception as e:
+ Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s',compiler,version,target,str(e))
+ conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
+ else:
+ Logs.debug('ifort: get_ifort_version: %r %r %r -> OK',compiler,version,target)
+ finally:
+ conf.env[compiler_name]=''
+ return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
+class target_compiler(object):
+ def __init__(self,ctx,compiler,cpu,version,bat_target,bat,callback=None):
+ self.conf=ctx
+ self.name=None
+ self.is_valid=False
+ self.is_done=False
+ self.compiler=compiler
+ self.cpu=cpu
+ self.version=version
+ self.bat_target=bat_target
+ self.bat=bat
+ self.callback=callback
+ def evaluate(self):
+ if self.is_done:
+ return
+ self.is_done=True
+ try:
+ vs=self.conf.get_ifort_version_win32(self.compiler,self.version,self.bat_target,self.bat)
+ except Errors.ConfigurationError:
+ self.is_valid=False
+ return
+ if self.callback:
+ vs=self.callback(self,vs)
+ self.is_valid=True
+ (self.bindirs,self.incdirs,self.libdirs)=vs
+ def __str__(self):
+ return str((self.bindirs,self.incdirs,self.libdirs))
+ def __repr__(self):
+ return repr((self.bindirs,self.incdirs,self.libdirs))
+@conf
+def detect_ifort(self):
+ return self.setup_ifort(self.get_ifort_versions(False))
+@conf
+def get_ifort_versions(self,eval_and_save=True):
+ dct={}
+ self.gather_ifort_versions(dct)
+ return dct
+def _get_prog_names(self,compiler):
+ if compiler=='intel':
+ compiler_name='ifort'
+ linker_name='XILINK'
+ lib_name='XILIB'
+ else:
+ compiler_name='CL'
+ linker_name='LINK'
+ lib_name='LIB'
+ return compiler_name,linker_name,lib_name
+@conf
+def find_ifort_win32(conf):
+ v=conf.env
+ path=v.PATH
+ compiler=v.MSVC_COMPILER
+ version=v.MSVC_VERSION
+ compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+ v.IFORT_MANIFEST=(compiler=='intel'and version>=11)
+ fc=conf.find_program(compiler_name,var='FC',path_list=path)
+ env=dict(conf.environ)
+ if path:
+ env.update(PATH=';'.join(path))
+ if not conf.cmd_and_log(fc+['/nologo','/help'],env=env):
+ conf.fatal('not intel fortran compiler could not be identified')
+ v.FC_NAME='IFORT'
+ if not v.LINK_FC:
+ conf.find_program(linker_name,var='LINK_FC',path_list=path,mandatory=True)
+ if not v.AR:
+ conf.find_program(lib_name,path_list=path,var='AR',mandatory=True)
+ v.ARFLAGS=['/nologo']
+ if v.IFORT_MANIFEST:
+ conf.find_program('MT',path_list=path,var='MT')
+ v.MTFLAGS=['/nologo']
+ try:
+ conf.load('winres')
+ except Errors.WafError:
+ Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+@after_method('apply_link')
+@feature('fc')
+def apply_flags_ifort(self):
+ if not self.env.IFORT_WIN32 or not getattr(self,'link_task',None):
+ return
+ is_static=isinstance(self.link_task,ccroot.stlink_task)
+ subsystem=getattr(self,'subsystem','')
+ if subsystem:
+ subsystem='/subsystem:%s'%subsystem
+ flags=is_static and'ARFLAGS'or'LINKFLAGS'
+ self.env.append_value(flags,subsystem)
+ if not is_static:
+ for f in self.env.LINKFLAGS:
+ d=f.lower()
+ if d[1:]=='debug':
+ pdbnode=self.link_task.outputs[0].change_ext('.pdb')
+ self.link_task.outputs.append(pdbnode)
+ if getattr(self,'install_task',None):
+ self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=pdbnode)
+ break
+@feature('fcprogram','fcshlib','fcprogram_test')
+@after_method('apply_link')
+def apply_manifest_ifort(self):
+ if self.env.IFORT_WIN32 and getattr(self,'link_task',None):
+ self.link_task.env.FC=self.env.LINK_FC
+ if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self,'link_task',None):
+ out_node=self.link_task.outputs[0]
+ man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
+ self.link_task.outputs.append(man_node)
+ self.env.DO_MANIFEST=True
diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py
new file mode 100644
index 0000000..d799402
--- /dev/null
+++ b/waflib/Tools/intltool.py
@@ -0,0 +1,101 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from __future__ import with_statement
+import os,re
+from waflib import Context,Task,Utils,Logs
+import waflib.Tools.ccroot
+from waflib.TaskGen import feature,before_method,taskgen_method
+from waflib.Logs import error
+from waflib.Configure import conf
+_style_flags={'ba':'-b','desktop':'-d','keys':'-k','quoted':'--quoted-style','quotedxml':'--quotedxml-style','rfc822deb':'-r','schemas':'-s','xml':'-x',}
+@taskgen_method
+def ensure_localedir(self):
+ if not self.env.LOCALEDIR:
+ if self.env.DATAROOTDIR:
+ self.env.LOCALEDIR=os.path.join(self.env.DATAROOTDIR,'locale')
+ else:
+ self.env.LOCALEDIR=os.path.join(self.env.PREFIX,'share','locale')
+@before_method('process_source')
+@feature('intltool_in')
+def apply_intltool_in_f(self):
+ try:
+ self.meths.remove('process_source')
+ except ValueError:
+ pass
+ self.ensure_localedir()
+ podir=getattr(self,'podir','.')
+ podirnode=self.path.find_dir(podir)
+ if not podirnode:
+ error("could not find the podir %r"%podir)
+ return
+ cache=getattr(self,'intlcache','.intlcache')
+ self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)]
+ self.env.INTLPODIR=podirnode.bldpath()
+ self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT))
+ if'-c'in self.env.INTLFLAGS:
+ self.bld.fatal('Redundant -c flag in intltool task %r'%self)
+ style=getattr(self,'style',None)
+ if style:
+ try:
+ style_flag=_style_flags[style]
+ except KeyError:
+ self.bld.fatal('intltool_in style "%s" is not valid'%style)
+ self.env.append_unique('INTLFLAGS',[style_flag])
+ for i in self.to_list(self.source):
+ node=self.path.find_resource(i)
+ task=self.create_task('intltool',node,node.change_ext(''))
+ inst=getattr(self,'install_path',None)
+ if inst:
+ self.add_install_files(install_to=inst,install_from=task.outputs)
+@feature('intltool_po')
+def apply_intltool_po(self):
+ try:
+ self.meths.remove('process_source')
+ except ValueError:
+ pass
+ self.ensure_localedir()
+ appname=getattr(self,'appname',getattr(Context.g_module,Context.APPNAME,'set_your_app_name'))
+ podir=getattr(self,'podir','.')
+ inst=getattr(self,'install_path','${LOCALEDIR}')
+ linguas=self.path.find_node(os.path.join(podir,'LINGUAS'))
+ if linguas:
+ with open(linguas.abspath())as f:
+ langs=[]
+ for line in f.readlines():
+ if not line.startswith('#'):
+ langs+=line.split()
+ re_linguas=re.compile('[-a-zA-Z_@.]+')
+ for lang in langs:
+ if re_linguas.match(lang):
+ node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po'))
+ task=self.create_task('po',node,node.change_ext('.mo'))
+ if inst:
+ filename=task.outputs[0].name
+ (langname,ext)=os.path.splitext(filename)
+ inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo'
+ self.add_install_as(install_to=inst_file,install_from=task.outputs[0],chmod=getattr(self,'chmod',Utils.O644))
+ else:
+ Logs.pprint('RED',"Error no LINGUAS file found in po directory")
+class po(Task.Task):
+ run_str='${MSGFMT} -o ${TGT} ${SRC}'
+ color='BLUE'
+class intltool(Task.Task):
+ run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
+ color='BLUE'
+@conf
+def find_msgfmt(conf):
+ conf.find_program('msgfmt',var='MSGFMT')
+@conf
+def find_intltool_merge(conf):
+ if not conf.env.PERL:
+ conf.find_program('perl',var='PERL')
+ conf.env.INTLCACHE_ST='--cache=%s'
+ conf.env.INTLFLAGS_DEFAULT=['-q','-u']
+ conf.find_program('intltool-merge',interpreter='PERL',var='INTLTOOL')
+def configure(conf):
+ conf.find_msgfmt()
+ conf.find_intltool_merge()
+ if conf.env.CC or conf.env.CXX:
+ conf.check(header_name='locale.h')
diff --git a/waflib/Tools/irixcc.py b/waflib/Tools/irixcc.py
new file mode 100644
index 0000000..06099ff
--- /dev/null
+++ b/waflib/Tools/irixcc.py
@@ -0,0 +1,51 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Errors
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+@conf
+def find_irixcc(conf):
+ v=conf.env
+ cc=None
+ if v.CC:
+ cc=v.CC
+ elif'CC'in conf.environ:
+ cc=conf.environ['CC']
+ if not cc:
+ cc=conf.find_program('cc',var='CC')
+ if not cc:
+ conf.fatal('irixcc was not found')
+ try:
+ conf.cmd_and_log(cc+['-version'])
+ except Errors.WafError:
+ conf.fatal('%r -version could not be executed'%cc)
+ v.CC=cc
+ v.CC_NAME='irix'
+@conf
+def irixcc_common_flags(conf):
+ v=conf.env
+ v.CC_SRC_F=''
+ v.CC_TGT_F=['-c','-o']
+ v.CPPPATH_ST='-I%s'
+ v.DEFINES_ST='-D%s'
+ if not v.LINK_CC:
+ v.LINK_CC=v.CC
+ v.CCLNK_SRC_F=''
+ v.CCLNK_TGT_F=['-o']
+ v.LIB_ST='-l%s'
+ v.LIBPATH_ST='-L%s'
+ v.STLIB_ST='-l%s'
+ v.STLIBPATH_ST='-L%s'
+ v.cprogram_PATTERN='%s'
+ v.cshlib_PATTERN='lib%s.so'
+ v.cstlib_PATTERN='lib%s.a'
+def configure(conf):
+ conf.find_irixcc()
+ conf.find_cpp()
+ conf.find_ar()
+ conf.irixcc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/javaw.py b/waflib/Tools/javaw.py
new file mode 100644
index 0000000..8b7ab2a
--- /dev/null
+++ b/waflib/Tools/javaw.py
@@ -0,0 +1,299 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,shutil
+from waflib import Task,Utils,Errors,Node
+from waflib.Configure import conf
+from waflib.TaskGen import feature,before_method,after_method
+from waflib.Tools import ccroot
+ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS'])
+SOURCE_RE='**/*.java'
+JAR_RE='**/*'
+class_check_source='''
+public class Test {
+ public static void main(String[] argv) {
+ Class lib;
+ if (argv.length < 1) {
+ System.err.println("Missing argument");
+ System.exit(77);
+ }
+ try {
+ lib = Class.forName(argv[0]);
+ } catch (ClassNotFoundException e) {
+ System.err.println("ClassNotFoundException");
+ System.exit(1);
+ }
+ lib = null;
+ System.exit(0);
+ }
+}
+'''
+@feature('javac')
+@before_method('process_source')
+def apply_java(self):
+ Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[])
+ outdir=getattr(self,'outdir',None)
+ if outdir:
+ if not isinstance(outdir,Node.Node):
+ outdir=self.path.get_bld().make_node(self.outdir)
+ else:
+ outdir=self.path.get_bld()
+ outdir.mkdir()
+ self.outdir=outdir
+ self.env.OUTDIR=outdir.abspath()
+ self.javac_task=tsk=self.create_task('javac')
+ tmp=[]
+ srcdir=getattr(self,'srcdir','')
+ if isinstance(srcdir,Node.Node):
+ srcdir=[srcdir]
+ for x in Utils.to_list(srcdir):
+ if isinstance(x,Node.Node):
+ y=x
+ else:
+ y=self.path.find_dir(x)
+ if not y:
+ self.bld.fatal('Could not find the folder %s from %s'%(x,self.path))
+ tmp.append(y)
+ tsk.srcdir=tmp
+ if getattr(self,'compat',None):
+ tsk.env.append_value('JAVACFLAGS',['-source',str(self.compat)])
+ if hasattr(self,'sourcepath'):
+ fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)]
+ names=os.pathsep.join([x.srcpath()for x in fold])
+ else:
+ names=[x.srcpath()for x in tsk.srcdir]
+ if names:
+ tsk.env.append_value('JAVACFLAGS',['-sourcepath',names])
+@feature('javac')
+@before_method('propagate_uselib_vars')
+@after_method('apply_java')
+def use_javac_files(self):
+ lst=[]
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ names=self.to_list(getattr(self,'use',[]))
+ get=self.bld.get_tgen_by_name
+ for x in names:
+ try:
+ y=get(x)
+ except Errors.WafError:
+ self.uselib.append(x)
+ else:
+ y.post()
+ if hasattr(y,'jar_task'):
+ lst.append(y.jar_task.outputs[0].abspath())
+ self.javac_task.set_run_after(y.jar_task)
+ else:
+ for tsk in y.tasks:
+ self.javac_task.set_run_after(tsk)
+ self.env.append_value('CLASSPATH',lst)
+@feature('javac')
+@after_method('apply_java','propagate_uselib_vars','use_javac_files')
+def set_classpath(self):
+ if getattr(self,'classpath',None):
+ self.env.append_unique('CLASSPATH',getattr(self,'classpath',[]))
+ for x in self.tasks:
+ x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep
+@feature('jar')
+@after_method('apply_java','use_javac_files')
+@before_method('process_source')
+def jar_files(self):
+ destfile=getattr(self,'destfile','test.jar')
+ jaropts=getattr(self,'jaropts',[])
+ manifest=getattr(self,'manifest',None)
+ basedir=getattr(self,'basedir',None)
+ if basedir:
+ if not isinstance(self.basedir,Node.Node):
+ basedir=self.path.get_bld().make_node(basedir)
+ else:
+ basedir=self.path.get_bld()
+ if not basedir:
+ self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self))
+ self.jar_task=tsk=self.create_task('jar_create')
+ if manifest:
+ jarcreate=getattr(self,'jarcreate','cfm')
+ if not isinstance(manifest,Node.Node):
+ node=self.path.find_resource(manifest)
+ else:
+ node=manifest
+ if not node:
+ self.bld.fatal('invalid manifest file %r for %r'%(manifest,self))
+ tsk.dep_nodes.append(node)
+ jaropts.insert(0,node.abspath())
+ else:
+ jarcreate=getattr(self,'jarcreate','cf')
+ if not isinstance(destfile,Node.Node):
+ destfile=self.path.find_or_declare(destfile)
+ if not destfile:
+ self.bld.fatal('invalid destfile %r for %r'%(destfile,self))
+ tsk.set_outputs(destfile)
+ tsk.basedir=basedir
+ jaropts.append('-C')
+ jaropts.append(basedir.bldpath())
+ jaropts.append('.')
+ tsk.env.JAROPTS=jaropts
+ tsk.env.JARCREATE=jarcreate
+ if getattr(self,'javac_task',None):
+ tsk.set_run_after(self.javac_task)
+@feature('jar')
+@after_method('jar_files')
+def use_jar_files(self):
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ names=self.to_list(getattr(self,'use',[]))
+ get=self.bld.get_tgen_by_name
+ for x in names:
+ try:
+ y=get(x)
+ except Errors.WafError:
+ self.uselib.append(x)
+ else:
+ y.post()
+ self.jar_task.run_after.update(y.tasks)
+class JTask(Task.Task):
+ def split_argfile(self,cmd):
+ inline=[cmd[0]]
+ infile=[]
+ for x in cmd[1:]:
+ if x.startswith('-J'):
+ inline.append(x)
+ else:
+ infile.append(self.quote_flag(x))
+ return(inline,infile)
+class jar_create(JTask):
+ color='GREEN'
+ run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ if not self.inputs:
+ try:
+ self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])]
+ except Exception:
+ raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self))
+ return super(jar_create,self).runnable_status()
+class javac(JTask):
+ color='BLUE'
+ run_str='${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
+ vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR']
+ def uid(self):
+ lst=[self.__class__.__name__,self.generator.outdir.abspath()]
+ for x in self.srcdir:
+ lst.append(x.abspath())
+ return Utils.h_list(lst)
+ def runnable_status(self):
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ if not self.inputs:
+ self.inputs=[]
+ for x in self.srcdir:
+ if x.exists():
+ self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False))
+ return super(javac,self).runnable_status()
+ def post_run(self):
+ for node in self.generator.outdir.ant_glob('**/*.class'):
+ self.generator.bld.node_sigs[node]=self.uid()
+ self.generator.bld.task_sigs[self.uid()]=self.cache_sig
+@feature('javadoc')
+@after_method('process_rule')
+def create_javadoc(self):
+ tsk=self.create_task('javadoc')
+ tsk.classpath=getattr(self,'classpath',[])
+ self.javadoc_package=Utils.to_list(self.javadoc_package)
+ if not isinstance(self.javadoc_output,Node.Node):
+ self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output)
+class javadoc(Task.Task):
+ color='BLUE'
+ def __str__(self):
+ return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output)
+ def run(self):
+ env=self.env
+ bld=self.generator.bld
+ wd=bld.bldnode
+ srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir
+ srcpath+=os.pathsep
+ srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir
+ classpath=env.CLASSPATH
+ classpath+=os.pathsep
+ classpath+=os.pathsep.join(self.classpath)
+ classpath="".join(classpath)
+ self.last_cmd=lst=[]
+ lst.extend(Utils.to_list(env.JAVADOC))
+ lst.extend(['-d',self.generator.javadoc_output.abspath()])
+ lst.extend(['-sourcepath',srcpath])
+ lst.extend(['-classpath',classpath])
+ lst.extend(['-subpackages'])
+ lst.extend(self.generator.javadoc_package)
+ lst=[x for x in lst if x]
+ self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0)
+ def post_run(self):
+ nodes=self.generator.javadoc_output.ant_glob('**')
+ for node in nodes:
+ self.generator.bld.node_sigs[node]=self.uid()
+ self.generator.bld.task_sigs[self.uid()]=self.cache_sig
+def configure(self):
+ java_path=self.environ['PATH'].split(os.pathsep)
+ v=self.env
+ if'JAVA_HOME'in self.environ:
+ java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path
+ self.env.JAVA_HOME=[self.environ['JAVA_HOME']]
+ for x in'javac java jar javadoc'.split():
+ self.find_program(x,var=x.upper(),path_list=java_path)
+ if'CLASSPATH'in self.environ:
+ v.CLASSPATH=self.environ['CLASSPATH']
+ if not v.JAR:
+ self.fatal('jar is required for making java packages')
+ if not v.JAVAC:
+ self.fatal('javac is required for compiling java classes')
+ v.JARCREATE='cf'
+ v.JAVACFLAGS=[]
+@conf
+def check_java_class(self,classname,with_classpath=None):
+ javatestdir='.waf-javatest'
+ classpath=javatestdir
+ if self.env.CLASSPATH:
+ classpath+=os.pathsep+self.env.CLASSPATH
+ if isinstance(with_classpath,str):
+ classpath+=os.pathsep+with_classpath
+ shutil.rmtree(javatestdir,True)
+ os.mkdir(javatestdir)
+ Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source)
+ self.exec_command(self.env.JAVAC+[os.path.join(javatestdir,'Test.java')],shell=False)
+ cmd=self.env.JAVA+['-cp',classpath,'Test',classname]
+ self.to_log("%s\n"%str(cmd))
+ found=self.exec_command(cmd,shell=False)
+ self.msg('Checking for java class %s'%classname,not found)
+ shutil.rmtree(javatestdir,True)
+ return found
+@conf
+def check_jni_headers(conf):
+ if not conf.env.CC_NAME and not conf.env.CXX_NAME:
+ conf.fatal('load a compiler first (gcc, g++, ..)')
+ if not conf.env.JAVA_HOME:
+ conf.fatal('set JAVA_HOME in the system environment')
+ javaHome=conf.env.JAVA_HOME[0]
+ dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include')
+ if dir is None:
+ dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers')
+ if dir is None:
+ conf.fatal('JAVA_HOME does not seem to be set properly')
+ f=dir.ant_glob('**/(jni|jni_md).h')
+ incDirs=[x.parent.abspath()for x in f]
+ dir=conf.root.find_dir(conf.env.JAVA_HOME[0])
+ f=dir.ant_glob('**/*jvm.(so|dll|dylib)')
+ libDirs=[x.parent.abspath()for x in f]or[javaHome]
+ f=dir.ant_glob('**/*jvm.(lib)')
+ if f:
+ libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f]
+ if conf.env.DEST_OS=='freebsd':
+ conf.env.append_unique('LINKFLAGS_JAVA','-pthread')
+ for d in libDirs:
+ try:
+ conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA')
+ except Exception:
+ pass
+ else:
+ break
+ else:
+ conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs)
diff --git a/waflib/Tools/ldc2.py b/waflib/Tools/ldc2.py
new file mode 100644
index 0000000..40d435e
--- /dev/null
+++ b/waflib/Tools/ldc2.py
@@ -0,0 +1,36 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ar,d
+from waflib.Configure import conf
+@conf
+def find_ldc2(conf):
+ conf.find_program(['ldc2'],var='D')
+ out=conf.cmd_and_log(conf.env.D+['-version'])
+ if out.find("based on DMD v2.")==-1:
+ conf.fatal("detected compiler is not ldc2")
+@conf
+def common_flags_ldc2(conf):
+ v=conf.env
+ v.D_SRC_F=['-c']
+ v.D_TGT_F='-of%s'
+ v.D_LINKER=v.D
+ v.DLNK_SRC_F=''
+ v.DLNK_TGT_F='-of%s'
+ v.DINC_ST='-I%s'
+ v.DSHLIB_MARKER=v.DSTLIB_MARKER=''
+ v.DSTLIB_ST=v.DSHLIB_ST='-L-l%s'
+ v.DSTLIBPATH_ST=v.DLIBPATH_ST='-L-L%s'
+ v.LINKFLAGS_dshlib=['-L-shared']
+ v.DHEADER_ext='.di'
+ v.DFLAGS_d_with_header=['-H','-Hf']
+ v.D_HDR_F='%s'
+ v.LINKFLAGS=[]
+ v.DFLAGS_dshlib=['-relocation-model=pic']
+def configure(conf):
+ conf.find_ldc2()
+ conf.load('ar')
+ conf.load('d')
+ conf.common_flags_ldc2()
+ conf.d_platform_flags()
diff --git a/waflib/Tools/lua.py b/waflib/Tools/lua.py
new file mode 100644
index 0000000..7c6a682
--- /dev/null
+++ b/waflib/Tools/lua.py
@@ -0,0 +1,18 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.TaskGen import extension
+from waflib import Task
+@extension('.lua')
+def add_lua(self,node):
+ tsk=self.create_task('luac',node,node.change_ext('.luac'))
+ inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None)
+ if inst_to:
+ self.add_install_files(install_to=inst_to,install_from=tsk.outputs)
+ return tsk
+class luac(Task.Task):
+ run_str='${LUAC} -s -o ${TGT} ${SRC}'
+ color='PINK'
+def configure(conf):
+ conf.find_program('luac',var='LUAC')
diff --git a/waflib/Tools/md5_tstamp.py b/waflib/Tools/md5_tstamp.py
new file mode 100644
index 0000000..0d0faa0
--- /dev/null
+++ b/waflib/Tools/md5_tstamp.py
@@ -0,0 +1,24 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,stat
+from waflib import Utils,Build,Node
+STRONGEST=True
+Build.SAVED_ATTRS.append('hashes_md5_tstamp')
+def h_file(self):
+ filename=self.abspath()
+ st=os.stat(filename)
+ cache=self.ctx.hashes_md5_tstamp
+ if filename in cache and cache[filename][0]==st.st_mtime:
+ return cache[filename][1]
+ if STRONGEST:
+ ret=Utils.h_file(filename)
+ else:
+ if stat.S_ISDIR(st[stat.ST_MODE]):
+ raise IOError('Not a file')
+ ret=Utils.md5(str((st.st_mtime,st.st_size)).encode()).digest()
+ cache[filename]=(st.st_mtime,ret)
+ return ret
+h_file.__doc__=Node.Node.h_file.__doc__
+Node.Node.h_file=h_file
diff --git a/waflib/Tools/msvc.py b/waflib/Tools/msvc.py
new file mode 100644
index 0000000..662fa61
--- /dev/null
+++ b/waflib/Tools/msvc.py
@@ -0,0 +1,704 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,sys,re,traceback
+from waflib import Utils,Logs,Options,Errors
+from waflib.TaskGen import after_method,feature
+from waflib.Configure import conf
+from waflib.Tools import ccroot,c,cxx,ar
+g_msvc_systemlibs='''
+aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
+cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
+credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
+ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
+faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
+gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
+kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
+mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
+msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
+netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
+odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
+osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
+ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
+rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
+shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
+traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
+version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
+wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
+'''.split()
+all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm'),('x86_arm64','arm64'),('amd64_x86','x86'),('amd64_arm','arm'),('amd64_arm64','arm64')]
+all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
+all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
+def options(opt):
+ opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='')
+ opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='')
+ opt.add_option('--no-msvc-lazy',action='store_false',help='lazily check msvc target environments',default=True,dest='msvc_lazy')
+@conf
+def setup_msvc(conf,versiondict):
+ platforms=getattr(Options.options,'msvc_targets','').split(',')
+ if platforms==['']:
+ platforms=Utils.to_list(conf.env.MSVC_TARGETS)or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
+ desired_versions=getattr(Options.options,'msvc_version','').split(',')
+ if desired_versions==['']:
+ desired_versions=conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys())))
+ lazy_detect=getattr(Options.options,'msvc_lazy',True)
+ if conf.env.MSVC_LAZY_AUTODETECT is False:
+ lazy_detect=False
+ if not lazy_detect:
+ for val in versiondict.values():
+ for arch in list(val.keys()):
+ cfg=val[arch]
+ cfg.evaluate()
+ if not cfg.is_valid:
+ del val[arch]
+ conf.env.MSVC_INSTALLED_VERSIONS=versiondict
+ for version in desired_versions:
+ Logs.debug('msvc: detecting %r - %r',version,desired_versions)
+ try:
+ targets=versiondict[version]
+ except KeyError:
+ continue
+ seen=set()
+ for arch in platforms:
+ if arch in seen:
+ continue
+ else:
+ seen.add(arch)
+ try:
+ cfg=targets[arch]
+ except KeyError:
+ continue
+ cfg.evaluate()
+ if cfg.is_valid:
+ compiler,revision=version.rsplit(' ',1)
+ return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
+ conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r'%(desired_versions,list(versiondict.keys())))
+@conf
+def get_msvc_version(conf,compiler,version,target,vcvars):
+ Logs.debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
+ try:
+ conf.msvc_cnt+=1
+ except AttributeError:
+ conf.msvc_cnt=1
+ batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt)
+ batfile.write("""@echo off
+set INCLUDE=
+set LIB=
+call "%s" %s
+echo PATH=%%PATH%%
+echo INCLUDE=%%INCLUDE%%
+echo LIB=%%LIB%%;%%LIBPATH%%
+"""%(vcvars,target))
+ sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()])
+ lines=sout.splitlines()
+ if not lines[0]:
+ lines.pop(0)
+ MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
+ for line in lines:
+ if line.startswith('PATH='):
+ path=line[5:]
+ MSVC_PATH=path.split(';')
+ elif line.startswith('INCLUDE='):
+ MSVC_INCDIR=[i for i in line[8:].split(';')if i]
+ elif line.startswith('LIB='):
+ MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
+ if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
+ conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
+ env=dict(os.environ)
+ env.update(PATH=path)
+ compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+ cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
+ if'CL'in env:
+ del(env['CL'])
+ try:
+ conf.cmd_and_log(cxx+['/help'],env=env)
+ except UnicodeError:
+ st=traceback.format_exc()
+ if conf.logger:
+ conf.logger.error(st)
+ conf.fatal('msvc: Unicode error - check the code page?')
+ except Exception as e:
+ Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s',compiler,version,target,str(e))
+ conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
+ else:
+ Logs.debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
+ finally:
+ conf.env[compiler_name]=''
+ return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
+def gather_wince_supported_platforms():
+ supported_wince_platforms=[]
+ try:
+ ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
+ except OSError:
+ try:
+ ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
+ except OSError:
+ ce_sdk=''
+ if not ce_sdk:
+ return supported_wince_platforms
+ index=0
+ while 1:
+ try:
+ sdk_device=Utils.winreg.EnumKey(ce_sdk,index)
+ sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device)
+ except OSError:
+ break
+ index+=1
+ try:
+ path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir')
+ except OSError:
+ try:
+ path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation')
+ except OSError:
+ continue
+ path,xml=os.path.split(path)
+ path=str(path)
+ path,device=os.path.split(path)
+ if not device:
+ path,device=os.path.split(path)
+ platforms=[]
+ for arch,compiler in all_wince_platforms:
+ if os.path.isdir(os.path.join(path,device,'Lib',arch)):
+ platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
+ if platforms:
+ supported_wince_platforms.append((device,platforms))
+ return supported_wince_platforms
+def gather_msvc_detected_versions():
+ version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')
+ detected_versions=[]
+ for vcver,vcvar in(('VCExpress','Exp'),('VisualStudio','')):
+ prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
+ except OSError:
+ prefix='SOFTWARE\\Microsoft\\'+vcver
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
+ except OSError:
+ continue
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except OSError:
+ break
+ index+=1
+ match=version_pattern.match(version)
+ if match:
+ versionnumber=float(match.group(1))
+ else:
+ continue
+ detected_versions.append((versionnumber,version+vcvar,prefix+'\\'+version))
+ def fun(tup):
+ return tup[0]
+ detected_versions.sort(key=fun)
+ return detected_versions
+class target_compiler(object):
+ def __init__(self,ctx,compiler,cpu,version,bat_target,bat,callback=None):
+ self.conf=ctx
+ self.name=None
+ self.is_valid=False
+ self.is_done=False
+ self.compiler=compiler
+ self.cpu=cpu
+ self.version=version
+ self.bat_target=bat_target
+ self.bat=bat
+ self.callback=callback
+ def evaluate(self):
+ if self.is_done:
+ return
+ self.is_done=True
+ try:
+ vs=self.conf.get_msvc_version(self.compiler,self.version,self.bat_target,self.bat)
+ except Errors.ConfigurationError:
+ self.is_valid=False
+ return
+ if self.callback:
+ vs=self.callback(self,vs)
+ self.is_valid=True
+ (self.bindirs,self.incdirs,self.libdirs)=vs
+ def __str__(self):
+ return str((self.compiler,self.cpu,self.version,self.bat_target,self.bat))
+ def __repr__(self):
+ return repr((self.compiler,self.cpu,self.version,self.bat_target,self.bat))
+@conf
+def gather_wsdk_versions(conf,versions):
+ version_pattern=re.compile('^v..?.?\...?.?')
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
+ except OSError:
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
+ except OSError:
+ return
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except OSError:
+ break
+ index+=1
+ if not version_pattern.match(version):
+ continue
+ try:
+ msvc_version=Utils.winreg.OpenKey(all_versions,version)
+ path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
+ except OSError:
+ continue
+ if path and os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
+ targets={}
+ for target,arch in all_msvc_platforms:
+ targets[target]=target_compiler(conf,'wsdk',arch,version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))
+ versions['wsdk '+version[1:]]=targets
+@conf
+def gather_msvc_targets(conf,versions,version,vc_path):
+ targets={}
+ if os.path.isfile(os.path.join(vc_path,'VC','Auxiliary','Build','vcvarsall.bat')):
+ for target,realtarget in all_msvc_platforms[::-1]:
+ targets[target]=target_compiler(conf,'msvc',realtarget,version,target,os.path.join(vc_path,'VC','Auxiliary','Build','vcvarsall.bat'))
+ elif os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
+ for target,realtarget in all_msvc_platforms[::-1]:
+ targets[target]=target_compiler(conf,'msvc',realtarget,version,target,os.path.join(vc_path,'vcvarsall.bat'))
+ elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')):
+ targets['x86']=target_compiler(conf,'msvc','x86',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat'))
+ elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')):
+ targets['x86']=target_compiler(conf,'msvc','x86',version,'',os.path.join(vc_path,'Bin','vcvars32.bat'))
+ if targets:
+ versions['msvc %s'%version]=targets
+@conf
+def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms):
+ for device,platforms in supported_platforms:
+ targets={}
+ for platform,compiler,include,lib in platforms:
+ winCEpath=os.path.join(vc_path,'ce')
+ if not os.path.isdir(winCEpath):
+ continue
+ if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
+ bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]
+ incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include]
+ libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib]
+ def combine_common(obj,compiler_env):
+ (common_bindirs,_1,_2)=compiler_env
+ return(bindirs+common_bindirs,incdirs,libdirs)
+ targets[platform]=target_compiler(conf,'msvc',platform,version,'x86',vsvars,combine_common)
+ if targets:
+ versions[device+' '+version]=targets
+@conf
+def gather_winphone_targets(conf,versions,version,vc_path,vsvars):
+ targets={}
+ for target,realtarget in all_msvc_platforms[::-1]:
+ targets[target]=target_compiler(conf,'winphone',realtarget,version,target,vsvars)
+ if targets:
+ versions['winphone '+version]=targets
+@conf
+def gather_vswhere_versions(conf,versions):
+ try:
+ import json
+ except ImportError:
+ Logs.error('Visual Studio 2017 detection requires Python 2.6')
+ return
+ prg_path=os.environ.get('ProgramFiles(x86)',os.environ.get('ProgramFiles','C:\\Program Files (x86)'))
+ vswhere=os.path.join(prg_path,'Microsoft Visual Studio','Installer','vswhere.exe')
+ args=[vswhere,'-products','*','-legacy','-format','json']
+ try:
+ txt=conf.cmd_and_log(args)
+ except Errors.WafError as e:
+ Logs.debug('msvc: vswhere.exe failed %s',e)
+ return
+ if sys.version_info[0]<3:
+ txt=txt.decode(Utils.console_encoding())
+ arr=json.loads(txt)
+ arr.sort(key=lambda x:x['installationVersion'])
+ for entry in arr:
+ ver=entry['installationVersion']
+ ver=str('.'.join(ver.split('.')[:2]))
+ path=str(os.path.abspath(entry['installationPath']))
+ if os.path.exists(path)and('msvc %s'%ver)not in versions:
+ conf.gather_msvc_targets(versions,ver,path)
+@conf
+def gather_msvc_versions(conf,versions):
+ vc_paths=[]
+ for(v,version,reg)in gather_msvc_detected_versions():
+ try:
+ try:
+ msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
+ except OSError:
+ msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
+ path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir')
+ except OSError:
+ try:
+ msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,"SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
+ path,type=Utils.winreg.QueryValueEx(msvc_version,version)
+ except OSError:
+ continue
+ else:
+ vc_paths.append((version,os.path.abspath(str(path))))
+ continue
+ else:
+ vc_paths.append((version,os.path.abspath(str(path))))
+ wince_supported_platforms=gather_wince_supported_platforms()
+ for version,vc_path in vc_paths:
+ vs_path=os.path.dirname(vc_path)
+ vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat')
+ if wince_supported_platforms and os.path.isfile(vsvars):
+ conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms)
+ for version,vc_path in vc_paths:
+ vs_path=os.path.dirname(vc_path)
+ vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat')
+ if os.path.isfile(vsvars):
+ conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars)
+ break
+ for version,vc_path in vc_paths:
+ vs_path=os.path.dirname(vc_path)
+ conf.gather_msvc_targets(versions,version,vc_path)
+@conf
+def gather_icl_versions(conf,versions):
+ version_pattern=re.compile('^...?.?\....?.?')
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
+ except OSError:
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
+ except OSError:
+ return
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except OSError:
+ break
+ index+=1
+ if not version_pattern.match(version):
+ continue
+ targets={}
+ for target,arch in all_icl_platforms:
+ if target=='intel64':
+ targetDir='EM64T_NATIVE'
+ else:
+ targetDir=target
+ try:
+ Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
+ icl_version=Utils.winreg.OpenKey(all_versions,version)
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ except OSError:
+ pass
+ else:
+ batch_file=os.path.join(path,'bin','iclvars.bat')
+ if os.path.isfile(batch_file):
+ targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+ for target,arch in all_icl_platforms:
+ try:
+ icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ except OSError:
+ continue
+ else:
+ batch_file=os.path.join(path,'bin','iclvars.bat')
+ if os.path.isfile(batch_file):
+ targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+ major=version[0:2]
+ versions['intel '+major]=targets
+@conf
+def gather_intel_composer_versions(conf,versions):
+ version_pattern=re.compile('^...?.?\...?.?.?')
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites')
+ except OSError:
+ try:
+ all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites')
+ except OSError:
+ return
+ index=0
+ while 1:
+ try:
+ version=Utils.winreg.EnumKey(all_versions,index)
+ except OSError:
+ break
+ index+=1
+ if not version_pattern.match(version):
+ continue
+ targets={}
+ for target,arch in all_icl_platforms:
+ if target=='intel64':
+ targetDir='EM64T_NATIVE'
+ else:
+ targetDir=target
+ try:
+ try:
+ defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
+ except OSError:
+ if targetDir=='EM64T_NATIVE':
+ defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
+ else:
+ raise
+ uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey')
+ Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
+ icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
+ path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
+ except OSError:
+ pass
+ else:
+ batch_file=os.path.join(path,'bin','iclvars.bat')
+ if os.path.isfile(batch_file):
+ targets[target]=target_compiler(conf,'intel',arch,version,target,batch_file)
+ compilervars_warning_attr='_compilervars_warning_key'
+ if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True):
+ setattr(conf,compilervars_warning_attr,False)
+ patch_url='http://software.intel.com/en-us/forums/topic/328487'
+ compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
+ for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'):
+ if vscomntool in os.environ:
+ vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe'
+ dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe'
+ if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)):
+ Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url))
+ major=version[0:2]
+ versions['intel '+major]=targets
+@conf
+def detect_msvc(self):
+ return self.setup_msvc(self.get_msvc_versions())
+@conf
+def get_msvc_versions(self):
+ dct=Utils.ordered_iter_dict()
+ self.gather_icl_versions(dct)
+ self.gather_intel_composer_versions(dct)
+ self.gather_wsdk_versions(dct)
+ self.gather_msvc_versions(dct)
+ self.gather_vswhere_versions(dct)
+ Logs.debug('msvc: detected versions %r',list(dct.keys()))
+ return dct
+@conf
+def find_lt_names_msvc(self,libname,is_static=False):
+ lt_names=['lib%s.la'%libname,'%s.la'%libname,]
+ for path in self.env.LIBPATH:
+ for la in lt_names:
+ laf=os.path.join(path,la)
+ dll=None
+ if os.path.exists(laf):
+ ltdict=Utils.read_la_file(laf)
+ lt_libdir=None
+ if ltdict.get('libdir',''):
+ lt_libdir=ltdict['libdir']
+ if not is_static and ltdict.get('library_names',''):
+ dllnames=ltdict['library_names'].split()
+ dll=dllnames[0].lower()
+ dll=re.sub('\.dll$','',dll)
+ return(lt_libdir,dll,False)
+ elif ltdict.get('old_library',''):
+ olib=ltdict['old_library']
+ if os.path.exists(os.path.join(path,olib)):
+ return(path,olib,True)
+ elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)):
+ return(lt_libdir,olib,True)
+ else:
+ return(None,olib,True)
+ else:
+ raise self.errors.WafError('invalid libtool object file: %s'%laf)
+ return(None,None,None)
+@conf
+def libname_msvc(self,libname,is_static=False):
+ lib=libname.lower()
+ lib=re.sub('\.lib$','',lib)
+ if lib in g_msvc_systemlibs:
+ return lib
+ lib=re.sub('^lib','',lib)
+ if lib=='m':
+ return None
+ (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
+ if lt_path!=None and lt_libname!=None:
+ if lt_static:
+ return os.path.join(lt_path,lt_libname)
+ if lt_path!=None:
+ _libpaths=[lt_path]+self.env.LIBPATH
+ else:
+ _libpaths=self.env.LIBPATH
+ static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
+ dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
+ libnames=static_libs
+ if not is_static:
+ libnames=dynamic_libs+static_libs
+ for path in _libpaths:
+ for libn in libnames:
+ if os.path.exists(os.path.join(path,libn)):
+ Logs.debug('msvc: lib found: %s',os.path.join(path,libn))
+ return re.sub('\.lib$','',libn)
+ self.fatal('The library %r could not be found'%libname)
+ return re.sub('\.lib$','',libname)
+@conf
+def check_lib_msvc(self,libname,is_static=False,uselib_store=None):
+ libn=self.libname_msvc(libname,is_static)
+ if not uselib_store:
+ uselib_store=libname.upper()
+ if False and is_static:
+ self.env['STLIB_'+uselib_store]=[libn]
+ else:
+ self.env['LIB_'+uselib_store]=[libn]
+@conf
+def check_libs_msvc(self,libnames,is_static=False):
+ for libname in Utils.to_list(libnames):
+ self.check_lib_msvc(libname,is_static)
+def configure(conf):
+ conf.autodetect(True)
+ conf.find_msvc()
+ conf.msvc_common_flags()
+ conf.cc_load_tools()
+ conf.cxx_load_tools()
+ conf.cc_add_flags()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
+ conf.visual_studio_add_flags()
+@conf
+def no_autodetect(conf):
+ conf.env.NO_MSVC_DETECT=1
+ configure(conf)
+@conf
+def autodetect(conf,arch=False):
+ v=conf.env
+ if v.NO_MSVC_DETECT:
+ return
+ compiler,version,path,includes,libdirs,cpu=conf.detect_msvc()
+ if arch:
+ v.DEST_CPU=cpu
+ v.PATH=path
+ v.INCLUDES=includes
+ v.LIBPATH=libdirs
+ v.MSVC_COMPILER=compiler
+ try:
+ v.MSVC_VERSION=float(version)
+ except ValueError:
+ v.MSVC_VERSION=float(version[:-3])
+def _get_prog_names(conf,compiler):
+ if compiler=='intel':
+ compiler_name='ICL'
+ linker_name='XILINK'
+ lib_name='XILIB'
+ else:
+ compiler_name='CL'
+ linker_name='LINK'
+ lib_name='LIB'
+ return compiler_name,linker_name,lib_name
+@conf
+def find_msvc(conf):
+ if sys.platform=='cygwin':
+ conf.fatal('MSVC module does not work under cygwin Python!')
+ v=conf.env
+ path=v.PATH
+ compiler=v.MSVC_COMPILER
+ version=v.MSVC_VERSION
+ compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
+ v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11)
+ cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
+ env=dict(conf.environ)
+ if path:
+ env.update(PATH=';'.join(path))
+ if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
+ conf.fatal('the msvc compiler could not be identified')
+ v.CC=v.CXX=cxx
+ v.CC_NAME=v.CXX_NAME='msvc'
+ if not v.LINK_CXX:
+ conf.find_program(linker_name,path_list=path,errmsg='%s was not found (linker)'%linker_name,var='LINK_CXX')
+ if not v.LINK_CC:
+ v.LINK_CC=v.LINK_CXX
+ if not v.AR:
+ stliblink=conf.find_program(lib_name,path_list=path,var='AR')
+ if not stliblink:
+ return
+ v.ARFLAGS=['/nologo']
+ if v.MSVC_MANIFEST:
+ conf.find_program('MT',path_list=path,var='MT')
+ v.MTFLAGS=['/nologo']
+ try:
+ conf.load('winres')
+ except Errors.ConfigurationError:
+ Logs.warn('Resource compiler not found. Compiling resource file is disabled')
+@conf
+def visual_studio_add_flags(self):
+ v=self.env
+ if self.environ.get('INCLUDE'):
+ v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x])
+ if self.environ.get('LIB'):
+ v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x])
+@conf
+def msvc_common_flags(conf):
+ v=conf.env
+ v.DEST_BINFMT='pe'
+ v.append_value('CFLAGS',['/nologo'])
+ v.append_value('CXXFLAGS',['/nologo'])
+ v.append_value('LINKFLAGS',['/nologo'])
+ v.DEFINES_ST='/D%s'
+ v.CC_SRC_F=''
+ v.CC_TGT_F=['/c','/Fo']
+ v.CXX_SRC_F=''
+ v.CXX_TGT_F=['/c','/Fo']
+ if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6):
+ v.CC_TGT_F=['/FC']+v.CC_TGT_F
+ v.CXX_TGT_F=['/FC']+v.CXX_TGT_F
+ v.CPPPATH_ST='/I%s'
+ v.AR_TGT_F=v.CCLNK_TGT_F=v.CXXLNK_TGT_F='/OUT:'
+ v.CFLAGS_CRT_MULTITHREADED=v.CXXFLAGS_CRT_MULTITHREADED=['/MT']
+ v.CFLAGS_CRT_MULTITHREADED_DLL=v.CXXFLAGS_CRT_MULTITHREADED_DLL=['/MD']
+ v.CFLAGS_CRT_MULTITHREADED_DBG=v.CXXFLAGS_CRT_MULTITHREADED_DBG=['/MTd']
+ v.CFLAGS_CRT_MULTITHREADED_DLL_DBG=v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG=['/MDd']
+ v.LIB_ST='%s.lib'
+ v.LIBPATH_ST='/LIBPATH:%s'
+ v.STLIB_ST='%s.lib'
+ v.STLIBPATH_ST='/LIBPATH:%s'
+ if v.MSVC_MANIFEST:
+ v.append_value('LINKFLAGS',['/MANIFEST'])
+ v.CFLAGS_cshlib=[]
+ v.CXXFLAGS_cxxshlib=[]
+ v.LINKFLAGS_cshlib=v.LINKFLAGS_cxxshlib=['/DLL']
+ v.cshlib_PATTERN=v.cxxshlib_PATTERN='%s.dll'
+ v.implib_PATTERN='%s.lib'
+ v.IMPLIB_ST='/IMPLIB:%s'
+ v.LINKFLAGS_cstlib=[]
+ v.cstlib_PATTERN=v.cxxstlib_PATTERN='%s.lib'
+ v.cprogram_PATTERN=v.cxxprogram_PATTERN='%s.exe'
+ v.def_PATTERN='/def:%s'
+@after_method('apply_link')
+@feature('c','cxx')
+def apply_flags_msvc(self):
+ if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None):
+ return
+ is_static=isinstance(self.link_task,ccroot.stlink_task)
+ subsystem=getattr(self,'subsystem','')
+ if subsystem:
+ subsystem='/subsystem:%s'%subsystem
+ flags=is_static and'ARFLAGS'or'LINKFLAGS'
+ self.env.append_value(flags,subsystem)
+ if not is_static:
+ for f in self.env.LINKFLAGS:
+ d=f.lower()
+ if d[1:]=='debug':
+ pdbnode=self.link_task.outputs[0].change_ext('.pdb')
+ self.link_task.outputs.append(pdbnode)
+ if getattr(self,'install_task',None):
+ self.pdb_install_task=self.add_install_files(install_to=self.install_task.install_to,install_from=pdbnode)
+ break
+@feature('cprogram','cshlib','cxxprogram','cxxshlib')
+@after_method('apply_link')
+def apply_manifest(self):
+ if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None):
+ out_node=self.link_task.outputs[0]
+ man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
+ self.link_task.outputs.append(man_node)
+ self.env.DO_MANIFEST=True
+def make_winapp(self,family):
+ append=self.env.append_unique
+ append('DEFINES','WINAPI_FAMILY=%s'%family)
+ append('CXXFLAGS',['/ZW','/TP'])
+ for lib_path in self.env.LIBPATH:
+ append('CXXFLAGS','/AI%s'%lib_path)
+@feature('winphoneapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_winphone_app(self):
+ make_winapp(self,'WINAPI_FAMILY_PHONE_APP')
+ self.env.append_unique('LINKFLAGS',['/NODEFAULTLIB:ole32.lib','PhoneAppModelHost.lib'])
+@feature('winapp')
+@after_method('process_use')
+@after_method('propagate_uselib_vars')
+def make_windows_app(self):
+ make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP')
diff --git a/waflib/Tools/nasm.py b/waflib/Tools/nasm.py
new file mode 100644
index 0000000..a107298
--- /dev/null
+++ b/waflib/Tools/nasm.py
@@ -0,0 +1,16 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os
+import waflib.Tools.asm
+from waflib.TaskGen import feature
+@feature('asm')
+def apply_nasm_vars(self):
+ self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[])))
+def configure(conf):
+ conf.find_program(['nasm','yasm'],var='AS')
+ conf.env.AS_TGT_F=['-o']
+ conf.env.ASLNK_TGT_F=['-o']
+ conf.load('asm')
+ conf.env.ASMPATH_ST='-I%s'+os.sep
diff --git a/waflib/Tools/nobuild.py b/waflib/Tools/nobuild.py
new file mode 100644
index 0000000..beb2217
--- /dev/null
+++ b/waflib/Tools/nobuild.py
@@ -0,0 +1,11 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Task
+def build(bld):
+ def run(self):
+ for x in self.outputs:
+ x.write('')
+ for(name,cls)in Task.classes.items():
+ cls.run=run
diff --git a/waflib/Tools/perl.py b/waflib/Tools/perl.py
new file mode 100644
index 0000000..ee86113
--- /dev/null
+++ b/waflib/Tools/perl.py
@@ -0,0 +1,85 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os
+from waflib import Task,Options,Utils,Errors
+from waflib.Configure import conf
+from waflib.TaskGen import extension,feature,before_method
+@before_method('apply_incpaths','apply_link','propagate_uselib_vars')
+@feature('perlext')
+def init_perlext(self):
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ if not'PERLEXT'in self.uselib:
+ self.uselib.append('PERLEXT')
+ self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.perlext_PATTERN
+@extension('.xs')
+def xsubpp_file(self,node):
+ outnode=node.change_ext('.c')
+ self.create_task('xsubpp',node,outnode)
+ self.source.append(outnode)
+class xsubpp(Task.Task):
+ run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
+ color='BLUE'
+ ext_out=['.h']
+@conf
+def check_perl_version(self,minver=None):
+ res=True
+ if minver:
+ cver='.'.join(map(str,minver))
+ else:
+ cver=''
+ self.start_msg('Checking for minimum perl version %s'%cver)
+ perl=self.find_program('perl',var='PERL',value=getattr(Options.options,'perlbinary',None))
+ version=self.cmd_and_log(perl+["-e",'printf \"%vd\", $^V'])
+ if not version:
+ res=False
+ version="Unknown"
+ elif not minver is None:
+ ver=tuple(map(int,version.split(".")))
+ if ver<minver:
+ res=False
+ self.end_msg(version,color=res and'GREEN'or'YELLOW')
+ return res
+@conf
+def check_perl_module(self,module):
+ cmd=self.env.PERL+['-e','use %s'%module]
+ self.start_msg('perl module %s'%module)
+ try:
+ r=self.cmd_and_log(cmd)
+ except Errors.WafError:
+ self.end_msg(False)
+ return None
+ self.end_msg(r or True)
+ return r
+@conf
+def check_perl_ext_devel(self):
+ env=self.env
+ perl=env.PERL
+ if not perl:
+ self.fatal('find perl first')
+ def cmd_perl_config(s):
+ return perl+['-MConfig','-e','print \"%s\"'%s]
+ def cfg_str(cfg):
+ return self.cmd_and_log(cmd_perl_config(cfg))
+ def cfg_lst(cfg):
+ return Utils.to_list(cfg_str(cfg))
+ def find_xsubpp():
+ for var in('privlib','vendorlib'):
+ xsubpp=cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}'%var)
+ if xsubpp and os.path.isfile(xsubpp[0]):
+ return xsubpp
+ return self.find_program('xsubpp')
+ env.LINKFLAGS_PERLEXT=cfg_lst('$Config{lddlflags}')
+ env.INCLUDES_PERLEXT=cfg_lst('$Config{archlib}/CORE')
+ env.CFLAGS_PERLEXT=cfg_lst('$Config{ccflags} $Config{cccdlflags}')
+ env.EXTUTILS_TYPEMAP=cfg_lst('$Config{privlib}/ExtUtils/typemap')
+ env.XSUBPP=find_xsubpp()
+ if not getattr(Options.options,'perlarchdir',None):
+ env.ARCHDIR_PERL=cfg_str('$Config{sitearch}')
+ else:
+ env.ARCHDIR_PERL=getattr(Options.options,'perlarchdir')
+ env.perlext_PATTERN='%s.'+cfg_str('$Config{dlext}')
+def options(opt):
+ opt.add_option('--with-perl-binary',type='string',dest='perlbinary',help='Specify alternate perl binary',default=None)
+ opt.add_option('--with-perl-archdir',type='string',dest='perlarchdir',help='Specify directory where to install arch specific files',default=None)
diff --git a/waflib/Tools/python.py b/waflib/Tools/python.py
new file mode 100644
index 0000000..3b7947f
--- /dev/null
+++ b/waflib/Tools/python.py
@@ -0,0 +1,410 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,sys
+from waflib import Errors,Logs,Node,Options,Task,Utils
+from waflib.TaskGen import extension,before_method,after_method,feature
+from waflib.Configure import conf
+FRAG='''
+#include <Python.h>
+#ifdef __cplusplus
+extern "C" {
+#endif
+ void Py_Initialize(void);
+ void Py_Finalize(void);
+#ifdef __cplusplus
+}
+#endif
+int main(int argc, char **argv)
+{
+ (void)argc; (void)argv;
+ Py_Initialize();
+ Py_Finalize();
+ return 0;
+}
+'''
+INST='''
+import sys, py_compile
+py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
+'''
+DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib']
+@before_method('process_source')
+@feature('py')
+def feature_py(self):
+ self.install_path=getattr(self,'install_path','${PYTHONDIR}')
+ install_from=getattr(self,'install_from',None)
+ if install_from and not isinstance(install_from,Node.Node):
+ install_from=self.path.find_dir(install_from)
+ self.install_from=install_from
+ ver=self.env.PYTHON_VERSION
+ if not ver:
+ self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
+ if int(ver.replace('.',''))>31:
+ self.install_32=True
+@extension('.py')
+def process_py(self,node):
+ assert(hasattr(self,'install_path')),'add features="py"'
+ if self.install_path:
+ if self.install_from:
+ self.add_install_files(install_to=self.install_path,install_from=node,cwd=self.install_from,relative_trick=True)
+ else:
+ self.add_install_files(install_to=self.install_path,install_from=node,relative_trick=True)
+ lst=[]
+ if self.env.PYC:
+ lst.append('pyc')
+ if self.env.PYO:
+ lst.append('pyo')
+ if self.install_path:
+ if self.install_from:
+ pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.install_from)),self.env)
+ else:
+ pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.path)),self.env)
+ else:
+ pyd=node.abspath()
+ for ext in lst:
+ if self.env.PYTAG and not self.env.NOPYCACHE:
+ name=node.name[:-3]
+ pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext))
+ pyobj.parent.mkdir()
+ else:
+ pyobj=node.change_ext(".%s"%ext)
+ tsk=self.create_task(ext,node,pyobj)
+ tsk.pyd=pyd
+ if self.install_path:
+ self.add_install_files(install_to=os.path.dirname(pyd),install_from=pyobj,cwd=node.parent.get_bld(),relative_trick=True)
+class pyc(Task.Task):
+ color='PINK'
+ def __str__(self):
+ node=self.outputs[0]
+ return node.path_from(node.ctx.launch_node())
+ def run(self):
+ cmd=[Utils.subst_vars('${PYTHON}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd]
+ ret=self.generator.bld.exec_command(cmd)
+ return ret
+class pyo(Task.Task):
+ color='PINK'
+ def __str__(self):
+ node=self.outputs[0]
+ return node.path_from(node.ctx.launch_node())
+ def run(self):
+ cmd=[Utils.subst_vars('${PYTHON}',self.env),Utils.subst_vars('${PYFLAGS_OPT}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd]
+ ret=self.generator.bld.exec_command(cmd)
+ return ret
+@feature('pyext')
+@before_method('propagate_uselib_vars','apply_link')
+@after_method('apply_bundle')
+def init_pyext(self):
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ if not'PYEXT'in self.uselib:
+ self.uselib.append('PYEXT')
+ self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN
+ self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN
+ try:
+ if not self.install_path:
+ return
+ except AttributeError:
+ self.install_path='${PYTHONARCHDIR}'
+@feature('pyext')
+@before_method('apply_link','apply_bundle')
+def set_bundle(self):
+ if Utils.unversioned_sys_platform()=='darwin':
+ self.mac_bundle=True
+@before_method('propagate_uselib_vars')
+@feature('pyembed')
+def init_pyembed(self):
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ if not'PYEMBED'in self.uselib:
+ self.uselib.append('PYEMBED')
+@conf
+def get_python_variables(self,variables,imports=None):
+ if not imports:
+ try:
+ imports=self.python_imports
+ except AttributeError:
+ imports=DISTUTILS_IMP
+ program=list(imports)
+ program.append('')
+ for v in variables:
+ program.append("print(repr(%s))"%v)
+ os_env=dict(os.environ)
+ try:
+ del os_env['MACOSX_DEPLOYMENT_TARGET']
+ except KeyError:
+ pass
+ try:
+ out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env)
+ except Errors.WafError:
+ self.fatal('The distutils module is unusable: install "python-devel"?')
+ self.to_log(out)
+ return_values=[]
+ for s in out.splitlines():
+ s=s.strip()
+ if not s:
+ continue
+ if s=='None':
+ return_values.append(None)
+ elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'):
+ return_values.append(eval(s))
+ elif s[0].isdigit():
+ return_values.append(int(s))
+ else:break
+ return return_values
+@conf
+def test_pyembed(self,mode,msg='Testing pyembed configuration'):
+ self.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg=msg,fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(mode,mode))
+@conf
+def test_pyext(self,mode,msg='Testing pyext configuration'):
+ self.check(header_name='Python.h',define_name='HAVE_PYEXT',msg=msg,fragment=FRAG,errmsg='Could not build python extensions',features='%s %sshlib pyext'%(mode,mode))
+@conf
+def python_cross_compile(self,features='pyembed pyext'):
+ features=Utils.to_list(features)
+ if not('PYTHON_LDFLAGS'in self.environ or'PYTHON_PYEXT_LDFLAGS'in self.environ or'PYTHON_PYEMBED_LDFLAGS'in self.environ):
+ return False
+ for x in'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
+ if not x in self.environ:
+ self.fatal('Please set %s in the os environment'%x)
+ else:
+ self.env[x]=self.environ[x]
+ xx=self.env.CXX_NAME and'cxx'or'c'
+ if'pyext'in features:
+ flags=self.environ.get('PYTHON_PYEXT_LDFLAGS',self.environ.get('PYTHON_LDFLAGS'))
+ if flags is None:
+ self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
+ else:
+ self.parse_flags(flags,'PYEXT')
+ self.test_pyext(xx)
+ if'pyembed'in features:
+ flags=self.environ.get('PYTHON_PYEMBED_LDFLAGS',self.environ.get('PYTHON_LDFLAGS'))
+ if flags is None:
+ self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
+ else:
+ self.parse_flags(flags,'PYEMBED')
+ self.test_pyembed(xx)
+ return True
+@conf
+def check_python_headers(conf,features='pyembed pyext'):
+ features=Utils.to_list(features)
+ assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'"
+ env=conf.env
+ if not env.CC_NAME and not env.CXX_NAME:
+ conf.fatal('load a compiler first (gcc, g++, ..)')
+ if conf.python_cross_compile(features):
+ return
+ if not env.PYTHON_VERSION:
+ conf.check_python_version()
+ pybin=env.PYTHON
+ if not pybin:
+ conf.fatal('Could not find the python executable')
+ v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
+ try:
+ lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v])
+ except RuntimeError:
+ conf.fatal("Python development headers not found (-v for details).")
+ vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)]
+ conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals)))
+ dct=dict(zip(v,lst))
+ x='MACOSX_DEPLOYMENT_TARGET'
+ if dct[x]:
+ env[x]=conf.environ[x]=dct[x]
+ env.pyext_PATTERN='%s'+dct['SO']
+ num='.'.join(env.PYTHON_VERSION.split('.')[:2])
+ conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False)
+ if env.PYTHON_CONFIG:
+ all_flags=[['--cflags','--libs','--ldflags']]
+ if sys.hexversion<0x2070000:
+ all_flags=[[k]for k in all_flags[0]]
+ xx=env.CXX_NAME and'cxx'or'c'
+ if'pyembed'in features:
+ for flags in all_flags:
+ conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags)
+ try:
+ conf.test_pyembed(xx)
+ except conf.errors.ConfigurationError:
+ if dct['Py_ENABLE_SHARED']and dct['LIBDIR']:
+ env.append_unique('LIBPATH_PYEMBED',[dct['LIBDIR']])
+ conf.test_pyembed(xx)
+ else:
+ raise
+ if'pyext'in features:
+ for flags in all_flags:
+ conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags)
+ try:
+ conf.test_pyext(xx)
+ except conf.errors.ConfigurationError:
+ if dct['Py_ENABLE_SHARED']and dct['LIBDIR']:
+ env.append_unique('LIBPATH_PYEXT',[dct['LIBDIR']])
+ conf.test_pyext(xx)
+ else:
+ raise
+ conf.define('HAVE_PYTHON_H',1)
+ return
+ all_flags=dct['LDFLAGS']+' '+dct['CFLAGS']
+ conf.parse_flags(all_flags,'PYEMBED')
+ all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS']
+ conf.parse_flags(all_flags,'PYEXT')
+ result=None
+ if not dct["LDVERSION"]:
+ dct["LDVERSION"]=env.PYTHON_VERSION
+ for name in('python'+dct['LDVERSION'],'python'+env.PYTHON_VERSION+'m','python'+env.PYTHON_VERSION.replace('.','')):
+ if not result and env.LIBPATH_PYEMBED:
+ path=env.LIBPATH_PYEMBED
+ conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path)
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name)
+ if not result and dct['LIBDIR']:
+ path=[dct['LIBDIR']]
+ conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path)
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name)
+ if not result and dct['LIBPL']:
+ path=[dct['LIBPL']]
+ conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name)
+ if not result:
+ path=[os.path.join(dct['prefix'],"libs")]
+ conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
+ result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name)
+ if result:
+ break
+ if result:
+ env.LIBPATH_PYEMBED=path
+ env.append_value('LIB_PYEMBED',[name])
+ else:
+ conf.to_log("\n\n### LIB NOT FOUND\n")
+ if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
+ env.LIBPATH_PYEXT=env.LIBPATH_PYEMBED
+ env.LIB_PYEXT=env.LIB_PYEMBED
+ conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],))
+ env.INCLUDES_PYEXT=[dct['INCLUDEPY']]
+ env.INCLUDES_PYEMBED=[dct['INCLUDEPY']]
+ if env.CC_NAME=='gcc':
+ env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing'])
+ env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing'])
+ if env.CXX_NAME=='gcc':
+ env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing'])
+ env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing'])
+ if env.CC_NAME=="msvc":
+ from distutils.msvccompiler import MSVCCompiler
+ dist_compiler=MSVCCompiler()
+ dist_compiler.initialize()
+ env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options)
+ env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options)
+ env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared)
+ conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!')
+@conf
+def check_python_version(conf,minver=None):
+ assert minver is None or isinstance(minver,tuple)
+ pybin=conf.env.PYTHON
+ if not pybin:
+ conf.fatal('could not find the python executable')
+ cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))']
+ Logs.debug('python: Running python command %r',cmd)
+ lines=conf.cmd_and_log(cmd).split()
+ assert len(lines)==5,"found %r lines, expected 5: %r"%(len(lines),lines)
+ pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4]))
+ result=(minver is None)or(pyver_tuple>=minver)
+ if result:
+ pyver='.'.join([str(x)for x in pyver_tuple[:2]])
+ conf.env.PYTHON_VERSION=pyver
+ if'PYTHONDIR'in conf.env:
+ pydir=conf.env.PYTHONDIR
+ elif'PYTHONDIR'in conf.environ:
+ pydir=conf.environ['PYTHONDIR']
+ else:
+ if Utils.is_win32:
+ (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0) or ''"])
+ else:
+ python_LIBDEST=None
+ (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX])
+ if python_LIBDEST is None:
+ if conf.env.LIBDIR:
+ python_LIBDEST=os.path.join(conf.env.LIBDIR,'python'+pyver)
+ else:
+ python_LIBDEST=os.path.join(conf.env.PREFIX,'lib','python'+pyver)
+ if'PYTHONARCHDIR'in conf.env:
+ pyarchdir=conf.env.PYTHONARCHDIR
+ elif'PYTHONARCHDIR'in conf.environ:
+ pyarchdir=conf.environ['PYTHONARCHDIR']
+ else:
+ (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX])
+ if not pyarchdir:
+ pyarchdir=pydir
+ if hasattr(conf,'define'):
+ conf.define('PYTHONDIR',pydir)
+ conf.define('PYTHONARCHDIR',pyarchdir)
+ conf.env.PYTHONDIR=pydir
+ conf.env.PYTHONARCHDIR=pyarchdir
+ pyver_full='.'.join(map(str,pyver_tuple[:3]))
+ if minver is None:
+ conf.msg('Checking for python version',pyver_full)
+ else:
+ minver_str='.'.join(map(str,minver))
+ conf.msg('Checking for python version >= %s'%(minver_str,),pyver_full,color=result and'GREEN'or'YELLOW')
+ if not result:
+ conf.fatal('The python version is too old, expecting %r'%(minver,))
+PYTHON_MODULE_TEMPLATE='''
+import %s as current_module
+version = getattr(current_module, '__version__', None)
+if version is not None:
+ print(str(version))
+else:
+ print('unknown version')
+'''
+@conf
+def check_python_module(conf,module_name,condition=''):
+ msg="Checking for python module %r"%module_name
+ if condition:
+ msg='%s (%s)'%(msg,condition)
+ conf.start_msg(msg)
+ try:
+ ret=conf.cmd_and_log(conf.env.PYTHON+['-c',PYTHON_MODULE_TEMPLATE%module_name])
+ except Errors.WafError:
+ conf.end_msg(False)
+ conf.fatal('Could not find the python module %r'%module_name)
+ ret=ret.strip()
+ if condition:
+ conf.end_msg(ret)
+ if ret=='unknown version':
+ conf.fatal('Could not check the %s version'%module_name)
+ from distutils.version import LooseVersion
+ def num(*k):
+ if isinstance(k[0],int):
+ return LooseVersion('.'.join([str(x)for x in k]))
+ else:
+ return LooseVersion(k[0])
+ d={'num':num,'ver':LooseVersion(ret)}
+ ev=eval(condition,{},d)
+ if not ev:
+ conf.fatal('The %s version does not satisfy the requirements'%module_name)
+ else:
+ if ret=='unknown version':
+ conf.end_msg(True)
+ else:
+ conf.end_msg(ret)
+def configure(conf):
+ v=conf.env
+ if getattr(Options.options,'pythondir',None):
+ v.PYTHONDIR=Options.options.pythondir
+ if getattr(Options.options,'pythonarchdir',None):
+ v.PYTHONARCHDIR=Options.options.pythonarchdir
+ if getattr(Options.options,'nopycache',None):
+ v.NOPYCACHE=Options.options.nopycache
+ if not v.PYTHON:
+ v.PYTHON=[getattr(Options.options,'python',None)or sys.executable]
+ v.PYTHON=Utils.to_list(v.PYTHON)
+ conf.find_program('python',var='PYTHON')
+ v.PYFLAGS=''
+ v.PYFLAGS_OPT='-O'
+ v.PYC=getattr(Options.options,'pyc',1)
+ v.PYO=getattr(Options.options,'pyo',1)
+ try:
+ v.PYTAG=conf.cmd_and_log(conf.env.PYTHON+['-c',"import imp;print(imp.get_tag())"]).strip()
+ except Errors.WafError:
+ pass
+def options(opt):
+ pyopt=opt.add_option_group("Python Options")
+ pyopt.add_option('--nopyc',dest='pyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]')
+ pyopt.add_option('--nopyo',dest='pyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
+ pyopt.add_option('--nopycache',dest='nopycache',action='store_true',help='Do not use __pycache__ directory to install objects [Default:auto]')
+ pyopt.add_option('--python',dest="python",help='python binary to be used [Default: %s]'%sys.executable)
+ pyopt.add_option('--pythondir',dest='pythondir',help='Installation path for python modules (py, platform-independent .py and .pyc files)')
+ pyopt.add_option('--pythonarchdir',dest='pythonarchdir',help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
diff --git a/waflib/Tools/qt5.py b/waflib/Tools/qt5.py
new file mode 100644
index 0000000..6f5f136
--- /dev/null
+++ b/waflib/Tools/qt5.py
@@ -0,0 +1,497 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from __future__ import with_statement
+try:
+ from xml.sax import make_parser
+ from xml.sax.handler import ContentHandler
+except ImportError:
+ has_xml=False
+ ContentHandler=object
+else:
+ has_xml=True
+import os,sys,re
+from waflib.Tools import cxx
+from waflib import Task,Utils,Options,Errors,Context
+from waflib.TaskGen import feature,after_method,extension,before_method
+from waflib.Configure import conf
+from waflib import Logs
+MOC_H=['.h','.hpp','.hxx','.hh']
+EXT_RCC=['.qrc']
+EXT_UI=['.ui']
+EXT_QT5=['.cpp','.cc','.cxx','.C']
+class qxx(Task.classes['cxx']):
+ def __init__(self,*k,**kw):
+ Task.Task.__init__(self,*k,**kw)
+ self.moc_done=0
+ def runnable_status(self):
+ if self.moc_done:
+ return Task.Task.runnable_status(self)
+ else:
+ for t in self.run_after:
+ if not t.hasrun:
+ return Task.ASK_LATER
+ self.add_moc_tasks()
+ return Task.Task.runnable_status(self)
+ def create_moc_task(self,h_node,m_node):
+ try:
+ moc_cache=self.generator.bld.moc_cache
+ except AttributeError:
+ moc_cache=self.generator.bld.moc_cache={}
+ try:
+ return moc_cache[h_node]
+ except KeyError:
+ tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator)
+ tsk.set_inputs(h_node)
+ tsk.set_outputs(m_node)
+ tsk.env.append_unique('MOC_FLAGS','-i')
+ if self.generator:
+ self.generator.tasks.append(tsk)
+ gen=self.generator.bld.producer
+ gen.outstanding.append(tsk)
+ gen.total+=1
+ return tsk
+ else:
+ delattr(self,'cache_sig')
+ def add_moc_tasks(self):
+ node=self.inputs[0]
+ bld=self.generator.bld
+ try:
+ self.signature()
+ except KeyError:
+ pass
+ else:
+ delattr(self,'cache_sig')
+ include_nodes=[node.parent]+self.generator.includes_nodes
+ moctasks=[]
+ mocfiles=set()
+ for d in bld.raw_deps.get(self.uid(),[]):
+ if not d.endswith('.moc'):
+ continue
+ if d in mocfiles:
+ continue
+ mocfiles.add(d)
+ h_node=None
+ base2=d[:-4]
+ prefix=node.name[:node.name.rfind('.')]
+ if base2==prefix:
+ h_node=node
+ else:
+ for x in include_nodes:
+ for e in MOC_H:
+ h_node=x.find_node(base2+e)
+ if h_node:
+ break
+ else:
+ continue
+ break
+ if h_node:
+ m_node=h_node.change_ext('.moc')
+ else:
+ raise Errors.WafError('No source found for %r which is a moc file'%d)
+ task=self.create_moc_task(h_node,m_node)
+ moctasks.append(task)
+ self.run_after.update(set(moctasks))
+ self.moc_done=1
+class trans_update(Task.Task):
+ run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}'
+ color='BLUE'
+class XMLHandler(ContentHandler):
+ def __init__(self):
+ ContentHandler.__init__(self)
+ self.buf=[]
+ self.files=[]
+ def startElement(self,name,attrs):
+ if name=='file':
+ self.buf=[]
+ def endElement(self,name):
+ if name=='file':
+ self.files.append(str(''.join(self.buf)))
+ def characters(self,cars):
+ self.buf.append(cars)
+@extension(*EXT_RCC)
+def create_rcc_task(self,node):
+ rcnode=node.change_ext('_rc.%d.cpp'%self.idx)
+ self.create_task('rcc',node,rcnode)
+ cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o'))
+ try:
+ self.compiled_tasks.append(cpptask)
+ except AttributeError:
+ self.compiled_tasks=[cpptask]
+ return cpptask
+@extension(*EXT_UI)
+def create_uic_task(self,node):
+ try:
+ uic_cache=self.bld.uic_cache
+ except AttributeError:
+ uic_cache=self.bld.uic_cache={}
+ if node not in uic_cache:
+ uictask=uic_cache[node]=self.create_task('ui5',node)
+ uictask.outputs=[node.parent.find_or_declare(self.env.ui_PATTERN%node.name[:-3])]
+@extension('.ts')
+def add_lang(self,node):
+ self.lang=self.to_list(getattr(self,'lang',[]))+[node]
+@feature('qt5')
+@before_method('process_source')
+def process_mocs(self):
+ lst=self.to_nodes(getattr(self,'moc',[]))
+ self.source=self.to_list(getattr(self,'source',[]))
+ for x in lst:
+ prefix=x.name[:x.name.rfind('.')]
+ moc_target='moc_%s.%d.cpp'%(prefix,self.idx)
+ moc_node=x.parent.find_or_declare(moc_target)
+ self.source.append(moc_node)
+ self.create_task('moc',x,moc_node)
+@feature('qt5')
+@after_method('apply_link')
+def apply_qt5(self):
+ if getattr(self,'lang',None):
+ qmtasks=[]
+ for x in self.to_list(self.lang):
+ if isinstance(x,str):
+ x=self.path.find_resource(x+'.ts')
+ qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.%d.qm'%self.idx)))
+ if getattr(self,'update',None)and Options.options.trans_qt5:
+ cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
+ for x in qmtasks:
+ self.create_task('trans_update',cxxnodes,x.inputs)
+ if getattr(self,'langname',None):
+ qmnodes=[x.outputs[0]for x in qmtasks]
+ rcnode=self.langname
+ if isinstance(rcnode,str):
+ rcnode=self.path.find_or_declare(rcnode+('.%d.qrc'%self.idx))
+ t=self.create_task('qm2rcc',qmnodes,rcnode)
+ k=create_rcc_task(self,t.outputs[0])
+ self.link_task.inputs.append(k.outputs[0])
+ lst=[]
+ for flag in self.to_list(self.env.CXXFLAGS):
+ if len(flag)<2:
+ continue
+ f=flag[0:2]
+ if f in('-D','-I','/D','/I'):
+ if(f[0]=='/'):
+ lst.append('-'+flag[1:])
+ else:
+ lst.append(flag)
+ self.env.append_value('MOC_FLAGS',lst)
+@extension(*EXT_QT5)
+def cxx_hook(self,node):
+ return self.create_compiled_task('qxx',node)
+class rcc(Task.Task):
+ color='BLUE'
+ run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
+ ext_out=['.h']
+ def rcname(self):
+ return os.path.splitext(self.inputs[0].name)[0]
+ def scan(self):
+ if not has_xml:
+ Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+ return([],[])
+ parser=make_parser()
+ curHandler=XMLHandler()
+ parser.setContentHandler(curHandler)
+ with open(self.inputs[0].abspath(),'r')as f:
+ parser.parse(f)
+ nodes=[]
+ names=[]
+ root=self.inputs[0].parent
+ for x in curHandler.files:
+ nd=root.find_resource(x)
+ if nd:
+ nodes.append(nd)
+ else:
+ names.append(x)
+ return(nodes,names)
+ def quote_flag(self,x):
+ return x
+class moc(Task.Task):
+ color='BLUE'
+ run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
+ def quote_flag(self,x):
+ return x
+class ui5(Task.Task):
+ color='BLUE'
+ run_str='${QT_UIC} ${SRC} -o ${TGT}'
+ ext_out=['.h']
+class ts2qm(Task.Task):
+ color='BLUE'
+ run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
+class qm2rcc(Task.Task):
+ color='BLUE'
+ after='ts2qm'
+ def run(self):
+ txt='\n'.join(['<file>%s</file>'%k.path_from(self.outputs[0].parent)for k in self.inputs])
+ code='<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>'%txt
+ self.outputs[0].write(code)
+def configure(self):
+ self.find_qt5_binaries()
+ self.set_qt5_libs_dir()
+ self.set_qt5_libs_to_check()
+ self.set_qt5_defines()
+ self.find_qt5_libraries()
+ self.add_qt5_rpath()
+ self.simplify_qt5_libs()
+ if not has_xml:
+ Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
+ if'COMPILER_CXX'not in self.env:
+ self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
+ frag='#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
+ uses='QT5CORE QT5WIDGETS QT5GUI'
+ for flag in[[],'-fPIE','-fPIC','-std=c++11',['-std=c++11','-fPIE'],['-std=c++11','-fPIC']]:
+ msg='See if Qt files compile '
+ if flag:
+ msg+='with %s'%flag
+ try:
+ self.check(features='qt5 cxx',use=uses,uselib_store='qt5',cxxflags=flag,fragment=frag,msg=msg)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ break
+ else:
+ self.fatal('Could not build a simple Qt application')
+ if Utils.unversioned_sys_platform()=='freebsd':
+ frag='#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
+ try:
+ self.check(features='qt5 cxx cxxprogram',use=uses,fragment=frag,msg='Can we link Qt programs on FreeBSD directly?')
+ except self.errors.ConfigurationError:
+ self.check(features='qt5 cxx cxxprogram',use=uses,uselib_store='qt5',libpath='/usr/local/lib',fragment=frag,msg='Is /usr/local/lib required?')
+@conf
+def find_qt5_binaries(self):
+ env=self.env
+ opt=Options.options
+ qtdir=getattr(opt,'qtdir','')
+ qtbin=getattr(opt,'qtbin','')
+ paths=[]
+ if qtdir:
+ qtbin=os.path.join(qtdir,'bin')
+ if not qtdir:
+ qtdir=self.environ.get('QT5_ROOT','')
+ qtbin=self.environ.get('QT5_BIN')or os.path.join(qtdir,'bin')
+ if qtbin:
+ paths=[qtbin]
+ if not qtdir:
+ paths=self.environ.get('PATH','').split(os.pathsep)
+ paths.extend(['/usr/share/qt5/bin','/usr/local/lib/qt5/bin'])
+ try:
+ lst=Utils.listdir('/usr/local/Trolltech/')
+ except OSError:
+ pass
+ else:
+ if lst:
+ lst.sort()
+ lst.reverse()
+ qtdir='/usr/local/Trolltech/%s/'%lst[0]
+ qtbin=os.path.join(qtdir,'bin')
+ paths.append(qtbin)
+ cand=None
+ prev_ver=['5','0','0']
+ for qmk in('qmake-qt5','qmake5','qmake'):
+ try:
+ qmake=self.find_program(qmk,path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ try:
+ version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip()
+ except self.errors.WafError:
+ pass
+ else:
+ if version:
+ new_ver=version.split('.')
+ if new_ver>prev_ver:
+ cand=qmake
+ prev_ver=new_ver
+ if not cand:
+ try:
+ self.find_program('qtchooser')
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ cmd=self.env.QTCHOOSER+['-qt=5','-run-tool=qmake']
+ try:
+ version=self.cmd_and_log(cmd+['-query','QT_VERSION'])
+ except self.errors.WafError:
+ pass
+ else:
+ cand=cmd
+ if cand:
+ self.env.QMAKE=cand
+ else:
+ self.fatal('Could not find qmake for qt5')
+ self.env.QT_HOST_BINS=qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_HOST_BINS']).strip()
+ paths.insert(0,qtbin)
+ def find_bin(lst,var):
+ if var in env:
+ return
+ for f in lst:
+ try:
+ ret=self.find_program(f,path_list=paths)
+ except self.errors.ConfigurationError:
+ pass
+ else:
+ env[var]=ret
+ break
+ find_bin(['uic-qt5','uic'],'QT_UIC')
+ if not env.QT_UIC:
+ self.fatal('cannot find the uic compiler for qt5')
+ self.start_msg('Checking for uic version')
+ uicver=self.cmd_and_log(env.QT_UIC+['-version'],output=Context.BOTH)
+ uicver=''.join(uicver).strip()
+ uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','')
+ self.end_msg(uicver)
+ if uicver.find(' 3.')!=-1 or uicver.find(' 4.')!=-1:
+ self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
+ find_bin(['moc-qt5','moc'],'QT_MOC')
+ find_bin(['rcc-qt5','rcc'],'QT_RCC')
+ find_bin(['lrelease-qt5','lrelease'],'QT_LRELEASE')
+ find_bin(['lupdate-qt5','lupdate'],'QT_LUPDATE')
+ env.UIC_ST='%s -o %s'
+ env.MOC_ST='-o'
+ env.ui_PATTERN='ui_%s.h'
+ env.QT_LRELEASE_FLAGS=['-silent']
+ env.MOCCPPPATH_ST='-I%s'
+ env.MOCDEFINES_ST='-D%s'
+@conf
+def set_qt5_libs_dir(self):
+ env=self.env
+ qtlibs=getattr(Options.options,'qtlibs',None)or self.environ.get('QT5_LIBDIR')
+ if not qtlibs:
+ try:
+ qtlibs=self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_LIBS']).strip()
+ except Errors.WafError:
+ qtdir=self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()
+ qtlibs=os.path.join(qtdir,'lib')
+ self.msg('Found the Qt5 libraries in',qtlibs)
+ env.QTLIBS=qtlibs
+@conf
+def find_single_qt5_lib(self,name,uselib,qtlibs,qtincludes,force_static):
+ env=self.env
+ if force_static:
+ exts=('.a','.lib')
+ prefix='STLIB'
+ else:
+ exts=('.so','.lib')
+ prefix='LIB'
+ def lib_names():
+ for x in exts:
+ for k in('','5')if Utils.is_win32 else['']:
+ for p in('lib',''):
+ yield(p,name,k,x)
+ for tup in lib_names():
+ k=''.join(tup)
+ path=os.path.join(qtlibs,k)
+ if os.path.exists(path):
+ if env.DEST_OS=='win32':
+ libval=''.join(tup[:-1])
+ else:
+ libval=name
+ env.append_unique(prefix+'_'+uselib,libval)
+ env.append_unique('%sPATH_%s'%(prefix,uselib),qtlibs)
+ env.append_unique('INCLUDES_'+uselib,qtincludes)
+ env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,name.replace('Qt5','Qt')))
+ return k
+ return False
+@conf
+def find_qt5_libraries(self):
+ env=self.env
+ qtincludes=self.environ.get('QT5_INCLUDES')or self.cmd_and_log(env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip()
+ force_static=self.environ.get('QT5_FORCE_STATIC')
+ try:
+ if self.environ.get('QT5_XCOMPILE'):
+ self.fatal('QT5_XCOMPILE Disables pkg-config detection')
+ self.check_cfg(atleast_pkgconfig_version='0.1')
+ except self.errors.ConfigurationError:
+ for i in self.qt5_vars:
+ uselib=i.upper()
+ if Utils.unversioned_sys_platform()=='darwin':
+ fwk=i.replace('Qt5','Qt')
+ frameworkName=fwk+'.framework'
+ qtDynamicLib=os.path.join(env.QTLIBS,frameworkName,fwk)
+ if os.path.exists(qtDynamicLib):
+ env.append_unique('FRAMEWORK_'+uselib,fwk)
+ env.append_unique('FRAMEWORKPATH_'+uselib,env.QTLIBS)
+ self.msg('Checking for %s'%i,qtDynamicLib,'GREEN')
+ else:
+ self.msg('Checking for %s'%i,False,'YELLOW')
+ env.append_unique('INCLUDES_'+uselib,os.path.join(env.QTLIBS,frameworkName,'Headers'))
+ else:
+ ret=self.find_single_qt5_lib(i,uselib,env.QTLIBS,qtincludes,force_static)
+ if not force_static and not ret:
+ ret=self.find_single_qt5_lib(i,uselib,env.QTLIBS,qtincludes,True)
+ self.msg('Checking for %s'%i,ret,'GREEN'if ret else'YELLOW')
+ else:
+ path='%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib'%(self.environ.get('PKG_CONFIG_PATH',''),env.QTLIBS,env.QTLIBS)
+ for i in self.qt5_vars:
+ self.check_cfg(package=i,args='--cflags --libs',mandatory=False,force_static=force_static,pkg_config_path=path)
+@conf
+def simplify_qt5_libs(self):
+ env=self.env
+ def process_lib(vars_,coreval):
+ for d in vars_:
+ var=d.upper()
+ if var=='QTCORE':
+ continue
+ value=env['LIBPATH_'+var]
+ if value:
+ core=env[coreval]
+ accu=[]
+ for lib in value:
+ if lib in core:
+ continue
+ accu.append(lib)
+ env['LIBPATH_'+var]=accu
+ process_lib(self.qt5_vars,'LIBPATH_QTCORE')
+@conf
+def add_qt5_rpath(self):
+ env=self.env
+ if getattr(Options.options,'want_rpath',False):
+ def process_rpath(vars_,coreval):
+ for d in vars_:
+ var=d.upper()
+ value=env['LIBPATH_'+var]
+ if value:
+ core=env[coreval]
+ accu=[]
+ for lib in value:
+ if var!='QTCORE':
+ if lib in core:
+ continue
+ accu.append('-Wl,--rpath='+lib)
+ env['RPATH_'+var]=accu
+ process_rpath(self.qt5_vars,'LIBPATH_QTCORE')
+@conf
+def set_qt5_libs_to_check(self):
+ self.qt5_vars=Utils.to_list(getattr(self,'qt5_vars',[]))
+ if not self.qt5_vars:
+ dirlst=Utils.listdir(self.env.QTLIBS)
+ pat=self.env.cxxshlib_PATTERN
+ if Utils.is_win32:
+ pat=pat.replace('.dll','.lib')
+ if self.environ.get('QT5_FORCE_STATIC'):
+ pat=self.env.cxxstlib_PATTERN
+ if Utils.unversioned_sys_platform()=='darwin':
+ pat="%s\.framework"
+ re_qt=re.compile(pat%'Qt5?(?P<name>.*)'+'$')
+ for x in dirlst:
+ m=re_qt.match(x)
+ if m:
+ self.qt5_vars.append("Qt5%s"%m.group('name'))
+ if not self.qt5_vars:
+ self.fatal('cannot find any Qt5 library (%r)'%self.env.QTLIBS)
+ qtextralibs=getattr(Options.options,'qtextralibs',None)
+ if qtextralibs:
+ self.qt5_vars.extend(qtextralibs.split(','))
+@conf
+def set_qt5_defines(self):
+ if sys.platform!='win32':
+ return
+ for x in self.qt5_vars:
+ y=x.replace('Qt5','Qt')[2:].upper()
+ self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y)
+def options(opt):
+ opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries')
+ for i in'qtdir qtbin qtlibs'.split():
+ opt.add_option('--'+i,type='string',default='',dest=i)
+ opt.add_option('--translate',action='store_true',help='collect translation strings',dest='trans_qt5',default=False)
+ opt.add_option('--qtextralibs',type='string',default='',dest='qtextralibs',help='additional qt libraries on the system to add to default ones, comma separated')
diff --git a/waflib/Tools/ruby.py b/waflib/Tools/ruby.py
new file mode 100644
index 0000000..887234f
--- /dev/null
+++ b/waflib/Tools/ruby.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os
+from waflib import Errors,Options,Task,Utils
+from waflib.TaskGen import before_method,feature,extension
+from waflib.Configure import conf
+@feature('rubyext')
+@before_method('apply_incpaths','process_source','apply_bundle','apply_link')
+def init_rubyext(self):
+ self.install_path='${ARCHDIR_RUBY}'
+ self.uselib=self.to_list(getattr(self,'uselib',''))
+ if not'RUBY'in self.uselib:
+ self.uselib.append('RUBY')
+ if not'RUBYEXT'in self.uselib:
+ self.uselib.append('RUBYEXT')
+@feature('rubyext')
+@before_method('apply_link','propagate_uselib_vars')
+def apply_ruby_so_name(self):
+ self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.rubyext_PATTERN
+@conf
+def check_ruby_version(self,minver=()):
+ ruby=self.find_program('ruby',var='RUBY',value=Options.options.rubybinary)
+ try:
+ version=self.cmd_and_log(ruby+['-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
+ except Errors.WafError:
+ self.fatal('could not determine ruby version')
+ self.env.RUBY_VERSION=version
+ try:
+ ver=tuple(map(int,version.split('.')))
+ except Errors.WafError:
+ self.fatal('unsupported ruby version %r'%version)
+ cver=''
+ if minver:
+ cver='> '+'.'.join(str(x)for x in minver)
+ if ver<minver:
+ self.fatal('ruby is too old %r'%ver)
+ self.msg('Checking for ruby version %s'%cver,version)
+@conf
+def check_ruby_ext_devel(self):
+ if not self.env.RUBY:
+ self.fatal('ruby detection is required first')
+ if not self.env.CC_NAME and not self.env.CXX_NAME:
+ self.fatal('load a c/c++ compiler first')
+ version=tuple(map(int,self.env.RUBY_VERSION.split(".")))
+ def read_out(cmd):
+ return Utils.to_list(self.cmd_and_log(self.env.RUBY+['-rrbconfig','-e',cmd]))
+ def read_config(key):
+ return read_out('puts RbConfig::CONFIG[%r]'%key)
+ cpppath=archdir=read_config('archdir')
+ if version>=(1,9,0):
+ ruby_hdrdir=read_config('rubyhdrdir')
+ cpppath+=ruby_hdrdir
+ if version>=(2,0,0):
+ cpppath+=read_config('rubyarchhdrdir')
+ cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])]
+ self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file',link_header_test=False)
+ self.env.LIBPATH_RUBYEXT=read_config('libdir')
+ self.env.LIBPATH_RUBYEXT+=archdir
+ self.env.INCLUDES_RUBYEXT=cpppath
+ self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS')
+ self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0]
+ flags=read_config('LDSHARED')
+ while flags and flags[0][0]!='-':
+ flags=flags[1:]
+ if len(flags)>1 and flags[1]=="ppc":
+ flags=flags[2:]
+ self.env.LINKFLAGS_RUBYEXT=flags
+ self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS')
+ self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED')
+ if Options.options.rubyarchdir:
+ self.env.ARCHDIR_RUBY=Options.options.rubyarchdir
+ else:
+ self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0]
+ if Options.options.rubylibdir:
+ self.env.LIBDIR_RUBY=Options.options.rubylibdir
+ else:
+ self.env.LIBDIR_RUBY=read_config('sitelibdir')[0]
+@conf
+def check_ruby_module(self,module_name):
+ self.start_msg('Ruby module %s'%module_name)
+ try:
+ self.cmd_and_log(self.env.RUBY+['-e','require \'%s\';puts 1'%module_name])
+ except Errors.WafError:
+ self.end_msg(False)
+ self.fatal('Could not find the ruby module %r'%module_name)
+ self.end_msg(True)
+@extension('.rb')
+def process(self,node):
+ return self.create_task('run_ruby',node)
+class run_ruby(Task.Task):
+ run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
+def options(opt):
+ opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files')
+ opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path')
+ opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary')
diff --git a/waflib/Tools/suncc.py b/waflib/Tools/suncc.py
new file mode 100644
index 0000000..676c884
--- /dev/null
+++ b/waflib/Tools/suncc.py
@@ -0,0 +1,48 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Errors
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+@conf
+def find_scc(conf):
+ v=conf.env
+ cc=conf.find_program('cc',var='CC')
+ try:
+ conf.cmd_and_log(cc+['-flags'])
+ except Errors.WafError:
+ conf.fatal('%r is not a Sun compiler'%cc)
+ v.CC_NAME='sun'
+ conf.get_suncc_version(cc)
+@conf
+def scc_common_flags(conf):
+ v=conf.env
+ v.CC_SRC_F=[]
+ v.CC_TGT_F=['-c','-o','']
+ if not v.LINK_CC:
+ v.LINK_CC=v.CC
+ v.CCLNK_SRC_F=''
+ v.CCLNK_TGT_F=['-o','']
+ v.CPPPATH_ST='-I%s'
+ v.DEFINES_ST='-D%s'
+ v.LIB_ST='-l%s'
+ v.LIBPATH_ST='-L%s'
+ v.STLIB_ST='-l%s'
+ v.STLIBPATH_ST='-L%s'
+ v.SONAME_ST='-Wl,-h,%s'
+ v.SHLIB_MARKER='-Bdynamic'
+ v.STLIB_MARKER='-Bstatic'
+ v.cprogram_PATTERN='%s'
+ v.CFLAGS_cshlib=['-xcode=pic32','-DPIC']
+ v.LINKFLAGS_cshlib=['-G']
+ v.cshlib_PATTERN='lib%s.so'
+ v.LINKFLAGS_cstlib=['-Bstatic']
+ v.cstlib_PATTERN='lib%s.a'
+def configure(conf):
+ conf.find_scc()
+ conf.find_ar()
+ conf.scc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/suncxx.py b/waflib/Tools/suncxx.py
new file mode 100644
index 0000000..0047098
--- /dev/null
+++ b/waflib/Tools/suncxx.py
@@ -0,0 +1,48 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib import Errors
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+@conf
+def find_sxx(conf):
+ v=conf.env
+ cc=conf.find_program(['CC','c++'],var='CXX')
+ try:
+ conf.cmd_and_log(cc+['-flags'])
+ except Errors.WafError:
+ conf.fatal('%r is not a Sun compiler'%cc)
+ v.CXX_NAME='sun'
+ conf.get_suncc_version(cc)
+@conf
+def sxx_common_flags(conf):
+ v=conf.env
+ v.CXX_SRC_F=[]
+ v.CXX_TGT_F=['-c','-o','']
+ if not v.LINK_CXX:
+ v.LINK_CXX=v.CXX
+ v.CXXLNK_SRC_F=[]
+ v.CXXLNK_TGT_F=['-o','']
+ v.CPPPATH_ST='-I%s'
+ v.DEFINES_ST='-D%s'
+ v.LIB_ST='-l%s'
+ v.LIBPATH_ST='-L%s'
+ v.STLIB_ST='-l%s'
+ v.STLIBPATH_ST='-L%s'
+ v.SONAME_ST='-Wl,-h,%s'
+ v.SHLIB_MARKER='-Bdynamic'
+ v.STLIB_MARKER='-Bstatic'
+ v.cxxprogram_PATTERN='%s'
+ v.CXXFLAGS_cxxshlib=['-xcode=pic32','-DPIC']
+ v.LINKFLAGS_cxxshlib=['-G']
+ v.cxxshlib_PATTERN='lib%s.so'
+ v.LINKFLAGS_cxxstlib=['-Bstatic']
+ v.cxxstlib_PATTERN='lib%s.a'
+def configure(conf):
+ conf.find_sxx()
+ conf.find_ar()
+ conf.sxx_common_flags()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/tex.py b/waflib/Tools/tex.py
new file mode 100644
index 0000000..3a208d8
--- /dev/null
+++ b/waflib/Tools/tex.py
@@ -0,0 +1,327 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re
+from waflib import Utils,Task,Errors,Logs,Node
+from waflib.TaskGen import feature,before_method
+re_bibunit=re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
+def bibunitscan(self):
+ node=self.inputs[0]
+ nodes=[]
+ if not node:
+ return nodes
+ code=node.read()
+ for match in re_bibunit.finditer(code):
+ path=match.group('file')
+ if path:
+ found=None
+ for k in('','.bib'):
+ Logs.debug('tex: trying %s%s',path,k)
+ fi=node.parent.find_resource(path+k)
+ if fi:
+ found=True
+ nodes.append(fi)
+ if not found:
+ Logs.debug('tex: could not find %s',path)
+ Logs.debug('tex: found the following bibunit files: %s',nodes)
+ return nodes
+exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps','.sty']
+exts_tex=['.ltx','.tex']
+re_tex=re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
+g_bibtex_re=re.compile('bibdata',re.M)
+g_glossaries_re=re.compile('\\@newglossary',re.M)
+class tex(Task.Task):
+ bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False)
+ bibtex_fun.__doc__="""
+ Execute the program **bibtex**
+ """
+ makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False)
+ makeindex_fun.__doc__="""
+ Execute the program **makeindex**
+ """
+ makeglossaries_fun,_=Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',shell=False)
+ makeglossaries_fun.__doc__="""
+ Execute the program **makeglossaries**
+ """
+ def exec_command(self,cmd,**kw):
+ if self.env.PROMPT_LATEX:
+ kw['stdout']=kw['stderr']=None
+ return super(tex,self).exec_command(cmd,**kw)
+ def scan_aux(self,node):
+ nodes=[node]
+ re_aux=re.compile(r'\\@input{(?P<file>[^{}]*)}',re.M)
+ def parse_node(node):
+ code=node.read()
+ for match in re_aux.finditer(code):
+ path=match.group('file')
+ found=node.parent.find_or_declare(path)
+ if found and found not in nodes:
+ Logs.debug('tex: found aux node %r',found)
+ nodes.append(found)
+ parse_node(found)
+ parse_node(node)
+ return nodes
+ def scan(self):
+ node=self.inputs[0]
+ nodes=[]
+ names=[]
+ seen=[]
+ if not node:
+ return(nodes,names)
+ def parse_node(node):
+ if node in seen:
+ return
+ seen.append(node)
+ code=node.read()
+ for match in re_tex.finditer(code):
+ multibib=match.group('type')
+ if multibib and multibib.startswith('bibliography'):
+ multibib=multibib[len('bibliography'):]
+ if multibib.startswith('style'):
+ continue
+ else:
+ multibib=None
+ for path in match.group('file').split(','):
+ if path:
+ add_name=True
+ found=None
+ for k in exts_deps_tex:
+ for up in self.texinputs_nodes:
+ Logs.debug('tex: trying %s%s',path,k)
+ found=up.find_resource(path+k)
+ if found:
+ break
+ for tsk in self.generator.tasks:
+ if not found or found in tsk.outputs:
+ break
+ else:
+ nodes.append(found)
+ add_name=False
+ for ext in exts_tex:
+ if found.name.endswith(ext):
+ parse_node(found)
+ break
+ if found and multibib and found.name.endswith('.bib'):
+ try:
+ self.multibibs.append(found)
+ except AttributeError:
+ self.multibibs=[found]
+ if add_name:
+ names.append(path)
+ parse_node(node)
+ for x in nodes:
+ x.parent.get_bld().mkdir()
+ Logs.debug("tex: found the following : %s and names %s",nodes,names)
+ return(nodes,names)
+ def check_status(self,msg,retcode):
+ if retcode!=0:
+ raise Errors.WafError('%r command exit status %r'%(msg,retcode))
+ def info(self,*k,**kw):
+ try:
+ info=self.generator.bld.conf.logger.info
+ except AttributeError:
+ info=Logs.info
+ info(*k,**kw)
+ def bibfile(self):
+ for aux_node in self.aux_nodes:
+ try:
+ ct=aux_node.read()
+ except EnvironmentError:
+ Logs.error('Error reading %s: %r',aux_node.abspath())
+ continue
+ if g_bibtex_re.findall(ct):
+ self.info('calling bibtex')
+ self.env.env={}
+ self.env.env.update(os.environ)
+ self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()})
+ self.env.SRCFILE=aux_node.name[:-4]
+ self.check_status('error when calling bibtex',self.bibtex_fun())
+ for node in getattr(self,'multibibs',[]):
+ self.env.env={}
+ self.env.env.update(os.environ)
+ self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()})
+ self.env.SRCFILE=node.name[:-4]
+ self.check_status('error when calling bibtex',self.bibtex_fun())
+ def bibunits(self):
+ try:
+ bibunits=bibunitscan(self)
+ except OSError:
+ Logs.error('error bibunitscan')
+ else:
+ if bibunits:
+ fn=['bu'+str(i)for i in range(1,len(bibunits)+1)]
+ if fn:
+ self.info('calling bibtex on bibunits')
+ for f in fn:
+ self.env.env={'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}
+ self.env.SRCFILE=f
+ self.check_status('error when calling bibtex',self.bibtex_fun())
+ def makeindex(self):
+ self.idx_node=self.inputs[0].change_ext('.idx')
+ try:
+ idx_path=self.idx_node.abspath()
+ os.stat(idx_path)
+ except OSError:
+ self.info('index file %s absent, not calling makeindex',idx_path)
+ else:
+ self.info('calling makeindex')
+ self.env.SRCFILE=self.idx_node.name
+ self.env.env={}
+ self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun())
+ def bibtopic(self):
+ p=self.inputs[0].parent.get_bld()
+ if os.path.exists(os.path.join(p.abspath(),'btaux.aux')):
+ self.aux_nodes+=p.ant_glob('*[0-9].aux')
+ def makeglossaries(self):
+ src_file=self.inputs[0].abspath()
+ base_file=os.path.basename(src_file)
+ base,_=os.path.splitext(base_file)
+ for aux_node in self.aux_nodes:
+ try:
+ ct=aux_node.read()
+ except EnvironmentError:
+ Logs.error('Error reading %s: %r',aux_node.abspath())
+ continue
+ if g_glossaries_re.findall(ct):
+ if not self.env.MAKEGLOSSARIES:
+ raise Errors.WafError("The program 'makeglossaries' is missing!")
+ Logs.warn('calling makeglossaries')
+ self.env.SRCFILE=base
+ self.check_status('error when calling makeglossaries %s'%base,self.makeglossaries_fun())
+ return
+ def texinputs(self):
+ return os.pathsep.join([k.abspath()for k in self.texinputs_nodes])+os.pathsep
+ def run(self):
+ env=self.env
+ if not env.PROMPT_LATEX:
+ env.append_value('LATEXFLAGS','-interaction=batchmode')
+ env.append_value('PDFLATEXFLAGS','-interaction=batchmode')
+ env.append_value('XELATEXFLAGS','-interaction=batchmode')
+ self.cwd=self.inputs[0].parent.get_bld()
+ self.info('first pass on %s',self.__class__.__name__)
+ cur_hash=self.hash_aux_nodes()
+ self.call_latex()
+ self.hash_aux_nodes()
+ self.bibtopic()
+ self.bibfile()
+ self.bibunits()
+ self.makeindex()
+ self.makeglossaries()
+ for i in range(10):
+ prev_hash=cur_hash
+ cur_hash=self.hash_aux_nodes()
+ if not cur_hash:
+ Logs.error('No aux.h to process')
+ if cur_hash and cur_hash==prev_hash:
+ break
+ self.info('calling %s',self.__class__.__name__)
+ self.call_latex()
+ def hash_aux_nodes(self):
+ try:
+ self.aux_nodes
+ except AttributeError:
+ try:
+ self.aux_nodes=self.scan_aux(self.inputs[0].change_ext('.aux'))
+ except IOError:
+ return None
+ return Utils.h_list([Utils.h_file(x.abspath())for x in self.aux_nodes])
+ def call_latex(self):
+ self.env.env={}
+ self.env.env.update(os.environ)
+ self.env.env.update({'TEXINPUTS':self.texinputs()})
+ self.env.SRCFILE=self.inputs[0].abspath()
+ self.check_status('error when calling latex',self.texfun())
+class latex(tex):
+ texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False)
+class pdflatex(tex):
+ texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False)
+class xelatex(tex):
+ texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False)
+class dvips(Task.Task):
+ run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
+ color='BLUE'
+ after=['latex','pdflatex','xelatex']
+class dvipdf(Task.Task):
+ run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
+ color='BLUE'
+ after=['latex','pdflatex','xelatex']
+class pdf2ps(Task.Task):
+ run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
+ color='BLUE'
+ after=['latex','pdflatex','xelatex']
+@feature('tex')
+@before_method('process_source')
+def apply_tex(self):
+ if not getattr(self,'type',None)in('latex','pdflatex','xelatex'):
+ self.type='pdflatex'
+ outs=Utils.to_list(getattr(self,'outs',[]))
+ try:
+ self.generator.bld.conf
+ except AttributeError:
+ default_prompt=False
+ else:
+ default_prompt=True
+ self.env.PROMPT_LATEX=getattr(self,'prompt',default_prompt)
+ deps_lst=[]
+ if getattr(self,'deps',None):
+ deps=self.to_list(self.deps)
+ for dep in deps:
+ if isinstance(dep,str):
+ n=self.path.find_resource(dep)
+ if not n:
+ self.bld.fatal('Could not find %r for %r'%(dep,self))
+ if not n in deps_lst:
+ deps_lst.append(n)
+ elif isinstance(dep,Node.Node):
+ deps_lst.append(dep)
+ for node in self.to_nodes(self.source):
+ if self.type=='latex':
+ task=self.create_task('latex',node,node.change_ext('.dvi'))
+ elif self.type=='pdflatex':
+ task=self.create_task('pdflatex',node,node.change_ext('.pdf'))
+ elif self.type=='xelatex':
+ task=self.create_task('xelatex',node,node.change_ext('.pdf'))
+ task.env=self.env
+ if deps_lst:
+ for n in deps_lst:
+ if not n in task.dep_nodes:
+ task.dep_nodes.append(n)
+ if hasattr(self,'texinputs_nodes'):
+ task.texinputs_nodes=self.texinputs_nodes
+ else:
+ task.texinputs_nodes=[node.parent,node.parent.get_bld(),self.path,self.path.get_bld()]
+ lst=os.environ.get('TEXINPUTS','')
+ if self.env.TEXINPUTS:
+ lst+=os.pathsep+self.env.TEXINPUTS
+ if lst:
+ lst=lst.split(os.pathsep)
+ for x in lst:
+ if x:
+ if os.path.isabs(x):
+ p=self.bld.root.find_node(x)
+ if p:
+ task.texinputs_nodes.append(p)
+ else:
+ Logs.error('Invalid TEXINPUTS folder %s',x)
+ else:
+ Logs.error('Cannot resolve relative paths in TEXINPUTS %s',x)
+ if self.type=='latex':
+ if'ps'in outs:
+ tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps'))
+ tsk.env.env=dict(os.environ)
+ if'pdf'in outs:
+ tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf'))
+ tsk.env.env=dict(os.environ)
+ elif self.type=='pdflatex':
+ if'ps'in outs:
+ self.create_task('pdf2ps',task.outputs,node.change_ext('.ps'))
+ self.source=[]
+def configure(self):
+ v=self.env
+ for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
+ try:
+ self.find_program(p,var=p.upper())
+ except self.errors.ConfigurationError:
+ pass
+ v.DVIPSFLAGS='-Ppdf'
diff --git a/waflib/Tools/vala.py b/waflib/Tools/vala.py
new file mode 100644
index 0000000..2f5a30d
--- /dev/null
+++ b/waflib/Tools/vala.py
@@ -0,0 +1,218 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib import Build,Context,Errors,Logs,Node,Options,Task,Utils
+from waflib.TaskGen import extension,taskgen_method
+from waflib.Configure import conf
+class valac(Task.Task):
+ vars=["VALAC","VALAC_VERSION","VALAFLAGS"]
+ ext_out=['.h']
+ def run(self):
+ cmd=self.env.VALAC+self.env.VALAFLAGS
+ resources=getattr(self,'vala_exclude',[])
+ cmd.extend([a.abspath()for a in self.inputs if a not in resources])
+ ret=self.exec_command(cmd,cwd=self.vala_dir_node.abspath())
+ if ret:
+ return ret
+ if self.generator.dump_deps_node:
+ self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
+ return ret
+@taskgen_method
+def init_vala_task(self):
+ self.profile=getattr(self,'profile','gobject')
+ self.packages=packages=Utils.to_list(getattr(self,'packages',[]))
+ self.use=Utils.to_list(getattr(self,'use',[]))
+ if packages and not self.use:
+ self.use=packages[:]
+ if self.profile=='gobject':
+ if not'GOBJECT'in self.use:
+ self.use.append('GOBJECT')
+ def addflags(flags):
+ self.env.append_value('VALAFLAGS',flags)
+ if self.profile:
+ addflags('--profile=%s'%self.profile)
+ valatask=self.valatask
+ if hasattr(self,'vala_dir'):
+ if isinstance(self.vala_dir,str):
+ valatask.vala_dir_node=self.path.get_bld().make_node(self.vala_dir)
+ try:
+ valatask.vala_dir_node.mkdir()
+ except OSError:
+ raise self.bld.fatal('Cannot create the vala dir %r'%valatask.vala_dir_node)
+ else:
+ valatask.vala_dir_node=self.vala_dir
+ else:
+ valatask.vala_dir_node=self.path.get_bld()
+ addflags('--directory=%s'%valatask.vala_dir_node.abspath())
+ if hasattr(self,'thread'):
+ if self.profile=='gobject':
+ if not'GTHREAD'in self.use:
+ self.use.append('GTHREAD')
+ else:
+ Logs.warn('Profile %s means no threading support',self.profile)
+ self.thread=False
+ if self.thread:
+ addflags('--thread')
+ self.is_lib='cprogram'not in self.features
+ if self.is_lib:
+ addflags('--library=%s'%self.target)
+ h_node=valatask.vala_dir_node.find_or_declare('%s.h'%self.target)
+ valatask.outputs.append(h_node)
+ addflags('--header=%s'%h_node.name)
+ valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi'%self.target))
+ if getattr(self,'gir',None):
+ gir_node=valatask.vala_dir_node.find_or_declare('%s.gir'%self.gir)
+ addflags('--gir=%s'%gir_node.name)
+ valatask.outputs.append(gir_node)
+ self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None))
+ if self.vala_target_glib:
+ addflags('--target-glib=%s'%self.vala_target_glib)
+ addflags(['--define=%s'%x for x in Utils.to_list(getattr(self,'vala_defines',[]))])
+ packages_private=Utils.to_list(getattr(self,'packages_private',[]))
+ addflags(['--pkg=%s'%x for x in packages_private])
+ def _get_api_version():
+ api_version='1.0'
+ if hasattr(Context.g_module,'API_VERSION'):
+ version=Context.g_module.API_VERSION.split(".")
+ if version[0]=="0":
+ api_version="0."+version[1]
+ else:
+ api_version=version[0]+".0"
+ return api_version
+ self.includes=Utils.to_list(getattr(self,'includes',[]))
+ valatask.install_path=getattr(self,'install_path','')
+ valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi')
+ valatask.pkg_name=getattr(self,'pkg_name',self.env.PACKAGE)
+ valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version()))
+ valatask.install_binding=getattr(self,'install_binding',True)
+ self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[]))
+ if hasattr(self,'use'):
+ local_packages=Utils.to_list(self.use)[:]
+ seen=[]
+ while len(local_packages)>0:
+ package=local_packages.pop()
+ if package in seen:
+ continue
+ seen.append(package)
+ try:
+ package_obj=self.bld.get_tgen_by_name(package)
+ except Errors.WafError:
+ continue
+ package_obj.post()
+ package_name=package_obj.target
+ task=getattr(package_obj,'valatask',None)
+ if task:
+ for output in task.outputs:
+ if output.name==package_name+".vapi":
+ valatask.set_run_after(task)
+ if package_name not in packages:
+ packages.append(package_name)
+ if output.parent not in vapi_dirs:
+ vapi_dirs.append(output.parent)
+ if output.parent not in self.includes:
+ self.includes.append(output.parent)
+ if hasattr(package_obj,'use'):
+ lst=self.to_list(package_obj.use)
+ lst.reverse()
+ local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages
+ addflags(['--pkg=%s'%p for p in packages])
+ for vapi_dir in vapi_dirs:
+ if isinstance(vapi_dir,Node.Node):
+ v_node=vapi_dir
+ else:
+ v_node=self.path.find_dir(vapi_dir)
+ if not v_node:
+ Logs.warn('Unable to locate Vala API directory: %r',vapi_dir)
+ else:
+ addflags('--vapidir=%s'%v_node.abspath())
+ self.dump_deps_node=None
+ if self.is_lib and self.packages:
+ self.dump_deps_node=valatask.vala_dir_node.find_or_declare('%s.deps'%self.target)
+ valatask.outputs.append(self.dump_deps_node)
+ if self.is_lib and valatask.install_binding:
+ headers_list=[o for o in valatask.outputs if o.suffix()==".h"]
+ if headers_list:
+ self.install_vheader=self.add_install_files(install_to=valatask.header_path,install_from=headers_list)
+ vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))]
+ if vapi_list:
+ self.install_vapi=self.add_install_files(install_to=valatask.vapi_path,install_from=vapi_list)
+ gir_list=[o for o in valatask.outputs if o.suffix()=='.gir']
+ if gir_list:
+ self.install_gir=self.add_install_files(install_to=getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),install_from=gir_list)
+ if hasattr(self,'vala_resources'):
+ nodes=self.to_nodes(self.vala_resources)
+ valatask.vala_exclude=getattr(valatask,'vala_exclude',[])+nodes
+ valatask.inputs.extend(nodes)
+ for x in nodes:
+ addflags(['--gresources',x.abspath()])
+@extension('.vala','.gs')
+def vala_file(self,node):
+ try:
+ valatask=self.valatask
+ except AttributeError:
+ valatask=self.valatask=self.create_task('valac')
+ self.init_vala_task()
+ valatask.inputs.append(node)
+ name=node.name[:node.name.rfind('.')]+'.c'
+ c_node=valatask.vala_dir_node.find_or_declare(name)
+ valatask.outputs.append(c_node)
+ self.source.append(c_node)
+@extension('.vapi')
+def vapi_file(self,node):
+ try:
+ valatask=self.valatask
+ except AttributeError:
+ valatask=self.valatask=self.create_task('valac')
+ self.init_vala_task()
+ valatask.inputs.append(node)
+@conf
+def find_valac(self,valac_name,min_version):
+ valac=self.find_program(valac_name,var='VALAC')
+ try:
+ output=self.cmd_and_log(valac+['--version'])
+ except Errors.WafError:
+ valac_version=None
+ else:
+ ver=re.search(r'\d+.\d+.\d+',output).group().split('.')
+ valac_version=tuple([int(x)for x in ver])
+ self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version)
+ if valac and valac_version<min_version:
+ self.fatal("%s version %r is too old, need >= %r"%(valac_name,valac_version,min_version))
+ self.env.VALAC_VERSION=valac_version
+ return valac
+@conf
+def check_vala(self,min_version=(0,8,0),branch=None):
+ if self.env.VALA_MINVER:
+ min_version=self.env.VALA_MINVER
+ if self.env.VALA_MINVER_BRANCH:
+ branch=self.env.VALA_MINVER_BRANCH
+ if not branch:
+ branch=min_version[:2]
+ try:
+ find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version)
+ except self.errors.ConfigurationError:
+ find_valac(self,'valac',min_version)
+@conf
+def check_vala_deps(self):
+ if not self.env.HAVE_GOBJECT:
+ pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'}
+ if getattr(Options.options,'vala_target_glib',None):
+ pkg_args['atleast_version']=Options.options.vala_target_glib
+ self.check_cfg(**pkg_args)
+ if not self.env.HAVE_GTHREAD:
+ pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'}
+ if getattr(Options.options,'vala_target_glib',None):
+ pkg_args['atleast_version']=Options.options.vala_target_glib
+ self.check_cfg(**pkg_args)
+def configure(self):
+ self.load('gnu_dirs')
+ self.check_vala_deps()
+ self.check_vala()
+ self.add_os_flags('VALAFLAGS')
+ self.env.append_unique('VALAFLAGS',['-C'])
+def options(opt):
+ opt.load('gnu_dirs')
+ valaopts=opt.add_option_group('Vala Compiler Options')
+ valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation')
diff --git a/waflib/Tools/waf_unit_test.py b/waflib/Tools/waf_unit_test.py
new file mode 100644
index 0000000..af07b44
--- /dev/null
+++ b/waflib/Tools/waf_unit_test.py
@@ -0,0 +1,172 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,shlex,sys
+from waflib.TaskGen import feature,after_method,taskgen_method
+from waflib import Utils,Task,Logs,Options
+from waflib.Tools import ccroot
+testlock=Utils.threading.Lock()
+SCRIPT_TEMPLATE="""#! %(python)s
+import subprocess, sys
+cmd = %(cmd)r
+# if you want to debug with gdb:
+#cmd = ['gdb', '-args'] + cmd
+env = %(env)r
+status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
+sys.exit(status)
+"""
+@taskgen_method
+def handle_ut_cwd(self,key):
+ cwd=getattr(self,key,None)
+ if cwd:
+ if isinstance(cwd,str):
+ if os.path.isabs(cwd):
+ self.ut_cwd=self.bld.root.make_node(cwd)
+ else:
+ self.ut_cwd=self.path.make_node(cwd)
+@feature('test_scripts')
+def make_interpreted_test(self):
+ for x in['test_scripts_source','test_scripts_template']:
+ if not hasattr(self,x):
+ Logs.warn('a test_scripts taskgen i missing %s'%x)
+ return
+ self.ut_run,lst=Task.compile_fun(self.test_scripts_template,shell=getattr(self,'test_scripts_shell',False))
+ script_nodes=self.to_nodes(self.test_scripts_source)
+ for script_node in script_nodes:
+ tsk=self.create_task('utest',[script_node])
+ tsk.vars=lst+tsk.vars
+ tsk.env['SCRIPT']=script_node.path_from(tsk.get_cwd())
+ self.handle_ut_cwd('test_scripts_cwd')
+ env=getattr(self,'test_scripts_env',None)
+ if env:
+ self.ut_env=env
+ else:
+ self.ut_env=dict(os.environ)
+ paths=getattr(self,'test_scripts_paths',{})
+ for(k,v)in paths.items():
+ p=self.ut_env.get(k,'').split(os.pathsep)
+ if isinstance(v,str):
+ v=v.split(os.pathsep)
+ self.ut_env[k]=os.pathsep.join(p+v)
+@feature('test')
+@after_method('apply_link','process_use')
+def make_test(self):
+ if not getattr(self,'link_task',None):
+ return
+ tsk=self.create_task('utest',self.link_task.outputs)
+ if getattr(self,'ut_str',None):
+ self.ut_run,lst=Task.compile_fun(self.ut_str,shell=getattr(self,'ut_shell',False))
+ tsk.vars=lst+tsk.vars
+ self.handle_ut_cwd('ut_cwd')
+ if not hasattr(self,'ut_paths'):
+ paths=[]
+ for x in self.tmp_use_sorted:
+ try:
+ y=self.bld.get_tgen_by_name(x).link_task
+ except AttributeError:
+ pass
+ else:
+ if not isinstance(y,ccroot.stlink_task):
+ paths.append(y.outputs[0].parent.abspath())
+ self.ut_paths=os.pathsep.join(paths)+os.pathsep
+ if not hasattr(self,'ut_env'):
+ self.ut_env=dct=dict(os.environ)
+ def add_path(var):
+ dct[var]=self.ut_paths+dct.get(var,'')
+ if Utils.is_win32:
+ add_path('PATH')
+ elif Utils.unversioned_sys_platform()=='darwin':
+ add_path('DYLD_LIBRARY_PATH')
+ add_path('LD_LIBRARY_PATH')
+ else:
+ add_path('LD_LIBRARY_PATH')
+ if not hasattr(self,'ut_cmd'):
+ self.ut_cmd=getattr(Options.options,'testcmd',False)
+@taskgen_method
+def add_test_results(self,tup):
+ Logs.debug("ut: %r",tup)
+ try:
+ self.utest_results.append(tup)
+ except AttributeError:
+ self.utest_results=[tup]
+ try:
+ self.bld.utest_results.append(tup)
+ except AttributeError:
+ self.bld.utest_results=[tup]
+@Task.deep_inputs
+class utest(Task.Task):
+ color='PINK'
+ after=['vnum','inst']
+ vars=[]
+ def runnable_status(self):
+ if getattr(Options.options,'no_tests',False):
+ return Task.SKIP_ME
+ ret=super(utest,self).runnable_status()
+ if ret==Task.SKIP_ME:
+ if getattr(Options.options,'all_tests',False):
+ return Task.RUN_ME
+ return ret
+ def get_test_env(self):
+ return self.generator.ut_env
+ def post_run(self):
+ super(utest,self).post_run()
+ if getattr(Options.options,'clear_failed_tests',False)and self.waf_unit_test_results[1]:
+ self.generator.bld.task_sigs[self.uid()]=None
+ def run(self):
+ if hasattr(self.generator,'ut_run'):
+ return self.generator.ut_run(self)
+ self.ut_exec=getattr(self.generator,'ut_exec',[self.inputs[0].abspath()])
+ ut_cmd=getattr(self.generator,'ut_cmd',False)
+ if ut_cmd:
+ self.ut_exec=shlex.split(ut_cmd%' '.join(self.ut_exec))
+ return self.exec_command(self.ut_exec)
+ def exec_command(self,cmd,**kw):
+ Logs.debug('runner: %r',cmd)
+ if getattr(Options.options,'dump_test_scripts',False):
+ script_code=SCRIPT_TEMPLATE%{'python':sys.executable,'env':self.get_test_env(),'cwd':self.get_cwd().abspath(),'cmd':cmd}
+ script_file=self.inputs[0].abspath()+'_run.py'
+ Utils.writef(script_file,script_code)
+ os.chmod(script_file,Utils.O755)
+ if Logs.verbose>1:
+ Logs.info('Test debug file written as %r'%script_file)
+ proc=Utils.subprocess.Popen(cmd,cwd=self.get_cwd().abspath(),env=self.get_test_env(),stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,shell=isinstance(cmd,str))
+ (stdout,stderr)=proc.communicate()
+ self.waf_unit_test_results=tup=(self.inputs[0].abspath(),proc.returncode,stdout,stderr)
+ testlock.acquire()
+ try:
+ return self.generator.add_test_results(tup)
+ finally:
+ testlock.release()
+ def get_cwd(self):
+ return getattr(self.generator,'ut_cwd',self.inputs[0].parent)
+def summary(bld):
+ lst=getattr(bld,'utest_results',[])
+ if lst:
+ Logs.pprint('CYAN','execution summary')
+ total=len(lst)
+ tfail=len([x for x in lst if x[1]])
+ Logs.pprint('GREEN',' tests that pass %d/%d'%(total-tfail,total))
+ for(f,code,out,err)in lst:
+ if not code:
+ Logs.pprint('GREEN',' %s'%f)
+ Logs.pprint('GREEN'if tfail==0 else'RED',' tests that fail %d/%d'%(tfail,total))
+ for(f,code,out,err)in lst:
+ if code:
+ Logs.pprint('RED',' %s'%f)
+def set_exit_code(bld):
+ lst=getattr(bld,'utest_results',[])
+ for(f,code,out,err)in lst:
+ if code:
+ msg=[]
+ if out:
+ msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8')))
+ if err:
+ msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8')))
+ bld.fatal(os.linesep.join(msg))
+def options(opt):
+ opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests')
+ opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests')
+ opt.add_option('--clear-failed',action='store_true',default=False,help='Force failed unit tests to run again next time',dest='clear_failed_tests')
+ opt.add_option('--testcmd',action='store',default=False,dest='testcmd',help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
+ opt.add_option('--dump-test-scripts',action='store_true',default=False,help='Create python scripts to help debug tests',dest='dump_test_scripts')
diff --git a/waflib/Tools/winres.py b/waflib/Tools/winres.py
new file mode 100644
index 0000000..ecb362b
--- /dev/null
+++ b/waflib/Tools/winres.py
@@ -0,0 +1,52 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import re
+from waflib import Task
+from waflib.TaskGen import extension
+from waflib.Tools import c_preproc
+@extension('.rc')
+def rc_file(self,node):
+ obj_ext='.rc.o'
+ if self.env.WINRC_TGT_F=='/fo':
+ obj_ext='.res'
+ rctask=self.create_task('winrc',node,node.change_ext(obj_ext))
+ try:
+ self.compiled_tasks.append(rctask)
+ except AttributeError:
+ self.compiled_tasks=[rctask]
+re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE)
+class rc_parser(c_preproc.c_parser):
+ def filter_comments(self,node):
+ code=node.read()
+ if c_preproc.use_trigraphs:
+ for(a,b)in c_preproc.trig_def:
+ code=code.split(a).join(b)
+ code=c_preproc.re_nl.sub('',code)
+ code=c_preproc.re_cpp.sub(c_preproc.repl,code)
+ ret=[]
+ for m in re.finditer(re_lines,code):
+ if m.group(2):
+ ret.append((m.group(2),m.group(3)))
+ else:
+ ret.append(('include',m.group(5)))
+ return ret
+class winrc(Task.Task):
+ run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
+ color='BLUE'
+ def scan(self):
+ tmp=rc_parser(self.generator.includes_nodes)
+ tmp.start(self.inputs[0],self.env)
+ return(tmp.nodes,tmp.names)
+def configure(conf):
+ v=conf.env
+ if not v.WINRC:
+ if v.CC_NAME=='msvc':
+ conf.find_program('RC',var='WINRC',path_list=v.PATH)
+ v.WINRC_TGT_F='/fo'
+ v.WINRC_SRC_F=''
+ else:
+ conf.find_program('windres',var='WINRC',path_list=v.PATH)
+ v.WINRC_TGT_F='-o'
+ v.WINRC_SRC_F='-i'
diff --git a/waflib/Tools/xlc.py b/waflib/Tools/xlc.py
new file mode 100644
index 0000000..a86010d
--- /dev/null
+++ b/waflib/Tools/xlc.py
@@ -0,0 +1,44 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+@conf
+def find_xlc(conf):
+ cc=conf.find_program(['xlc_r','xlc'],var='CC')
+ conf.get_xlc_version(cc)
+ conf.env.CC_NAME='xlc'
+@conf
+def xlc_common_flags(conf):
+ v=conf.env
+ v.CC_SRC_F=[]
+ v.CC_TGT_F=['-c','-o']
+ if not v.LINK_CC:
+ v.LINK_CC=v.CC
+ v.CCLNK_SRC_F=[]
+ v.CCLNK_TGT_F=['-o']
+ v.CPPPATH_ST='-I%s'
+ v.DEFINES_ST='-D%s'
+ v.LIB_ST='-l%s'
+ v.LIBPATH_ST='-L%s'
+ v.STLIB_ST='-l%s'
+ v.STLIBPATH_ST='-L%s'
+ v.RPATH_ST='-Wl,-rpath,%s'
+ v.SONAME_ST=[]
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.LINKFLAGS_cprogram=['-Wl,-brtl']
+ v.cprogram_PATTERN='%s'
+ v.CFLAGS_cshlib=['-fPIC']
+ v.LINKFLAGS_cshlib=['-G','-Wl,-brtl,-bexpfull']
+ v.cshlib_PATTERN='lib%s.so'
+ v.LINKFLAGS_cstlib=[]
+ v.cstlib_PATTERN='lib%s.a'
+def configure(conf):
+ conf.find_xlc()
+ conf.find_ar()
+ conf.xlc_common_flags()
+ conf.cc_load_tools()
+ conf.cc_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Tools/xlcxx.py b/waflib/Tools/xlcxx.py
new file mode 100644
index 0000000..8a081b6
--- /dev/null
+++ b/waflib/Tools/xlcxx.py
@@ -0,0 +1,44 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from waflib.Tools import ccroot,ar
+from waflib.Configure import conf
+@conf
+def find_xlcxx(conf):
+ cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX')
+ conf.get_xlc_version(cxx)
+ conf.env.CXX_NAME='xlc++'
+@conf
+def xlcxx_common_flags(conf):
+ v=conf.env
+ v.CXX_SRC_F=[]
+ v.CXX_TGT_F=['-c','-o']
+ if not v.LINK_CXX:
+ v.LINK_CXX=v.CXX
+ v.CXXLNK_SRC_F=[]
+ v.CXXLNK_TGT_F=['-o']
+ v.CPPPATH_ST='-I%s'
+ v.DEFINES_ST='-D%s'
+ v.LIB_ST='-l%s'
+ v.LIBPATH_ST='-L%s'
+ v.STLIB_ST='-l%s'
+ v.STLIBPATH_ST='-L%s'
+ v.RPATH_ST='-Wl,-rpath,%s'
+ v.SONAME_ST=[]
+ v.SHLIB_MARKER=[]
+ v.STLIB_MARKER=[]
+ v.LINKFLAGS_cxxprogram=['-Wl,-brtl']
+ v.cxxprogram_PATTERN='%s'
+ v.CXXFLAGS_cxxshlib=['-fPIC']
+ v.LINKFLAGS_cxxshlib=['-G','-Wl,-brtl,-bexpfull']
+ v.cxxshlib_PATTERN='lib%s.so'
+ v.LINKFLAGS_cxxstlib=[]
+ v.cxxstlib_PATTERN='lib%s.a'
+def configure(conf):
+ conf.find_xlcxx()
+ conf.find_ar()
+ conf.xlcxx_common_flags()
+ conf.cxx_load_tools()
+ conf.cxx_add_flags()
+ conf.link_add_flags()
diff --git a/waflib/Utils.py b/waflib/Utils.py
new file mode 100644
index 0000000..924d1f1
--- /dev/null
+++ b/waflib/Utils.py
@@ -0,0 +1,615 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from __future__ import with_statement
+import atexit,os,sys,errno,inspect,re,datetime,platform,base64,signal,functools,time
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+if os.name=='posix'and sys.version_info[0]<3:
+ try:
+ import subprocess32 as subprocess
+ except ImportError:
+ import subprocess
+else:
+ import subprocess
+try:
+ TimeoutExpired=subprocess.TimeoutExpired
+except AttributeError:
+ class TimeoutExpired(Exception):
+ pass
+from collections import deque,defaultdict
+try:
+ import _winreg as winreg
+except ImportError:
+ try:
+ import winreg
+ except ImportError:
+ winreg=None
+from waflib import Errors
+try:
+ from hashlib import md5
+except ImportError:
+ try:
+ from md5 import md5
+ except ImportError:
+ pass
+try:
+ import threading
+except ImportError:
+ if not'JOBS'in os.environ:
+ os.environ['JOBS']='1'
+ class threading(object):
+ pass
+ class Lock(object):
+ def acquire(self):
+ pass
+ def release(self):
+ pass
+ threading.Lock=threading.Thread=Lock
+SIG_NIL='SIG_NIL_SIG_NIL_'.encode()
+O644=420
+O755=493
+rot_chr=['\\','|','/','-']
+rot_idx=0
+class ordered_iter_dict(dict):
+ def __init__(self,*k,**kw):
+ self.lst=deque()
+ dict.__init__(self,*k,**kw)
+ def clear(self):
+ dict.clear(self)
+ self.lst=deque()
+ def __setitem__(self,key,value):
+ if key in dict.keys(self):
+ self.lst.remove(key)
+ dict.__setitem__(self,key,value)
+ self.lst.append(key)
+ def __delitem__(self,key):
+ dict.__delitem__(self,key)
+ try:
+ self.lst.remove(key)
+ except ValueError:
+ pass
+ def __iter__(self):
+ return reversed(self.lst)
+ def keys(self):
+ return reversed(self.lst)
+class lru_node(object):
+ __slots__=('next','prev','key','val')
+ def __init__(self):
+ self.next=self
+ self.prev=self
+ self.key=None
+ self.val=None
+class lru_cache(object):
+ __slots__=('maxlen','table','head')
+ def __init__(self,maxlen=100):
+ self.maxlen=maxlen
+ self.table={}
+ self.head=lru_node()
+ self.head.next=self.head
+ self.head.prev=self.head
+ def __getitem__(self,key):
+ node=self.table[key]
+ if node is self.head:
+ return node.val
+ node.prev.next=node.next
+ node.next.prev=node.prev
+ node.next=self.head.next
+ node.prev=self.head
+ self.head=node.next.prev=node.prev.next=node
+ return node.val
+ def __setitem__(self,key,val):
+ if key in self.table:
+ node=self.table[key]
+ node.val=val
+ self.__getitem__(key)
+ else:
+ if len(self.table)<self.maxlen:
+ node=lru_node()
+ node.prev=self.head
+ node.next=self.head.next
+ node.prev.next=node.next.prev=node
+ else:
+ node=self.head=self.head.next
+ try:
+ del self.table[node.key]
+ except KeyError:
+ pass
+ node.key=key
+ node.val=val
+ self.table[key]=node
+class lazy_generator(object):
+ def __init__(self,fun,params):
+ self.fun=fun
+ self.params=params
+ def __iter__(self):
+ return self
+ def __next__(self):
+ try:
+ it=self.it
+ except AttributeError:
+ it=self.it=self.fun(*self.params)
+ return next(it)
+ next=__next__
+is_win32=os.sep=='\\'or sys.platform=='win32'
+def readf(fname,m='r',encoding='latin-1'):
+ if sys.hexversion>0x3000000 and not'b'in m:
+ m+='b'
+ with open(fname,m)as f:
+ txt=f.read()
+ if encoding:
+ txt=txt.decode(encoding)
+ else:
+ txt=txt.decode()
+ else:
+ with open(fname,m)as f:
+ txt=f.read()
+ return txt
+def writef(fname,data,m='w',encoding='latin-1'):
+ if sys.hexversion>0x3000000 and not'b'in m:
+ data=data.encode(encoding)
+ m+='b'
+ with open(fname,m)as f:
+ f.write(data)
+def h_file(fname):
+ m=md5()
+ with open(fname,'rb')as f:
+ while fname:
+ fname=f.read(200000)
+ m.update(fname)
+ return m.digest()
+def readf_win32(f,m='r',encoding='latin-1'):
+ flags=os.O_NOINHERIT|os.O_RDONLY
+ if'b'in m:
+ flags|=os.O_BINARY
+ if'+'in m:
+ flags|=os.O_RDWR
+ try:
+ fd=os.open(f,flags)
+ except OSError:
+ raise IOError('Cannot read from %r'%f)
+ if sys.hexversion>0x3000000 and not'b'in m:
+ m+='b'
+ with os.fdopen(fd,m)as f:
+ txt=f.read()
+ if encoding:
+ txt=txt.decode(encoding)
+ else:
+ txt=txt.decode()
+ else:
+ with os.fdopen(fd,m)as f:
+ txt=f.read()
+ return txt
+def writef_win32(f,data,m='w',encoding='latin-1'):
+ if sys.hexversion>0x3000000 and not'b'in m:
+ data=data.encode(encoding)
+ m+='b'
+ flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT
+ if'b'in m:
+ flags|=os.O_BINARY
+ if'+'in m:
+ flags|=os.O_RDWR
+ try:
+ fd=os.open(f,flags)
+ except OSError:
+ raise OSError('Cannot write to %r'%f)
+ with os.fdopen(fd,m)as f:
+ f.write(data)
+def h_file_win32(fname):
+ try:
+ fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT)
+ except OSError:
+ raise OSError('Cannot read from %r'%fname)
+ m=md5()
+ with os.fdopen(fd,'rb')as f:
+ while fname:
+ fname=f.read(200000)
+ m.update(fname)
+ return m.digest()
+readf_unix=readf
+writef_unix=writef
+h_file_unix=h_file
+if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000:
+ readf=readf_win32
+ writef=writef_win32
+ h_file=h_file_win32
+try:
+ x=''.encode('hex')
+except LookupError:
+ import binascii
+ def to_hex(s):
+ ret=binascii.hexlify(s)
+ if not isinstance(ret,str):
+ ret=ret.decode('utf-8')
+ return ret
+else:
+ def to_hex(s):
+ return s.encode('hex')
+to_hex.__doc__="""
+Return the hexadecimal representation of a string
+
+:param s: string to convert
+:type s: string
+"""
+def listdir_win32(s):
+ if not s:
+ try:
+ import ctypes
+ except ImportError:
+ return[x+':\\'for x in'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
+ else:
+ dlen=4
+ maxdrives=26
+ buf=ctypes.create_string_buffer(maxdrives*dlen)
+ ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf))
+ return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))]
+ if len(s)==2 and s[1]==":":
+ s+=os.sep
+ if not os.path.isdir(s):
+ e=OSError('%s is not a directory'%s)
+ e.errno=errno.ENOENT
+ raise e
+ return os.listdir(s)
+listdir=os.listdir
+if is_win32:
+ listdir=listdir_win32
+def num2ver(ver):
+ if isinstance(ver,str):
+ ver=tuple(ver.split('.'))
+ if isinstance(ver,tuple):
+ ret=0
+ for i in range(4):
+ if i<len(ver):
+ ret+=256**(3-i)*int(ver[i])
+ return ret
+ return ver
+def to_list(val):
+ if isinstance(val,str):
+ return val.split()
+ else:
+ return val
+def console_encoding():
+ try:
+ import ctypes
+ except ImportError:
+ pass
+ else:
+ try:
+ codepage=ctypes.windll.kernel32.GetConsoleCP()
+ except AttributeError:
+ pass
+ else:
+ if codepage:
+ return'cp%d'%codepage
+ return sys.stdout.encoding or('cp1252'if is_win32 else'latin-1')
+def split_path_unix(path):
+ return path.split('/')
+def split_path_cygwin(path):
+ if path.startswith('//'):
+ ret=path.split('/')[2:]
+ ret[0]='/'+ret[0]
+ return ret
+ return path.split('/')
+re_sp=re.compile('[/\\\\]+')
+def split_path_win32(path):
+ if path.startswith('\\\\'):
+ ret=re_sp.split(path)[1:]
+ ret[0]='\\\\'+ret[0]
+ if ret[0]=='\\\\?':
+ return ret[1:]
+ return ret
+ return re_sp.split(path)
+msysroot=None
+def split_path_msys(path):
+ if path.startswith(('/','\\'))and not path.startswith(('//','\\\\')):
+ global msysroot
+ if not msysroot:
+ msysroot=subprocess.check_output(['cygpath','-w','/']).decode(sys.stdout.encoding or'latin-1')
+ msysroot=msysroot.strip()
+ path=os.path.normpath(msysroot+os.sep+path)
+ return split_path_win32(path)
+if sys.platform=='cygwin':
+ split_path=split_path_cygwin
+elif is_win32:
+ if os.environ.get('MSYSTEM'):
+ split_path=split_path_msys
+ else:
+ split_path=split_path_win32
+else:
+ split_path=split_path_unix
+split_path.__doc__="""
+Splits a path by / or \\; do not confuse this function with with ``os.path.split``
+
+:type path: string
+:param path: path to split
+:return: list of string
+"""
+def check_dir(path):
+ if not os.path.isdir(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if not os.path.isdir(path):
+ raise Errors.WafError('Cannot create the folder %r'%path,ex=e)
+def check_exe(name,env=None):
+ if not name:
+ raise ValueError('Cannot execute an empty string!')
+ def is_exe(fpath):
+ return os.path.isfile(fpath)and os.access(fpath,os.X_OK)
+ fpath,fname=os.path.split(name)
+ if fpath and is_exe(name):
+ return os.path.abspath(name)
+ else:
+ env=env or os.environ
+ for path in env['PATH'].split(os.pathsep):
+ path=path.strip('"')
+ exe_file=os.path.join(path,name)
+ if is_exe(exe_file):
+ return os.path.abspath(exe_file)
+ return None
+def def_attrs(cls,**kw):
+ for k,v in kw.items():
+ if not hasattr(cls,k):
+ setattr(cls,k,v)
+def quote_define_name(s):
+ fu=re.sub('[^a-zA-Z0-9]','_',s)
+ fu=re.sub('_+','_',fu)
+ fu=fu.upper()
+ return fu
+re_sh=re.compile('\\s|\'|"')
+def shell_escape(cmd):
+ if isinstance(cmd,str):
+ return cmd
+ return' '.join(repr(x)if re_sh.search(x)else x for x in cmd)
+def h_list(lst):
+ return md5(repr(lst).encode()).digest()
+def h_fun(fun):
+ try:
+ return fun.code
+ except AttributeError:
+ if isinstance(fun,functools.partial):
+ code=list(fun.args)
+ code.extend(sorted(fun.keywords.items()))
+ code.append(h_fun(fun.func))
+ fun.code=h_list(code)
+ return fun.code
+ try:
+ h=inspect.getsource(fun)
+ except EnvironmentError:
+ h='nocode'
+ try:
+ fun.code=h
+ except AttributeError:
+ pass
+ return h
+def h_cmd(ins):
+ if isinstance(ins,str):
+ ret=ins
+ elif isinstance(ins,list)or isinstance(ins,tuple):
+ ret=str([h_cmd(x)for x in ins])
+ else:
+ ret=str(h_fun(ins))
+ if sys.hexversion>0x3000000:
+ ret=ret.encode('latin-1','xmlcharrefreplace')
+ return ret
+reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
+def subst_vars(expr,params):
+ def repl_var(m):
+ if m.group(1):
+ return'\\'
+ if m.group(2):
+ return'$'
+ try:
+ return params.get_flat(m.group(3))
+ except AttributeError:
+ return params[m.group(3)]
+ return reg_subst.sub(repl_var,expr)
+def destos_to_binfmt(key):
+ if key=='darwin':
+ return'mac-o'
+ elif key in('win32','cygwin','uwin','msys'):
+ return'pe'
+ return'elf'
+def unversioned_sys_platform():
+ s=sys.platform
+ if s.startswith('java'):
+ from java.lang import System
+ s=System.getProperty('os.name')
+ if s=='Mac OS X':
+ return'darwin'
+ elif s.startswith('Windows '):
+ return'win32'
+ elif s=='OS/2':
+ return'os2'
+ elif s=='HP-UX':
+ return'hp-ux'
+ elif s in('SunOS','Solaris'):
+ return'sunos'
+ else:s=s.lower()
+ if s=='powerpc':
+ return'darwin'
+ if s=='win32'or s=='os2':
+ return s
+ if s=='cli'and os.name=='nt':
+ return'win32'
+ return re.split('\d+$',s)[0]
+def nada(*k,**kw):
+ pass
+class Timer(object):
+ def __init__(self):
+ self.start_time=self.now()
+ def __str__(self):
+ delta=self.now()-self.start_time
+ if not isinstance(delta,datetime.timedelta):
+ delta=datetime.timedelta(seconds=delta)
+ days=delta.days
+ hours,rem=divmod(delta.seconds,3600)
+ minutes,seconds=divmod(rem,60)
+ seconds+=delta.microseconds*1e-6
+ result=''
+ if days:
+ result+='%dd'%days
+ if days or hours:
+ result+='%dh'%hours
+ if days or hours or minutes:
+ result+='%dm'%minutes
+ return'%s%.3fs'%(result,seconds)
+ def now(self):
+ return datetime.datetime.utcnow()
+ if hasattr(time,'perf_counter'):
+ def now(self):
+ return time.perf_counter()
+def read_la_file(path):
+ sp=re.compile(r'^([^=]+)=\'(.*)\'$')
+ dc={}
+ for line in readf(path).splitlines():
+ try:
+ _,left,right,_=sp.split(line.strip())
+ dc[left]=right
+ except ValueError:
+ pass
+ return dc
+def run_once(fun):
+ cache={}
+ def wrap(*k):
+ try:
+ return cache[k]
+ except KeyError:
+ ret=fun(*k)
+ cache[k]=ret
+ return ret
+ wrap.__cache__=cache
+ wrap.__name__=fun.__name__
+ return wrap
+def get_registry_app_path(key,filename):
+ if not winreg:
+ return None
+ try:
+ result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0])
+ except OSError:
+ pass
+ else:
+ if os.path.isfile(result):
+ return result
+def lib64():
+ if os.sep=='/':
+ if platform.architecture()[0]=='64bit':
+ if os.path.exists('/usr/lib64')and not os.path.exists('/usr/lib32'):
+ return'64'
+ return''
+def sane_path(p):
+ return os.path.abspath(os.path.expanduser(p))
+process_pool=[]
+def get_process():
+ try:
+ return process_pool.pop()
+ except IndexError:
+ filepath=os.path.dirname(os.path.abspath(__file__))+os.sep+'processor.py'
+ cmd=[sys.executable,'-c',readf(filepath)]
+ return subprocess.Popen(cmd,stdout=subprocess.PIPE,stdin=subprocess.PIPE,bufsize=0)
+def run_prefork_process(cmd,kwargs,cargs):
+ if not'env'in kwargs:
+ kwargs['env']=dict(os.environ)
+ try:
+ obj=base64.b64encode(cPickle.dumps([cmd,kwargs,cargs]))
+ except(TypeError,AttributeError):
+ return run_regular_process(cmd,kwargs,cargs)
+ proc=get_process()
+ if not proc:
+ return run_regular_process(cmd,kwargs,cargs)
+ proc.stdin.write(obj)
+ proc.stdin.write('\n'.encode())
+ proc.stdin.flush()
+ obj=proc.stdout.readline()
+ if not obj:
+ raise OSError('Preforked sub-process %r died'%proc.pid)
+ process_pool.append(proc)
+ lst=cPickle.loads(base64.b64decode(obj))
+ assert len(lst)==5
+ ret,out,err,ex,trace=lst
+ if ex:
+ if ex=='OSError':
+ raise OSError(trace)
+ elif ex=='ValueError':
+ raise ValueError(trace)
+ elif ex=='TimeoutExpired':
+ exc=TimeoutExpired(cmd,timeout=cargs['timeout'],output=out)
+ exc.stderr=err
+ raise exc
+ else:
+ raise Exception(trace)
+ return ret,out,err
+def lchown(path,user=-1,group=-1):
+ if isinstance(user,str):
+ import pwd
+ entry=pwd.getpwnam(user)
+ if not entry:
+ raise OSError('Unknown user %r'%user)
+ user=entry[2]
+ if isinstance(group,str):
+ import grp
+ entry=grp.getgrnam(group)
+ if not entry:
+ raise OSError('Unknown group %r'%group)
+ group=entry[2]
+ return os.lchown(path,user,group)
+def run_regular_process(cmd,kwargs,cargs={}):
+ proc=subprocess.Popen(cmd,**kwargs)
+ if kwargs.get('stdout')or kwargs.get('stderr'):
+ try:
+ out,err=proc.communicate(**cargs)
+ except TimeoutExpired:
+ if kwargs.get('start_new_session')and hasattr(os,'killpg'):
+ os.killpg(proc.pid,signal.SIGKILL)
+ else:
+ proc.kill()
+ out,err=proc.communicate()
+ exc=TimeoutExpired(proc.args,timeout=cargs['timeout'],output=out)
+ exc.stderr=err
+ raise exc
+ status=proc.returncode
+ else:
+ out,err=(None,None)
+ try:
+ status=proc.wait(**cargs)
+ except TimeoutExpired as e:
+ if kwargs.get('start_new_session')and hasattr(os,'killpg'):
+ os.killpg(proc.pid,signal.SIGKILL)
+ else:
+ proc.kill()
+ proc.wait()
+ raise e
+ return status,out,err
+def run_process(cmd,kwargs,cargs={}):
+ if kwargs.get('stdout')and kwargs.get('stderr'):
+ return run_prefork_process(cmd,kwargs,cargs)
+ else:
+ return run_regular_process(cmd,kwargs,cargs)
+def alloc_process_pool(n,force=False):
+ global run_process,get_process,alloc_process_pool
+ if not force:
+ n=max(n-len(process_pool),0)
+ try:
+ lst=[get_process()for x in range(n)]
+ except OSError:
+ run_process=run_regular_process
+ get_process=alloc_process_pool=nada
+ else:
+ for x in lst:
+ process_pool.append(x)
+def atexit_pool():
+ for k in process_pool:
+ try:
+ os.kill(k.pid,9)
+ except OSError:
+ pass
+ else:
+ k.wait()
+if(sys.hexversion<0x207000f and not is_win32)or sys.hexversion>=0x306000f:
+ atexit.register(atexit_pool)
+if os.environ.get('WAF_NO_PREFORK')or sys.platform=='cli'or not sys.executable:
+ run_process=run_regular_process
+ get_process=alloc_process_pool=nada
diff --git a/waflib/__init__.py b/waflib/__init__.py
new file mode 100644
index 0000000..55e850d
--- /dev/null
+++ b/waflib/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
diff --git a/waflib/ansiterm.py b/waflib/ansiterm.py
new file mode 100644
index 0000000..1d8bc78
--- /dev/null
+++ b/waflib/ansiterm.py
@@ -0,0 +1,238 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,re,sys
+from waflib import Utils
+wlock=Utils.threading.Lock()
+try:
+ from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,c_wchar,POINTER,c_long
+except ImportError:
+ class AnsiTerm(object):
+ def __init__(self,stream):
+ self.stream=stream
+ try:
+ self.errors=self.stream.errors
+ except AttributeError:
+ pass
+ self.encoding=self.stream.encoding
+ def write(self,txt):
+ try:
+ wlock.acquire()
+ self.stream.write(txt)
+ self.stream.flush()
+ finally:
+ wlock.release()
+ def fileno(self):
+ return self.stream.fileno()
+ def flush(self):
+ self.stream.flush()
+ def isatty(self):
+ return self.stream.isatty()
+else:
+ class COORD(Structure):
+ _fields_=[("X",c_short),("Y",c_short)]
+ class SMALL_RECT(Structure):
+ _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)]
+ class CONSOLE_SCREEN_BUFFER_INFO(Structure):
+ _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_ushort),("Window",SMALL_RECT),("MaximumWindowSize",COORD)]
+ class CONSOLE_CURSOR_INFO(Structure):
+ _fields_=[('dwSize',c_ulong),('bVisible',c_int)]
+ try:
+ _type=unicode
+ except NameError:
+ _type=str
+ to_int=lambda number,default:number and int(number)or default
+ STD_OUTPUT_HANDLE=-11
+ STD_ERROR_HANDLE=-12
+ windll.kernel32.GetStdHandle.argtypes=[c_ulong]
+ windll.kernel32.GetStdHandle.restype=c_ulong
+ windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+ windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long
+ windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort]
+ windll.kernel32.SetConsoleTextAttribute.restype=c_long
+ windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)]
+ windll.kernel32.FillConsoleOutputCharacterW.restype=c_long
+ windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)]
+ windll.kernel32.FillConsoleOutputAttribute.restype=c_long
+ windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)]
+ windll.kernel32.SetConsoleCursorPosition.restype=c_long
+ windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)]
+ windll.kernel32.SetConsoleCursorInfo.restype=c_long
+ class AnsiTerm(object):
+ def __init__(self,s):
+ self.stream=s
+ try:
+ self.errors=s.errors
+ except AttributeError:
+ pass
+ self.encoding=s.encoding
+ self.cursor_history=[]
+ handle=(s.fileno()==2)and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
+ self.hconsole=windll.kernel32.GetStdHandle(handle)
+ self._sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ self._csinfo=CONSOLE_CURSOR_INFO()
+ windll.kernel32.GetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
+ self._orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ r=windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._orig_sbinfo))
+ self._isatty=r==1
+ def screen_buffer_info(self):
+ windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._sbinfo))
+ return self._sbinfo
+ def clear_line(self,param):
+ mode=param and int(param)or 0
+ sbinfo=self.screen_buffer_info()
+ if mode==1:
+ line_start=COORD(0,sbinfo.CursorPosition.Y)
+ line_length=sbinfo.Size.X
+ elif mode==2:
+ line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y)
+ line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
+ else:
+ line_start=sbinfo.CursorPosition
+ line_length=sbinfo.Size.X-sbinfo.CursorPosition.X
+ chars_written=c_ulong()
+ windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written))
+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written))
+ def clear_screen(self,param):
+ mode=to_int(param,0)
+ sbinfo=self.screen_buffer_info()
+ if mode==1:
+ clear_start=COORD(0,0)
+ clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y
+ elif mode==2:
+ clear_start=COORD(0,0)
+ clear_length=sbinfo.Size.X*sbinfo.Size.Y
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start)
+ else:
+ clear_start=sbinfo.CursorPosition
+ clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y))
+ chars_written=c_ulong()
+ windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written))
+ windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written))
+ def push_cursor(self,param):
+ sbinfo=self.screen_buffer_info()
+ self.cursor_history.append(sbinfo.CursorPosition)
+ def pop_cursor(self,param):
+ if self.cursor_history:
+ old_pos=self.cursor_history.pop()
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos)
+ def set_cursor(self,param):
+ y,sep,x=param.partition(';')
+ x=to_int(x,1)-1
+ y=to_int(y,1)-1
+ sbinfo=self.screen_buffer_info()
+ new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y))
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+ def set_column(self,param):
+ x=to_int(param,1)-1
+ sbinfo=self.screen_buffer_info()
+ new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y)
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+ def move_cursor(self,x_offset=0,y_offset=0):
+ sbinfo=self.screen_buffer_info()
+ new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y))
+ windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos)
+ def move_up(self,param):
+ self.move_cursor(y_offset=-to_int(param,1))
+ def move_down(self,param):
+ self.move_cursor(y_offset=to_int(param,1))
+ def move_left(self,param):
+ self.move_cursor(x_offset=-to_int(param,1))
+ def move_right(self,param):
+ self.move_cursor(x_offset=to_int(param,1))
+ def next_line(self,param):
+ sbinfo=self.screen_buffer_info()
+ self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1))
+ def prev_line(self,param):
+ sbinfo=self.screen_buffer_info()
+ self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1))
+ def rgb2bgr(self,c):
+ return((c&1)<<2)|(c&2)|((c&4)>>2)
+ def set_color(self,param):
+ cols=param.split(';')
+ sbinfo=self.screen_buffer_info()
+ attr=sbinfo.Attributes
+ for c in cols:
+ c=to_int(c,0)
+ if 29<c<38:
+ attr=(attr&0xfff0)|self.rgb2bgr(c-30)
+ elif 39<c<48:
+ attr=(attr&0xff0f)|(self.rgb2bgr(c-40)<<4)
+ elif c==0:
+ attr=self._orig_sbinfo.Attributes
+ elif c==1:
+ attr|=0x08
+ elif c==4:
+ attr|=0x80
+ elif c==7:
+ attr=(attr&0xff88)|((attr&0x70)>>4)|((attr&0x07)<<4)
+ windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr)
+ def show_cursor(self,param):
+ self._csinfo.bVisible=1
+ windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
+ def hide_cursor(self,param):
+ self._csinfo.bVisible=0
+ windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo))
+ ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,}
+ ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
+ def write(self,text):
+ try:
+ wlock.acquire()
+ if self._isatty:
+ for param,cmd,txt in self.ansi_tokens.findall(text):
+ if cmd:
+ cmd_func=self.ansi_command_table.get(cmd)
+ if cmd_func:
+ cmd_func(self,param)
+ else:
+ self.writeconsole(txt)
+ else:
+ self.stream.write(text)
+ finally:
+ wlock.release()
+ def writeconsole(self,txt):
+ chars_written=c_ulong()
+ writeconsole=windll.kernel32.WriteConsoleA
+ if isinstance(txt,_type):
+ writeconsole=windll.kernel32.WriteConsoleW
+ done=0
+ todo=len(txt)
+ chunk=32<<10
+ while todo!=0:
+ doing=min(chunk,todo)
+ buf=txt[done:done+doing]
+ r=writeconsole(self.hconsole,buf,doing,byref(chars_written),None)
+ if r==0:
+ chunk>>=1
+ continue
+ done+=doing
+ todo-=doing
+ def fileno(self):
+ return self.stream.fileno()
+ def flush(self):
+ pass
+ def isatty(self):
+ return self._isatty
+ if sys.stdout.isatty()or sys.stderr.isatty():
+ handle=sys.stdout.isatty()and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
+ console=windll.kernel32.GetStdHandle(handle)
+ sbinfo=CONSOLE_SCREEN_BUFFER_INFO()
+ def get_term_cols():
+ windll.kernel32.GetConsoleScreenBufferInfo(console,byref(sbinfo))
+ return sbinfo.Size.X-1
+try:
+ import struct,fcntl,termios
+except ImportError:
+ pass
+else:
+ if(sys.stdout.isatty()or sys.stderr.isatty())and os.environ.get('TERM','')not in('dumb','emacs'):
+ FD=sys.stdout.isatty()and sys.stdout.fileno()or sys.stderr.fileno()
+ def fun():
+ return struct.unpack("HHHH",fcntl.ioctl(FD,termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[1]
+ try:
+ fun()
+ except Exception as e:
+ pass
+ else:
+ get_term_cols=fun
diff --git a/waflib/extras/__init__.py b/waflib/extras/__init__.py
new file mode 100644
index 0000000..55e850d
--- /dev/null
+++ b/waflib/extras/__init__.py
@@ -0,0 +1,4 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
diff --git a/waflib/extras/compat15.py b/waflib/extras/compat15.py
new file mode 100644
index 0000000..fb4e578
--- /dev/null
+++ b/waflib/extras/compat15.py
@@ -0,0 +1,305 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import sys
+from waflib import ConfigSet,Logs,Options,Scripting,Task,Build,Configure,Node,Runner,TaskGen,Utils,Errors,Context
+sys.modules['Environment']=ConfigSet
+ConfigSet.Environment=ConfigSet.ConfigSet
+sys.modules['Logs']=Logs
+sys.modules['Options']=Options
+sys.modules['Scripting']=Scripting
+sys.modules['Task']=Task
+sys.modules['Build']=Build
+sys.modules['Configure']=Configure
+sys.modules['Node']=Node
+sys.modules['Runner']=Runner
+sys.modules['TaskGen']=TaskGen
+sys.modules['Utils']=Utils
+sys.modules['Constants']=Context
+Context.SRCDIR=''
+Context.BLDDIR=''
+from waflib.Tools import c_preproc
+sys.modules['preproc']=c_preproc
+from waflib.Tools import c_config
+sys.modules['config_c']=c_config
+ConfigSet.ConfigSet.copy=ConfigSet.ConfigSet.derive
+ConfigSet.ConfigSet.set_variant=Utils.nada
+Utils.pproc=Utils.subprocess
+Build.BuildContext.add_subdirs=Build.BuildContext.recurse
+Build.BuildContext.new_task_gen=Build.BuildContext.__call__
+Build.BuildContext.is_install=0
+Node.Node.relpath_gen=Node.Node.path_from
+Utils.pproc=Utils.subprocess
+Utils.get_term_cols=Logs.get_term_cols
+def cmd_output(cmd,**kw):
+ silent=False
+ if'silent'in kw:
+ silent=kw['silent']
+ del(kw['silent'])
+ if'e'in kw:
+ tmp=kw['e']
+ del(kw['e'])
+ kw['env']=tmp
+ kw['shell']=isinstance(cmd,str)
+ kw['stdout']=Utils.subprocess.PIPE
+ if silent:
+ kw['stderr']=Utils.subprocess.PIPE
+ try:
+ p=Utils.subprocess.Popen(cmd,**kw)
+ output=p.communicate()[0]
+ except OSError as e:
+ raise ValueError(str(e))
+ if p.returncode:
+ if not silent:
+ msg="command execution failed: %s -> %r"%(cmd,str(output))
+ raise ValueError(msg)
+ output=''
+ return output
+Utils.cmd_output=cmd_output
+def name_to_obj(self,s,env=None):
+ if Logs.verbose:
+ Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
+ return self.get_tgen_by_name(s)
+Build.BuildContext.name_to_obj=name_to_obj
+def env_of_name(self,name):
+ try:
+ return self.all_envs[name]
+ except KeyError:
+ Logs.error('no such environment: '+name)
+ return None
+Build.BuildContext.env_of_name=env_of_name
+def set_env_name(self,name,env):
+ self.all_envs[name]=env
+ return env
+Configure.ConfigurationContext.set_env_name=set_env_name
+def retrieve(self,name,fromenv=None):
+ try:
+ env=self.all_envs[name]
+ except KeyError:
+ env=ConfigSet.ConfigSet()
+ self.prepare_env(env)
+ self.all_envs[name]=env
+ else:
+ if fromenv:
+ Logs.warn('The environment %s may have been configured already',name)
+ return env
+Configure.ConfigurationContext.retrieve=retrieve
+Configure.ConfigurationContext.sub_config=Configure.ConfigurationContext.recurse
+Configure.ConfigurationContext.check_tool=Configure.ConfigurationContext.load
+Configure.conftest=Configure.conf
+Configure.ConfigurationError=Errors.ConfigurationError
+Utils.WafError=Errors.WafError
+Options.OptionsContext.sub_options=Options.OptionsContext.recurse
+Options.OptionsContext.tool_options=Context.Context.load
+Options.Handler=Options.OptionsContext
+Task.simple_task_type=Task.task_type_from_func=Task.task_factory
+Task.Task.classes=Task.classes
+def setitem(self,key,value):
+ if key.startswith('CCFLAGS'):
+ key=key[1:]
+ self.table[key]=value
+ConfigSet.ConfigSet.__setitem__=setitem
+@TaskGen.feature('d')
+@TaskGen.before('apply_incpaths')
+def old_importpaths(self):
+ if getattr(self,'importpaths',[]):
+ self.includes=self.importpaths
+from waflib import Context
+eld=Context.load_tool
+def load_tool(*k,**kw):
+ ret=eld(*k,**kw)
+ if'set_options'in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "set_options" to options')
+ ret.options=ret.set_options
+ if'detect'in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "detect" to "configure"')
+ ret.configure=ret.detect
+ return ret
+Context.load_tool=load_tool
+def get_curdir(self):
+ return self.path.abspath()
+Context.Context.curdir=property(get_curdir,Utils.nada)
+def get_srcdir(self):
+ return self.srcnode.abspath()
+Configure.ConfigurationContext.srcdir=property(get_srcdir,Utils.nada)
+def get_blddir(self):
+ return self.bldnode.abspath()
+Configure.ConfigurationContext.blddir=property(get_blddir,Utils.nada)
+Configure.ConfigurationContext.check_message_1=Configure.ConfigurationContext.start_msg
+Configure.ConfigurationContext.check_message_2=Configure.ConfigurationContext.end_msg
+rev=Context.load_module
+def load_module(path,encoding=None):
+ ret=rev(path,encoding)
+ if'set_options'in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "set_options" to "options" (%r)',path)
+ ret.options=ret.set_options
+ if'srcdir'in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "srcdir" to "top" (%r)',path)
+ ret.top=ret.srcdir
+ if'blddir'in ret.__dict__:
+ if Logs.verbose:
+ Logs.warn('compat: rename "blddir" to "out" (%r)',path)
+ ret.out=ret.blddir
+ Utils.g_module=Context.g_module
+ Options.launch_dir=Context.launch_dir
+ return ret
+Context.load_module=load_module
+old_post=TaskGen.task_gen.post
+def post(self):
+ self.features=self.to_list(self.features)
+ if'cc'in self.features:
+ if Logs.verbose:
+ Logs.warn('compat: the feature cc does not exist anymore (use "c")')
+ self.features.remove('cc')
+ self.features.append('c')
+ if'cstaticlib'in self.features:
+ if Logs.verbose:
+ Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
+ self.features.remove('cstaticlib')
+ self.features.append(('cxx'in self.features)and'cxxstlib'or'cstlib')
+ if getattr(self,'ccflags',None):
+ if Logs.verbose:
+ Logs.warn('compat: "ccflags" was renamed to "cflags"')
+ self.cflags=self.ccflags
+ return old_post(self)
+TaskGen.task_gen.post=post
+def waf_version(*k,**kw):
+ Logs.warn('wrong version (waf_version was removed in waf 1.6)')
+Utils.waf_version=waf_version
+import os
+@TaskGen.feature('c','cxx','d')
+@TaskGen.before('apply_incpaths','propagate_uselib_vars')
+@TaskGen.after('apply_link','process_source')
+def apply_uselib_local(self):
+ env=self.env
+ from waflib.Tools.ccroot import stlink_task
+ self.uselib=self.to_list(getattr(self,'uselib',[]))
+ self.includes=self.to_list(getattr(self,'includes',[]))
+ names=self.to_list(getattr(self,'uselib_local',[]))
+ get=self.bld.get_tgen_by_name
+ seen=set()
+ seen_uselib=set()
+ tmp=Utils.deque(names)
+ if tmp:
+ if Logs.verbose:
+ Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
+ while tmp:
+ lib_name=tmp.popleft()
+ if lib_name in seen:
+ continue
+ y=get(lib_name)
+ y.post()
+ seen.add(lib_name)
+ if getattr(y,'uselib_local',None):
+ for x in self.to_list(getattr(y,'uselib_local',[])):
+ obj=get(x)
+ obj.post()
+ if getattr(obj,'link_task',None):
+ if not isinstance(obj.link_task,stlink_task):
+ tmp.append(x)
+ if getattr(y,'link_task',None):
+ link_name=y.target[y.target.rfind(os.sep)+1:]
+ if isinstance(y.link_task,stlink_task):
+ env.append_value('STLIB',[link_name])
+ else:
+ env.append_value('LIB',[link_name])
+ self.link_task.set_run_after(y.link_task)
+ self.link_task.dep_nodes+=y.link_task.outputs
+ tmp_path=y.link_task.outputs[0].parent.bldpath()
+ if not tmp_path in env['LIBPATH']:
+ env.prepend_value('LIBPATH',[tmp_path])
+ for v in self.to_list(getattr(y,'uselib',[])):
+ if v not in seen_uselib:
+ seen_uselib.add(v)
+ if not env['STLIB_'+v]:
+ if not v in self.uselib:
+ self.uselib.insert(0,v)
+ if getattr(y,'export_includes',None):
+ self.includes.extend(y.to_incnodes(y.export_includes))
+@TaskGen.feature('cprogram','cxxprogram','cstlib','cxxstlib','cshlib','cxxshlib','dprogram','dstlib','dshlib')
+@TaskGen.after('apply_link')
+def apply_objdeps(self):
+ names=getattr(self,'add_objects',[])
+ if not names:
+ return
+ names=self.to_list(names)
+ get=self.bld.get_tgen_by_name
+ seen=[]
+ while names:
+ x=names[0]
+ if x in seen:
+ names=names[1:]
+ continue
+ y=get(x)
+ if getattr(y,'add_objects',None):
+ added=0
+ lst=y.to_list(y.add_objects)
+ lst.reverse()
+ for u in lst:
+ if u in seen:
+ continue
+ added=1
+ names=[u]+names
+ if added:
+ continue
+ y.post()
+ seen.append(x)
+ for t in getattr(y,'compiled_tasks',[]):
+ self.link_task.inputs.extend(t.outputs)
+@TaskGen.after('apply_link')
+def process_obj_files(self):
+ if not hasattr(self,'obj_files'):
+ return
+ for x in self.obj_files:
+ node=self.path.find_resource(x)
+ self.link_task.inputs.append(node)
+@TaskGen.taskgen_method
+def add_obj_file(self,file):
+ if not hasattr(self,'obj_files'):
+ self.obj_files=[]
+ if not'process_obj_files'in self.meths:
+ self.meths.append('process_obj_files')
+ self.obj_files.append(file)
+old_define=Configure.ConfigurationContext.__dict__['define']
+@Configure.conf
+def define(self,key,val,quote=True,comment=''):
+ old_define(self,key,val,quote,comment)
+ if key.startswith('HAVE_'):
+ self.env[key]=1
+old_undefine=Configure.ConfigurationContext.__dict__['undefine']
+@Configure.conf
+def undefine(self,key,comment=''):
+ old_undefine(self,key,comment)
+ if key.startswith('HAVE_'):
+ self.env[key]=0
+def set_incdirs(self,val):
+ Logs.warn('compat: change "export_incdirs" by "export_includes"')
+ self.export_includes=val
+TaskGen.task_gen.export_incdirs=property(None,set_incdirs)
+def install_dir(self,path):
+ if not path:
+ return[]
+ destpath=Utils.subst_vars(path,self.env)
+ if self.is_install>0:
+ Logs.info('* creating %s',destpath)
+ Utils.check_dir(destpath)
+ elif self.is_install<0:
+ Logs.info('* removing %s',destpath)
+ try:
+ os.remove(destpath)
+ except OSError:
+ pass
+Build.BuildContext.install_dir=install_dir
+repl={'apply_core':'process_source','apply_lib_vars':'process_source','apply_obj_vars':'propagate_uselib_vars','exec_rule':'process_rule'}
+def after(*k):
+ k=[repl.get(key,key)for key in k]
+ return TaskGen.after_method(*k)
+def before(*k):
+ k=[repl.get(key,key)for key in k]
+ return TaskGen.before_method(*k)
+TaskGen.before=before
diff --git a/waflib/fixpy2.py b/waflib/fixpy2.py
new file mode 100644
index 0000000..9aa8418
--- /dev/null
+++ b/waflib/fixpy2.py
@@ -0,0 +1,47 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+from __future__ import with_statement
+import os
+all_modifs={}
+def fixdir(dir):
+ for k in all_modifs:
+ for v in all_modifs[k]:
+ modif(os.path.join(dir,'waflib'),k,v)
+def modif(dir,name,fun):
+ if name=='*':
+ lst=[]
+ for y in'. Tools extras'.split():
+ for x in os.listdir(os.path.join(dir,y)):
+ if x.endswith('.py'):
+ lst.append(y+os.sep+x)
+ for x in lst:
+ modif(dir,x,fun)
+ return
+ filename=os.path.join(dir,name)
+ with open(filename,'r')as f:
+ txt=f.read()
+ txt=fun(txt)
+ with open(filename,'w')as f:
+ f.write(txt)
+def subst(*k):
+ def do_subst(fun):
+ for x in k:
+ try:
+ all_modifs[x].append(fun)
+ except KeyError:
+ all_modifs[x]=[fun]
+ return fun
+ return do_subst
+@subst('*')
+def r1(code):
+ code=code.replace('as e:',',e:')
+ code=code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')",'')
+ return code.replace('.encode()','')
+@subst('Runner.py')
+def r4(code):
+ return code.replace('next(self.biter)','self.biter.next()')
+@subst('Context.py')
+def r5(code):
+ return code.replace("('Execution failure: %s'%str(e),ex=e)","('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")
diff --git a/waflib/processor.py b/waflib/processor.py
new file mode 100644
index 0000000..10f7c1b
--- /dev/null
+++ b/waflib/processor.py
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+# encoding: utf-8
+# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
+
+import os,sys,traceback,base64,signal
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+try:
+ import subprocess32 as subprocess
+except ImportError:
+ import subprocess
+try:
+ TimeoutExpired=subprocess.TimeoutExpired
+except AttributeError:
+ class TimeoutExpired(Exception):
+ pass
+def run():
+ txt=sys.stdin.readline().strip()
+ if not txt:
+ sys.exit(1)
+ [cmd,kwargs,cargs]=cPickle.loads(base64.b64decode(txt))
+ cargs=cargs or{}
+ ret=1
+ out,err,ex,trace=(None,None,None,None)
+ try:
+ proc=subprocess.Popen(cmd,**kwargs)
+ try:
+ out,err=proc.communicate(**cargs)
+ except TimeoutExpired:
+ if kwargs.get('start_new_session')and hasattr(os,'killpg'):
+ os.killpg(proc.pid,signal.SIGKILL)
+ else:
+ proc.kill()
+ out,err=proc.communicate()
+ exc=TimeoutExpired(proc.args,timeout=cargs['timeout'],output=out)
+ exc.stderr=err
+ raise exc
+ ret=proc.returncode
+ except Exception as e:
+ exc_type,exc_value,tb=sys.exc_info()
+ exc_lines=traceback.format_exception(exc_type,exc_value,tb)
+ trace=str(cmd)+'\n'+''.join(exc_lines)
+ ex=e.__class__.__name__
+ tmp=[ret,out,err,ex,trace]
+ obj=base64.b64encode(cPickle.dumps(tmp))
+ sys.stdout.write(obj.decode())
+ sys.stdout.write('\n')
+ sys.stdout.flush()
+while 1:
+ try:
+ run()
+ except KeyboardInterrupt:
+ break